diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 87a1729d2b41..6d3e488bb082 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -15,7 +15,7 @@ env: CARGO_NET_RETRY: 10 CI: 1 RUST_BACKTRACE: short - RUSTFLAGS: "-D warnings -W unreachable-pub -W bare-trait-objects" + RUSTFLAGS: "-D warnings -D elided_lifetimes_in_paths -D explicit_outlives_requirements -D unsafe_op_in_unsafe_fn -D unused_extern_crates -D unused_lifetimes -D unreachable_pub" RUSTUP_MAX_RETRIES: 10 jobs: diff --git a/.typos.toml b/.typos.toml index c2e8b265218f..e7e764ce0351 100644 --- a/.typos.toml +++ b/.typos.toml @@ -14,6 +14,8 @@ extend-ignore-re = [ "\\w*\\.{3,4}\\w*", '"flate2"', "raison d'ĂȘtre", + "inout", + "optin" ] [default.extend-words] diff --git a/.vscode/launch.json b/.vscode/launch.json index c353737a35a8..e83c03796a4b 100644 --- a/.vscode/launch.json +++ b/.vscode/launch.json @@ -18,7 +18,8 @@ "args": [ // "--user-data-dir=${workspaceFolder}/target/code", "--disable-extensions", - "--extensionDevelopmentPath=${workspaceFolder}/editors/code" + "--extensionDevelopmentPath=${workspaceFolder}/editors/code", + "--log rust-lang.rust-analyzer:debug" ], "outFiles": [ "${workspaceFolder}/editors/code/out/**/*.js" @@ -36,7 +37,8 @@ "runtimeExecutable": "${execPath}", "args": [ "--disable-extensions", - "--extensionDevelopmentPath=${workspaceFolder}/editors/code" + "--extensionDevelopmentPath=${workspaceFolder}/editors/code", + "--log rust-lang.rust-analyzer:debug" ], "outFiles": [ "${workspaceFolder}/editors/code/out/**/*.js" @@ -57,7 +59,8 @@ "runtimeExecutable": "${execPath}", "args": [ "--disable-extensions", - "--extensionDevelopmentPath=${workspaceFolder}/editors/code" + "--extensionDevelopmentPath=${workspaceFolder}/editors/code", + "--log rust-lang.rust-analyzer:debug" ], "outFiles": [ "${workspaceFolder}/editors/code/out/**/*.js" @@ -79,7 +82,8 @@ "runtimeExecutable": "${execPath}", "args": [ "--disable-extension", "rust-lang.rust-analyzer", - "--extensionDevelopmentPath=${workspaceFolder}/editors/code" + "--extensionDevelopmentPath=${workspaceFolder}/editors/code", + "--log rust-lang.rust-analyzer:debug" ], "outFiles": [ "${workspaceFolder}/editors/code/out/**/*.js" diff --git a/Cargo.lock b/Cargo.lock index 56deb1b7a191..8fee2b64fd3c 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4,9 +4,9 @@ version = 3 [[package]] name = "addr2line" -version = "0.21.0" +version = "0.22.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8a30b2e23b9e17a9f90641c7ab1549cd9b44f296d3ccbf309d2863cfe398a0cb" +checksum = "6e4503c46a5c0c7844e948c9a4d6acd9f50cccb4de1c48eb9e291ea17470c678" dependencies = [ "gimli", ] @@ -28,9 +28,9 @@ dependencies = [ [[package]] name = "anyhow" -version = "1.0.83" +version = "1.0.86" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "25bdb32cbbdce2b519a9cd7df3a678443100e265d5e25ca763b7572a5104f5f3" +checksum = "b3d1d046238990b9cf5bcde22a3fb3584ee5cf65fb2765f454ed428c7a0063da" [[package]] name = "arbitrary" @@ -52,16 +52,16 @@ checksum = "0c4b4d0bd25bd0b74681c0ad21497610ce1b7c91b1022cd21c80c6fbdd9476b0" [[package]] name = "backtrace" -version = "0.3.71" +version = "0.3.72" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "26b05800d2e817c8b3b4b54abd461726265fa9789ae34330622f2db9ee696f9d" +checksum = "17c6a35df3749d2e8bb1b7b21a976d82b15548788d2735b9d82f329268f71a11" dependencies = [ "addr2line", "cc", "cfg-if", "libc", "miniz_oxide", - "object 0.32.2", + "object 0.35.0", "rustc-demangle", ] @@ -70,6 +70,7 @@ name = "base-db" version = "0.0.0" dependencies = [ "cfg", + "intern", "la-arena 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", "lz4_flex", "rustc-hash", @@ -103,9 +104,9 @@ checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" [[package]] name = "camino" -version = "1.1.6" +version = "1.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c59e92b5a388f549b863a7bea62612c09f24c8393560709a54558a9abdfb3b9c" +checksum = "e0ec6b951b160caa93cc0c7b209e5a3bff7aae9062213451ac99493cd844c239" dependencies = [ "serde", ] @@ -135,9 +136,9 @@ dependencies = [ [[package]] name = "cc" -version = "1.0.97" +version = "1.0.98" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "099a5357d84c4c61eb35fc8eafa9a79a902c2f76911e5747ced4e032edd8d9b4" +checksum = "41c270e7540d725e65ac7f1b212ac8ce349719624d7bcff99f8e2e488e8cf03f" [[package]] name = "cfg" @@ -146,10 +147,11 @@ dependencies = [ "arbitrary", "derive_arbitrary", "expect-test", - "mbe", + "intern", "oorandom", "rustc-hash", "syntax", + "syntax-bridge", "tt", ] @@ -167,9 +169,9 @@ checksum = "fd16c4719339c4530435d38e511904438d07cce7950afa3718a84ac36c10e89e" [[package]] name = "chalk-derive" -version = "0.97.0" +version = "0.98.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "92a0aedc4ac2adc5c0b7dc9ec38c5c816284ad28da6d4ecd01873b9683f54972" +checksum = "9426c8fd0fe61c3da880b801d3b510524df17843a8f9ec1f5b9cec24fb7412df" dependencies = [ "proc-macro2", "quote", @@ -179,9 +181,9 @@ dependencies = [ [[package]] name = "chalk-ir" -version = "0.97.0" +version = "0.98.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "db18493569b190f7266a04901e520fc3a5c00564475154287906f8a27302c119" +checksum = "d5f2eb1cd6054da221bd1ac0197fb2fe5e2caf3dcb93619398fc1433f8f09093" dependencies = [ "bitflags 2.5.0", "chalk-derive", @@ -189,9 +191,9 @@ dependencies = [ [[package]] name = "chalk-recursive" -version = "0.97.0" +version = "0.98.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ae4ba8ce5bd2e1b59f1f79495bc8704db09a8285e51cc5ddf01d9baee1bf447d" +checksum = "129dc03458f71cfb9c3cd621c9c68166a94e87b85b16ccd29af015d7ff9a1c61" dependencies = [ "chalk-derive", "chalk-ir", @@ -202,9 +204,9 @@ dependencies = [ [[package]] name = "chalk-solve" -version = "0.97.0" +version = "0.98.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b2ec1b3b7f7b1ec38f099ef39c2bc3ea29335be1b8316d114baff46d96d131e9" +checksum = "d7e8a8c1e928f98cdf227b868416ef21dcd8cc3c61b347576d783713444d41c8" dependencies = [ "chalk-derive", "chalk-ir", @@ -221,11 +223,6 @@ name = "countme" version = "3.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7704b5fdd17b18ae31c4c1da5a2e0305a2bf17b5249300a9ee9ed7b72114c636" -dependencies = [ - "dashmap", - "once_cell", - "rustc-hash", -] [[package]] name = "cov-mark" @@ -235,18 +232,18 @@ checksum = "0d48d8f76bd9331f19fe2aaf3821a9f9fb32c3963e1e3d6ce82a8c09cef7444a" [[package]] name = "crc32fast" -version = "1.4.0" +version = "1.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b3855a8a784b474f333699ef2bbca9db2c4a1f6d9088a90a2d25b1eb53111eaa" +checksum = "a97769d94ddab943e4510d138150169a2758b5ef3eb191a9ee688de3e23ef7b3" dependencies = [ "cfg-if", ] [[package]] name = "crossbeam-channel" -version = "0.5.12" +version = "0.5.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ab3db02a9c5b5121e1e42fbdb1aeb65f5e02624cc58c43f2884c6ccac0b82f95" +checksum = "33480d6946193aa8033910124896ca395333cae7e2d1113d1fef6c3272217df2" dependencies = [ "crossbeam-utils", ] @@ -272,9 +269,9 @@ dependencies = [ [[package]] name = "crossbeam-utils" -version = "0.8.19" +version = "0.8.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "248e3bacc7dc6baa3b21e405ee045c3047101a49145e7e9eca583ab4c2ca5345" +checksum = "22ec99545bb0ed0ea7bb9b8e1e9122ea386ff8a48c0922e43f36d45ab09e0e80" [[package]] name = "ctrlc" @@ -369,9 +366,9 @@ checksum = "9bda8e21c04aca2ae33ffc2fd8c23134f3cac46db123ba97bd9d3f3b8a4a85e1" [[package]] name = "either" -version = "1.11.0" +version = "1.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a47c1c47d2f5964e29c61246e81db715514cd532db6b5116a25ea3c03d6780a2" +checksum = "3dca9240753cf90908d7e4aac30f630662b02aebaa1b58a3cadabdb23385b58b" [[package]] name = "ena" @@ -434,6 +431,7 @@ dependencies = [ "crossbeam-channel", "paths", "process-wrap", + "project-model", "rustc-hash", "serde", "serde_json", @@ -479,9 +477,9 @@ dependencies = [ [[package]] name = "gimli" -version = "0.28.1" +version = "0.29.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4271d37baee1b8c7e4b708028c57d816cf9d2434acb33a549475f78c181f6253" +checksum = "40ecd4077b5ae9fd2e9e169b102c6c330d0605168eb0e8bf79952b256dbefffd" [[package]] name = "hashbrown" @@ -548,14 +546,15 @@ dependencies = [ "limit", "mbe", "once_cell", - "profile", "ra-ap-rustc_abi", "ra-ap-rustc_parse_format", "rustc-hash", + "rustc_apfloat", "smallvec", "span", "stdx", "syntax", + "syntax-bridge", "test-fixture", "test-utils", "tracing", @@ -584,6 +583,7 @@ dependencies = [ "span", "stdx", "syntax", + "syntax-bridge", "tracing", "triomphe", "tt", @@ -616,9 +616,10 @@ dependencies = [ "oorandom", "project-model", "ra-ap-rustc_abi", - "ra-ap-rustc_index 0.53.0", + "ra-ap-rustc_index", "ra-ap-rustc_pattern_analysis", "rustc-hash", + "rustc_apfloat", "scoped-tls", "smallvec", "span", @@ -664,6 +665,7 @@ dependencies = [ "profile", "pulldown-cmark", "pulldown-cmark-to-cmark", + "rustc_apfloat", "smallvec", "span", "stdx", @@ -809,7 +811,6 @@ checksum = "168fb715dda47215e360912c096649d23d58bf392ac62f73919e831745e40f26" dependencies = [ "equivalent", "hashbrown", - "serde", ] [[package]] @@ -839,6 +840,7 @@ dependencies = [ "dashmap", "hashbrown", "rustc-hash", + "sptr", "triomphe", ] @@ -917,9 +919,9 @@ dependencies = [ [[package]] name = "libmimalloc-sys" -version = "0.1.37" +version = "0.1.38" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "81eb4061c0582dedea1cbc7aff2240300dd6982e0239d1c99e65c1dbf4a30ba7" +checksum = "0e7bb23d733dfcc8af652a78b7bf232f0e967710d044732185e561e47c0336b6" dependencies = [ "cc", "libc", @@ -972,6 +974,7 @@ dependencies = [ "crossbeam-channel", "hir-expand", "ide-db", + "intern", "itertools", "paths", "proc-macro-api", @@ -1048,12 +1051,14 @@ version = "0.0.0" dependencies = [ "arrayvec", "cov-mark", + "intern", "parser", "rustc-hash", "smallvec", "span", "stdx", "syntax", + "syntax-bridge", "test-utils", "tracing", "tt", @@ -1085,18 +1090,18 @@ dependencies = [ [[package]] name = "mimalloc" -version = "0.1.41" +version = "0.1.42" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9f41a2280ded0da56c8cf898babb86e8f10651a34adcfff190ae9a1159c6908d" +checksum = "e9186d86b79b52f4a77af65604b51225e8db1d6ee7e3f41aec1e40829c71a176" dependencies = [ "libmimalloc-sys", ] [[package]] name = "miniz_oxide" -version = "0.7.2" +version = "0.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9d811f3e15f28568be3407c8e7fdb6514c1cda3cb30683f15b6a1a1dc4ea14a7" +checksum = "87dfd01fe195c66b572b37921ad8803d010623c0aca821bea2302239d155cdae" dependencies = [ "adler", ] @@ -1161,9 +1166,9 @@ dependencies = [ [[package]] name = "nu-ansi-term" -version = "0.49.0" +version = "0.50.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c073d3c1930d0751774acf49e66653acecb416c3a54c6ec095a9b11caddb5a68" +checksum = "dd2800e1520bdc966782168a627aa5d1ad92e33b984bf7c7615d31280c83ff14" dependencies = [ "windows-sys 0.48.0", ] @@ -1186,18 +1191,18 @@ dependencies = [ [[package]] name = "object" -version = "0.32.2" +version = "0.33.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a6a622008b6e321afc04970976f62ee297fdbaa6f95318ca343e3eebb9648441" +checksum = "d8dd6c0cdf9429bce006e1362bfce61fa1bfd8c898a643ed8d2b471934701d3d" dependencies = [ "memchr", ] [[package]] name = "object" -version = "0.33.0" +version = "0.35.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d8dd6c0cdf9429bce006e1362bfce61fa1bfd8c898a643ed8d2b471934701d3d" +checksum = "b8ec7ab813848ba4522158d5517a6093db1ded27575b070f4177b8d12b41db5e" dependencies = [ "memchr", ] @@ -1222,9 +1227,9 @@ checksum = "04744f49eae99ab78e0d5c0b603ab218f515ea8cfe5a456d7629ad883a3b6e7d" [[package]] name = "parking_lot" -version = "0.12.2" +version = "0.12.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7e4af0ca4f6caed20e900d564c242b8e5d4903fdacf31d3daf527b66fe6f42fb" +checksum = "f1bf18183cf54e8d6059647fc3063646a1801cf30896933ec2311622cc4b9a27" dependencies = [ "lock_api", "parking_lot_core", @@ -1251,7 +1256,6 @@ dependencies = [ "expect-test", "limit", "ra-ap-rustc_lexer", - "sourcegen", "stdx", "tracing", ] @@ -1267,6 +1271,7 @@ name = "paths" version = "0.0.0" dependencies = [ "camino", + "serde", ] [[package]] @@ -1328,14 +1333,13 @@ version = "0.0.0" dependencies = [ "base-db", "indexmap", - "la-arena 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", + "intern", "paths", "rustc-hash", "serde", "serde_json", "span", "stdx", - "text-size", "tracing", "tt", ] @@ -1346,8 +1350,8 @@ version = "0.0.0" dependencies = [ "base-db", "expect-test", + "intern", "libloading", - "mbe", "memmap2", "object 0.33.0", "paths", @@ -1357,6 +1361,7 @@ dependencies = [ "snap", "span", "stdx", + "syntax-bridge", "tt", ] @@ -1377,9 +1382,9 @@ dependencies = [ [[package]] name = "proc-macro2" -version = "1.0.82" +version = "1.0.85" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8ad3d49ab951a01fbaafe34f2ec74122942fe18a3f9814c3268f1bb72042131b" +checksum = "22244ce15aa966053a896d1accb3a6e68469b97c7f33f284b99f0d576879fc23" dependencies = [ "unicode-ident", ] @@ -1401,13 +1406,9 @@ name = "profile" version = "0.0.0" dependencies = [ "cfg-if", - "countme", - "la-arena 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", "libc", - "once_cell", "perf-event", "tikv-jemalloc-ctl", - "tracing", "windows-sys 0.52.0", ] @@ -1420,6 +1421,7 @@ dependencies = [ "cargo_metadata", "cfg", "expect-test", + "intern", "itertools", "la-arena 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", "paths", @@ -1490,21 +1492,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "80b1d613eee933486c0613a7bc26e515e46f43adf479d1edd5e537f983e9ce46" dependencies = [ "bitflags 2.5.0", - "ra-ap-rustc_index 0.53.0", + "ra-ap-rustc_index", "tracing", ] -[[package]] -name = "ra-ap-rustc_index" -version = "0.44.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9ad68bacffb87dcdbb23a3ce11261375078aaa06b85d348c49f39ffd5510dc20" -dependencies = [ - "arrayvec", - "ra-ap-rustc_index_macros 0.44.0", - "smallvec", -] - [[package]] name = "ra-ap-rustc_index" version = "0.53.0" @@ -1512,22 +1503,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f072060ac77e9e1a02cc20028095993af7e72cc0804779c68bcbf47b16de49c9" dependencies = [ "arrayvec", - "ra-ap-rustc_index_macros 0.53.0", + "ra-ap-rustc_index_macros", "smallvec", ] -[[package]] -name = "ra-ap-rustc_index_macros" -version = "0.44.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8782aaf3a113837c533dfb1c45df91cd17e1fdd1d2f9a20c2e0d1976025c4f1f" -dependencies = [ - "proc-macro2", - "quote", - "syn", - "synstructure", -] - [[package]] name = "ra-ap-rustc_index_macros" version = "0.53.0" @@ -1556,17 +1535,17 @@ version = "0.53.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "70dad7a491c2554590222e0c9212dcb7c2e7aceb668875075012a35ea780d135" dependencies = [ - "ra-ap-rustc_index 0.53.0", + "ra-ap-rustc_index", "ra-ap-rustc_lexer", ] [[package]] name = "ra-ap-rustc_pattern_analysis" -version = "0.44.0" +version = "0.53.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d63d1e1d5b2a13273cee1a10011147418f40e12b70f70578ce1dee0f1cafc334" +checksum = "34768e1faf88c31f2e9ad57b48318a52b507dafac0cddbf01b5d63bfc0b0a365" dependencies = [ - "ra-ap-rustc_index 0.44.0", + "ra-ap-rustc_index", "rustc-hash", "rustc_apfloat", "smallvec", @@ -1683,12 +1662,11 @@ dependencies = [ "ide", "ide-db", "ide-ssr", - "indexmap", + "intern", "itertools", "load-cargo", "lsp-server 0.7.6 (registry+https://github.com/rust-lang/crates.io-index)", "lsp-types", - "mbe", "memchr", "mimalloc", "nohash-hasher", @@ -1706,9 +1684,9 @@ dependencies = [ "semver", "serde", "serde_json", - "sourcegen", "stdx", "syntax", + "syntax-bridge", "test-fixture", "test-utils", "tikv-jemallocator", @@ -1825,18 +1803,18 @@ dependencies = [ [[package]] name = "serde" -version = "1.0.201" +version = "1.0.203" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "780f1cebed1629e4753a1a38a3c72d30b97ec044f0aef68cb26650a3c5cf363c" +checksum = "7253ab4de971e72fb7be983802300c30b5a7f0c2e56fab8abfc6a214307c0094" dependencies = [ "serde_derive", ] [[package]] name = "serde_derive" -version = "1.0.201" +version = "1.0.203" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c5e405930b9796f1c00bee880d03fc7e0bb4b9a11afc776885ffe84320da2865" +checksum = "500cbc0ebeb6f46627f50f3f5811ccf6bf00643be300b4c3eabc0ef55dc5b5ba" dependencies = [ "proc-macro2", "quote", @@ -1868,9 +1846,9 @@ dependencies = [ [[package]] name = "serde_spanned" -version = "0.6.5" +version = "0.6.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eb3622f419d1296904700073ea6cc23ad690adbd66f13ea683df73298736f0c1" +checksum = "79e674e01f999af37c49f70a6ede167a8a60b2503e56c5599532a65baa5969a0" dependencies = [ "serde", ] @@ -1892,9 +1870,9 @@ checksum = "3c5e1a9a646d36c3599cd173a41282daf47c44583ad367b8e6837255952e5c67" [[package]] name = "smol_str" -version = "0.2.1" +version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e6845563ada680337a52d43bb0b29f396f2d911616f6573012645b9e3d048a49" +checksum = "dd538fb6910ac1099850255cf94a94df6551fbdd602454387d0adb2d1ca6dead" dependencies = [ "serde", ] @@ -1905,13 +1883,6 @@ version = "1.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1b6b67fb9a61334225b5b790716f609cd58395f895b3fe8b328786812a40bc3b" -[[package]] -name = "sourcegen" -version = "0.0.0" -dependencies = [ - "xshell", -] - [[package]] name = "span" version = "0.0.0" @@ -1926,6 +1897,12 @@ dependencies = [ "vfs", ] +[[package]] +name = "sptr" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3b9b39299b249ad65f3b7e96443bad61c02ca5cd3589f46cb6d610a0fd6c0d6a" + [[package]] name = "stable_deref_trait" version = "1.2.0" @@ -1948,9 +1925,9 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.63" +version = "2.0.66" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bf5be731623ca1a1fb7d8be6f261a3be6d3e2337b8a1f97be944d020c8fcb704" +checksum = "c42f3f41a2de00b01c0aaad383c5a45241efc8b2d1eda5661812fda5f3cdcff5" dependencies = [ "proc-macro2", "quote", @@ -1983,6 +1960,7 @@ dependencies = [ "rayon", "rowan", "rustc-hash", + "rustc_apfloat", "smol_str", "stdx", "test-utils", @@ -1991,6 +1969,21 @@ dependencies = [ "triomphe", ] +[[package]] +name = "syntax-bridge" +version = "0.0.0" +dependencies = [ + "intern", + "parser", + "rustc-hash", + "span", + "stdx", + "syntax", + "test-utils", + "tracing", + "tt", +] + [[package]] name = "test-fixture" version = "0.0.0" @@ -1998,6 +1991,7 @@ dependencies = [ "base-db", "cfg", "hir-expand", + "intern", "rustc-hash", "span", "stdx", @@ -2010,6 +2004,7 @@ name = "test-utils" version = "0.0.0" dependencies = [ "dissimilar", + "paths", "profile", "rustc-hash", "stdx", @@ -2033,18 +2028,18 @@ checksum = "f18aa187839b2bdb1ad2fa35ead8c4c2976b64e4363c386d45ac0f7ee85c9233" [[package]] name = "thiserror" -version = "1.0.60" +version = "1.0.61" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "579e9083ca58dd9dcf91a9923bb9054071b9ebbd800b342194c9feb0ee89fc18" +checksum = "c546c80d6be4bc6a00c0f01730c08df82eaa7a7a61f11d656526506112cc1709" dependencies = [ "thiserror-impl", ] [[package]] name = "thiserror-impl" -version = "1.0.60" +version = "1.0.61" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e2470041c06ec3ac1ab38d0356a6119054dedaea53e12fbefc0de730a1c08524" +checksum = "46c3384250002a6d5af4d114f2845d37b57521033f30d5c3f46c4d70e1197533" dependencies = [ "proc-macro2", "quote", @@ -2128,9 +2123,9 @@ checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" [[package]] name = "toml" -version = "0.8.12" +version = "0.8.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e9dd1545e8208b4a5af1aa9bbd0b4cf7e9ea08fabc5d0a5c67fcaafa17433aa3" +checksum = "6f49eb2ab21d2f26bd6db7bf383edc527a7ebaee412d17af4d40fdccd442f335" dependencies = [ "serde", "serde_spanned", @@ -2140,18 +2135,18 @@ dependencies = [ [[package]] name = "toml_datetime" -version = "0.6.5" +version = "0.6.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3550f4e9685620ac18a50ed434eb3aec30db8ba93b0287467bca5826ea25baf1" +checksum = "4badfd56924ae69bcc9039335b2e017639ce3f9b001c393c1b2d1ef846ce2cbf" dependencies = [ "serde", ] [[package]] name = "toml_edit" -version = "0.22.12" +version = "0.22.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d3328d4f68a705b2a4498da1d580585d39a6510f98318a2cec3018a7ec61ddef" +checksum = "f21c7aaf97f1bd9ca9d4f9e73b0a6c74bd5afef56f2bc931943a6e1c37e04e38" dependencies = [ "indexmap", "serde", @@ -2225,9 +2220,9 @@ dependencies = [ [[package]] name = "tracing-tree" -version = "0.3.0" +version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "65139ecd2c3f6484c3b99bc01c77afe21e95473630747c7aca525e78b0666675" +checksum = "b56c62d2c80033cb36fae448730a2f2ef99410fe3ecbffc916681a32f6807dbe" dependencies = [ "nu-ansi-term", "tracing-core", @@ -2237,9 +2232,9 @@ dependencies = [ [[package]] name = "triomphe" -version = "0.1.11" +version = "0.1.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "859eb650cfee7434994602c3a68b25d77ad9e68c8a6cd491616ef86661382eb3" +checksum = "1b2cb4fbb9995eeb36ac86fadf24031ccd58f99d6b4b2d7b911db70bddb80d90" dependencies = [ "serde", "stable_deref_trait", @@ -2250,7 +2245,8 @@ name = "tt" version = "0.0.0" dependencies = [ "arrayvec", - "smol_str", + "intern", + "ra-ap-rustc_lexer", "stdx", "text-size", ] @@ -2353,6 +2349,8 @@ dependencies = [ "crossbeam-channel", "notify", "paths", + "rayon", + "rustc-hash", "stdx", "tracing", "vfs", @@ -2578,9 +2576,9 @@ checksum = "bec47e5bfd1bff0eeaf6d8b485cc1074891a197ab4225d504cb7a1ab88b02bf0" [[package]] name = "winnow" -version = "0.6.8" +version = "0.6.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c3c52e9c97a68071b23e836c9380edae937f17b9c4667bd021973efc689f618d" +checksum = "56c52728401e1dc672a56e81e593e912aa54c78f40246869f78359a2bf24d29d" dependencies = [ "memchr", ] diff --git a/Cargo.toml b/Cargo.toml index d77804a418e2..4e8348406984 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -8,6 +8,7 @@ rust-version = "1.78" edition = "2021" license = "MIT OR Apache-2.0" authors = ["rust-analyzer team"] +repository = "https://github.com/rust-lang/rust-analyzer" [profile.dev] debug = 1 @@ -50,7 +51,7 @@ debug = 2 [workspace.dependencies] # local crates base-db = { path = "./crates/base-db", version = "0.0.0" } -cfg = { path = "./crates/cfg", version = "0.0.0" } +cfg = { path = "./crates/cfg", version = "0.0.0", features = ["tt"] } flycheck = { path = "./crates/flycheck", version = "0.0.0" } hir = { path = "./crates/hir", version = "0.0.0" } hir-def = { path = "./crates/hir-def", version = "0.0.0" } @@ -77,6 +78,7 @@ salsa = { path = "./crates/salsa", version = "0.0.0" } span = { path = "./crates/span", version = "0.0.0" } stdx = { path = "./crates/stdx", version = "0.0.0" } syntax = { path = "./crates/syntax", version = "0.0.0" } +syntax-bridge = { path = "./crates/syntax-bridge", version = "0.0.0" } text-edit = { path = "./crates/text-edit", version = "0.0.0" } toolchain = { path = "./crates/toolchain", version = "0.0.0" } tt = { path = "./crates/tt", version = "0.0.0" } @@ -87,10 +89,9 @@ ra-ap-rustc_lexer = { version = "0.53.0", default-features = false } ra-ap-rustc_parse_format = { version = "0.53.0", default-features = false } ra-ap-rustc_index = { version = "0.53.0", default-features = false } ra-ap-rustc_abi = { version = "0.53.0", default-features = false } -ra-ap-rustc_pattern_analysis = { version = "0.44.0", default-features = false } +ra-ap-rustc_pattern_analysis = { version = "0.53.0", default-features = false } # local crates that aren't published to crates.io. These should not have versions. -sourcegen = { path = "./crates/sourcegen" } test-fixture = { path = "./crates/test-fixture" } test-utils = { path = "./crates/test-utils" } @@ -105,10 +106,10 @@ arrayvec = "0.7.4" bitflags = "2.4.1" cargo_metadata = "0.18.1" camino = "1.1.6" -chalk-solve = { version = "0.97.0", default-features = false } -chalk-ir = "0.97.0" -chalk-recursive = { version = "0.97.0", default-features = false } -chalk-derive = "0.97.0" +chalk-solve = { version = "0.98.0", default-features = false } +chalk-ir = "0.98.0" +chalk-recursive = { version = "0.98.0", default-features = false } +chalk-derive = "0.98.0" crossbeam-channel = "0.5.8" dissimilar = "1.0.7" dot = "0.1.4" @@ -164,14 +165,14 @@ xshell = "0.2.5" dashmap = { version = "=5.5.3", features = ["raw-api"] } [workspace.lints.rust] -bare_trait_objects = "warn" +# remember to update RUSTFLAGS in ci.yml if you add something here + elided_lifetimes_in_paths = "warn" -ellipsis_inclusive_range_patterns = "warn" explicit_outlives_requirements = "warn" +unsafe_op_in_unsafe_fn = "warn" unused_extern_crates = "warn" unused_lifetimes = "warn" unreachable_pub = "warn" -semicolon_in_expressions_from_macros = "warn" [workspace.lints.clippy] # FIXME Remove the tidy test once the lint table is stable diff --git a/crates/base-db/Cargo.toml b/crates/base-db/Cargo.toml index 4ab99fc33c46..b17b08a720c9 100644 --- a/crates/base-db/Cargo.toml +++ b/crates/base-db/Cargo.toml @@ -1,7 +1,8 @@ [package] name = "base-db" version = "0.0.0" -description = "TBD" +repository.workspace = true +description = "Basic database traits for rust-analyzer. The concrete DB is defined by `ide` (aka `ra_ap_ide`)." authors.workspace = true edition.workspace = true @@ -27,6 +28,7 @@ stdx.workspace = true syntax.workspace = true vfs.workspace = true span.workspace = true +intern.workspace = true [lints] workspace = true diff --git a/crates/base-db/src/change.rs b/crates/base-db/src/change.rs index 0fd54e1211cd..a9d91d64ceb6 100644 --- a/crates/base-db/src/change.rs +++ b/crates/base-db/src/change.rs @@ -7,7 +7,7 @@ use salsa::Durability; use triomphe::Arc; use vfs::FileId; -use crate::{CrateGraph, SourceDatabaseExt, SourceDatabaseExt2, SourceRoot, SourceRootId}; +use crate::{CrateGraph, SourceDatabaseFileInputExt, SourceRoot, SourceRootDatabase, SourceRootId}; /// Encapsulate a bunch of raw `.set` calls on the database. #[derive(Default)] @@ -50,7 +50,7 @@ impl FileChange { self.crate_graph = Some(graph); } - pub fn apply(self, db: &mut dyn SourceDatabaseExt) { + pub fn apply(self, db: &mut dyn SourceRootDatabase) { let _p = tracing::info_span!("FileChange::apply").entered(); if let Some(roots) = self.roots { for (idx, root) in roots.into_iter().enumerate() { diff --git a/crates/base-db/src/input.rs b/crates/base-db/src/input.rs index 1d172ab9e40e..3616fa9fd868 100644 --- a/crates/base-db/src/input.rs +++ b/crates/base-db/src/input.rs @@ -9,16 +9,14 @@ use std::{fmt, mem, ops}; use cfg::CfgOptions; +use intern::Symbol; use la_arena::{Arena, Idx, RawIdx}; use rustc_hash::{FxHashMap, FxHashSet}; -use span::Edition; -use syntax::SmolStr; +use span::{Edition, EditionedFileId}; use triomphe::Arc; use vfs::{file_set::FileSet, AbsPathBuf, AnchoredPath, FileId, VfsPath}; -// Map from crate id to the name of the crate and path of the proc-macro. If the value is `None`, -// then the crate for the proc-macro hasn't been build yet as the build data is missing. -pub type ProcMacroPaths = FxHashMap, AbsPathBuf), String>>; +pub type ProcMacroPaths = FxHashMap>; #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)] pub struct SourceRootId(pub u32); @@ -99,8 +97,8 @@ impl fmt::Debug for CrateGraph { pub type CrateId = Idx; -#[derive(Debug, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)] -pub struct CrateName(SmolStr); +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct CrateName(Symbol); impl CrateName { /// Creates a crate name, checking for dashes in the string provided. @@ -110,16 +108,16 @@ impl CrateName { if name.contains('-') { Err(name) } else { - Ok(Self(SmolStr::new(name))) + Ok(Self(Symbol::intern(name))) } } /// Creates a crate name, unconditionally replacing the dashes with underscores. pub fn normalize_dashes(name: &str) -> CrateName { - Self(SmolStr::new(name.replace('-', "_"))) + Self(Symbol::intern(&name.replace('-', "_"))) } - pub fn as_smol_str(&self) -> &SmolStr { + pub fn symbol(&self) -> &Symbol { &self.0 } } @@ -133,7 +131,7 @@ impl fmt::Display for CrateName { impl ops::Deref for CrateName { type Target = str; fn deref(&self) -> &str { - &self.0 + self.0.as_str() } } @@ -141,11 +139,11 @@ impl ops::Deref for CrateName { #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub enum CrateOrigin { /// Crates that are from the rustc workspace. - Rustc { name: String }, + Rustc { name: Symbol }, /// Crates that are workspace members. - Local { repo: Option, name: Option }, + Local { repo: Option, name: Option }, /// Crates that are non member libraries. - Library { repo: Option, name: String }, + Library { repo: Option, name: Symbol }, /// Crates that are provided by the language, like std, core, proc-macro, ... Lang(LangCrateOrigin), } @@ -201,16 +199,16 @@ impl fmt::Display for LangCrateOrigin { } } -#[derive(Debug, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)] +#[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct CrateDisplayName { // The name we use to display various paths (with `_`). crate_name: CrateName, // The name as specified in Cargo.toml (with `-`). - canonical_name: String, + canonical_name: Symbol, } impl CrateDisplayName { - pub fn canonical_name(&self) -> &str { + pub fn canonical_name(&self) -> &Symbol { &self.canonical_name } pub fn crate_name(&self) -> &CrateName { @@ -220,7 +218,7 @@ impl CrateDisplayName { impl From for CrateDisplayName { fn from(crate_name: CrateName) -> CrateDisplayName { - let canonical_name = crate_name.to_string(); + let canonical_name = crate_name.0.clone(); CrateDisplayName { crate_name, canonical_name } } } @@ -239,9 +237,9 @@ impl ops::Deref for CrateDisplayName { } impl CrateDisplayName { - pub fn from_canonical_name(canonical_name: String) -> CrateDisplayName { - let crate_name = CrateName::normalize_dashes(&canonical_name); - CrateDisplayName { crate_name, canonical_name } + pub fn from_canonical_name(canonical_name: &str) -> CrateDisplayName { + let crate_name = CrateName::normalize_dashes(canonical_name); + CrateDisplayName { crate_name, canonical_name: Symbol::intern(canonical_name) } } } @@ -662,6 +660,10 @@ impl CrateData { fn add_dep(&mut self, dep: Dependency) { self.dependencies.push(dep) } + + pub fn root_file_id(&self) -> EditionedFileId { + EditionedFileId::new(self.root_file_id, self.edition) + } } impl Extend<(String, String)> for Env { @@ -688,6 +690,14 @@ impl Env { pub fn extend_from_other(&mut self, other: &Env) { self.entries.extend(other.entries.iter().map(|(x, y)| (x.to_owned(), y.to_owned()))); } + + pub fn is_empty(&self) -> bool { + self.entries.is_empty() + } + + pub fn insert(&mut self, k: impl Into, v: impl Into) -> Option { + self.entries.insert(k.into(), v.into()) + } } impl From for Vec<(String, String)> { @@ -698,6 +708,15 @@ impl From for Vec<(String, String)> { } } +impl<'a> IntoIterator for &'a Env { + type Item = (&'a String, &'a String); + type IntoIter = std::collections::hash_map::Iter<'a, String, String>; + + fn into_iter(self) -> Self::IntoIter { + self.entries.iter() + } +} + #[derive(Debug)] pub struct CyclicDependenciesError { path: Vec<(CrateId, Option)>, diff --git a/crates/base-db/src/lib.rs b/crates/base-db/src/lib.rs index 96fbbc317d48..20ef45d0b372 100644 --- a/crates/base-db/src/lib.rs +++ b/crates/base-db/src/lib.rs @@ -1,13 +1,15 @@ //! base_db defines basic database traits. The concrete DB is defined by ide. - +// FIXME: Rename this crate, base db is non descriptive mod change; mod input; use std::panic; use salsa::Durability; +use span::EditionedFileId; use syntax::{ast, Parse, SourceFile, SyntaxError}; use triomphe::Arc; +use vfs::FileId; pub use crate::{ change::FileChange, @@ -18,8 +20,7 @@ pub use crate::{ }, }; pub use salsa::{self, Cancelled}; -pub use span::{FilePosition, FileRange}; -pub use vfs::{file_set::FileSet, AnchoredPath, AnchoredPathBuf, FileId, VfsPath}; +pub use vfs::{file_set::FileSet, AnchoredPath, AnchoredPathBuf, VfsPath}; pub use semver::{BuildMetadata, Prerelease, Version, VersionReq}; @@ -41,13 +42,11 @@ pub trait Upcast { fn upcast(&self) -> &T; } -pub const DEFAULT_FILE_TEXT_LRU_CAP: usize = 16; -pub const DEFAULT_PARSE_LRU_CAP: usize = 128; -pub const DEFAULT_BORROWCK_LRU_CAP: usize = 2024; +pub const DEFAULT_FILE_TEXT_LRU_CAP: u16 = 16; +pub const DEFAULT_PARSE_LRU_CAP: u16 = 128; +pub const DEFAULT_BORROWCK_LRU_CAP: u16 = 2024; pub trait FileLoader { - /// Text of the file. - fn file_text(&self, file_id: FileId) -> Arc; fn resolve_path(&self, path: AnchoredPath<'_>) -> Option; /// Crates whose root's source root is the same as the source root of `file_id` fn relevant_crates(&self, file_id: FileId) -> Arc<[CrateId]>; @@ -57,11 +56,19 @@ pub trait FileLoader { /// model. Everything else in rust-analyzer is derived from these queries. #[salsa::query_group(SourceDatabaseStorage)] pub trait SourceDatabase: FileLoader + std::fmt::Debug { + #[salsa::input] + fn compressed_file_text(&self, file_id: FileId) -> Arc<[u8]>; + + /// Text of the file. + #[salsa::lru] + fn file_text(&self, file_id: FileId) -> Arc; + /// Parses the file into the syntax tree. - fn parse(&self, file_id: FileId) -> Parse; + #[salsa::lru] + fn parse(&self, file_id: EditionedFileId) -> Parse; /// Returns the set of errors obtained from parsing the file including validation errors. - fn parse_errors(&self, file_id: FileId) -> Option>; + fn parse_errors(&self, file_id: EditionedFileId) -> Option>; /// The crate graph. #[salsa::input] @@ -82,14 +89,14 @@ fn toolchain_channel(db: &dyn SourceDatabase, krate: CrateId) -> Option Parse { +fn parse(db: &dyn SourceDatabase, file_id: EditionedFileId) -> Parse { let _p = tracing::info_span!("parse", ?file_id).entered(); + let (file_id, edition) = file_id.unpack(); let text = db.file_text(file_id); - // FIXME: Edition based parsing - SourceFile::parse(&text, span::Edition::CURRENT) + SourceFile::parse(&text, edition) } -fn parse_errors(db: &dyn SourceDatabase, file_id: FileId) -> Option> { +fn parse_errors(db: &dyn SourceDatabase, file_id: EditionedFileId) -> Option> { let errors = db.parse(file_id).errors(); match &*errors { [] => None, @@ -97,15 +104,18 @@ fn parse_errors(db: &dyn SourceDatabase, file_id: FileId) -> Option Arc { + let bytes = db.compressed_file_text(file_id); + let bytes = + lz4_flex::decompress_size_prepended(&bytes).expect("lz4 decompression should not fail"); + let text = std::str::from_utf8(&bytes).expect("file contents should be valid UTF-8"); + Arc::from(text) +} + /// We don't want to give HIR knowledge of source roots, hence we extract these /// methods into a separate DB. -#[salsa::query_group(SourceDatabaseExtStorage)] -pub trait SourceDatabaseExt: SourceDatabase { - #[salsa::input] - fn compressed_file_text(&self, file_id: FileId) -> Arc<[u8]>; - - fn file_text(&self, file_id: FileId) -> Arc; - +#[salsa::query_group(SourceRootDatabaseStorage)] +pub trait SourceRootDatabase: SourceDatabase { /// Path to a file, relative to the root of its source root. /// Source root of the file. #[salsa::input] @@ -118,15 +128,7 @@ pub trait SourceDatabaseExt: SourceDatabase { fn source_root_crates(&self, id: SourceRootId) -> Arc<[CrateId]>; } -fn file_text(db: &dyn SourceDatabaseExt, file_id: FileId) -> Arc { - let bytes = db.compressed_file_text(file_id); - let bytes = - lz4_flex::decompress_size_prepended(&bytes).expect("lz4 decompression should not fail"); - let text = std::str::from_utf8(&bytes).expect("file contents should be valid UTF-8"); - Arc::from(text) -} - -pub trait SourceDatabaseExt2 { +pub trait SourceDatabaseFileInputExt { fn set_file_text(&mut self, file_id: FileId, text: &str) { self.set_file_text_with_durability(file_id, text, Durability::LOW); } @@ -139,7 +141,7 @@ pub trait SourceDatabaseExt2 { ); } -impl SourceDatabaseExt2 for Db { +impl SourceDatabaseFileInputExt for Db { fn set_file_text_with_durability( &mut self, file_id: FileId, @@ -156,7 +158,7 @@ impl SourceDatabaseExt2 for Db { } } -fn source_root_crates(db: &dyn SourceDatabaseExt, id: SourceRootId) -> Arc<[CrateId]> { +fn source_root_crates(db: &dyn SourceRootDatabase, id: SourceRootId) -> Arc<[CrateId]> { let graph = db.crate_graph(); let mut crates = graph .iter() @@ -170,13 +172,12 @@ fn source_root_crates(db: &dyn SourceDatabaseExt, id: SourceRootId) -> Arc<[Crat crates.into_iter().collect() } -/// Silly workaround for cyclic deps between the traits +// FIXME: Would be nice to get rid of this somehow +/// Silly workaround for cyclic deps due to the SourceRootDatabase and SourceDatabase split +/// regarding FileLoader pub struct FileLoaderDelegate(pub T); -impl FileLoader for FileLoaderDelegate<&'_ T> { - fn file_text(&self, file_id: FileId) -> Arc { - SourceDatabaseExt::file_text(self.0, file_id) - } +impl FileLoader for FileLoaderDelegate<&'_ T> { fn resolve_path(&self, path: AnchoredPath<'_>) -> Option { // FIXME: this *somehow* should be platform agnostic... let source_root = self.0.file_source_root(path.anchor); diff --git a/crates/cfg/Cargo.toml b/crates/cfg/Cargo.toml index 9b3a5026ac80..29b7ad6f8fe2 100644 --- a/crates/cfg/Cargo.toml +++ b/crates/cfg/Cargo.toml @@ -1,7 +1,8 @@ [package] name = "cfg" version = "0.0.0" -description = "TBD" +repository.workspace = true +description = "Conditional compiling options, `cfg` attribute parser and evaluator for rust-analyzer." authors.workspace = true edition.workspace = true @@ -15,7 +16,8 @@ doctest = false rustc-hash.workspace = true # locals deps -tt.workspace = true +tt = { workspace = true, optional = true } +intern.workspace = true [dev-dependencies] expect-test = "1.4.1" @@ -27,7 +29,7 @@ arbitrary = "1.3.2" derive_arbitrary = "1.3.2" # local deps -mbe.workspace = true +syntax-bridge.workspace = true syntax.workspace = true [lints] diff --git a/crates/cfg/src/cfg_expr.rs b/crates/cfg/src/cfg_expr.rs index b7dbb7b5fdd7..35c0c89c70cf 100644 --- a/crates/cfg/src/cfg_expr.rs +++ b/crates/cfg/src/cfg_expr.rs @@ -2,20 +2,20 @@ //! //! See: -use std::{fmt, slice::Iter as SliceIter}; +use std::fmt; -use tt::SmolStr; +use intern::Symbol; /// A simple configuration value passed in from the outside. -#[derive(Debug, Clone, PartialEq, Eq, Hash, Ord, PartialOrd)] +#[derive(Debug, Clone, PartialEq, Eq, Hash)] pub enum CfgAtom { /// eg. `#[cfg(test)]` - Flag(SmolStr), + Flag(Symbol), /// eg. `#[cfg(target_os = "linux")]` /// /// Note that a key can have multiple values that are all considered "active" at the same time. /// For example, `#[cfg(target_feature = "sse")]` and `#[cfg(target_feature = "sse2")]`. - KeyValue { key: SmolStr, value: SmolStr }, + KeyValue { key: Symbol, value: Symbol }, } impl fmt::Display for CfgAtom { @@ -32,8 +32,8 @@ impl fmt::Display for CfgAtom { pub enum CfgExpr { Invalid, Atom(CfgAtom), - All(Vec), - Any(Vec), + All(Box<[CfgExpr]>), + Any(Box<[CfgExpr]>), Not(Box), } @@ -44,6 +44,7 @@ impl From for CfgExpr { } impl CfgExpr { + #[cfg(feature = "tt")] pub fn parse(tt: &tt::Subtree) -> CfgExpr { next_cfg_expr(&mut tt.token_trees.iter()).unwrap_or(CfgExpr::Invalid) } @@ -63,10 +64,14 @@ impl CfgExpr { } } } -fn next_cfg_expr(it: &mut SliceIter<'_, tt::TokenTree>) -> Option { + +#[cfg(feature = "tt")] +fn next_cfg_expr(it: &mut std::slice::Iter<'_, tt::TokenTree>) -> Option { + use intern::sym; + let name = match it.next() { None => return None, - Some(tt::TokenTree::Leaf(tt::Leaf::Ident(ident))) => ident.text.clone(), + Some(tt::TokenTree::Leaf(tt::Leaf::Ident(ident))) => ident.sym.clone(), Some(_) => return Some(CfgExpr::Invalid), }; @@ -77,10 +82,7 @@ fn next_cfg_expr(it: &mut SliceIter<'_, tt::TokenTree>) -> Option Some(tt::TokenTree::Leaf(tt::Leaf::Literal(literal))) => { it.next(); it.next(); - // FIXME: escape? raw string? - let value = - SmolStr::new(literal.text.trim_start_matches('"').trim_end_matches('"')); - CfgAtom::KeyValue { key: name, value }.into() + CfgAtom::KeyValue { key: name, value: literal.symbol.clone() }.into() } _ => return Some(CfgExpr::Invalid), } @@ -88,11 +90,13 @@ fn next_cfg_expr(it: &mut SliceIter<'_, tt::TokenTree>) -> Option Some(tt::TokenTree::Subtree(subtree)) => { it.next(); let mut sub_it = subtree.token_trees.iter(); - let mut subs = std::iter::from_fn(|| next_cfg_expr(&mut sub_it)).collect(); - match name.as_str() { - "all" => CfgExpr::All(subs), - "any" => CfgExpr::Any(subs), - "not" => CfgExpr::Not(Box::new(subs.pop().unwrap_or(CfgExpr::Invalid))), + let mut subs = std::iter::from_fn(|| next_cfg_expr(&mut sub_it)); + match name { + s if s == sym::all => CfgExpr::All(subs.collect()), + s if s == sym::any => CfgExpr::Any(subs.collect()), + s if s == sym::not => { + CfgExpr::Not(Box::new(subs.next().unwrap_or(CfgExpr::Invalid))) + } _ => CfgExpr::Invalid, } } @@ -112,11 +116,11 @@ fn next_cfg_expr(it: &mut SliceIter<'_, tt::TokenTree>) -> Option impl arbitrary::Arbitrary<'_> for CfgAtom { fn arbitrary(u: &mut arbitrary::Unstructured<'_>) -> arbitrary::Result { if u.arbitrary()? { - Ok(CfgAtom::Flag(String::arbitrary(u)?.into())) + Ok(CfgAtom::Flag(Symbol::intern(<_>::arbitrary(u)?))) } else { Ok(CfgAtom::KeyValue { - key: String::arbitrary(u)?.into(), - value: String::arbitrary(u)?.into(), + key: Symbol::intern(<_>::arbitrary(u)?), + value: Symbol::intern(<_>::arbitrary(u)?), }) } } diff --git a/crates/cfg/src/dnf.rs b/crates/cfg/src/dnf.rs index fd80e1ebe683..f3ebca046509 100644 --- a/crates/cfg/src/dnf.rs +++ b/crates/cfg/src/dnf.rs @@ -27,7 +27,7 @@ struct Literal { } impl DnfExpr { - pub fn new(expr: CfgExpr) -> Self { + pub fn new(expr: &CfgExpr) -> Self { let builder = Builder { expr: DnfExpr { conjunctions: Vec::new() } }; builder.lower(expr) @@ -66,9 +66,9 @@ impl DnfExpr { } } - res.enabled.sort_unstable(); + res.enabled.sort_unstable_by(compare); res.enabled.dedup(); - res.disabled.sort_unstable(); + res.disabled.sort_unstable_by(compare); res.disabled.dedup(); Some(res) } @@ -114,14 +114,25 @@ impl DnfExpr { }; // Undo the FxHashMap randomization for consistent output. - diff.enable.sort_unstable(); - diff.disable.sort_unstable(); + diff.enable.sort_unstable_by(compare); + diff.disable.sort_unstable_by(compare); Some(diff) }) } } +fn compare(a: &CfgAtom, b: &CfgAtom) -> std::cmp::Ordering { + match (a, b) { + (CfgAtom::Flag(a), CfgAtom::Flag(b)) => a.as_str().cmp(b.as_str()), + (CfgAtom::Flag(_), CfgAtom::KeyValue { .. }) => std::cmp::Ordering::Less, + (CfgAtom::KeyValue { .. }, CfgAtom::Flag(_)) => std::cmp::Ordering::Greater, + (CfgAtom::KeyValue { key, value }, CfgAtom::KeyValue { key: key2, value: value2 }) => { + key.as_str().cmp(key2.as_str()).then(value.as_str().cmp(value2.as_str())) + } + } +} + impl fmt::Display for DnfExpr { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { if self.conjunctions.len() != 1 { @@ -143,9 +154,9 @@ impl fmt::Display for DnfExpr { } impl Conjunction { - fn new(parts: Vec) -> Self { + fn new(parts: Box<[CfgExpr]>) -> Self { let mut literals = Vec::new(); - for part in parts { + for part in parts.into_vec() { match part { CfgExpr::Invalid | CfgExpr::Atom(_) | CfgExpr::Not(_) => { literals.push(Literal::new(part)); @@ -221,27 +232,28 @@ struct Builder { } impl Builder { - fn lower(mut self, expr: CfgExpr) -> DnfExpr { + fn lower(mut self, expr: &CfgExpr) -> DnfExpr { let expr = make_nnf(expr); let expr = make_dnf(expr); match expr { CfgExpr::Invalid | CfgExpr::Atom(_) | CfgExpr::Not(_) => { - self.expr.conjunctions.push(Conjunction::new(vec![expr])); + self.expr.conjunctions.push(Conjunction::new(Box::new([expr]))); } CfgExpr::All(conj) => { self.expr.conjunctions.push(Conjunction::new(conj)); } - CfgExpr::Any(mut disj) => { + CfgExpr::Any(disj) => { + let mut disj = disj.into_vec(); disj.reverse(); while let Some(conj) = disj.pop() { match conj { CfgExpr::Invalid | CfgExpr::Atom(_) | CfgExpr::All(_) | CfgExpr::Not(_) => { - self.expr.conjunctions.push(Conjunction::new(vec![conj])); + self.expr.conjunctions.push(Conjunction::new(Box::new([conj]))); } CfgExpr::Any(inner_disj) => { // Flatten. - disj.extend(inner_disj.into_iter().rev()); + disj.extend(inner_disj.into_vec().into_iter().rev()); } } } @@ -255,11 +267,11 @@ impl Builder { fn make_dnf(expr: CfgExpr) -> CfgExpr { match expr { CfgExpr::Invalid | CfgExpr::Atom(_) | CfgExpr::Not(_) => expr, - CfgExpr::Any(e) => flatten(CfgExpr::Any(e.into_iter().map(make_dnf).collect())), + CfgExpr::Any(e) => flatten(CfgExpr::Any(e.into_vec().into_iter().map(make_dnf).collect())), CfgExpr::All(e) => { - let e = e.into_iter().map(make_dnf).collect::>(); + let e = e.into_vec().into_iter().map(make_dnf).collect::>(); - flatten(CfgExpr::Any(distribute_conj(&e))) + flatten(CfgExpr::Any(distribute_conj(&e).into_boxed_slice())) } } } @@ -270,7 +282,7 @@ fn distribute_conj(conj: &[CfgExpr]) -> Vec { match rest { [head, tail @ ..] => match head { CfgExpr::Any(disj) => { - for part in disj { + for part in disj.iter() { with.push(part.clone()); go(out, with, tail); with.pop(); @@ -284,7 +296,7 @@ fn distribute_conj(conj: &[CfgExpr]) -> Vec { }, _ => { // Turn accumulated parts into a new conjunction. - out.push(CfgExpr::All(with.clone())); + out.push(CfgExpr::All(with.clone().into_boxed_slice())); } } } @@ -297,25 +309,27 @@ fn distribute_conj(conj: &[CfgExpr]) -> Vec { out } -fn make_nnf(expr: CfgExpr) -> CfgExpr { +fn make_nnf(expr: &CfgExpr) -> CfgExpr { match expr { - CfgExpr::Invalid | CfgExpr::Atom(_) => expr, - CfgExpr::Any(expr) => CfgExpr::Any(expr.into_iter().map(make_nnf).collect()), - CfgExpr::All(expr) => CfgExpr::All(expr.into_iter().map(make_nnf).collect()), - CfgExpr::Not(operand) => match *operand { - CfgExpr::Invalid | CfgExpr::Atom(_) => CfgExpr::Not(operand.clone()), // Original negated expr - CfgExpr::Not(expr) => { - // Remove double negation. - make_nnf(*expr) - } - // Convert negated conjunction/disjunction using DeMorgan's Law. - CfgExpr::Any(inner) => CfgExpr::All( - inner.into_iter().map(|expr| make_nnf(CfgExpr::Not(Box::new(expr)))).collect(), - ), - CfgExpr::All(inner) => CfgExpr::Any( - inner.into_iter().map(|expr| make_nnf(CfgExpr::Not(Box::new(expr)))).collect(), - ), - }, + CfgExpr::Invalid | CfgExpr::Atom(_) => expr.clone(), + CfgExpr::Any(expr) => CfgExpr::Any(expr.iter().map(make_nnf).collect()), + CfgExpr::All(expr) => CfgExpr::All(expr.iter().map(make_nnf).collect()), + CfgExpr::Not(operand) => make_nnf_neg(operand), + } +} + +fn make_nnf_neg(operand: &CfgExpr) -> CfgExpr { + match operand { + // Original negated expr + CfgExpr::Invalid => CfgExpr::Not(Box::new(CfgExpr::Invalid)), // Original negated expr + // Original negated expr + CfgExpr::Atom(atom) => CfgExpr::Not(Box::new(CfgExpr::Atom(atom.clone()))), + // Remove double negation. + CfgExpr::Not(expr) => make_nnf(expr), + // Convert negated conjunction/disjunction using DeMorgan's Law. + CfgExpr::Any(inner) => CfgExpr::All(inner.iter().map(make_nnf_neg).collect()), + // Convert negated conjunction/disjunction using DeMorgan's Law. + CfgExpr::All(inner) => CfgExpr::Any(inner.iter().map(make_nnf_neg).collect()), } } @@ -324,20 +338,22 @@ fn flatten(expr: CfgExpr) -> CfgExpr { match expr { CfgExpr::All(inner) => CfgExpr::All( inner - .into_iter() + .iter() .flat_map(|e| match e { - CfgExpr::All(inner) => inner, - _ => vec![e], + CfgExpr::All(inner) => inner.as_ref(), + _ => std::slice::from_ref(e), }) + .cloned() .collect(), ), CfgExpr::Any(inner) => CfgExpr::Any( inner - .into_iter() + .iter() .flat_map(|e| match e { - CfgExpr::Any(inner) => inner, - _ => vec![e], + CfgExpr::Any(inner) => inner.as_ref(), + _ => std::slice::from_ref(e), }) + .cloned() .collect(), ), _ => expr, diff --git a/crates/cfg/src/lib.rs b/crates/cfg/src/lib.rs index 8b30286a0a8a..e9daaf7de3c3 100644 --- a/crates/cfg/src/lib.rs +++ b/crates/cfg/src/lib.rs @@ -8,7 +8,8 @@ mod tests; use std::fmt; use rustc_hash::FxHashSet; -use tt::SmolStr; + +use intern::Symbol; pub use cfg_expr::{CfgAtom, CfgExpr}; pub use dnf::DnfExpr; @@ -48,11 +49,11 @@ impl CfgOptions { cfg.fold(&|atom| self.enabled.contains(atom)) } - pub fn insert_atom(&mut self, key: SmolStr) { + pub fn insert_atom(&mut self, key: Symbol) { self.enabled.insert(CfgAtom::Flag(key)); } - pub fn insert_key_value(&mut self, key: SmolStr, value: SmolStr) { + pub fn insert_key_value(&mut self, key: Symbol, value: Symbol) { self.enabled.insert(CfgAtom::KeyValue { key, value }); } @@ -66,19 +67,16 @@ impl CfgOptions { } } - pub fn get_cfg_keys(&self) -> impl Iterator { + pub fn get_cfg_keys(&self) -> impl Iterator { self.enabled.iter().map(|it| match it { CfgAtom::Flag(key) => key, CfgAtom::KeyValue { key, .. } => key, }) } - pub fn get_cfg_values<'a>( - &'a self, - cfg_key: &'a str, - ) -> impl Iterator + 'a { + pub fn get_cfg_values<'a>(&'a self, cfg_key: &'a str) -> impl Iterator + 'a { self.enabled.iter().filter_map(move |it| match it { - CfgAtom::KeyValue { key, value } if cfg_key == key => Some(value), + CfgAtom::KeyValue { key, value } if cfg_key == key.as_str() => Some(value), _ => None, }) } @@ -110,6 +108,14 @@ impl<'a> IntoIterator for &'a CfgOptions { } } +impl FromIterator for CfgOptions { + fn from_iter>(iter: T) -> Self { + let mut options = CfgOptions::default(); + options.extend(iter); + options + } +} + #[derive(Default, Clone, Debug, PartialEq, Eq)] pub struct CfgDiff { // Invariants: No duplicates, no atom that's both in `enable` and `disable`. diff --git a/crates/cfg/src/tests.rs b/crates/cfg/src/tests.rs index dddaf2cce182..6d87d83ad930 100644 --- a/crates/cfg/src/tests.rs +++ b/crates/cfg/src/tests.rs @@ -1,7 +1,11 @@ use arbitrary::{Arbitrary, Unstructured}; use expect_test::{expect, Expect}; -use mbe::{syntax_node_to_token_tree, DocCommentDesugarMode, DummyTestSpanMap, DUMMY}; +use intern::Symbol; use syntax::{ast, AstNode, Edition}; +use syntax_bridge::{ + dummy_test_span_utils::{DummyTestSpanMap, DUMMY}, + syntax_node_to_token_tree, DocCommentDesugarMode, +}; use crate::{CfgAtom, CfgExpr, CfgOptions, DnfExpr}; @@ -28,7 +32,7 @@ fn check_dnf(input: &str, expect: Expect) { DocCommentDesugarMode::ProcMacro, ); let cfg = CfgExpr::parse(&tt); - let actual = format!("#![cfg({})]", DnfExpr::new(cfg)); + let actual = format!("#![cfg({})]", DnfExpr::new(&cfg)); expect.assert_eq(&actual); } @@ -42,7 +46,7 @@ fn check_why_inactive(input: &str, opts: &CfgOptions, expect: Expect) { DocCommentDesugarMode::ProcMacro, ); let cfg = CfgExpr::parse(&tt); - let dnf = DnfExpr::new(cfg); + let dnf = DnfExpr::new(&cfg); let why_inactive = dnf.why_inactive(opts).unwrap().to_string(); expect.assert_eq(&why_inactive); } @@ -58,40 +62,51 @@ fn check_enable_hints(input: &str, opts: &CfgOptions, expected_hints: &[&str]) { DocCommentDesugarMode::ProcMacro, ); let cfg = CfgExpr::parse(&tt); - let dnf = DnfExpr::new(cfg); + let dnf = DnfExpr::new(&cfg); let hints = dnf.compute_enable_hints(opts).map(|diff| diff.to_string()).collect::>(); assert_eq!(hints, expected_hints); } #[test] fn test_cfg_expr_parser() { - assert_parse_result("#![cfg(foo)]", CfgAtom::Flag("foo".into()).into()); - assert_parse_result("#![cfg(foo,)]", CfgAtom::Flag("foo".into()).into()); + assert_parse_result("#![cfg(foo)]", CfgAtom::Flag(Symbol::intern("foo")).into()); + assert_parse_result("#![cfg(foo,)]", CfgAtom::Flag(Symbol::intern("foo")).into()); assert_parse_result( "#![cfg(not(foo))]", - CfgExpr::Not(Box::new(CfgAtom::Flag("foo".into()).into())), + CfgExpr::Not(Box::new(CfgAtom::Flag(Symbol::intern("foo")).into())), ); assert_parse_result("#![cfg(foo(bar))]", CfgExpr::Invalid); // Only take the first - assert_parse_result(r#"#![cfg(foo, bar = "baz")]"#, CfgAtom::Flag("foo".into()).into()); + assert_parse_result( + r#"#![cfg(foo, bar = "baz")]"#, + CfgAtom::Flag(Symbol::intern("foo")).into(), + ); assert_parse_result( r#"#![cfg(all(foo, bar = "baz"))]"#, - CfgExpr::All(vec![ - CfgAtom::Flag("foo".into()).into(), - CfgAtom::KeyValue { key: "bar".into(), value: "baz".into() }.into(), - ]), + CfgExpr::All( + vec![ + CfgAtom::Flag(Symbol::intern("foo")).into(), + CfgAtom::KeyValue { key: Symbol::intern("bar"), value: Symbol::intern("baz") } + .into(), + ] + .into_boxed_slice(), + ), ); assert_parse_result( r#"#![cfg(any(not(), all(), , bar = "baz",))]"#, - CfgExpr::Any(vec![ - CfgExpr::Not(Box::new(CfgExpr::Invalid)), - CfgExpr::All(vec![]), - CfgExpr::Invalid, - CfgAtom::KeyValue { key: "bar".into(), value: "baz".into() }.into(), - ]), + CfgExpr::Any( + vec![ + CfgExpr::Not(Box::new(CfgExpr::Invalid)), + CfgExpr::All(Box::new([])), + CfgExpr::Invalid, + CfgAtom::KeyValue { key: Symbol::intern("bar"), value: Symbol::intern("baz") } + .into(), + ] + .into_boxed_slice(), + ), ); } @@ -167,7 +182,7 @@ fn hints() { check_enable_hints("#![cfg(all(a, b))]", &opts, &["enable a and b"]); - opts.insert_atom("test".into()); + opts.insert_atom(Symbol::intern("test")); check_enable_hints("#![cfg(test)]", &opts, &[]); check_enable_hints("#![cfg(not(test))]", &opts, &["disable test"]); @@ -180,7 +195,7 @@ fn hints_impossible() { check_enable_hints("#![cfg(all(test, not(test)))]", &opts, &[]); - opts.insert_atom("test".into()); + opts.insert_atom(Symbol::intern("test")); check_enable_hints("#![cfg(all(test, not(test)))]", &opts, &[]); } @@ -188,8 +203,8 @@ fn hints_impossible() { #[test] fn why_inactive() { let mut opts = CfgOptions::default(); - opts.insert_atom("test".into()); - opts.insert_atom("test2".into()); + opts.insert_atom(Symbol::intern("test")); + opts.insert_atom(Symbol::intern("test2")); check_why_inactive("#![cfg(a)]", &opts, expect![["a is disabled"]]); check_why_inactive("#![cfg(not(test))]", &opts, expect![["test is enabled"]]); @@ -231,6 +246,6 @@ fn proptest() { let mut u = Unstructured::new(&buf); let cfg = CfgExpr::arbitrary(&mut u).unwrap(); - DnfExpr::new(cfg); + DnfExpr::new(&cfg); } } diff --git a/crates/flycheck/Cargo.toml b/crates/flycheck/Cargo.toml index d81a5fe34006..a7f6e296d3b0 100644 --- a/crates/flycheck/Cargo.toml +++ b/crates/flycheck/Cargo.toml @@ -1,7 +1,8 @@ [package] name = "flycheck" version = "0.0.0" -description = "TBD" +repository.workspace = true +description = "Functionality needed for rust-analyzer to run `cargo` commands in a background thread." authors.workspace = true edition.workspace = true @@ -24,6 +25,7 @@ process-wrap.workspace = true paths.workspace = true stdx.workspace = true toolchain.workspace = true +project-model.workspace = true [lints] workspace = true \ No newline at end of file diff --git a/crates/flycheck/src/lib.rs b/crates/flycheck/src/lib.rs index a257633516ce..3dd2a91d8fd9 100644 --- a/crates/flycheck/src/lib.rs +++ b/crates/flycheck/src/lib.rs @@ -20,10 +20,11 @@ pub use cargo_metadata::diagnostic::{ use toolchain::Tool; mod command; +pub mod project_json; mod test_runner; use command::{CommandHandle, ParseFromLine}; -pub use test_runner::{CargoTestHandle, CargoTestMessage, TestState}; +pub use test_runner::{CargoTestHandle, CargoTestMessage, TestState, TestTarget}; #[derive(Copy, Clone, Debug, Default, PartialEq, Eq)] pub enum InvocationStrategy { @@ -240,7 +241,7 @@ enum FlycheckStatus { Finished, } -const SAVED_FILE_PLACEHOLDER: &str = "$saved_file"; +pub const SAVED_FILE_PLACEHOLDER: &str = "$saved_file"; impl FlycheckActor { fn new( @@ -426,6 +427,8 @@ impl FlycheckActor { } } + cmd.arg("--keep-going"); + options.apply_on_command(&mut cmd); (cmd, options.extra_args.clone()) } diff --git a/crates/flycheck/src/project_json.rs b/crates/flycheck/src/project_json.rs new file mode 100644 index 000000000000..b6e4495bc6d6 --- /dev/null +++ b/crates/flycheck/src/project_json.rs @@ -0,0 +1,152 @@ +//! A `cargo-metadata`-equivalent for non-Cargo build systems. +use std::{io, process::Command}; + +use crossbeam_channel::Sender; +use paths::{AbsPathBuf, Utf8Path, Utf8PathBuf}; +use project_model::ProjectJsonData; +use serde::{Deserialize, Serialize}; +use serde_json::Value; + +use crate::command::{CommandHandle, ParseFromLine}; + +pub const ARG_PLACEHOLDER: &str = "{arg}"; + +/// A command wrapper for getting a `rust-project.json`. +/// +/// This is analogous to `cargo-metadata`, but for non-Cargo build systems. +pub struct Discover { + command: Vec, + sender: Sender, +} + +#[derive(PartialEq, Clone, Debug, Serialize)] +#[serde(rename_all = "camelCase")] +pub enum DiscoverArgument { + Path(#[serde(serialize_with = "serialize_abs_pathbuf")] AbsPathBuf), + Buildfile(#[serde(serialize_with = "serialize_abs_pathbuf")] AbsPathBuf), +} + +fn serialize_abs_pathbuf(path: &AbsPathBuf, se: S) -> Result +where + S: serde::Serializer, +{ + let path: &Utf8Path = path.as_ref(); + se.serialize_str(path.as_str()) +} + +impl Discover { + /// Create a new [Discover]. + pub fn new(sender: Sender, command: Vec) -> Self { + Self { sender, command } + } + + /// Spawn the command inside [Discover] and report progress, if any. + pub fn spawn(&self, discover_arg: DiscoverArgument) -> io::Result { + let command = &self.command[0]; + let args = &self.command[1..]; + + let args: Vec = args + .iter() + .map(|arg| { + if arg == ARG_PLACEHOLDER { + serde_json::to_string(&discover_arg).expect("Unable to serialize args") + } else { + arg.to_owned() + } + }) + .collect(); + + let mut cmd = Command::new(command); + cmd.args(args); + + Ok(DiscoverHandle { _handle: CommandHandle::spawn(cmd, self.sender.clone())? }) + } +} + +/// A handle to a spawned [Discover]. +#[derive(Debug)] +pub struct DiscoverHandle { + _handle: CommandHandle, +} + +/// An enum containing either progress messages, an error, +/// or the materialized `rust-project`. +#[derive(Debug, Clone, Deserialize, Serialize)] +#[serde(tag = "kind")] +#[serde(rename_all = "snake_case")] +enum DiscoverProjectData { + Finished { buildfile: Utf8PathBuf, project: ProjectJsonData }, + Error { error: String, source: Option }, + Progress { message: String }, +} + +#[derive(Debug, PartialEq, Clone)] +pub enum DiscoverProjectMessage { + Finished { project: ProjectJsonData, buildfile: AbsPathBuf }, + Error { error: String, source: Option }, + Progress { message: String }, +} + +impl DiscoverProjectMessage { + fn new(data: DiscoverProjectData) -> Self { + match data { + DiscoverProjectData::Finished { project, buildfile, .. } => { + let buildfile = buildfile.try_into().expect("Unable to make path absolute"); + DiscoverProjectMessage::Finished { project, buildfile } + } + DiscoverProjectData::Error { error, source } => { + DiscoverProjectMessage::Error { error, source } + } + DiscoverProjectData::Progress { message } => { + DiscoverProjectMessage::Progress { message } + } + } + } +} + +impl ParseFromLine for DiscoverProjectMessage { + fn from_line(line: &str, _error: &mut String) -> Option { + // can the line even be deserialized as JSON? + let Ok(data) = serde_json::from_str::(line) else { + let err = DiscoverProjectData::Error { error: line.to_owned(), source: None }; + return Some(DiscoverProjectMessage::new(err)); + }; + + let Ok(data) = serde_json::from_value::(data) else { + return None; + }; + + let msg = DiscoverProjectMessage::new(data); + Some(msg) + } + + fn from_eof() -> Option { + None + } +} + +#[test] +fn test_deserialization() { + let message = r#" + {"kind": "progress", "message":"querying build system","input":{"files":["src/main.rs"]}} + "#; + let message: DiscoverProjectData = + serde_json::from_str(message).expect("Unable to deserialize message"); + assert!(matches!(message, DiscoverProjectData::Progress { .. })); + + let message = r#" + {"kind": "error", "error":"failed to deserialize command output","source":"command"} + "#; + + let message: DiscoverProjectData = + serde_json::from_str(message).expect("Unable to deserialize message"); + assert!(matches!(message, DiscoverProjectData::Error { .. })); + + let message = r#" + {"kind": "finished", "project": {"sysroot": "foo", "crates": [], "runnables": []}, "buildfile":"rust-analyzer/BUILD"} + "#; + + let message: DiscoverProjectData = + serde_json::from_str(message).expect("Unable to deserialize message"); + assert!(matches!(message, DiscoverProjectData::Finished { .. })); +} diff --git a/crates/flycheck/src/test_runner.rs b/crates/flycheck/src/test_runner.rs index c136dd13664a..74ebca34103a 100644 --- a/crates/flycheck/src/test_runner.rs +++ b/crates/flycheck/src/test_runner.rs @@ -59,19 +59,38 @@ pub struct CargoTestHandle { } // Example of a cargo test command: -// cargo test --workspace --no-fail-fast -- module::func -Z unstable-options --format=json +// cargo test --workspace --no-fail-fast -- -Z unstable-options --format=json +// or +// cargo test --package my-package --no-fail-fast -- module::func -Z unstable-options --format=json + +#[derive(Debug)] +pub enum TestTarget { + Workspace, + Package(String), +} impl CargoTestHandle { pub fn new( path: Option<&str>, options: CargoOptions, root: &AbsPath, + test_target: TestTarget, sender: Sender, ) -> std::io::Result { let mut cmd = Command::new(Tool::Cargo.path()); cmd.env("RUSTC_BOOTSTRAP", "1"); cmd.arg("test"); - cmd.arg("--workspace"); + + match &test_target { + TestTarget::Package(package) => { + cmd.arg("--package"); + cmd.arg(package); + } + TestTarget::Workspace => { + cmd.arg("--workspace"); + } + }; + // --no-fail-fast is needed to ensure that all requested tests will run cmd.arg("--no-fail-fast"); cmd.arg("--manifest-path"); diff --git a/crates/hir-def/Cargo.toml b/crates/hir-def/Cargo.toml index 41c59ea0d93f..5b9d227e3152 100644 --- a/crates/hir-def/Cargo.toml +++ b/crates/hir-def/Cargo.toml @@ -1,7 +1,8 @@ [package] name = "hir-def" version = "0.0.0" -description = "TBD" +repository.workspace = true +description = "RPC Api for the `proc-macro-srv` crate of rust-analyzer." authors.workspace = true edition.workspace = true @@ -28,6 +29,7 @@ tracing.workspace = true smallvec.workspace = true hashbrown.workspace = true triomphe.workspace = true +rustc_apfloat = "0.2.0" ra-ap-rustc_parse_format.workspace = true ra-ap-rustc_abi.workspace = true @@ -37,7 +39,6 @@ stdx.workspace = true intern.workspace = true base-db.workspace = true syntax.workspace = true -profile.workspace = true hir-expand.workspace = true mbe.workspace = true cfg.workspace = true @@ -52,7 +53,7 @@ expect-test.workspace = true # local deps test-utils.workspace = true test-fixture.workspace = true - +syntax-bridge.workspace = true [features] in-rust-tree = ["hir-expand/in-rust-tree"] diff --git a/crates/hir-def/src/attr.rs b/crates/hir-def/src/attr.rs index 184dab8367c1..198dc93f6b14 100644 --- a/crates/hir-def/src/attr.rs +++ b/crates/hir-def/src/attr.rs @@ -1,6 +1,6 @@ //! A higher level attributes based on TokenTree, with also some shortcuts. -use std::{borrow::Cow, hash::Hash, ops, slice::Iter as SliceIter}; +use std::{borrow::Cow, hash::Hash, ops, slice}; use base_db::CrateId; use cfg::{CfgExpr, CfgOptions}; @@ -9,17 +9,18 @@ use hir_expand::{ attrs::{collect_attrs, Attr, AttrId, RawAttrs}, HirFileId, InFile, }; +use intern::{sym, Symbol}; use la_arena::{ArenaMap, Idx, RawIdx}; use mbe::DelimiterKind; use syntax::{ ast::{self, HasAttrs}, - AstPtr, SmolStr, + AstPtr, }; use triomphe::Arc; use crate::{ db::DefDatabase, - item_tree::{AttrOwner, Fields, ItemTreeNode}, + item_tree::{AttrOwner, FieldParent, ItemTreeNode}, lang_item::LangItem, nameres::{ModuleOrigin, ModuleSource}, src::{HasChildSource, HasSource}, @@ -75,40 +76,36 @@ impl Attrs { let mut res = ArenaMap::default(); let crate_graph = db.crate_graph(); - let (fields, item_tree, krate) = match v { + let item_tree; + let (parent, fields, krate) = match v { VariantId::EnumVariantId(it) => { let loc = it.lookup(db); let krate = loc.parent.lookup(db).container.krate; - let item_tree = loc.id.item_tree(db); + item_tree = loc.id.item_tree(db); let variant = &item_tree[loc.id.value]; - (variant.fields.clone(), item_tree, krate) + (FieldParent::Variant(loc.id.value), &variant.fields, krate) } VariantId::StructId(it) => { let loc = it.lookup(db); let krate = loc.container.krate; - let item_tree = loc.id.item_tree(db); + item_tree = loc.id.item_tree(db); let struct_ = &item_tree[loc.id.value]; - (struct_.fields.clone(), item_tree, krate) + (FieldParent::Struct(loc.id.value), &struct_.fields, krate) } VariantId::UnionId(it) => { let loc = it.lookup(db); let krate = loc.container.krate; - let item_tree = loc.id.item_tree(db); + item_tree = loc.id.item_tree(db); let union_ = &item_tree[loc.id.value]; - (union_.fields.clone(), item_tree, krate) + (FieldParent::Union(loc.id.value), &union_.fields, krate) } }; - let fields = match fields { - Fields::Record(fields) | Fields::Tuple(fields) => fields, - Fields::Unit => return Arc::new(res), - }; - let cfg_options = &crate_graph[krate].cfg_options; let mut idx = 0; - for field in fields { - let attrs = item_tree.attrs(db, krate, field.into()); + for (id, _field) in fields.iter().enumerate() { + let attrs = item_tree.attrs(db, krate, AttrOwner::make_field_indexed(parent, id)); if attrs.is_cfg_enabled(cfg_options) { res.insert(Idx::from_raw(RawIdx::from(idx)), attrs); idx += 1; @@ -120,12 +117,12 @@ impl Attrs { } impl Attrs { - pub fn by_key(&self, key: &'static str) -> AttrQuery<'_> { + pub fn by_key<'attrs>(&'attrs self, key: &'attrs Symbol) -> AttrQuery<'_> { AttrQuery { attrs: self, key } } pub fn cfg(&self) -> Option { - let mut cfgs = self.by_key("cfg").tt_values().map(CfgExpr::parse); + let mut cfgs = self.by_key(&sym::cfg).tt_values().map(CfgExpr::parse); let first = cfgs.next()?; match cfgs.next() { Some(second) => { @@ -137,7 +134,7 @@ impl Attrs { } pub fn cfgs(&self) -> impl Iterator + '_ { - self.by_key("cfg").tt_values().map(CfgExpr::parse) + self.by_key(&sym::cfg).tt_values().map(CfgExpr::parse) } pub(crate) fn is_cfg_enabled(&self, cfg_options: &CfgOptions) -> bool { @@ -147,50 +144,50 @@ impl Attrs { } } - pub fn lang(&self) -> Option<&str> { - self.by_key("lang").string_value() + pub fn lang(&self) -> Option<&Symbol> { + self.by_key(&sym::lang).string_value() } pub fn lang_item(&self) -> Option { - self.by_key("lang").string_value().and_then(LangItem::from_str) + self.by_key(&sym::lang).string_value().and_then(LangItem::from_symbol) } pub fn has_doc_hidden(&self) -> bool { - self.by_key("doc").tt_values().any(|tt| { + self.by_key(&sym::doc).tt_values().any(|tt| { tt.delimiter.kind == DelimiterKind::Parenthesis && - matches!(&*tt.token_trees, [tt::TokenTree::Leaf(tt::Leaf::Ident(ident))] if ident.text == "hidden") + matches!(&*tt.token_trees, [tt::TokenTree::Leaf(tt::Leaf::Ident(ident))] if ident.sym == sym::hidden) }) } pub fn has_doc_notable_trait(&self) -> bool { - self.by_key("doc").tt_values().any(|tt| { + self.by_key(&sym::doc).tt_values().any(|tt| { tt.delimiter.kind == DelimiterKind::Parenthesis && - matches!(&*tt.token_trees, [tt::TokenTree::Leaf(tt::Leaf::Ident(ident))] if ident.text == "notable_trait") + matches!(&*tt.token_trees, [tt::TokenTree::Leaf(tt::Leaf::Ident(ident))] if ident.sym == sym::notable_trait) }) } pub fn doc_exprs(&self) -> impl Iterator + '_ { - self.by_key("doc").tt_values().map(DocExpr::parse) + self.by_key(&sym::doc).tt_values().map(DocExpr::parse) } - pub fn doc_aliases(&self) -> impl Iterator + '_ { + pub fn doc_aliases(&self) -> impl Iterator + '_ { self.doc_exprs().flat_map(|doc_expr| doc_expr.aliases().to_vec()) } - pub fn export_name(&self) -> Option<&str> { - self.by_key("export_name").string_value() + pub fn export_name(&self) -> Option<&Symbol> { + self.by_key(&sym::export_name).string_value() } pub fn is_proc_macro(&self) -> bool { - self.by_key("proc_macro").exists() + self.by_key(&sym::proc_macro).exists() } pub fn is_proc_macro_attribute(&self) -> bool { - self.by_key("proc_macro_attribute").exists() + self.by_key(&sym::proc_macro_attribute).exists() } pub fn is_proc_macro_derive(&self) -> bool { - self.by_key("proc_macro_derive").exists() + self.by_key(&sym::proc_macro_derive).exists() } pub fn is_test(&self) -> bool { @@ -199,33 +196,37 @@ impl Attrs { .segments() .iter() .rev() - .zip(["core", "prelude", "v1", "test"].iter().rev()) - .all(|it| it.0.as_str() == Some(it.1)) + .zip( + [sym::core.clone(), sym::prelude.clone(), sym::v1.clone(), sym::test.clone()] + .iter() + .rev(), + ) + .all(|it| it.0 == it.1) }) } pub fn is_ignore(&self) -> bool { - self.by_key("ignore").exists() + self.by_key(&sym::ignore).exists() } pub fn is_bench(&self) -> bool { - self.by_key("bench").exists() + self.by_key(&sym::bench).exists() } pub fn is_unstable(&self) -> bool { - self.by_key("unstable").exists() + self.by_key(&sym::unstable).exists() } } -#[derive(Debug, Clone, PartialEq, Eq, Hash, Ord, PartialOrd)] +#[derive(Debug, Clone, PartialEq, Eq, Hash)] pub enum DocAtom { /// eg. `#[doc(hidden)]` - Flag(SmolStr), + Flag(Symbol), /// eg. `#[doc(alias = "it")]` /// /// Note that a key can have multiple values that are all considered "active" at the same time. /// For example, `#[doc(alias = "x")]` and `#[doc(alias = "y")]`. - KeyValue { key: SmolStr, value: SmolStr }, + KeyValue { key: Symbol, value: Symbol }, } #[derive(Debug, Clone, PartialEq, Eq, Hash)] @@ -234,7 +235,7 @@ pub enum DocExpr { /// eg. `#[doc(hidden)]`, `#[doc(alias = "x")]` Atom(DocAtom), /// eg. `#[doc(alias("x", "y"))]` - Alias(Vec), + Alias(Vec), } impl From for DocExpr { @@ -248,9 +249,9 @@ impl DocExpr { next_doc_expr(&mut tt.token_trees.iter()).unwrap_or(DocExpr::Invalid) } - pub fn aliases(&self) -> &[SmolStr] { + pub fn aliases(&self) -> &[Symbol] { match self { - DocExpr::Atom(DocAtom::KeyValue { key, value }) if key == "alias" => { + DocExpr::Atom(DocAtom::KeyValue { key, value }) if *key == sym::alias => { std::slice::from_ref(value) } DocExpr::Alias(aliases) => aliases, @@ -259,10 +260,10 @@ impl DocExpr { } } -fn next_doc_expr(it: &mut SliceIter<'_, tt::TokenTree>) -> Option { +fn next_doc_expr(it: &mut slice::Iter<'_, tt::TokenTree>) -> Option { let name = match it.next() { None => return None, - Some(tt::TokenTree::Leaf(tt::Leaf::Ident(ident))) => ident.text.clone(), + Some(tt::TokenTree::Leaf(tt::Leaf::Ident(ident))) => ident.sym.clone(), Some(_) => return Some(DocExpr::Invalid), }; @@ -270,13 +271,14 @@ fn next_doc_expr(it: &mut SliceIter<'_, tt::TokenTree>) -> Option let ret = match it.as_slice().first() { Some(tt::TokenTree::Leaf(tt::Leaf::Punct(punct))) if punct.char == '=' => { match it.as_slice().get(1) { - Some(tt::TokenTree::Leaf(tt::Leaf::Literal(literal))) => { + Some(tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal { + symbol: text, + kind: tt::LitKind::Str, + .. + }))) => { it.next(); it.next(); - // FIXME: escape? raw string? - let value = - SmolStr::new(literal.text.trim_start_matches('"').trim_end_matches('"')); - DocAtom::KeyValue { key: name, value }.into() + DocAtom::KeyValue { key: name, value: text.clone() }.into() } _ => return Some(DocExpr::Invalid), } @@ -284,8 +286,8 @@ fn next_doc_expr(it: &mut SliceIter<'_, tt::TokenTree>) -> Option Some(tt::TokenTree::Subtree(subtree)) => { it.next(); let subs = parse_comma_sep(subtree); - match name.as_str() { - "alias" => DocExpr::Alias(subs), + match &name { + s if *s == sym::alias => DocExpr::Alias(subs), _ => DocExpr::Invalid, } } @@ -301,15 +303,16 @@ fn next_doc_expr(it: &mut SliceIter<'_, tt::TokenTree>) -> Option Some(ret) } -fn parse_comma_sep(subtree: &tt::Subtree) -> Vec { +fn parse_comma_sep(subtree: &tt::Subtree) -> Vec { subtree .token_trees .iter() .filter_map(|tt| match tt { - tt::TokenTree::Leaf(tt::Leaf::Literal(lit)) => { - // FIXME: escape? raw string? - Some(SmolStr::new(lit.text.trim_start_matches('"').trim_end_matches('"'))) - } + tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal { + kind: tt::LitKind::Str, + symbol, + .. + })) => Some(symbol.clone()), _ => None, }) .collect() @@ -556,7 +559,7 @@ impl AttrSourceMap { #[derive(Debug, Clone, Copy)] pub struct AttrQuery<'attr> { attrs: &'attr Attrs, - key: &'static str, + key: &'attr Symbol, } impl<'attr> AttrQuery<'attr> { @@ -564,10 +567,14 @@ impl<'attr> AttrQuery<'attr> { self.attrs().filter_map(|attr| attr.token_tree_value()) } - pub fn string_value(self) -> Option<&'attr str> { + pub fn string_value(self) -> Option<&'attr Symbol> { self.attrs().find_map(|attr| attr.string_value()) } + pub fn string_value_with_span(self) -> Option<(&'attr Symbol, span::Span)> { + self.attrs().find_map(|attr| attr.string_value_with_span()) + } + pub fn string_value_unescape(self) -> Option> { self.attrs().find_map(|attr| attr.string_value_unescape()) } @@ -578,9 +585,7 @@ impl<'attr> AttrQuery<'attr> { pub fn attrs(self) -> impl Iterator + Clone { let key = self.key; - self.attrs - .iter() - .filter(move |attr| attr.path.as_ident().map_or(false, |s| s.to_smol_str() == key)) + self.attrs.iter().filter(move |attr| attr.path.as_ident().map_or(false, |s| *s == *key)) } /// Find string value for a specific key inside token tree @@ -589,14 +594,14 @@ impl<'attr> AttrQuery<'attr> { /// #[doc(html_root_url = "url")] /// ^^^^^^^^^^^^^ key /// ``` - pub fn find_string_value_in_tt(self, key: &'attr str) -> Option<&SmolStr> { + pub fn find_string_value_in_tt(self, key: &'attr Symbol) -> Option<&str> { self.tt_values().find_map(|tt| { let name = tt.token_trees.iter() - .skip_while(|tt| !matches!(tt, tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident { text, ..} )) if text == key)) + .skip_while(|tt| !matches!(tt, tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident { sym, ..} )) if *sym == *key)) .nth(2); match name { - Some(tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal{ ref text, ..}))) => Some(text), + Some(tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal{ symbol: text, kind: tt::LitKind::Str | tt::LitKind::StrRaw(_) , ..}))) => Some(text.as_str()), _ => None } }) @@ -647,19 +652,23 @@ mod tests { //! This module contains tests for doc-expression parsing. //! Currently, it tests `#[doc(hidden)]` and `#[doc(alias)]`. + use intern::Symbol; + use span::EditionedFileId; use triomphe::Arc; - use base_db::FileId; use hir_expand::span_map::{RealSpanMap, SpanMap}; - use mbe::{syntax_node_to_token_tree, DocCommentDesugarMode}; + use span::FileId; use syntax::{ast, AstNode, TextRange}; + use syntax_bridge::{syntax_node_to_token_tree, DocCommentDesugarMode}; use crate::attr::{DocAtom, DocExpr}; fn assert_parse_result(input: &str, expected: DocExpr) { let source_file = ast::SourceFile::parse(input, span::Edition::CURRENT).ok().unwrap(); let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap(); - let map = SpanMap::RealSpanMap(Arc::new(RealSpanMap::absolute(FileId::from_raw(0)))); + let map = SpanMap::RealSpanMap(Arc::new(RealSpanMap::absolute( + EditionedFileId::current_edition(FileId::from_raw(0)), + ))); let tt = syntax_node_to_token_tree( tt.syntax(), map.as_ref(), @@ -672,24 +681,29 @@ mod tests { #[test] fn test_doc_expr_parser() { - assert_parse_result("#![doc(hidden)]", DocAtom::Flag("hidden".into()).into()); + assert_parse_result("#![doc(hidden)]", DocAtom::Flag(Symbol::intern("hidden")).into()); assert_parse_result( r#"#![doc(alias = "foo")]"#, - DocAtom::KeyValue { key: "alias".into(), value: "foo".into() }.into(), + DocAtom::KeyValue { key: Symbol::intern("alias"), value: Symbol::intern("foo") }.into(), ); - assert_parse_result(r#"#![doc(alias("foo"))]"#, DocExpr::Alias(["foo".into()].into())); + assert_parse_result( + r#"#![doc(alias("foo"))]"#, + DocExpr::Alias([Symbol::intern("foo")].into()), + ); assert_parse_result( r#"#![doc(alias("foo", "bar", "baz"))]"#, - DocExpr::Alias(["foo".into(), "bar".into(), "baz".into()].into()), + DocExpr::Alias( + [Symbol::intern("foo"), Symbol::intern("bar"), Symbol::intern("baz")].into(), + ), ); assert_parse_result( r#" #[doc(alias("Bar", "Qux"))] struct Foo;"#, - DocExpr::Alias(["Bar".into(), "Qux".into()].into()), + DocExpr::Alias([Symbol::intern("Bar"), Symbol::intern("Qux")].into()), ); } } diff --git a/crates/hir-def/src/body.rs b/crates/hir-def/src/body.rs index ca4a3f5217cf..58812479ddf5 100644 --- a/crates/hir-def/src/body.rs +++ b/crates/hir-def/src/body.rs @@ -6,13 +6,14 @@ pub mod scope; #[cfg(test)] mod tests; -use std::ops::Index; +use std::ops::{Deref, Index}; use base_db::CrateId; use cfg::{CfgExpr, CfgOptions}; -use hir_expand::{name::Name, InFile}; -use la_arena::{Arena, ArenaMap}; +use hir_expand::{name::Name, ExpandError, InFile}; +use la_arena::{Arena, ArenaMap, Idx, RawIdx}; use rustc_hash::FxHashMap; +use smallvec::SmallVec; use span::MacroFileId; use syntax::{ast, AstPtr, SyntaxNodePtr}; use triomphe::Arc; @@ -23,6 +24,7 @@ use crate::{ hir::{ dummy_expr_id, Binding, BindingId, Expr, ExprId, Label, LabelId, Pat, PatId, RecordFieldPat, }, + item_tree::AttrOwner, nameres::DefMap, path::{ModPath, Path}, src::HasSource, @@ -91,6 +93,7 @@ pub struct BodySourceMap { label_map_back: ArenaMap, self_param: Option>>, + binding_definitions: FxHashMap>, /// We don't create explicit nodes for record fields (`S { record_field: 92 }`). /// Instead, we use id of expression (`92`) to identify the field. @@ -112,10 +115,10 @@ pub struct SyntheticSyntax; #[derive(Debug, Eq, PartialEq)] pub enum BodyDiagnostic { InactiveCode { node: InFile, cfg: CfgExpr, opts: CfgOptions }, - MacroError { node: InFile>, message: String }, - UnresolvedProcMacro { node: InFile>, krate: CrateId }, + MacroError { node: InFile>, err: ExpandError }, UnresolvedMacroCall { node: InFile>, path: ModPath }, UnreachableLabel { node: InFile>, name: Name }, + AwaitOutsideOfAsync { node: InFile>, location: String }, UndeclaredLabel { node: InFile>, name: Name }, } @@ -134,16 +137,23 @@ impl Body { let data = db.function_data(f); let f = f.lookup(db); let src = f.source(db); - params = src.value.param_list().map(|param_list| { + params = src.value.param_list().map(move |param_list| { let item_tree = f.id.item_tree(db); let func = &item_tree[f.id.value]; let krate = f.container.module(db).krate; let crate_graph = db.crate_graph(); ( param_list, - func.params.clone().map(move |param| { + (0..func.params.len()).map(move |idx| { item_tree - .attrs(db, krate, param.into()) + .attrs( + db, + krate, + AttrOwner::Param( + f.id.value, + Idx::from_raw(RawIdx::from(idx as u32)), + ), + ) .is_cfg_enabled(&crate_graph[krate].cfg_options) }), ) @@ -377,6 +387,10 @@ impl BodySourceMap { self.label_map_back[label] } + pub fn patterns_for_binding(&self, binding: BindingId) -> &[PatId] { + self.binding_definitions.get(&binding).map_or(&[], Deref::deref) + } + pub fn node_label(&self, node: InFile<&ast::Label>) -> Option { let src = node.map(AstPtr::new); self.label_map.get(&src).cloned() @@ -428,6 +442,7 @@ impl BodySourceMap { expansions, format_args_template_map, diagnostics, + binding_definitions, } = self; format_args_template_map.shrink_to_fit(); expr_map.shrink_to_fit(); @@ -440,5 +455,6 @@ impl BodySourceMap { pat_field_map_back.shrink_to_fit(); expansions.shrink_to_fit(); diagnostics.shrink_to_fit(); + binding_definitions.shrink_to_fit(); } } diff --git a/crates/hir-def/src/body/lower.rs b/crates/hir-def/src/body/lower.rs index faba9050fc4d..abf789582920 100644 --- a/crates/hir-def/src/body/lower.rs +++ b/crates/hir-def/src/body/lower.rs @@ -4,19 +4,19 @@ use std::mem; use base_db::CrateId; +use either::Either; use hir_expand::{ - name::{name, AsName, Name}, - ExpandError, InFile, + name::{AsName, Name}, + InFile, }; -use intern::Interned; +use intern::{sym, Interned, Symbol}; use rustc_hash::FxHashMap; -use smallvec::SmallVec; use span::AstIdMap; use stdx::never; use syntax::{ ast::{ - self, ArrayExprKind, AstChildren, BlockExpr, HasArgList, HasAttrs, HasLoopBody, HasName, - RangeItem, SlicePatComponents, + self, ArrayExprKind, AstChildren, BlockExpr, HasArgList, HasAttrs, HasGenericArgs, + HasLoopBody, HasName, RangeItem, SlicePatComponents, }, AstNode, AstPtr, AstToken as _, SyntaxNodePtr, }; @@ -72,6 +72,7 @@ pub(super) fn lower( is_lowering_coroutine: false, label_ribs: Vec::new(), current_binding_owner: None, + awaitable_context: None, } .collect(params, body, is_async_fn) } @@ -100,6 +101,8 @@ struct ExprCollector<'a> { // resolution label_ribs: Vec, current_binding_owner: Option, + + awaitable_context: Option, } #[derive(Clone, Debug)] @@ -135,6 +138,11 @@ impl RibKind { } } +enum Awaitable { + Yes, + No(&'static str), +} + #[derive(Debug, Default)] struct BindingList { map: FxHashMap, @@ -180,6 +188,18 @@ impl ExprCollector<'_> { body: Option, is_async_fn: bool, ) -> (Body, BodySourceMap) { + self.awaitable_context.replace(if is_async_fn { + Awaitable::Yes + } else { + match self.owner { + DefWithBodyId::FunctionId(..) => Awaitable::No("non-async function"), + DefWithBodyId::StaticId(..) => Awaitable::No("static"), + DefWithBodyId::ConstId(..) | DefWithBodyId::InTypeConstId(..) => { + Awaitable::No("constant") + } + DefWithBodyId::VariantId(..) => Awaitable::No("enum variant"), + } + }); if let Some((param_list, mut attr_enabled)) = param_list { let mut params = vec![]; if let Some(self_param) = @@ -187,8 +207,10 @@ impl ExprCollector<'_> { { let is_mutable = self_param.mut_token().is_some() && self_param.amp_token().is_none(); - let binding_id: la_arena::Idx = - self.alloc_binding(name![self], BindingAnnotation::new(is_mutable, false)); + let binding_id: la_arena::Idx = self.alloc_binding( + Name::new_symbol_root(sym::self_.clone()), + BindingAnnotation::new(is_mutable, false), + ); self.body.self_param = Some(binding_id); self.source_map.self_param = Some(self.expander.in_file(AstPtr::new(&self_param))); } @@ -278,27 +300,39 @@ impl ExprCollector<'_> { } Some(ast::BlockModifier::Async(_)) => { self.with_label_rib(RibKind::Closure, |this| { - this.collect_block_(e, |id, statements, tail| Expr::Async { - id, - statements, - tail, + this.with_awaitable_block(Awaitable::Yes, |this| { + this.collect_block_(e, |id, statements, tail| Expr::Async { + id, + statements, + tail, + }) }) }) } Some(ast::BlockModifier::Const(_)) => { self.with_label_rib(RibKind::Constant, |this| { - let (result_expr_id, prev_binding_owner) = - this.initialize_binding_owner(syntax_ptr); - let inner_expr = this.collect_block(e); - let it = this.db.intern_anonymous_const(ConstBlockLoc { - parent: this.owner, - root: inner_expr, - }); - this.body.exprs[result_expr_id] = Expr::Const(it); - this.current_binding_owner = prev_binding_owner; - result_expr_id + this.with_awaitable_block(Awaitable::No("constant block"), |this| { + let (result_expr_id, prev_binding_owner) = + this.initialize_binding_owner(syntax_ptr); + let inner_expr = this.collect_block(e); + let it = this.db.intern_anonymous_const(ConstBlockLoc { + parent: this.owner, + root: inner_expr, + }); + this.body.exprs[result_expr_id] = Expr::Const(it); + this.current_binding_owner = prev_binding_owner; + result_expr_id + }) }) } + // FIXME + Some(ast::BlockModifier::AsyncGen(_)) => { + self.with_awaitable_block(Awaitable::Yes, |this| this.collect_block(e)) + } + Some(ast::BlockModifier::Gen(_)) => self + .with_awaitable_block(Awaitable::No("non-async gen block"), |this| { + this.collect_block(e) + }), None => self.collect_block(e), }, ast::Expr::LoopExpr(e) => { @@ -464,6 +498,12 @@ impl ExprCollector<'_> { } ast::Expr::AwaitExpr(e) => { let expr = self.collect_expr_opt(e.expr()); + if let Awaitable::No(location) = self.is_lowering_awaitable_block() { + self.source_map.diagnostics.push(BodyDiagnostic::AwaitOutsideOfAsync { + node: InFile::new(self.expander.current_file_id(), AstPtr::new(&e)), + location: location.to_string(), + }); + } self.alloc_expr(Expr::Await { expr }, syntax_ptr) } ast::Expr::TryExpr(e) => self.collect_try_operator(syntax_ptr, e), @@ -522,7 +562,13 @@ impl ExprCollector<'_> { let prev_is_lowering_coroutine = mem::take(&mut this.is_lowering_coroutine); let prev_try_block_label = this.current_try_block_label.take(); - let body = this.collect_expr_opt(e.body()); + let awaitable = if e.async_token().is_some() { + Awaitable::Yes + } else { + Awaitable::No("non-async closure") + }; + let body = + this.with_awaitable_block(awaitable, |this| this.collect_expr_opt(e.body())); let closure_kind = if this.is_lowering_coroutine { let movability = if e.static_token().is_some() { @@ -987,20 +1033,11 @@ impl ExprCollector<'_> { } }; if record_diagnostics { - match &res.err { - Some(ExpandError::UnresolvedProcMacro(krate)) => { - self.source_map.diagnostics.push(BodyDiagnostic::UnresolvedProcMacro { - node: InFile::new(outer_file, syntax_ptr), - krate: *krate, - }); - } - Some(err) => { - self.source_map.diagnostics.push(BodyDiagnostic::MacroError { - node: InFile::new(outer_file, syntax_ptr), - message: err.to_string(), - }); - } - None => {} + if let Some(err) = res.err { + self.source_map.diagnostics.push(BodyDiagnostic::MacroError { + node: InFile::new(outer_file, syntax_ptr), + err, + }); } } @@ -1431,15 +1468,14 @@ impl ExprCollector<'_> { args: AstChildren, has_leading_comma: bool, binding_list: &mut BindingList, - ) -> (Box<[PatId]>, Option) { + ) -> (Box<[PatId]>, Option) { + let args: Vec<_> = args.map(|p| self.collect_pat_possibly_rest(p, binding_list)).collect(); // Find the location of the `..`, if there is one. Note that we do not // consider the possibility of there being multiple `..` here. - let ellipsis = args.clone().position(|p| matches!(p, ast::Pat::RestPat(_))); + let ellipsis = args.iter().position(|p| p.is_right()).map(|it| it as u32); + // We want to skip the `..` pattern here, since we account for it above. - let mut args: Vec<_> = args - .filter(|p| !matches!(p, ast::Pat::RestPat(_))) - .map(|p| self.collect_pat(p, binding_list)) - .collect(); + let mut args: Vec<_> = args.into_iter().filter_map(Either::left).collect(); // if there is a leading comma, the user is most likely to type out a leading pattern // so we insert a missing pattern at the beginning for IDE features if has_leading_comma { @@ -1449,6 +1485,41 @@ impl ExprCollector<'_> { (args.into_boxed_slice(), ellipsis) } + // `collect_pat` rejects `ast::Pat::RestPat`, but it should be handled in some cases that + // it is the macro expansion result of an arg sub-pattern in a slice or tuple pattern. + fn collect_pat_possibly_rest( + &mut self, + pat: ast::Pat, + binding_list: &mut BindingList, + ) -> Either { + match &pat { + ast::Pat::RestPat(_) => Either::Right(()), + ast::Pat::MacroPat(mac) => match mac.macro_call() { + Some(call) => { + let macro_ptr = AstPtr::new(&call); + let src = self.expander.in_file(AstPtr::new(&pat)); + let pat = + self.collect_macro_call(call, macro_ptr, true, |this, expanded_pat| { + if let Some(expanded_pat) = expanded_pat { + this.collect_pat_possibly_rest(expanded_pat, binding_list) + } else { + Either::Left(this.missing_pat()) + } + }); + if let Some(pat) = pat.left() { + self.source_map.pat_map.insert(src, pat); + } + pat + } + None => { + let ptr = AstPtr::new(&pat); + Either::Left(self.alloc_pat(Pat::Missing, ptr)) + } + }, + _ => Either::Left(self.collect_pat(pat, binding_list)), + } + } + // endregion: patterns /// Returns `None` (and emits diagnostics) when `owner` if `#[cfg]`d out, and `Some(())` when @@ -1473,7 +1544,7 @@ impl ExprCollector<'_> { } fn add_definition_to_binding(&mut self, binding_id: BindingId, pat_id: PatId) { - self.body.bindings[binding_id].definitions.push(pat_id); + self.source_map.binding_definitions.entry(binding_id).or_default().push(pat_id); } // region: labels @@ -1588,18 +1659,22 @@ impl ExprCollector<'_> { }); let mut mappings = vec![]; let fmt = match template.and_then(|it| self.expand_macros_to_string(it)) { - Some((s, is_direct_literal)) => format_args::parse( - &s, - fmt_snippet, - args, - is_direct_literal, - |name| self.alloc_expr_desugared(Expr::Path(Path::from(name))), - |name, span| { - if let Some(span) = span { - mappings.push((span, name)) - } - }, - ), + Some((s, is_direct_literal)) => { + let call_ctx = self.expander.syntax_context(); + format_args::parse( + &s, + fmt_snippet, + args, + is_direct_literal, + |name| self.alloc_expr_desugared(Expr::Path(Path::from(name))), + |name, span| { + if let Some(span) = span { + mappings.push((span, name)) + } + }, + call_ctx, + ) + } None => FormatArgs { template: Default::default(), arguments: args.finish(), @@ -1617,30 +1692,29 @@ impl ExprCollector<'_> { } } - let lit_pieces = - fmt.template - .iter() - .enumerate() - .filter_map(|(i, piece)| { - match piece { - FormatArgsPiece::Literal(s) => Some( - self.alloc_expr_desugared(Expr::Literal(Literal::String(s.clone()))), - ), - &FormatArgsPiece::Placeholder(_) => { - // Inject empty string before placeholders when not already preceded by a literal piece. - if i == 0 - || matches!(fmt.template[i - 1], FormatArgsPiece::Placeholder(_)) - { - Some(self.alloc_expr_desugared(Expr::Literal(Literal::String( - "".into(), - )))) - } else { - None - } + let lit_pieces = fmt + .template + .iter() + .enumerate() + .filter_map(|(i, piece)| { + match piece { + FormatArgsPiece::Literal(s) => { + Some(self.alloc_expr_desugared(Expr::Literal(Literal::String(s.clone())))) + } + &FormatArgsPiece::Placeholder(_) => { + // Inject empty string before placeholders when not already preceded by a literal piece. + if i == 0 || matches!(fmt.template[i - 1], FormatArgsPiece::Placeholder(_)) + { + Some(self.alloc_expr_desugared(Expr::Literal(Literal::String( + Symbol::empty(), + )))) + } else { + None } } - }) - .collect(); + } + }) + .collect(); let lit_pieces = self.alloc_expr_desugared(Expr::Array(Array::ElementList { elements: lit_pieces, is_assignee_expr: false, @@ -1723,14 +1797,18 @@ impl ExprCollector<'_> { // unsafe { ::core::fmt::UnsafeArg::new() } // ) - let Some(new_v1_formatted) = - LangItem::FormatArguments.ty_rel_path(self.db, self.krate, name![new_v1_formatted]) - else { + let Some(new_v1_formatted) = LangItem::FormatArguments.ty_rel_path( + self.db, + self.krate, + Name::new_symbol_root(sym::new_v1_formatted.clone()), + ) else { return self.missing_expr(); }; - let Some(unsafe_arg_new) = - LangItem::FormatUnsafeArg.ty_rel_path(self.db, self.krate, name![new]) - else { + let Some(unsafe_arg_new) = LangItem::FormatUnsafeArg.ty_rel_path( + self.db, + self.krate, + Name::new_symbol_root(sym::new.clone()), + ) else { return self.missing_expr(); }; let new_v1_formatted = self.alloc_expr_desugared(Expr::Path(new_v1_formatted)); @@ -1812,10 +1890,10 @@ impl ExprCollector<'_> { self.db, self.krate, match alignment { - Some(FormatAlignment::Left) => name![Left], - Some(FormatAlignment::Right) => name![Right], - Some(FormatAlignment::Center) => name![Center], - None => name![Unknown], + Some(FormatAlignment::Left) => Name::new_symbol_root(sym::Left.clone()), + Some(FormatAlignment::Right) => Name::new_symbol_root(sym::Right.clone()), + Some(FormatAlignment::Center) => Name::new_symbol_root(sym::Center.clone()), + None => Name::new_symbol_root(sym::Unknown.clone()), }, ); match align { @@ -1838,8 +1916,11 @@ impl ExprCollector<'_> { let width = self.make_count(width, argmap); let format_placeholder_new = { - let format_placeholder_new = - LangItem::FormatPlaceholder.ty_rel_path(self.db, self.krate, name![new]); + let format_placeholder_new = LangItem::FormatPlaceholder.ty_rel_path( + self.db, + self.krate, + Name::new_symbol_root(sym::new.clone()), + ); match format_placeholder_new { Some(path) => self.alloc_expr_desugared(Expr::Path(path)), None => self.missing_expr(), @@ -1883,11 +1964,14 @@ impl ExprCollector<'_> { *n as u128, Some(BuiltinUint::Usize), ))); - let count_is = - match LangItem::FormatCount.ty_rel_path(self.db, self.krate, name![Is]) { - Some(count_is) => self.alloc_expr_desugared(Expr::Path(count_is)), - None => self.missing_expr(), - }; + let count_is = match LangItem::FormatCount.ty_rel_path( + self.db, + self.krate, + Name::new_symbol_root(sym::Is.clone()), + ) { + Some(count_is) => self.alloc_expr_desugared(Expr::Path(count_is)), + None => self.missing_expr(), + }; self.alloc_expr_desugared(Expr::Call { callee: count_is, args: Box::new([args]), @@ -1905,7 +1989,7 @@ impl ExprCollector<'_> { let count_param = match LangItem::FormatCount.ty_rel_path( self.db, self.krate, - name![Param], + Name::new_symbol_root(sym::Param.clone()), ) { Some(count_param) => self.alloc_expr_desugared(Expr::Path(count_param)), None => self.missing_expr(), @@ -1921,7 +2005,11 @@ impl ExprCollector<'_> { self.missing_expr() } } - None => match LangItem::FormatCount.ty_rel_path(self.db, self.krate, name![Implied]) { + None => match LangItem::FormatCount.ty_rel_path( + self.db, + self.krate, + Name::new_symbol_root(sym::Implied.clone()), + ) { Some(count_param) => self.alloc_expr_desugared(Expr::Path(count_param)), None => self.missing_expr(), }, @@ -1942,18 +2030,18 @@ impl ExprCollector<'_> { let new_fn = match LangItem::FormatArgument.ty_rel_path( self.db, self.krate, - match ty { - Format(Display) => name![new_display], - Format(Debug) => name![new_debug], - Format(LowerExp) => name![new_lower_exp], - Format(UpperExp) => name![new_upper_exp], - Format(Octal) => name![new_octal], - Format(Pointer) => name![new_pointer], - Format(Binary) => name![new_binary], - Format(LowerHex) => name![new_lower_hex], - Format(UpperHex) => name![new_upper_hex], - Usize => name![from_usize], - }, + Name::new_symbol_root(match ty { + Format(Display) => sym::new_display.clone(), + Format(Debug) => sym::new_debug.clone(), + Format(LowerExp) => sym::new_lower_exp.clone(), + Format(UpperExp) => sym::new_upper_exp.clone(), + Format(Octal) => sym::new_octal.clone(), + Format(Pointer) => sym::new_pointer.clone(), + Format(Binary) => sym::new_binary.clone(), + Format(LowerHex) => sym::new_lower_hex.clone(), + Format(UpperHex) => sym::new_upper_hex.clone(), + Usize => sym::from_usize.clone(), + }), ) { Some(new_fn) => self.alloc_expr_desugared(Expr::Path(new_fn)), None => self.missing_expr(), @@ -2002,12 +2090,7 @@ impl ExprCollector<'_> { } fn alloc_binding(&mut self, name: Name, mode: BindingAnnotation) -> BindingId { - let binding = self.body.bindings.alloc(Binding { - name, - mode, - definitions: SmallVec::new(), - problems: None, - }); + let binding = self.body.bindings.alloc(Binding { name, mode, problems: None }); if let Some(owner) = self.current_binding_owner { self.body.binding_owners.insert(binding, owner); } @@ -2040,6 +2123,21 @@ impl ExprCollector<'_> { fn alloc_label_desugared(&mut self, label: Label) -> LabelId { self.body.labels.alloc(label) } + + fn is_lowering_awaitable_block(&self) -> &Awaitable { + self.awaitable_context.as_ref().unwrap_or(&Awaitable::No("unknown")) + } + + fn with_awaitable_block( + &mut self, + awaitable: Awaitable, + f: impl FnOnce(&mut Self) -> T, + ) -> T { + let orig = self.awaitable_context.replace(awaitable); + let res = f(self); + self.awaitable_context = orig; + res + } } fn comma_follows_token(t: Option) -> bool { diff --git a/crates/hir-def/src/body/pretty.rs b/crates/hir-def/src/body/pretty.rs index c48d16d05304..edaee60937d1 100644 --- a/crates/hir-def/src/body/pretty.rs +++ b/crates/hir-def/src/body/pretty.rs @@ -517,7 +517,7 @@ impl Printer<'_> { if i != 0 { w!(self, ", "); } - if *ellipsis == Some(i) { + if *ellipsis == Some(i as u32) { w!(self, ".., "); } self.print_pat(*pat); @@ -595,7 +595,7 @@ impl Printer<'_> { if i != 0 { w!(self, ", "); } - if *ellipsis == Some(i) { + if *ellipsis == Some(i as u32) { w!(self, ", .."); } self.print_pat(*arg); diff --git a/crates/hir-def/src/body/scope.rs b/crates/hir-def/src/body/scope.rs index fd685235e17d..bf201ca83479 100644 --- a/crates/hir-def/src/body/scope.rs +++ b/crates/hir-def/src/body/scope.rs @@ -288,8 +288,9 @@ fn compute_expr_scopes( #[cfg(test)] mod tests { - use base_db::{FileId, SourceDatabase}; + use base_db::SourceDatabase; use hir_expand::{name::AsName, InFile}; + use span::FileId; use syntax::{algo::find_node_at_offset, ast, AstNode}; use test_fixture::WithFixture; use test_utils::{assert_eq_text, extract_offset}; @@ -325,7 +326,7 @@ mod tests { let file_syntax = db.parse(file_id).syntax_node(); let marker: ast::PathExpr = find_node_at_offset(&file_syntax, offset).unwrap(); - let function = find_function(&db, file_id); + let function = find_function(&db, file_id.file_id()); let scopes = db.expr_scopes(function.into()); let (_body, source_map) = db.body_with_source_map(function.into()); @@ -338,7 +339,7 @@ mod tests { let actual = scopes .scope_chain(scope) .flat_map(|scope| scopes.entries(scope)) - .map(|it| it.name().to_smol_str()) + .map(|it| it.name().as_str()) .collect::>() .join("\n"); let expected = expected.join("\n"); @@ -480,10 +481,10 @@ fn foo() { .expect("failed to find a name at the target offset"); let name_ref: ast::NameRef = find_node_at_offset(file.syntax(), offset).unwrap(); - let function = find_function(&db, file_id); + let function = find_function(&db, file_id.file_id()); let scopes = db.expr_scopes(function.into()); - let (body, source_map) = db.body_with_source_map(function.into()); + let (_, source_map) = db.body_with_source_map(function.into()); let expr_scope = { let expr_ast = name_ref.syntax().ancestors().find_map(ast::Expr::cast).unwrap(); @@ -494,7 +495,7 @@ fn foo() { let resolved = scopes.resolve_name_in_scope(expr_scope, &name_ref.as_name()).unwrap(); let pat_src = source_map - .pat_syntax(*body.bindings[resolved.binding()].definitions.first().unwrap()) + .pat_syntax(*source_map.binding_definitions[&resolved.binding()].first().unwrap()) .unwrap(); let local_name = pat_src.value.syntax_node_ptr().to_node(file.syntax()); diff --git a/crates/hir-def/src/builtin_type.rs b/crates/hir-def/src/builtin_type.rs index 61b2481978e2..14b9af84e6ff 100644 --- a/crates/hir-def/src/builtin_type.rs +++ b/crates/hir-def/src/builtin_type.rs @@ -5,7 +5,8 @@ use std::fmt; -use hir_expand::name::{name, AsName, Name}; +use hir_expand::name::{AsName, Name}; +use intern::{sym, Symbol}; /// Different signed int types. #[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)] pub enum BuiltinInt { @@ -30,8 +31,10 @@ pub enum BuiltinUint { #[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)] pub enum BuiltinFloat { + F16, F32, F64, + F128, } #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] @@ -46,59 +49,67 @@ pub enum BuiltinType { impl BuiltinType { #[rustfmt::skip] - pub const ALL: &'static [(Name, BuiltinType)] = &[ - (name![char], BuiltinType::Char), - (name![bool], BuiltinType::Bool), - (name![str], BuiltinType::Str), - - (name![isize], BuiltinType::Int(BuiltinInt::Isize)), - (name![i8], BuiltinType::Int(BuiltinInt::I8)), - (name![i16], BuiltinType::Int(BuiltinInt::I16)), - (name![i32], BuiltinType::Int(BuiltinInt::I32)), - (name![i64], BuiltinType::Int(BuiltinInt::I64)), - (name![i128], BuiltinType::Int(BuiltinInt::I128)), - - (name![usize], BuiltinType::Uint(BuiltinUint::Usize)), - (name![u8], BuiltinType::Uint(BuiltinUint::U8)), - (name![u16], BuiltinType::Uint(BuiltinUint::U16)), - (name![u32], BuiltinType::Uint(BuiltinUint::U32)), - (name![u64], BuiltinType::Uint(BuiltinUint::U64)), - (name![u128], BuiltinType::Uint(BuiltinUint::U128)), - - (name![f32], BuiltinType::Float(BuiltinFloat::F32)), - (name![f64], BuiltinType::Float(BuiltinFloat::F64)), - ]; + pub fn all_builtin_types() -> [(Name, BuiltinType); 19] { + [ + (Name::new_symbol_root(sym::char.clone()), BuiltinType::Char), + (Name::new_symbol_root(sym::bool.clone()), BuiltinType::Bool), + (Name::new_symbol_root(sym::str.clone()), BuiltinType::Str), + + (Name::new_symbol_root(sym::isize.clone()), BuiltinType::Int(BuiltinInt::Isize)), + (Name::new_symbol_root(sym::i8.clone()), BuiltinType::Int(BuiltinInt::I8)), + (Name::new_symbol_root(sym::i16.clone()), BuiltinType::Int(BuiltinInt::I16)), + (Name::new_symbol_root(sym::i32.clone()), BuiltinType::Int(BuiltinInt::I32)), + (Name::new_symbol_root(sym::i64.clone()), BuiltinType::Int(BuiltinInt::I64)), + (Name::new_symbol_root(sym::i128.clone()), BuiltinType::Int(BuiltinInt::I128)), + + (Name::new_symbol_root(sym::usize.clone()), BuiltinType::Uint(BuiltinUint::Usize)), + (Name::new_symbol_root(sym::u8.clone()), BuiltinType::Uint(BuiltinUint::U8)), + (Name::new_symbol_root(sym::u16.clone()), BuiltinType::Uint(BuiltinUint::U16)), + (Name::new_symbol_root(sym::u32.clone()), BuiltinType::Uint(BuiltinUint::U32)), + (Name::new_symbol_root(sym::u64.clone()), BuiltinType::Uint(BuiltinUint::U64)), + (Name::new_symbol_root(sym::u128.clone()), BuiltinType::Uint(BuiltinUint::U128)), + + (Name::new_symbol_root(sym::f16.clone()), BuiltinType::Float(BuiltinFloat::F16)), + (Name::new_symbol_root(sym::f32.clone()), BuiltinType::Float(BuiltinFloat::F32)), + (Name::new_symbol_root(sym::f64.clone()), BuiltinType::Float(BuiltinFloat::F64)), + (Name::new_symbol_root(sym::f128.clone()), BuiltinType::Float(BuiltinFloat::F128)), + ] + } pub fn by_name(name: &Name) -> Option { - Self::ALL.iter().find_map(|(n, ty)| if n == name { Some(*ty) } else { None }) + Self::all_builtin_types() + .iter() + .find_map(|(n, ty)| if n == name { Some(*ty) } else { None }) } } impl AsName for BuiltinType { fn as_name(&self) -> Name { match self { - BuiltinType::Char => name![char], - BuiltinType::Bool => name![bool], - BuiltinType::Str => name![str], + BuiltinType::Char => Name::new_symbol_root(sym::char.clone()), + BuiltinType::Bool => Name::new_symbol_root(sym::bool.clone()), + BuiltinType::Str => Name::new_symbol_root(sym::str.clone()), BuiltinType::Int(it) => match it { - BuiltinInt::Isize => name![isize], - BuiltinInt::I8 => name![i8], - BuiltinInt::I16 => name![i16], - BuiltinInt::I32 => name![i32], - BuiltinInt::I64 => name![i64], - BuiltinInt::I128 => name![i128], + BuiltinInt::Isize => Name::new_symbol_root(sym::isize.clone()), + BuiltinInt::I8 => Name::new_symbol_root(sym::i8.clone()), + BuiltinInt::I16 => Name::new_symbol_root(sym::i16.clone()), + BuiltinInt::I32 => Name::new_symbol_root(sym::i32.clone()), + BuiltinInt::I64 => Name::new_symbol_root(sym::i64.clone()), + BuiltinInt::I128 => Name::new_symbol_root(sym::i128.clone()), }, BuiltinType::Uint(it) => match it { - BuiltinUint::Usize => name![usize], - BuiltinUint::U8 => name![u8], - BuiltinUint::U16 => name![u16], - BuiltinUint::U32 => name![u32], - BuiltinUint::U64 => name![u64], - BuiltinUint::U128 => name![u128], + BuiltinUint::Usize => Name::new_symbol_root(sym::usize.clone()), + BuiltinUint::U8 => Name::new_symbol_root(sym::u8.clone()), + BuiltinUint::U16 => Name::new_symbol_root(sym::u16.clone()), + BuiltinUint::U32 => Name::new_symbol_root(sym::u32.clone()), + BuiltinUint::U64 => Name::new_symbol_root(sym::u64.clone()), + BuiltinUint::U128 => Name::new_symbol_root(sym::u128.clone()), }, BuiltinType::Float(it) => match it { - BuiltinFloat::F32 => name![f32], - BuiltinFloat::F64 => name![f64], + BuiltinFloat::F16 => Name::new_symbol_root(sym::f16.clone()), + BuiltinFloat::F32 => Name::new_symbol_root(sym::f32.clone()), + BuiltinFloat::F64 => Name::new_symbol_root(sym::f64.clone()), + BuiltinFloat::F128 => Name::new_symbol_root(sym::f128.clone()), }, } } @@ -132,6 +143,18 @@ impl BuiltinInt { }; Some(res) } + pub fn from_suffix_sym(suffix: &Symbol) -> Option { + let res = match suffix { + s if *s == sym::isize => Self::Isize, + s if *s == sym::i8 => Self::I8, + s if *s == sym::i16 => Self::I16, + s if *s == sym::i32 => Self::I32, + s if *s == sym::i64 => Self::I64, + s if *s == sym::i128 => Self::I128, + _ => return None, + }; + Some(res) + } } #[rustfmt::skip] @@ -149,14 +172,29 @@ impl BuiltinUint { }; Some(res) } + pub fn from_suffix_sym(suffix: &Symbol) -> Option { + let res = match suffix { + s if *s == sym::usize => Self::Usize, + s if *s == sym::u8 => Self::U8, + s if *s == sym::u16 => Self::U16, + s if *s == sym::u32 => Self::U32, + s if *s == sym::u64 => Self::U64, + s if *s == sym::u128 => Self::U128, + + _ => return None, + }; + Some(res) + } } #[rustfmt::skip] impl BuiltinFloat { pub fn from_suffix(suffix: &str) -> Option { let res = match suffix { + "f16" => BuiltinFloat::F16, "f32" => BuiltinFloat::F32, "f64" => BuiltinFloat::F64, + "f128" => BuiltinFloat::F128, _ => return None, }; Some(res) @@ -192,8 +230,10 @@ impl fmt::Display for BuiltinUint { impl fmt::Display for BuiltinFloat { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.write_str(match self { + BuiltinFloat::F16 => "f16", BuiltinFloat::F32 => "f32", BuiltinFloat::F64 => "f64", + BuiltinFloat::F128 => "f128", }) } } diff --git a/crates/hir-def/src/child_by_source.rs b/crates/hir-def/src/child_by_source.rs index 106109eb1844..0438278ca270 100644 --- a/crates/hir-def/src/child_by_source.rs +++ b/crates/hir-def/src/child_by_source.rs @@ -214,8 +214,8 @@ impl ChildBySource for GenericDefId { } let generic_params = db.generic_params(*self); - let mut toc_idx_iter = generic_params.type_or_consts.iter().map(|(idx, _)| idx); - let lts_idx_iter = generic_params.lifetimes.iter().map(|(idx, _)| idx); + let mut toc_idx_iter = generic_params.iter_type_or_consts().map(|(idx, _)| idx); + let lts_idx_iter = generic_params.iter_lt().map(|(idx, _)| idx); // For traits the first type index is `Self`, skip it. if let GenericDefId::TraitId(_) = *self { diff --git a/crates/hir-def/src/data.rs b/crates/hir-def/src/data.rs index 55043fdc4b05..c3c2e51fd038 100644 --- a/crates/hir-def/src/data.rs +++ b/crates/hir-def/src/data.rs @@ -6,7 +6,8 @@ use base_db::CrateId; use hir_expand::{ name::Name, AstId, ExpandResult, HirFileId, InFile, MacroCallId, MacroCallKind, MacroDefKind, }; -use intern::Interned; +use intern::{sym, Interned, Symbol}; +use la_arena::{Idx, RawIdx}; use smallvec::SmallVec; use syntax::{ast, Parse}; use triomphe::Arc; @@ -38,8 +39,8 @@ pub struct FunctionData { pub ret_type: Interned, pub attrs: Attrs, pub visibility: RawVisibility, - pub abi: Option>, - pub legacy_const_generics_indices: Box<[u32]>, + pub abi: Option, + pub legacy_const_generics_indices: Option>>, pub rustc_allow_incoherent_impl: bool, flags: FnFlags, } @@ -58,52 +59,52 @@ impl FunctionData { let crate_graph = db.crate_graph(); let cfg_options = &crate_graph[krate].cfg_options; - let enabled_params = func - .params - .clone() - .filter(|¶m| item_tree.attrs(db, krate, param.into()).is_cfg_enabled(cfg_options)); - - // If last cfg-enabled param is a `...` param, it's a varargs function. - let is_varargs = enabled_params - .clone() - .next_back() - .map_or(false, |param| item_tree[param].type_ref.is_none()); + let attr_owner = |idx| { + item_tree::AttrOwner::Param(loc.id.value, Idx::from_raw(RawIdx::from(idx as u32))) + }; let mut flags = func.flags; - if is_varargs { - flags |= FnFlags::IS_VARARGS; - } if flags.contains(FnFlags::HAS_SELF_PARAM) { // If there's a self param in the syntax, but it is cfg'd out, remove the flag. - let is_cfgd_out = match func.params.clone().next() { - Some(param) => { - !item_tree.attrs(db, krate, param.into()).is_cfg_enabled(cfg_options) - } - None => { - stdx::never!("fn HAS_SELF_PARAM but no parameters allocated"); - true - } - }; + let is_cfgd_out = + !item_tree.attrs(db, krate, attr_owner(0usize)).is_cfg_enabled(cfg_options); if is_cfgd_out { cov_mark::hit!(cfgd_out_self_param); flags.remove(FnFlags::HAS_SELF_PARAM); } } + if flags.contains(FnFlags::IS_VARARGS) { + if let Some((_, param)) = func.params.iter().enumerate().rev().find(|&(idx, _)| { + item_tree.attrs(db, krate, attr_owner(idx)).is_cfg_enabled(cfg_options) + }) { + if param.type_ref.is_some() { + flags.remove(FnFlags::IS_VARARGS); + } + } else { + flags.remove(FnFlags::IS_VARARGS); + } + } let attrs = item_tree.attrs(db, krate, ModItem::from(loc.id.value).into()); let legacy_const_generics_indices = attrs - .by_key("rustc_legacy_const_generics") + .by_key(&sym::rustc_legacy_const_generics) .tt_values() .next() .map(parse_rustc_legacy_const_generics) - .unwrap_or_default(); - let rustc_allow_incoherent_impl = attrs.by_key("rustc_allow_incoherent_impl").exists(); + .filter(|it| !it.is_empty()) + .map(Box::new); + let rustc_allow_incoherent_impl = attrs.by_key(&sym::rustc_allow_incoherent_impl).exists(); Arc::new(FunctionData { name: func.name.clone(), - params: enabled_params - .clone() - .filter_map(|id| item_tree[id].type_ref.clone()) + params: func + .params + .iter() + .enumerate() + .filter(|&(idx, _)| { + item_tree.attrs(db, krate, attr_owner(idx)).is_cfg_enabled(cfg_options) + }) + .filter_map(|(_, param)| param.type_ref.clone()) .collect(), ret_type: func.ret_type.clone(), attrs: item_tree.attrs(db, krate, ModItem::from(loc.id.value).into()), @@ -150,7 +151,7 @@ fn parse_rustc_legacy_const_generics(tt: &crate::tt::Subtree) -> Box<[u32]> { let mut indices = Vec::new(); for args in tt.token_trees.chunks(2) { match &args[0] { - tt::TokenTree::Leaf(tt::Leaf::Literal(lit)) => match lit.text.parse() { + tt::TokenTree::Leaf(tt::Leaf::Literal(lit)) => match lit.symbol.as_str().parse() { Ok(index) => indices.push(index), Err(_) => break, }, @@ -200,8 +201,8 @@ impl TypeAliasData { ModItem::from(loc.id.value).into(), ); let rustc_has_incoherent_inherent_impls = - attrs.by_key("rustc_has_incoherent_inherent_impls").exists(); - let rustc_allow_incoherent_impl = attrs.by_key("rustc_allow_incoherent_impl").exists(); + attrs.by_key(&sym::rustc_has_incoherent_inherent_impls).exists(); + let rustc_allow_incoherent_impl = attrs.by_key(&sym::rustc_allow_incoherent_impl).exists(); Arc::new(TypeAliasData { name: typ.name.clone(), @@ -223,6 +224,7 @@ pub struct TraitData { pub is_unsafe: bool, pub rustc_has_incoherent_inherent_impls: bool, pub skip_array_during_method_dispatch: bool, + pub skip_boxed_slice_during_method_dispatch: bool, pub fundamental: bool, pub visibility: RawVisibility, /// Whether the trait has `#[rust_skip_array_during_method_dispatch]`. `hir_ty` will ignore @@ -250,11 +252,20 @@ impl TraitData { let is_unsafe = tr_def.is_unsafe; let visibility = item_tree[tr_def.visibility].clone(); let attrs = item_tree.attrs(db, module_id.krate(), ModItem::from(tree_id.value).into()); - let skip_array_during_method_dispatch = - attrs.by_key("rustc_skip_array_during_method_dispatch").exists(); + let mut skip_array_during_method_dispatch = + attrs.by_key(&sym::rustc_skip_array_during_method_dispatch).exists(); + let mut skip_boxed_slice_during_method_dispatch = false; + for tt in attrs.by_key(&sym::rustc_skip_during_method_dispatch).tt_values() { + for tt in tt.token_trees.iter() { + if let crate::tt::TokenTree::Leaf(tt::Leaf::Ident(ident)) = tt { + skip_array_during_method_dispatch |= ident.sym == sym::array; + skip_boxed_slice_during_method_dispatch |= ident.sym == sym::boxed_slice; + } + } + } let rustc_has_incoherent_inherent_impls = - attrs.by_key("rustc_has_incoherent_inherent_impls").exists(); - let fundamental = attrs.by_key("fundamental").exists(); + attrs.by_key(&sym::rustc_has_incoherent_inherent_impls).exists(); + let fundamental = attrs.by_key(&sym::fundamental).exists(); let mut collector = AssocItemCollector::new(db, module_id, tree_id.file_id(), ItemContainerId::TraitId(tr)); collector.collect(&item_tree, tree_id.tree_id(), &tr_def.items); @@ -269,6 +280,7 @@ impl TraitData { is_unsafe, visibility, skip_array_during_method_dispatch, + skip_boxed_slice_during_method_dispatch, rustc_has_incoherent_inherent_impls, fundamental, }), @@ -393,7 +405,7 @@ impl Macro2Data { let helpers = item_tree .attrs(db, loc.container.krate(), ModItem::from(loc.id.value).into()) - .by_key("rustc_builtin_macro") + .by_key(&sym::rustc_builtin_macro) .tt_values() .next() .and_then(|attr| parse_macro_name_and_helper_attrs(&attr.token_trees)) @@ -423,7 +435,7 @@ impl MacroRulesData { let macro_export = item_tree .attrs(db, loc.container.krate(), ModItem::from(loc.id.value).into()) - .by_key("macro_export") + .by_key(&sym::macro_export) .exists(); Arc::new(MacroRulesData { name: makro.name.clone(), macro_export }) @@ -485,7 +497,7 @@ impl ExternCrateDeclData { let name = extern_crate.name.clone(); let krate = loc.container.krate(); - let crate_id = if name == hir_expand::name![self] { + let crate_id = if name == sym::self_.clone() { Some(krate) } else { db.crate_def_map(krate) @@ -526,7 +538,7 @@ impl ConstData { let rustc_allow_incoherent_impl = item_tree .attrs(db, loc.container.module(db).krate(), ModItem::from(loc.id.value).into()) - .by_key("rustc_allow_incoherent_impl") + .by_key(&sym::rustc_allow_incoherent_impl) .exists(); Arc::new(ConstData { @@ -618,7 +630,8 @@ impl<'a> AssocItemCollector<'a> { if !attrs.is_cfg_enabled(self.expander.cfg_options()) { self.diagnostics.push(DefDiagnostic::unconfigured_code( self.module_id.local_id, - InFile::new(self.expander.current_file_id(), item.ast_id(item_tree).erase()), + tree_id, + ModItem::from(item).into(), attrs.cfg().unwrap(), self.expander.cfg_options().clone(), )); @@ -644,22 +657,18 @@ impl<'a> AssocItemCollector<'a> { // crate failed), skip expansion like we would if it was // disabled. This is analogous to the handling in // `DefCollector::collect_macros`. - if exp.is_dummy() { - self.diagnostics.push(DefDiagnostic::unresolved_proc_macro( + if let Some(err) = exp.as_expand_error(self.module_id.krate) { + self.diagnostics.push(DefDiagnostic::macro_error( self.module_id.local_id, - loc.kind, - loc.def.krate, + ast_id, + (*attr.path).clone(), + err, )); - - continue 'attrs; - } - if exp.is_disabled() { continue 'attrs; } } self.macro_calls.push((ast_id, call_id)); - let res = self.expander.enter_expand_id::(self.db, call_id); self.collect_macro_items(res); diff --git a/crates/hir-def/src/data/adt.rs b/crates/hir-def/src/data/adt.rs index 0fe73418e51f..a70710e565c2 100644 --- a/crates/hir-def/src/data/adt.rs +++ b/crates/hir-def/src/data/adt.rs @@ -5,24 +5,20 @@ use bitflags::bitflags; use cfg::CfgOptions; use either::Either; -use hir_expand::{ - name::{AsName, Name}, - HirFileId, InFile, -}; -use intern::Interned; +use hir_expand::name::Name; +use intern::{sym, Interned}; use la_arena::Arena; use rustc_abi::{Align, Integer, IntegerType, ReprFlags, ReprOptions}; -use syntax::ast::{self, HasName, HasVisibility}; use triomphe::Arc; use crate::{ builtin_type::{BuiltinInt, BuiltinUint}, db::DefDatabase, - item_tree::{AttrOwner, Field, FieldAstId, Fields, ItemTree, ModItem, RawVisibilityId}, + item_tree::{ + AttrOwner, Field, FieldParent, FieldsShape, ItemTree, ModItem, RawVisibilityId, TreeId, + }, lang_item::LangItem, - lower::LowerCtx, nameres::diagnostics::{DefDiagnostic, DefDiagnostics}, - trace::Trace, tt::{Delimiter, DelimiterKind, Leaf, Subtree, TokenTree}, type_ref::TypeRef, visibility::RawVisibility, @@ -95,7 +91,7 @@ fn repr_from_value( item_tree: &ItemTree, of: AttrOwner, ) -> Option { - item_tree.attrs(db, krate, of).by_key("repr").tt_values().find_map(parse_repr_tt) + item_tree.attrs(db, krate, of).by_key(&sym::repr).tt_values().find_map(parse_repr_tt) } fn parse_repr_tt(tt: &Subtree) -> Option { @@ -112,12 +108,12 @@ fn parse_repr_tt(tt: &Subtree) -> Option { let mut tts = tt.token_trees.iter().peekable(); while let Some(tt) = tts.next() { if let TokenTree::Leaf(Leaf::Ident(ident)) = tt { - flags.insert(match &*ident.text { - "packed" => { + flags.insert(match &ident.sym { + s if *s == sym::packed => { let pack = if let Some(TokenTree::Subtree(tt)) = tts.peek() { tts.next(); if let Some(TokenTree::Leaf(Leaf::Literal(lit))) = tt.token_trees.first() { - lit.text.parse().unwrap_or_default() + lit.symbol.as_str().parse().unwrap_or_default() } else { 0 } @@ -129,11 +125,11 @@ fn parse_repr_tt(tt: &Subtree) -> Option { Some(if let Some(min_pack) = min_pack { min_pack.min(pack) } else { pack }); ReprFlags::empty() } - "align" => { + s if *s == sym::align => { if let Some(TokenTree::Subtree(tt)) = tts.peek() { tts.next(); if let Some(TokenTree::Leaf(Leaf::Literal(lit))) = tt.token_trees.first() { - if let Ok(align) = lit.text.parse() { + if let Ok(align) = lit.symbol.as_str().parse() { let align = Align::from_bytes(align).ok(); max_align = max_align.max(align); } @@ -141,13 +137,13 @@ fn parse_repr_tt(tt: &Subtree) -> Option { } ReprFlags::empty() } - "C" => ReprFlags::IS_C, - "transparent" => ReprFlags::IS_TRANSPARENT, - "simd" => ReprFlags::IS_SIMD, + s if *s == sym::C => ReprFlags::IS_C, + s if *s == sym::transparent => ReprFlags::IS_TRANSPARENT, + s if *s == sym::simd => ReprFlags::IS_SIMD, repr => { - if let Some(builtin) = BuiltinInt::from_suffix(repr) + if let Some(builtin) = BuiltinInt::from_suffix_sym(repr) .map(Either::Left) - .or_else(|| BuiltinUint::from_suffix(repr).map(Either::Right)) + .or_else(|| BuiltinUint::from_suffix_sym(repr).map(Either::Right)) { int = Some(match builtin { Either::Left(bi) => match bi { @@ -194,10 +190,10 @@ impl StructData { let attrs = item_tree.attrs(db, krate, ModItem::from(loc.id.value).into()); let mut flags = StructFlags::NO_FLAGS; - if attrs.by_key("rustc_has_incoherent_inherent_impls").exists() { + if attrs.by_key(&sym::rustc_has_incoherent_inherent_impls).exists() { flags |= StructFlags::IS_RUSTC_HAS_INCOHERENT_INHERENT_IMPL; } - if attrs.by_key("fundamental").exists() { + if attrs.by_key(&sym::fundamental).exists() { flags |= StructFlags::IS_FUNDAMENTAL; } if let Some(lang) = attrs.lang_item() { @@ -211,20 +207,25 @@ impl StructData { } let strukt = &item_tree[loc.id.value]; - let (variant_data, diagnostics) = lower_fields( + let (data, diagnostics) = lower_fields( db, krate, - loc.id.file_id(), loc.container.local_id, + loc.id.tree_id(), &item_tree, &db.crate_graph()[krate].cfg_options, + FieldParent::Struct(loc.id.value), &strukt.fields, None, ); ( Arc::new(StructData { name: strukt.name.clone(), - variant_data: Arc::new(variant_data), + variant_data: Arc::new(match strukt.shape { + FieldsShape::Record => VariantData::Record(data), + FieldsShape::Tuple => VariantData::Tuple(data), + FieldsShape::Unit => VariantData::Unit, + }), repr, visibility: item_tree[strukt.visibility].clone(), flags, @@ -248,28 +249,29 @@ impl StructData { let repr = repr_from_value(db, krate, &item_tree, ModItem::from(loc.id.value).into()); let attrs = item_tree.attrs(db, krate, ModItem::from(loc.id.value).into()); let mut flags = StructFlags::NO_FLAGS; - if attrs.by_key("rustc_has_incoherent_inherent_impls").exists() { + if attrs.by_key(&sym::rustc_has_incoherent_inherent_impls).exists() { flags |= StructFlags::IS_RUSTC_HAS_INCOHERENT_INHERENT_IMPL; } - if attrs.by_key("fundamental").exists() { + if attrs.by_key(&sym::fundamental).exists() { flags |= StructFlags::IS_FUNDAMENTAL; } let union = &item_tree[loc.id.value]; - let (variant_data, diagnostics) = lower_fields( + let (data, diagnostics) = lower_fields( db, krate, - loc.id.file_id(), loc.container.local_id, + loc.id.tree_id(), &item_tree, &db.crate_graph()[krate].cfg_options, + FieldParent::Union(loc.id.value), &union.fields, None, ); ( Arc::new(StructData { name: union.name.clone(), - variant_data: Arc::new(variant_data), + variant_data: Arc::new(VariantData::Record(data)), repr, visibility: item_tree[union.visibility].clone(), flags, @@ -287,7 +289,7 @@ impl EnumData { let repr = repr_from_value(db, krate, &item_tree, ModItem::from(loc.id.value).into()); let rustc_has_incoherent_inherent_impls = item_tree .attrs(db, loc.container.krate, ModItem::from(loc.id.value).into()) - .by_key("rustc_has_incoherent_inherent_impls") + .by_key(&sym::rustc_has_incoherent_inherent_impls) .exists(); let enum_ = &item_tree[loc.id.value]; @@ -336,13 +338,14 @@ impl EnumVariantData { let item_tree = loc.id.item_tree(db); let variant = &item_tree[loc.id.value]; - let (var_data, diagnostics) = lower_fields( + let (data, diagnostics) = lower_fields( db, krate, - loc.id.file_id(), container.local_id, + loc.id.tree_id(), &item_tree, &db.crate_graph()[krate].cfg_options, + FieldParent::Variant(loc.id.value), &variant.fields, Some(item_tree[loc.parent.lookup(db).id.value].visibility), ); @@ -350,7 +353,11 @@ impl EnumVariantData { ( Arc::new(EnumVariantData { name: variant.name.clone(), - variant_data: Arc::new(var_data), + variant_data: Arc::new(match variant.shape { + FieldsShape::Record => VariantData::Record(data), + FieldsShape::Tuple => VariantData::Tuple(data), + FieldsShape::Unit => VariantData::Unit, + }), }), DefDiagnostics::new(diagnostics), ) @@ -396,123 +403,35 @@ pub enum StructKind { Unit, } -pub(crate) fn lower_struct( - db: &dyn DefDatabase, - trace: &mut Trace>, - ast: &InFile, - krate: CrateId, - item_tree: &ItemTree, - fields: &Fields, -) -> StructKind { - let ctx = LowerCtx::new(db, ast.file_id); - - match (&ast.value, fields) { - (ast::StructKind::Tuple(fl), Fields::Tuple(fields)) => { - let cfg_options = &db.crate_graph()[krate].cfg_options; - for ((i, fd), item_tree_id) in fl.fields().enumerate().zip(fields.clone()) { - if !item_tree.attrs(db, krate, item_tree_id.into()).is_cfg_enabled(cfg_options) { - continue; - } - - trace.alloc( - || Either::Left(fd.clone()), - || FieldData { - name: Name::new_tuple_field(i), - type_ref: Interned::new(TypeRef::from_ast_opt(&ctx, fd.ty())), - visibility: RawVisibility::from_ast(db, fd.visibility(), &mut |range| { - ctx.span_map().span_for_range(range).ctx - }), - }, - ); - } - StructKind::Tuple - } - (ast::StructKind::Record(fl), Fields::Record(fields)) => { - let cfg_options = &db.crate_graph()[krate].cfg_options; - for (fd, item_tree_id) in fl.fields().zip(fields.clone()) { - if !item_tree.attrs(db, krate, item_tree_id.into()).is_cfg_enabled(cfg_options) { - continue; - } - - trace.alloc( - || Either::Right(fd.clone()), - || FieldData { - name: fd.name().map(|n| n.as_name()).unwrap_or_else(Name::missing), - type_ref: Interned::new(TypeRef::from_ast_opt(&ctx, fd.ty())), - visibility: RawVisibility::from_ast(db, fd.visibility(), &mut |range| { - ctx.span_map().span_for_range(range).ctx - }), - }, - ); - } - StructKind::Record - } - _ => StructKind::Unit, - } -} - fn lower_fields( db: &dyn DefDatabase, krate: CrateId, - current_file_id: HirFileId, container: LocalModuleId, + tree_id: TreeId, item_tree: &ItemTree, cfg_options: &CfgOptions, - fields: &Fields, + parent: FieldParent, + fields: &[Field], override_visibility: Option, -) -> (VariantData, Vec) { +) -> (Arena, Vec) { let mut diagnostics = Vec::new(); - match fields { - Fields::Record(flds) => { - let mut arena = Arena::new(); - for field_id in flds.clone() { - let attrs = item_tree.attrs(db, krate, field_id.into()); - let field = &item_tree[field_id]; - if attrs.is_cfg_enabled(cfg_options) { - arena.alloc(lower_field(item_tree, field, override_visibility)); - } else { - diagnostics.push(DefDiagnostic::unconfigured_code( - container, - InFile::new( - current_file_id, - match field.ast_id { - FieldAstId::Record(it) => it.erase(), - FieldAstId::Tuple(it) => it.erase(), - }, - ), - attrs.cfg().unwrap(), - cfg_options.clone(), - )) - } - } - (VariantData::Record(arena), diagnostics) - } - Fields::Tuple(flds) => { - let mut arena = Arena::new(); - for field_id in flds.clone() { - let attrs = item_tree.attrs(db, krate, field_id.into()); - let field = &item_tree[field_id]; - if attrs.is_cfg_enabled(cfg_options) { - arena.alloc(lower_field(item_tree, field, override_visibility)); - } else { - diagnostics.push(DefDiagnostic::unconfigured_code( - container, - InFile::new( - current_file_id, - match field.ast_id { - FieldAstId::Record(it) => it.erase(), - FieldAstId::Tuple(it) => it.erase(), - }, - ), - attrs.cfg().unwrap(), - cfg_options.clone(), - )) - } - } - (VariantData::Tuple(arena), diagnostics) + let mut arena = Arena::new(); + for (idx, field) in fields.iter().enumerate() { + let attr_owner = AttrOwner::make_field_indexed(parent, idx); + let attrs = item_tree.attrs(db, krate, attr_owner); + if attrs.is_cfg_enabled(cfg_options) { + arena.alloc(lower_field(item_tree, field, override_visibility)); + } else { + diagnostics.push(DefDiagnostic::unconfigured_code( + container, + tree_id, + attr_owner, + attrs.cfg().unwrap(), + cfg_options.clone(), + )) } - Fields::Unit => (VariantData::Unit, diagnostics), } + (arena, diagnostics) } fn lower_field( diff --git a/crates/hir-def/src/db.rs b/crates/hir-def/src/db.rs index 0eb9e7d30b25..56feb0163e13 100644 --- a/crates/hir-def/src/db.rs +++ b/crates/hir-def/src/db.rs @@ -1,10 +1,10 @@ //! Defines database & queries for name resolution. -use base_db::{salsa, CrateId, FileId, SourceDatabase, Upcast}; +use base_db::{salsa, CrateId, SourceDatabase, Upcast}; use either::Either; use hir_expand::{db::ExpandDatabase, HirFileId, MacroDefId}; -use intern::Interned; +use intern::{sym, Interned}; use la_arena::ArenaMap; -use span::MacroCallId; +use span::{EditionedFileId, MacroCallId}; use syntax::{ast, AstPtr}; use triomphe::Arc; @@ -177,6 +177,7 @@ pub trait DefDatabase: InternDatabase + ExpandDatabase + Upcast (Arc, Arc); #[salsa::invoke(Body::body_query)] @@ -239,11 +240,14 @@ pub trait DefDatabase: InternDatabase + ExpandDatabase + Upcast bool; - fn include_macro_invoc(&self, crate_id: CrateId) -> Vec<(MacroCallId, FileId)>; + fn include_macro_invoc(&self, crate_id: CrateId) -> Vec<(MacroCallId, EditionedFileId)>; } // return: macro call id and include file id -fn include_macro_invoc(db: &dyn DefDatabase, krate: CrateId) -> Vec<(MacroCallId, FileId)> { +fn include_macro_invoc( + db: &dyn DefDatabase, + krate: CrateId, +) -> Vec<(MacroCallId, EditionedFileId)> { db.crate_def_map(krate) .modules .values() @@ -257,13 +261,13 @@ fn include_macro_invoc(db: &dyn DefDatabase, krate: CrateId) -> Vec<(MacroCallId } fn crate_supports_no_std(db: &dyn DefDatabase, crate_id: CrateId) -> bool { - let file = db.crate_graph()[crate_id].root_file_id; + let file = db.crate_graph()[crate_id].root_file_id(); let item_tree = db.file_item_tree(file.into()); let attrs = item_tree.raw_attrs(AttrOwner::TopLevel); for attr in &**attrs { - match attr.path().as_ident().and_then(|id| id.as_text()) { - Some(ident) if ident == "no_std" => return true, - Some(ident) if ident == "cfg_attr" => {} + match attr.path().as_ident() { + Some(ident) if *ident == sym::no_std.clone() => return true, + Some(ident) if *ident == sym::cfg_attr.clone() => {} _ => continue, } @@ -278,7 +282,7 @@ fn crate_supports_no_std(db: &dyn DefDatabase, crate_id: CrateId) -> bool { tt.split(|tt| matches!(tt, tt::TokenTree::Leaf(tt::Leaf::Punct(p)) if p.char == ',')); for output in segments.skip(1) { match output { - [tt::TokenTree::Leaf(tt::Leaf::Ident(ident))] if ident.text == "no_std" => { + [tt::TokenTree::Leaf(tt::Leaf::Ident(ident))] if ident.sym == sym::no_std => { return true } _ => {} diff --git a/crates/hir-def/src/expander.rs b/crates/hir-def/src/expander.rs index dbf8e6b225c2..6d8b4445f75b 100644 --- a/crates/hir-def/src/expander.rs +++ b/crates/hir-def/src/expander.rs @@ -6,10 +6,11 @@ use base_db::CrateId; use cfg::CfgOptions; use drop_bomb::DropBomb; use hir_expand::{ - attrs::RawAttrs, mod_path::ModPath, span_map::SpanMap, ExpandError, ExpandResult, HirFileId, - InFile, MacroCallId, + attrs::RawAttrs, mod_path::ModPath, span_map::SpanMap, ExpandError, ExpandErrorKind, + ExpandResult, HirFileId, InFile, Lookup, MacroCallId, }; use limit::Limit; +use span::SyntaxContextId; use syntax::{ast, Parse}; use triomphe::Arc; @@ -52,6 +53,11 @@ impl Expander { self.module.krate } + pub fn syntax_context(&self) -> SyntaxContextId { + // FIXME: + SyntaxContextId::ROOT + } + pub fn enter_expand( &mut self, db: &dyn DefDatabase, @@ -154,26 +160,30 @@ impl Expander { // so don't return overflow error here to avoid diagnostics duplication. cov_mark::hit!(overflow_but_not_me); return ExpandResult::ok(None); - } else if self.recursion_limit.check(self.recursion_depth as usize + 1).is_err() { - self.recursion_depth = u32::MAX; - cov_mark::hit!(your_stack_belongs_to_me); - return ExpandResult::only_err(ExpandError::RecursionOverflow); } let ExpandResult { value, err } = op(self); let Some(call_id) = value else { return ExpandResult { value: None, err }; }; + if self.recursion_limit.check(self.recursion_depth as usize + 1).is_err() { + self.recursion_depth = u32::MAX; + cov_mark::hit!(your_stack_belongs_to_me); + return ExpandResult::only_err(ExpandError::new( + db.macro_arg_considering_derives(call_id, &call_id.lookup(db.upcast()).kind).2, + ExpandErrorKind::RecursionOverflow, + )); + } let macro_file = call_id.as_macro_file(); let res = db.parse_macro_expansion(macro_file); let err = err.or(res.err); ExpandResult { - value: match err { + value: match &err { // If proc-macro is disabled or unresolved, we want to expand to a missing expression // instead of an empty tree which might end up in an empty block. - Some(ExpandError::UnresolvedProcMacro(_)) => None, + Some(e) if matches!(e.kind(), ExpandErrorKind::MissingProcMacroExpander(_)) => None, _ => (|| { let parse = res.value.0.cast::()?; diff --git a/crates/hir-def/src/find_path.rs b/crates/hir-def/src/find_path.rs index 58a1872ef251..91594aecd044 100644 --- a/crates/hir-def/src/find_path.rs +++ b/crates/hir-def/src/find_path.rs @@ -2,10 +2,12 @@ use std::{cell::Cell, cmp::Ordering, iter}; +use base_db::{CrateId, CrateOrigin, LangCrateOrigin}; use hir_expand::{ - name::{known, AsName, Name}, + name::{AsName, Name}, Lookup, }; +use intern::sym; use rustc_hash::FxHashSet; use crate::{ @@ -36,7 +38,8 @@ pub fn find_path( // within block modules, forcing a `self` or `crate` prefix will not allow using inner items, so // default to plain paths. - if item.module(db).is_some_and(ModuleId::is_within_block) { + let item_module = item.module(db)?; + if item_module.is_within_block() { prefix_kind = PrefixKind::Plain; } cfg.prefer_no_std = cfg.prefer_no_std || db.crate_supports_no_std(from.krate()); @@ -53,23 +56,17 @@ pub fn find_path( }, item, MAX_PATH_LEN, + db.crate_graph()[item_module.krate()].origin.is_lang(), ) } -#[derive(Copy, Clone, Debug, PartialEq, Eq)] +#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord)] enum Stability { Unstable, Stable, } use Stability::*; -fn zip_stability(a: Stability, b: Stability) -> Stability { - match (a, b) { - (Stable, Stable) => Stable, - _ => Unstable, - } -} - const MAX_PATH_LEN: usize = 15; const FIND_PATH_FUEL: usize = 10000; @@ -107,12 +104,18 @@ struct FindPathCtx<'db> { } /// Attempts to find a path to refer to the given `item` visible from the `from` ModuleId -fn find_path_inner(ctx: &FindPathCtx<'_>, item: ItemInNs, max_len: usize) -> Option { +fn find_path_inner( + ctx: &FindPathCtx<'_>, + item: ItemInNs, + max_len: usize, + is_std_item: bool, +) -> Option { // - if the item is a module, jump straight to module search - if let ItemInNs::Types(ModuleDefId::ModuleId(module_id)) = item { - let mut visited_modules = FxHashSet::default(); - return find_path_for_module(ctx, &mut visited_modules, module_id, max_len) - .map(|(item, _)| item); + if !is_std_item { + if let ItemInNs::Types(ModuleDefId::ModuleId(module_id)) = item { + return find_path_for_module(ctx, &mut FxHashSet::default(), module_id, true, max_len) + .map(|choice| choice.path); + } } let may_be_in_scope = match ctx.prefix { @@ -130,14 +133,17 @@ fn find_path_inner(ctx: &FindPathCtx<'_>, item: ItemInNs, max_len: usize) -> Opt // - if the item is in the prelude, return the name from there if let Some(value) = find_in_prelude(ctx.db, ctx.from_def_map, item, ctx.from) { - return Some(value); + return Some(value.path); } if let Some(ModuleDefId::EnumVariantId(variant)) = item.as_module_def_id() { // - if the item is an enum variant, refer to it via the enum - if let Some(mut path) = - find_path_inner(ctx, ItemInNs::Types(variant.lookup(ctx.db).parent.into()), max_len) - { + if let Some(mut path) = find_path_inner( + ctx, + ItemInNs::Types(variant.lookup(ctx.db).parent.into()), + max_len, + is_std_item, + ) { path.push_segment(ctx.db.enum_variant_data(variant).name.clone()); return Some(path); } @@ -146,22 +152,42 @@ fn find_path_inner(ctx: &FindPathCtx<'_>, item: ItemInNs, max_len: usize) -> Opt // variant somewhere } - let mut visited_modules = FxHashSet::default(); + if is_std_item { + // The item we are searching for comes from the sysroot libraries, so skip prefer looking in + // the sysroot libraries directly. + // We do need to fallback as the item in question could be re-exported by another crate + // while not being a transitive dependency of the current crate. + if let Some(choice) = find_in_sysroot(ctx, &mut FxHashSet::default(), item, max_len) { + return Some(choice.path); + } + } - calculate_best_path(ctx, &mut visited_modules, item, max_len).map(|(item, _)| item) + let mut best_choice = None; + calculate_best_path(ctx, &mut FxHashSet::default(), item, max_len, &mut best_choice); + best_choice.map(|choice| choice.path) } #[tracing::instrument(skip_all)] fn find_path_for_module( ctx: &FindPathCtx<'_>, - visited_modules: &mut FxHashSet, + visited_modules: &mut FxHashSet<(ItemInNs, ModuleId)>, module_id: ModuleId, + maybe_extern: bool, max_len: usize, -) -> Option<(ModPath, Stability)> { +) -> Option { + if max_len == 0 { + // recursive base case, we can't find a path of length 0 + return None; + } if let Some(crate_root) = module_id.as_crate_root() { - if crate_root == ctx.from.derive_crate_root() { + if !maybe_extern || crate_root == ctx.from.derive_crate_root() { // - if the item is the crate root, return `crate` - return Some((ModPath::from_segments(PathKind::Crate, None), Stable)); + return Some(Choice { + path: ModPath::from_segments(PathKind::Crate, None), + path_text_len: 5, + stability: Stable, + prefer_due_to_prelude: false, + }); } // - otherwise if the item is the crate root of a dependency crate, return the name from the extern prelude @@ -183,10 +209,12 @@ fn find_path_for_module( let kind = if name_already_occupied_in_type_ns { cov_mark::hit!(ambiguous_crate_start); PathKind::Abs + } else if ctx.cfg.prefer_absolute { + PathKind::Abs } else { PathKind::Plain }; - return Some((ModPath::from_segments(kind, iter::once(name.clone())), Stable)); + return Some(Choice::new(ctx.cfg.prefer_prelude, kind, name.clone(), Stable)); } } @@ -204,27 +232,39 @@ fn find_path_for_module( ); if let Some(scope_name) = scope_name { // - if the item is already in scope, return the name under which it is - return Some(( - ModPath::from_segments(ctx.prefix.path_kind(), iter::once(scope_name)), + return Some(Choice::new( + ctx.cfg.prefer_prelude, + ctx.prefix.path_kind(), + scope_name, Stable, )); } } // - if the module can be referenced as self, super or crate, do that - if let Some(mod_path) = is_kw_kind_relative_to_from(ctx.from_def_map, module_id, ctx.from) { - if ctx.prefix != PrefixKind::ByCrate || mod_path.kind == PathKind::Crate { - return Some((mod_path, Stable)); + if let Some(kind) = is_kw_kind_relative_to_from(ctx.from_def_map, module_id, ctx.from) { + if ctx.prefix != PrefixKind::ByCrate || kind == PathKind::Crate { + return Some(Choice { + path: ModPath::from_segments(kind, None), + path_text_len: path_kind_len(kind), + stability: Stable, + prefer_due_to_prelude: false, + }); } } // - if the module is in the prelude, return it by that path - if let Some(mod_path) = - find_in_prelude(ctx.db, ctx.from_def_map, ItemInNs::Types(module_id.into()), ctx.from) - { - return Some((mod_path, Stable)); + let item = ItemInNs::Types(module_id.into()); + if let Some(choice) = find_in_prelude(ctx.db, ctx.from_def_map, item, ctx.from) { + return Some(choice); } - calculate_best_path(ctx, visited_modules, ItemInNs::Types(module_id.into()), max_len) + let mut best_choice = None; + if maybe_extern { + calculate_best_path(ctx, visited_modules, item, max_len, &mut best_choice); + } else { + calculate_best_path_local(ctx, visited_modules, item, max_len, &mut best_choice); + } + best_choice } fn find_in_scope( @@ -249,7 +289,7 @@ fn find_in_prelude( local_def_map: &DefMap, item: ItemInNs, from: ModuleId, -) -> Option { +) -> Option { let (prelude_module, _) = local_def_map.prelude()?; let prelude_def_map = prelude_module.def_map(db); let prelude_scope = &prelude_def_map[prelude_module.local_id].scope; @@ -271,7 +311,7 @@ fn find_in_prelude( }); if found_and_same_def.unwrap_or(true) { - Some(ModPath::from_segments(PathKind::Plain, iter::once(name.clone()))) + Some(Choice::new(false, PathKind::Plain, name.clone(), Stable)) } else { None } @@ -281,7 +321,7 @@ fn is_kw_kind_relative_to_from( def_map: &DefMap, item: ModuleId, from: ModuleId, -) -> Option { +) -> Option { if item.krate != from.krate || item.is_within_block() || from.is_within_block() { return None; } @@ -289,14 +329,11 @@ fn is_kw_kind_relative_to_from( let from = from.local_id; if item == from { // - if the item is the module we're in, use `self` - Some(ModPath::from_segments(PathKind::SELF, None)) + Some(PathKind::SELF) } else if let Some(parent_id) = def_map[from].parent { if item == parent_id { // - if the item is the parent module, use `super` (this is not used recursively, since `super::super` is ugly) - Some(ModPath::from_segments( - if parent_id == DefMap::ROOT { PathKind::Crate } else { PathKind::Super(1) }, - None, - )) + Some(if parent_id == DefMap::ROOT { PathKind::Crate } else { PathKind::Super(1) }) } else { None } @@ -308,15 +345,11 @@ fn is_kw_kind_relative_to_from( #[tracing::instrument(skip_all)] fn calculate_best_path( ctx: &FindPathCtx<'_>, - visited_modules: &mut FxHashSet, + visited_modules: &mut FxHashSet<(ItemInNs, ModuleId)>, item: ItemInNs, max_len: usize, -) -> Option<(ModPath, Stability)> { - if max_len <= 1 { - // recursive base case, we can't find a path prefix of length 0, one segment is occupied by - // the item's name itself. - return None; - } + best_choice: &mut Option, +) { let fuel = ctx.fuel.get(); if fuel == 0 { // we ran out of fuel, so we stop searching here @@ -325,144 +358,208 @@ fn calculate_best_path( item.krate(ctx.db), ctx.from.krate() ); - return None; + return; } ctx.fuel.set(fuel - 1); - let mut best_path = None; - let mut best_path_len = max_len; - let mut process = |mut path: (ModPath, Stability), name, best_path_len: &mut _| { - path.0.push_segment(name); - let new_path = match best_path.take() { - Some(best_path) => select_best_path(best_path, path, ctx.cfg), - None => path, - }; - if new_path.1 == Stable { - *best_path_len = new_path.0.len(); - } - match &mut best_path { - Some((old_path, old_stability)) => { - *old_path = new_path.0; - *old_stability = zip_stability(*old_stability, new_path.1); - } - None => best_path = Some(new_path), - } - }; - let db = ctx.db; - if item.krate(db) == Some(ctx.from.krate) { + if item.krate(ctx.db) == Some(ctx.from.krate) { // Item was defined in the same crate that wants to import it. It cannot be found in any // dependency in this case. - // FIXME: cache the `find_local_import_locations` output? - find_local_import_locations(db, item, ctx.from, ctx.from_def_map, |name, module_id| { - if !visited_modules.insert(module_id) { - return; - } - // we are looking for paths of length up to best_path_len, any longer will make it be - // less optimal. The -1 is due to us pushing name onto it afterwards. - if let Some(path) = - find_path_for_module(ctx, visited_modules, module_id, best_path_len - 1) - { - process(path, name.clone(), &mut best_path_len); - } - }) + calculate_best_path_local(ctx, visited_modules, item, max_len, best_choice) } else { // Item was defined in some upstream crate. This means that it must be exported from one, // too (unless we can't name it at all). It could *also* be (re)exported by the same crate // that wants to import it here, but we always prefer to use the external path here. - for dep in &db.crate_graph()[ctx.from.krate].dependencies { - let import_map = db.import_map(dep.crate_id); - let Some(import_info_for) = import_map.import_info_for(item) else { continue }; - for info in import_info_for { - if info.is_doc_hidden { - // the item or import is `#[doc(hidden)]`, so skip it as it is in an external crate - continue; - } - - // Determine best path for containing module and append last segment from `info`. - // FIXME: we should guide this to look up the path locally, or from the same crate again? - let path = - find_path_for_module(ctx, visited_modules, info.container, best_path_len - 1); - let Some((path, path_stability)) = path else { - continue; - }; - cov_mark::hit!(partially_imported); - let path = ( - path, - zip_stability(path_stability, if info.is_unstable { Unstable } else { Stable }), - ); + ctx.db.crate_graph()[ctx.from.krate].dependencies.iter().for_each(|dep| { + find_in_dep(ctx, visited_modules, item, max_len, best_choice, dep.crate_id) + }); + } +} - process(path, info.name.clone(), &mut best_path_len); +fn find_in_sysroot( + ctx: &FindPathCtx<'_>, + visited_modules: &mut FxHashSet<(ItemInNs, ModuleId)>, + item: ItemInNs, + max_len: usize, +) -> Option { + let crate_graph = ctx.db.crate_graph(); + let dependencies = &crate_graph[ctx.from.krate].dependencies; + let mut best_choice = None; + let mut search = |lang, best_choice: &mut _| { + if let Some(dep) = dependencies.iter().filter(|it| it.is_sysroot()).find(|dep| { + match crate_graph[dep.crate_id].origin { + CrateOrigin::Lang(l) => l == lang, + _ => false, } + }) { + find_in_dep(ctx, visited_modules, item, max_len, best_choice, dep.crate_id); + } + }; + if ctx.cfg.prefer_no_std { + search(LangCrateOrigin::Core, &mut best_choice); + if matches!(best_choice, Some(Choice { stability: Stable, .. })) { + return best_choice; + } + search(LangCrateOrigin::Std, &mut best_choice); + if matches!(best_choice, Some(Choice { stability: Stable, .. })) { + return best_choice; + } + } else { + search(LangCrateOrigin::Std, &mut best_choice); + if matches!(best_choice, Some(Choice { stability: Stable, .. })) { + return best_choice; + } + search(LangCrateOrigin::Core, &mut best_choice); + if matches!(best_choice, Some(Choice { stability: Stable, .. })) { + return best_choice; } } - best_path + let mut best_choice = None; + dependencies.iter().filter(|it| it.is_sysroot()).for_each(|dep| { + find_in_dep(ctx, visited_modules, item, max_len, &mut best_choice, dep.crate_id); + }); + best_choice } -/// Select the best (most relevant) path between two paths. -/// This accounts for stability, path length whether, std should be chosen over alloc/core paths as -/// well as ignoring prelude like paths or not. -fn select_best_path( - old_path @ (_, old_stability): (ModPath, Stability), - new_path @ (_, new_stability): (ModPath, Stability), - cfg: ImportPathConfig, -) -> (ModPath, Stability) { - match (old_stability, new_stability) { - (Stable, Unstable) => return old_path, - (Unstable, Stable) => return new_path, - _ => {} - } - const STD_CRATES: [Name; 3] = [known::std, known::core, known::alloc]; - - let choose = |new: (ModPath, _), old: (ModPath, _)| { - let (new_path, _) = &new; - let (old_path, _) = &old; - let new_has_prelude = new_path.segments().iter().any(|seg| seg == &known::prelude); - let old_has_prelude = old_path.segments().iter().any(|seg| seg == &known::prelude); - match (new_has_prelude, old_has_prelude, cfg.prefer_prelude) { - (true, false, true) | (false, true, false) => new, - (true, false, false) | (false, true, true) => old, - // no prelude difference in the paths, so pick the shorter one - (true, true, _) | (false, false, _) => { - let new_path_is_shorter = new_path - .len() - .cmp(&old_path.len()) - .then_with(|| new_path.textual_len().cmp(&old_path.textual_len())) - .is_lt(); - if new_path_is_shorter { - new - } else { - old - } +fn find_in_dep( + ctx: &FindPathCtx<'_>, + visited_modules: &mut FxHashSet<(ItemInNs, ModuleId)>, + item: ItemInNs, + max_len: usize, + best_choice: &mut Option, + dep: CrateId, +) { + let import_map = ctx.db.import_map(dep); + let Some(import_info_for) = import_map.import_info_for(item) else { + return; + }; + for info in import_info_for { + if info.is_doc_hidden { + // the item or import is `#[doc(hidden)]`, so skip it as it is in an external crate + continue; + } + + // Determine best path for containing module and append last segment from `info`. + // FIXME: we should guide this to look up the path locally, or from the same crate again? + let choice = find_path_for_module( + ctx, + visited_modules, + info.container, + true, + best_choice.as_ref().map_or(max_len, |it| it.path.len()) - 1, + ); + let Some(mut choice) = choice else { + continue; + }; + cov_mark::hit!(partially_imported); + if info.is_unstable { + choice.stability = Unstable; + } + + Choice::try_select(best_choice, choice, ctx.cfg.prefer_prelude, info.name.clone()); + } +} + +fn calculate_best_path_local( + ctx: &FindPathCtx<'_>, + visited_modules: &mut FxHashSet<(ItemInNs, ModuleId)>, + item: ItemInNs, + max_len: usize, + best_choice: &mut Option, +) { + // FIXME: cache the `find_local_import_locations` output? + find_local_import_locations( + ctx.db, + item, + ctx.from, + ctx.from_def_map, + visited_modules, + |visited_modules, name, module_id| { + // we are looking for paths of length up to best_path_len, any longer will make it be + // less optimal. The -1 is due to us pushing name onto it afterwards. + if let Some(choice) = find_path_for_module( + ctx, + visited_modules, + module_id, + false, + best_choice.as_ref().map_or(max_len, |it| it.path.len()) - 1, + ) { + Choice::try_select(best_choice, choice, ctx.cfg.prefer_prelude, name.clone()); } + }, + ); +} + +struct Choice { + path: ModPath, + /// The length in characters of the path + path_text_len: usize, + /// The stability of the path + stability: Stability, + /// Whether this path contains a prelude segment and preference for it has been signaled + prefer_due_to_prelude: bool, +} + +impl Choice { + fn new(prefer_prelude: bool, kind: PathKind, name: Name, stability: Stability) -> Self { + Self { + path_text_len: path_kind_len(kind) + name.as_str().len(), + stability, + prefer_due_to_prelude: prefer_prelude && name == sym::prelude, + path: ModPath::from_segments(kind, iter::once(name)), } - }; + } - match (old_path.0.segments().first(), new_path.0.segments().first()) { - (Some(old), Some(new)) if STD_CRATES.contains(old) && STD_CRATES.contains(new) => { - let rank = match cfg.prefer_no_std { - false => |name: &Name| match name { - name if name == &known::core => 0, - name if name == &known::alloc => 1, - name if name == &known::std => 2, - _ => unreachable!(), - }, - true => |name: &Name| match name { - name if name == &known::core => 2, - name if name == &known::alloc => 1, - name if name == &known::std => 0, - _ => unreachable!(), - }, - }; - let nrank = rank(new); - let orank = rank(old); - match nrank.cmp(&orank) { - Ordering::Less => old_path, - Ordering::Equal => choose(new_path, old_path), - Ordering::Greater => new_path, + fn push(mut self, prefer_prelude: bool, name: Name) -> Self { + self.path_text_len += name.as_str().len(); + self.prefer_due_to_prelude |= prefer_prelude && name == sym::prelude; + self.path.push_segment(name); + self + } + + fn try_select( + current: &mut Option, + mut other: Choice, + prefer_prelude: bool, + name: Name, + ) { + let Some(current) = current else { + *current = Some(other.push(prefer_prelude, name)); + return; + }; + match other + .stability + .cmp(¤t.stability) + .then_with(|| other.prefer_due_to_prelude.cmp(¤t.prefer_due_to_prelude)) + .then_with(|| (current.path.len()).cmp(&(other.path.len() + 1))) + { + Ordering::Less => return, + Ordering::Equal => { + other.path_text_len += name.as_str().len(); + if let Ordering::Less | Ordering::Equal = + current.path_text_len.cmp(&other.path_text_len) + { + return; + } + } + Ordering::Greater => { + other.path_text_len += name.as_str().len(); } } - _ => choose(new_path, old_path), + other.path.push_segment(name); + *current = other; + } +} + +fn path_kind_len(kind: PathKind) -> usize { + match kind { + PathKind::Plain => 0, + PathKind::Super(0) => 4, + PathKind::Super(s) => s as usize * 5, + PathKind::Crate => 5, + PathKind::Abs => 2, + PathKind::DollarCrate(_) => 0, } } @@ -472,7 +569,8 @@ fn find_local_import_locations( item: ItemInNs, from: ModuleId, def_map: &DefMap, - mut cb: impl FnMut(&Name, ModuleId), + visited_modules: &mut FxHashSet<(ItemInNs, ModuleId)>, + mut cb: impl FnMut(&mut FxHashSet<(ItemInNs, ModuleId)>, &Name, ModuleId), ) { let _p = tracing::info_span!("find_local_import_locations").entered(); @@ -485,32 +583,29 @@ fn find_local_import_locations( let mut worklist = def_map[from.local_id] .children .values() - .map(|child| def_map.module_id(*child)) - // FIXME: do we need to traverse out of block expressions here? + .map(|&child| def_map.module_id(child)) .chain(iter::successors(from.containing_module(db), |m| m.containing_module(db))) + .zip(iter::repeat(false)) .collect::>(); - let mut seen: FxHashSet<_> = FxHashSet::default(); let def_map = def_map.crate_root().def_map(db); - - while let Some(module) = worklist.pop() { - if !seen.insert(module) { - continue; // already processed this module + let mut block_def_map; + let mut cursor = 0; + + while let Some(&mut (module, ref mut processed)) = worklist.get_mut(cursor) { + cursor += 1; + if !visited_modules.insert((item, module)) { + // already processed this module + continue; } - let ext_def_map; - let data = if module.krate == from.krate { - if module.block.is_some() { - // Re-query the block's DefMap - ext_def_map = module.def_map(db); - &ext_def_map[module.local_id] - } else { - // Reuse the root DefMap - &def_map[module.local_id] - } + *processed = true; + let data = if module.block.is_some() { + // Re-query the block's DefMap + block_def_map = module.def_map(db); + &block_def_map[module.local_id] } else { - // The crate might reexport a module defined in another crate. - ext_def_map = module.def_map(db); - &ext_def_map[module.local_id] + // Reuse the root DefMap + &def_map[module.local_id] }; if let Some((name, vis, declared)) = data.scope.name_of(item) { @@ -533,18 +628,30 @@ fn find_local_import_locations( // the item and we're a submodule of it, so can we. // Also this keeps the cached data smaller. if declared || is_pub_or_explicit { - cb(name, module); + cb(visited_modules, name, module); } } } // Descend into all modules visible from `from`. for (module, vis) in data.scope.modules_in_scope() { + if module.krate != from.krate { + // We don't need to look at modules from other crates as our item has to be in the + // current crate + continue; + } + if visited_modules.contains(&(item, module)) { + continue; + } + if vis.is_visible_from(db, from) { - worklist.push(module); + worklist.push((module, false)); } } } + worklist.into_iter().filter(|&(_, processed)| processed).for_each(|(module, _)| { + visited_modules.remove(&(item, module)); + }); } #[cfg(test)] @@ -564,7 +671,13 @@ mod tests { /// item the `path` refers to returns that same path when called from the /// module the cursor is in. #[track_caller] - fn check_found_path_(ra_fixture: &str, path: &str, prefer_prelude: bool, expect: Expect) { + fn check_found_path_( + ra_fixture: &str, + path: &str, + prefer_prelude: bool, + prefer_absolute: bool, + expect: Expect, + ) { let (db, pos) = TestDB::with_position(ra_fixture); let module = db.module_at_position(pos); let parsed_path_file = @@ -604,7 +717,7 @@ mod tests { module, prefix, ignore_local_imports, - ImportPathConfig { prefer_no_std: false, prefer_prelude }, + ImportPathConfig { prefer_no_std: false, prefer_prelude, prefer_absolute }, ); format_to!( res, @@ -619,11 +732,15 @@ mod tests { } fn check_found_path(ra_fixture: &str, path: &str, expect: Expect) { - check_found_path_(ra_fixture, path, false, expect); + check_found_path_(ra_fixture, path, false, false, expect); } fn check_found_path_prelude(ra_fixture: &str, path: &str, expect: Expect) { - check_found_path_(ra_fixture, path, true, expect); + check_found_path_(ra_fixture, path, true, false, expect); + } + + fn check_found_path_absolute(ra_fixture: &str, path: &str, expect: Expect) { + check_found_path_(ra_fixture, path, false, true, expect); } #[test] @@ -870,6 +987,39 @@ pub mod ast { ); } + #[test] + fn partially_imported_with_prefer_absolute() { + cov_mark::check!(partially_imported); + // Similar to partially_imported test case above, but with prefer_absolute enabled. + // Even if the actual imported item is in external crate, if the path to that item + // is starting from the imported name, then the path should not start from "::". + // i.e. The first line in the expected output should not start from "::". + check_found_path_absolute( + r#" +//- /main.rs crate:main deps:syntax + +use syntax::ast; +$0 + +//- /lib.rs crate:syntax +pub mod ast { + pub enum ModuleItem { + A, B, C, + } +} + "#, + "syntax::ast::ModuleItem", + expect![[r#" + Plain (imports ✔): ast::ModuleItem + Plain (imports ✖): ::syntax::ast::ModuleItem + ByCrate(imports ✔): crate::ast::ModuleItem + ByCrate(imports ✖): ::syntax::ast::ModuleItem + BySelf (imports ✔): self::ast::ModuleItem + BySelf (imports ✖): ::syntax::ast::ModuleItem + "#]], + ); + } + #[test] fn same_crate_reexport() { check_found_path( @@ -1469,8 +1619,6 @@ fn main() { #[test] fn from_inside_module() { - // This worked correctly, but the test suite logic was broken. - cov_mark::check!(submodule_in_testdb); check_found_path( r#" mod baz { @@ -1495,6 +1643,35 @@ mod bar { ) } + #[test] + fn from_inside_module2() { + check_found_path( + r#" +mod qux { + pub mod baz { + pub struct Foo {} + } + + mod bar { + fn bar() { + $0; + } + } +} + + "#, + "crate::qux::baz::Foo", + expect![[r#" + Plain (imports ✔): super::baz::Foo + Plain (imports ✖): super::baz::Foo + ByCrate(imports ✔): crate::qux::baz::Foo + ByCrate(imports ✖): crate::qux::baz::Foo + BySelf (imports ✔): super::baz::Foo + BySelf (imports ✖): super::baz::Foo + "#]], + ) + } + #[test] fn from_inside_module_with_inner_items() { check_found_path( @@ -1769,6 +1946,43 @@ pub mod foo { ); } + #[test] + fn respects_absolute_setting() { + let ra_fixture = r#" +//- /main.rs crate:main deps:krate +$0 +//- /krate.rs crate:krate +pub mod foo { + pub struct Foo; +} +"#; + check_found_path( + ra_fixture, + "krate::foo::Foo", + expect![[r#" + Plain (imports ✔): krate::foo::Foo + Plain (imports ✖): krate::foo::Foo + ByCrate(imports ✔): krate::foo::Foo + ByCrate(imports ✖): krate::foo::Foo + BySelf (imports ✔): krate::foo::Foo + BySelf (imports ✖): krate::foo::Foo + "#]], + ); + + check_found_path_absolute( + ra_fixture, + "krate::foo::Foo", + expect![[r#" + Plain (imports ✔): ::krate::foo::Foo + Plain (imports ✖): ::krate::foo::Foo + ByCrate(imports ✔): ::krate::foo::Foo + ByCrate(imports ✖): ::krate::foo::Foo + BySelf (imports ✔): ::krate::foo::Foo + BySelf (imports ✖): ::krate::foo::Foo + "#]], + ); + } + #[test] fn respect_segment_length() { check_found_path( @@ -1833,4 +2047,34 @@ pub fn c() {} "#]], ); } + + #[test] + fn prefer_long_std_over_short_extern() { + check_found_path( + r#" +//- /lib.rs crate:main deps:futures_lite,std,core +$0 +//- /futures_lite.rs crate:futures_lite deps:std,core +pub use crate::future::Future; +pub mod future { + pub use core::future::Future; +} +//- /std.rs crate:std deps:core +pub use core::future; +//- /core.rs crate:core +pub mod future { + pub trait Future {} +} +"#, + "core::future::Future", + expect![[r#" + Plain (imports ✔): std::future::Future + Plain (imports ✖): std::future::Future + ByCrate(imports ✔): std::future::Future + ByCrate(imports ✖): std::future::Future + BySelf (imports ✔): std::future::Future + BySelf (imports ✖): std::future::Future + "#]], + ); + } } diff --git a/crates/hir-def/src/generics.rs b/crates/hir-def/src/generics.rs index ca02501567c0..ebaaef66db6c 100644 --- a/crates/hir-def/src/generics.rs +++ b/crates/hir-def/src/generics.rs @@ -28,6 +28,7 @@ use crate::{ LocalLifetimeParamId, LocalTypeOrConstParamId, Lookup, TypeOrConstParamId, TypeParamId, }; +/// The index of the self param in the generic of the non-parent definition. const SELF_PARAM_ID_IN_SELF: la_arena::Idx = LocalTypeOrConstParamId::from_raw(RawIdx::from_u32(0)); @@ -158,9 +159,9 @@ pub enum GenericParamDataRef<'a> { /// Data about the generic parameters of a function, struct, impl, etc. #[derive(Clone, PartialEq, Eq, Debug, Hash)] pub struct GenericParams { - pub type_or_consts: Arena, - pub lifetimes: Arena, - pub where_predicates: Box<[WherePredicate]>, + type_or_consts: Arena, + lifetimes: Arena, + where_predicates: Box<[WherePredicate]>, } impl ops::Index for GenericParams { @@ -205,6 +206,219 @@ pub enum WherePredicateTypeTarget { TypeOrConstParam(LocalTypeOrConstParamId), } +impl GenericParams { + /// Number of Generic parameters (type_or_consts + lifetimes) + #[inline] + pub fn len(&self) -> usize { + self.type_or_consts.len() + self.lifetimes.len() + } + + #[inline] + pub fn len_lifetimes(&self) -> usize { + self.lifetimes.len() + } + + #[inline] + pub fn len_type_or_consts(&self) -> usize { + self.type_or_consts.len() + } + + #[inline] + pub fn is_empty(&self) -> bool { + self.len() == 0 + } + + #[inline] + pub fn where_predicates(&self) -> std::slice::Iter<'_, WherePredicate> { + self.where_predicates.iter() + } + + /// Iterator of type_or_consts field + #[inline] + pub fn iter_type_or_consts( + &self, + ) -> impl DoubleEndedIterator { + self.type_or_consts.iter() + } + + /// Iterator of lifetimes field + #[inline] + pub fn iter_lt( + &self, + ) -> impl DoubleEndedIterator { + self.lifetimes.iter() + } + + pub fn find_type_by_name(&self, name: &Name, parent: GenericDefId) -> Option { + self.type_or_consts.iter().find_map(|(id, p)| { + if p.name().as_ref() == Some(&name) && p.type_param().is_some() { + Some(TypeParamId::from_unchecked(TypeOrConstParamId { local_id: id, parent })) + } else { + None + } + }) + } + + pub fn find_const_by_name(&self, name: &Name, parent: GenericDefId) -> Option { + self.type_or_consts.iter().find_map(|(id, p)| { + if p.name().as_ref() == Some(&name) && p.const_param().is_some() { + Some(ConstParamId::from_unchecked(TypeOrConstParamId { local_id: id, parent })) + } else { + None + } + }) + } + + #[inline] + pub fn trait_self_param(&self) -> Option { + if self.type_or_consts.is_empty() { + return None; + } + matches!( + self.type_or_consts[SELF_PARAM_ID_IN_SELF], + TypeOrConstParamData::TypeParamData(TypeParamData { + provenance: TypeParamProvenance::TraitSelf, + .. + }) + ) + .then(|| SELF_PARAM_ID_IN_SELF) + } + + pub fn find_lifetime_by_name( + &self, + name: &Name, + parent: GenericDefId, + ) -> Option { + self.lifetimes.iter().find_map(|(id, p)| { + if &p.name == name { + Some(LifetimeParamId { local_id: id, parent }) + } else { + None + } + }) + } + + pub(crate) fn generic_params_query( + db: &dyn DefDatabase, + def: GenericDefId, + ) -> Interned { + let _p = tracing::info_span!("generic_params_query").entered(); + + let krate = def.krate(db); + let cfg_options = db.crate_graph(); + let cfg_options = &cfg_options[krate].cfg_options; + + // Returns the generic parameters that are enabled under the current `#[cfg]` options + let enabled_params = + |params: &Interned, item_tree: &ItemTree, parent: GenericModItem| { + let enabled = |param| item_tree.attrs(db, krate, param).is_cfg_enabled(cfg_options); + let attr_owner_ct = |param| AttrOwner::TypeOrConstParamData(parent, param); + let attr_owner_lt = |param| AttrOwner::LifetimeParamData(parent, param); + + // In the common case, no parameters will by disabled by `#[cfg]` attributes. + // Therefore, make a first pass to check if all parameters are enabled and, if so, + // clone the `Interned` instead of recreating an identical copy. + let all_type_or_consts_enabled = + params.type_or_consts.iter().all(|(idx, _)| enabled(attr_owner_ct(idx))); + let all_lifetimes_enabled = + params.lifetimes.iter().all(|(idx, _)| enabled(attr_owner_lt(idx))); + + if all_type_or_consts_enabled && all_lifetimes_enabled { + params.clone() + } else { + Interned::new(GenericParams { + type_or_consts: all_type_or_consts_enabled + .then(|| params.type_or_consts.clone()) + .unwrap_or_else(|| { + params + .type_or_consts + .iter() + .filter(|&(idx, _)| enabled(attr_owner_ct(idx))) + .map(|(_, param)| param.clone()) + .collect() + }), + lifetimes: all_lifetimes_enabled + .then(|| params.lifetimes.clone()) + .unwrap_or_else(|| { + params + .lifetimes + .iter() + .filter(|&(idx, _)| enabled(attr_owner_lt(idx))) + .map(|(_, param)| param.clone()) + .collect() + }), + where_predicates: params.where_predicates.clone(), + }) + } + }; + fn id_to_generics( + db: &dyn DefDatabase, + id: impl for<'db> Lookup< + Database<'db> = dyn DefDatabase + 'db, + Data = impl ItemTreeLoc, + >, + enabled_params: impl Fn( + &Interned, + &ItemTree, + GenericModItem, + ) -> Interned, + ) -> Interned + where + FileItemTreeId: Into, + { + let id = id.lookup(db).item_tree_id(); + let tree = id.item_tree(db); + let item = &tree[id.value]; + enabled_params(item.generic_params(), &tree, id.value.into()) + } + + match def { + GenericDefId::FunctionId(id) => { + let loc = id.lookup(db); + let tree = loc.id.item_tree(db); + let item = &tree[loc.id.value]; + + let enabled_params = + enabled_params(&item.explicit_generic_params, &tree, loc.id.value.into()); + + let module = loc.container.module(db); + let func_data = db.function_data(id); + if func_data.params.is_empty() { + enabled_params + } else { + let mut generic_params = GenericParamsCollector { + type_or_consts: enabled_params.type_or_consts.clone(), + lifetimes: enabled_params.lifetimes.clone(), + where_predicates: enabled_params.where_predicates.clone().into(), + }; + + // Don't create an `Expander` if not needed since this + // could cause a reparse after the `ItemTree` has been created due to the spanmap. + let mut expander = Lazy::new(|| { + (module.def_map(db), Expander::new(db, loc.id.file_id(), module)) + }); + for param in func_data.params.iter() { + generic_params.fill_implicit_impl_trait_args(db, &mut expander, param); + } + Interned::new(generic_params.finish()) + } + } + GenericDefId::AdtId(AdtId::StructId(id)) => id_to_generics(db, id, enabled_params), + GenericDefId::AdtId(AdtId::EnumId(id)) => id_to_generics(db, id, enabled_params), + GenericDefId::AdtId(AdtId::UnionId(id)) => id_to_generics(db, id, enabled_params), + GenericDefId::TraitId(id) => id_to_generics(db, id, enabled_params), + GenericDefId::TraitAliasId(id) => id_to_generics(db, id, enabled_params), + GenericDefId::TypeAliasId(id) => id_to_generics(db, id, enabled_params), + GenericDefId::ImplId(id) => id_to_generics(db, id, enabled_params), + GenericDefId::ConstId(_) => Interned::new(GenericParams { + type_or_consts: Default::default(), + lifetimes: Default::default(), + where_predicates: Default::default(), + }), + } + } +} + #[derive(Clone, Default)] pub(crate) struct GenericParamsCollector { pub(crate) type_or_consts: Arena, @@ -441,200 +655,3 @@ impl GenericParamsCollector { } } } - -impl GenericParams { - /// Number of Generic parameters (type_or_consts + lifetimes) - #[inline] - pub fn len(&self) -> usize { - self.type_or_consts.len() + self.lifetimes.len() - } - - #[inline] - pub fn is_empty(&self) -> bool { - self.len() == 0 - } - - /// Iterator of type_or_consts field - #[inline] - pub fn iter_type_or_consts( - &self, - ) -> impl DoubleEndedIterator { - self.type_or_consts.iter() - } - - /// Iterator of lifetimes field - #[inline] - pub fn iter_lt( - &self, - ) -> impl DoubleEndedIterator { - self.lifetimes.iter() - } - - pub(crate) fn generic_params_query( - db: &dyn DefDatabase, - def: GenericDefId, - ) -> Interned { - let _p = tracing::info_span!("generic_params_query").entered(); - - let krate = def.module(db).krate; - let cfg_options = db.crate_graph(); - let cfg_options = &cfg_options[krate].cfg_options; - - // Returns the generic parameters that are enabled under the current `#[cfg]` options - let enabled_params = - |params: &Interned, item_tree: &ItemTree, parent: GenericModItem| { - let enabled = |param| item_tree.attrs(db, krate, param).is_cfg_enabled(cfg_options); - let attr_owner_ct = |param| AttrOwner::TypeOrConstParamData(parent, param); - let attr_owner_lt = |param| AttrOwner::LifetimeParamData(parent, param); - - // In the common case, no parameters will by disabled by `#[cfg]` attributes. - // Therefore, make a first pass to check if all parameters are enabled and, if so, - // clone the `Interned` instead of recreating an identical copy. - let all_type_or_consts_enabled = - params.type_or_consts.iter().all(|(idx, _)| enabled(attr_owner_ct(idx))); - let all_lifetimes_enabled = - params.lifetimes.iter().all(|(idx, _)| enabled(attr_owner_lt(idx))); - - if all_type_or_consts_enabled && all_lifetimes_enabled { - params.clone() - } else { - Interned::new(GenericParams { - type_or_consts: all_type_or_consts_enabled - .then(|| params.type_or_consts.clone()) - .unwrap_or_else(|| { - params - .type_or_consts - .iter() - .filter(|&(idx, _)| enabled(attr_owner_ct(idx))) - .map(|(_, param)| param.clone()) - .collect() - }), - lifetimes: all_lifetimes_enabled - .then(|| params.lifetimes.clone()) - .unwrap_or_else(|| { - params - .lifetimes - .iter() - .filter(|&(idx, _)| enabled(attr_owner_lt(idx))) - .map(|(_, param)| param.clone()) - .collect() - }), - where_predicates: params.where_predicates.clone(), - }) - } - }; - fn id_to_generics( - db: &dyn DefDatabase, - id: impl for<'db> Lookup< - Database<'db> = dyn DefDatabase + 'db, - Data = impl ItemTreeLoc, - >, - enabled_params: impl Fn( - &Interned, - &ItemTree, - GenericModItem, - ) -> Interned, - ) -> Interned - where - FileItemTreeId: Into, - { - let id = id.lookup(db).item_tree_id(); - let tree = id.item_tree(db); - let item = &tree[id.value]; - enabled_params(item.generic_params(), &tree, id.value.into()) - } - - match def { - GenericDefId::FunctionId(id) => { - let loc = id.lookup(db); - let tree = loc.id.item_tree(db); - let item = &tree[loc.id.value]; - - let enabled_params = - enabled_params(&item.explicit_generic_params, &tree, loc.id.value.into()); - - let module = loc.container.module(db); - let func_data = db.function_data(id); - if func_data.params.is_empty() { - enabled_params - } else { - let mut generic_params = GenericParamsCollector { - type_or_consts: enabled_params.type_or_consts.clone(), - lifetimes: enabled_params.lifetimes.clone(), - where_predicates: enabled_params.where_predicates.clone().into(), - }; - - // Don't create an `Expander` if not needed since this - // could cause a reparse after the `ItemTree` has been created due to the spanmap. - let mut expander = Lazy::new(|| { - (module.def_map(db), Expander::new(db, loc.id.file_id(), module)) - }); - for param in func_data.params.iter() { - generic_params.fill_implicit_impl_trait_args(db, &mut expander, param); - } - Interned::new(generic_params.finish()) - } - } - GenericDefId::AdtId(AdtId::StructId(id)) => id_to_generics(db, id, enabled_params), - GenericDefId::AdtId(AdtId::EnumId(id)) => id_to_generics(db, id, enabled_params), - GenericDefId::AdtId(AdtId::UnionId(id)) => id_to_generics(db, id, enabled_params), - GenericDefId::TraitId(id) => id_to_generics(db, id, enabled_params), - GenericDefId::TraitAliasId(id) => id_to_generics(db, id, enabled_params), - GenericDefId::TypeAliasId(id) => id_to_generics(db, id, enabled_params), - GenericDefId::ImplId(id) => id_to_generics(db, id, enabled_params), - GenericDefId::ConstId(_) => Interned::new(GenericParams { - type_or_consts: Default::default(), - lifetimes: Default::default(), - where_predicates: Default::default(), - }), - } - } - - pub fn find_type_by_name(&self, name: &Name, parent: GenericDefId) -> Option { - self.type_or_consts.iter().find_map(|(id, p)| { - if p.name().as_ref() == Some(&name) && p.type_param().is_some() { - Some(TypeParamId::from_unchecked(TypeOrConstParamId { local_id: id, parent })) - } else { - None - } - }) - } - - pub fn find_const_by_name(&self, name: &Name, parent: GenericDefId) -> Option { - self.type_or_consts.iter().find_map(|(id, p)| { - if p.name().as_ref() == Some(&name) && p.const_param().is_some() { - Some(ConstParamId::from_unchecked(TypeOrConstParamId { local_id: id, parent })) - } else { - None - } - }) - } - - pub fn trait_self_param(&self) -> Option { - if self.type_or_consts.is_empty() { - return None; - } - matches!( - self.type_or_consts[SELF_PARAM_ID_IN_SELF], - TypeOrConstParamData::TypeParamData(TypeParamData { - provenance: TypeParamProvenance::TraitSelf, - .. - }) - ) - .then(|| SELF_PARAM_ID_IN_SELF) - } - - pub fn find_lifetime_by_name( - &self, - name: &Name, - parent: GenericDefId, - ) -> Option { - self.lifetimes.iter().find_map(|(id, p)| { - if &p.name == name { - Some(LifetimeParamId { local_id: id, parent }) - } else { - None - } - }) - } -} diff --git a/crates/hir-def/src/hir.rs b/crates/hir-def/src/hir.rs index fd6f4a3d089d..86fd092603ff 100644 --- a/crates/hir-def/src/hir.rs +++ b/crates/hir-def/src/hir.rs @@ -18,9 +18,9 @@ pub mod type_ref; use std::fmt; use hir_expand::name::Name; -use intern::Interned; +use intern::{Interned, Symbol}; use la_arena::{Idx, RawIdx}; -use smallvec::SmallVec; +use rustc_apfloat::ieee::{Half as f16, Quad as f128}; use syntax::ast; use crate::{ @@ -56,42 +56,51 @@ pub struct Label { } pub type LabelId = Idx