diff --git a/Cargo.lock b/Cargo.lock index cafc623c185a6..3b6d68a70cc86 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1671,6 +1671,7 @@ dependencies = [ "compiler_builtins", "rustc-std-workspace-alloc", "rustc-std-workspace-core", + "serde", ] [[package]] @@ -1880,6 +1881,12 @@ dependencies = [ "syn 2.0.67", ] +[[package]] +name = "id-arena" +version = "2.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "25a2bc672d1148e28034f176e01fffebb08b35768468cc954630da77a1449005" + [[package]] name = "ident_case" version = "1.0.1" @@ -2109,6 +2116,12 @@ version = "1.0.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "db13adb97ab515a3691f56e4dbab09283d0b86cb45abd991d8634a9d6f501760" +[[package]] +name = "lexopt" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "baff4b617f7df3d896f97fe922b64817f6cd9a756bb81d40f8883f2f66dcb401" + [[package]] name = "libc" version = "0.2.155" @@ -2623,7 +2636,7 @@ dependencies = [ "indexmap", "memchr", "ruzstd 0.5.0", - "wasmparser", + "wasmparser 0.118.2", ] [[package]] @@ -3425,7 +3438,7 @@ dependencies = [ "object 0.34.0", "regex", "similar", - "wasmparser", + "wasmparser 0.118.2", ] [[package]] @@ -3812,7 +3825,7 @@ dependencies = [ "thin-vec", "thorin-dwp", "tracing", - "wasm-encoder", + "wasm-encoder 0.200.0", "windows", ] @@ -5256,6 +5269,15 @@ dependencies = [ "color-eyre", ] +[[package]] +name = "spdx" +version = "0.10.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "47317bbaf63785b53861e1ae2d11b80d6b624211d42cb20efcd210ee6f8a14bc" +dependencies = [ + "smallvec", +] + [[package]] name = "spdx-expression" version = "0.5.2" @@ -6304,6 +6326,28 @@ version = "0.2.92" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "af190c94f2773fdb3729c55b007a722abb5384da03bc0986df4c289bf5567e96" +[[package]] +name = "wasm-component-ld" +version = "0.5.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "314d932d5e84c9678751b85498b1482b2f32f185744e449d3ce0b1d400376dad" +dependencies = [ + "anyhow", + "clap", + "lexopt", + "tempfile", + "wasmparser 0.210.0", + "wat", + "wit-component", +] + +[[package]] +name = "wasm-component-ld-wrapper" +version = "0.1.0" +dependencies = [ + "wasm-component-ld", +] + [[package]] name = "wasm-encoder" version = "0.200.0" @@ -6313,6 +6357,40 @@ dependencies = [ "leb128", ] +[[package]] +name = "wasm-encoder" +version = "0.210.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e7e3764d9d6edabd8c9e16195e177be0d20f6ab942ad18af52860f12f82bc59a" +dependencies = [ + "leb128", +] + +[[package]] +name = "wasm-encoder" +version = "0.211.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5e7d931a1120ef357f32b74547646b6fa68ea25e377772b72874b131a9ed70d4" +dependencies = [ + "leb128", +] + +[[package]] +name = "wasm-metadata" +version = "0.210.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "012729d1294907fcb0866f08460ab95426a6d0b176a599619b84cac7653452b4" +dependencies = [ + "anyhow", + "indexmap", + "serde", + "serde_derive", + "serde_json", + "spdx", + "wasm-encoder 0.210.0", + "wasmparser 0.210.0", +] + [[package]] name = "wasmparser" version = "0.118.2" @@ -6323,6 +6401,42 @@ dependencies = [ "semver", ] +[[package]] +name = "wasmparser" +version = "0.210.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a7bbcd21e7581619d9f6ca00f8c4f08f1cacfe58bf63f83af57cd0476f1026f5" +dependencies = [ + "ahash", + "bitflags 2.5.0", + "hashbrown", + "indexmap", + "semver", + "serde", +] + +[[package]] +name = "wast" +version = "211.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b25506dd82d00da6b14a87436b3d52b1d264083fa79cdb72a0d1b04a8595ccaa" +dependencies = [ + "bumpalo", + "leb128", + "memchr", + "unicode-width", + "wasm-encoder 0.211.1", +] + +[[package]] +name = "wat" +version = "1.211.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eb716ca6c86eecac2d82541ffc39860118fc0af9309c4f2670637bea2e1bdd7d" +dependencies = [ + "wast", +] + [[package]] name = "winapi" version = "0.3.9" @@ -6550,6 +6664,43 @@ dependencies = [ "memchr", ] +[[package]] +name = "wit-component" +version = "0.210.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a450bdb5d032acf1fa0865451fa0c6f50e62f2d31eaa8dba967c2e2d068694a4" +dependencies = [ + "anyhow", + "bitflags 2.5.0", + "indexmap", + "log", + "serde", + "serde_derive", + "serde_json", + "wasm-encoder 0.210.0", + "wasm-metadata", + "wasmparser 0.210.0", + "wit-parser", +] + +[[package]] +name = "wit-parser" +version = "0.210.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "60a965cbd439af19a4b44a54a97ab8957d86f02d01320efc9e31c1d3605c6710" +dependencies = [ + "anyhow", + "id-arena", + "indexmap", + "log", + "semver", + "serde", + "serde_derive", + "serde_json", + "unicode-xid", + "wasmparser 0.210.0", +] + [[package]] name = "writeable" version = "0.5.5" diff --git a/Cargo.toml b/Cargo.toml index 93c520b0d689d..ce87a8c20b77d 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -45,6 +45,7 @@ members = [ "src/tools/opt-dist", "src/tools/coverage-dump", "src/tools/rustc-perf-wrapper", + "src/tools/wasm-component-ld", ] exclude = [ @@ -104,6 +105,9 @@ rustc-demangle.debug = 0 [profile.release.package.lld-wrapper] debug = 0 strip = true +[profile.release.package.wasm-component-ld-wrapper] +debug = 0 +strip = true [patch.crates-io] # See comments in `library/rustc-std-workspace-core/README.md` for what's going on diff --git a/INSTALL.md b/INSTALL.md index 1c2cecf8ef9b3..ded0b59fc6cd3 100644 --- a/INSTALL.md +++ b/INSTALL.md @@ -215,7 +215,7 @@ python x.py build Right now, building Rust only works with some known versions of Visual Studio. If you have a more recent version installed and the build system doesn't -understand, you may need to force rustbuild to use an older version. +understand, you may need to force bootstrap to use an older version. This can be done by manually calling the appropriate vcvars file before running the bootstrap. diff --git a/compiler/rustc_ast/src/attr/mod.rs b/compiler/rustc_ast/src/attr/mod.rs index 088ae9ba44102..d2c7b1c0753da 100644 --- a/compiler/rustc_ast/src/attr/mod.rs +++ b/compiler/rustc_ast/src/attr/mod.rs @@ -202,21 +202,18 @@ impl Attribute { } } - // Named `get_tokens` to distinguish it from the `::tokens` method. - pub fn get_tokens(&self) -> TokenStream { - match &self.kind { - AttrKind::Normal(normal) => TokenStream::new( - normal - .tokens - .as_ref() - .unwrap_or_else(|| panic!("attribute is missing tokens: {self:?}")) - .to_attr_token_stream() - .to_token_trees(), - ), - &AttrKind::DocComment(comment_kind, data) => TokenStream::token_alone( + pub fn token_trees(&self) -> Vec { + match self.kind { + AttrKind::Normal(ref normal) => normal + .tokens + .as_ref() + .unwrap_or_else(|| panic!("attribute is missing tokens: {self:?}")) + .to_attr_token_stream() + .to_token_trees(), + AttrKind::DocComment(comment_kind, data) => vec![TokenTree::token_alone( token::DocComment(comment_kind, self.style, data), self.span, - ), + )], } } } diff --git a/compiler/rustc_ast/src/tokenstream.rs b/compiler/rustc_ast/src/tokenstream.rs index ee068f19332a5..a92ef575777c7 100644 --- a/compiler/rustc_ast/src/tokenstream.rs +++ b/compiler/rustc_ast/src/tokenstream.rs @@ -16,7 +16,7 @@ use crate::ast::{AttrStyle, StmtKind}; use crate::ast_traits::{HasAttrs, HasTokens}; use crate::token::{self, Delimiter, Nonterminal, Token, TokenKind}; -use crate::AttrVec; +use crate::{AttrVec, Attribute}; use rustc_data_structures::stable_hasher::{HashStable, StableHasher}; use rustc_data_structures::sync::{self, Lrc}; @@ -179,11 +179,10 @@ impl AttrTokenStream { AttrTokenStream(Lrc::new(tokens)) } - /// Converts this `AttrTokenStream` to a plain `Vec`. - /// During conversion, `AttrTokenTree::AttrsTarget` get 'flattened' - /// back to a `TokenStream` of the form `outer_attr attr_target`. - /// If there are inner attributes, they are inserted into the proper - /// place in the attribute target tokens. + /// Converts this `AttrTokenStream` to a plain `Vec`. During + /// conversion, any `AttrTokenTree::AttrsTarget` gets "flattened" back to a + /// `TokenStream`, as described in the comment on + /// `attrs_and_tokens_to_token_trees`. pub fn to_token_trees(&self) -> Vec { let mut res = Vec::with_capacity(self.0.len()); for tree in self.0.iter() { @@ -200,51 +199,7 @@ impl AttrTokenStream { )) } AttrTokenTree::AttrsTarget(target) => { - let idx = target - .attrs - .partition_point(|attr| matches!(attr.style, crate::AttrStyle::Outer)); - let (outer_attrs, inner_attrs) = target.attrs.split_at(idx); - - let mut target_tokens = target.tokens.to_attr_token_stream().to_token_trees(); - if !inner_attrs.is_empty() { - let mut found = false; - // Check the last two trees (to account for a trailing semi) - for tree in target_tokens.iter_mut().rev().take(2) { - if let TokenTree::Delimited(span, spacing, delim, delim_tokens) = tree { - // Inner attributes are only supported on extern blocks, functions, - // impls, and modules. All of these have their inner attributes - // placed at the beginning of the rightmost outermost braced group: - // e.g. fn foo() { #![my_attr] } - // - // Therefore, we can insert them back into the right location - // without needing to do any extra position tracking. - // - // Note: Outline modules are an exception - they can - // have attributes like `#![my_attr]` at the start of a file. - // Support for custom attributes in this position is not - // properly implemented - we always synthesize fake tokens, - // so we never reach this code. - - let mut stream = TokenStream::default(); - for inner_attr in inner_attrs { - stream.push_stream(inner_attr.get_tokens()); - } - stream.push_stream(delim_tokens.clone()); - *tree = TokenTree::Delimited(*span, *spacing, *delim, stream); - found = true; - break; - } - } - - assert!( - found, - "Failed to find trailing delimited group in: {target_tokens:?}" - ); - } - for attr in outer_attrs { - res.extend(attr.get_tokens().0.iter().cloned()); - } - res.extend(target_tokens); + attrs_and_tokens_to_token_trees(&target.attrs, &target.tokens, &mut res); } } } @@ -252,15 +207,76 @@ impl AttrTokenStream { } } +// Converts multiple attributes and the tokens for a target AST node into token trees, and appends +// them to `res`. +// +// Example: if the AST node is "fn f() { blah(); }", then: +// - Simple if no attributes are present, e.g. "fn f() { blah(); }" +// - Simple if only outer attribute are present, e.g. "#[outer1] #[outer2] fn f() { blah(); }" +// - Trickier if inner attributes are present, because they must be moved within the AST node's +// tokens, e.g. "#[outer] fn f() { #![inner] blah() }" +fn attrs_and_tokens_to_token_trees( + attrs: &[Attribute], + target_tokens: &LazyAttrTokenStream, + res: &mut Vec, +) { + let idx = attrs.partition_point(|attr| matches!(attr.style, crate::AttrStyle::Outer)); + let (outer_attrs, inner_attrs) = attrs.split_at(idx); + + // Add outer attribute tokens. + for attr in outer_attrs { + res.extend(attr.token_trees()); + } + + // Add target AST node tokens. + res.extend(target_tokens.to_attr_token_stream().to_token_trees()); + + // Insert inner attribute tokens. + if !inner_attrs.is_empty() { + let mut found = false; + // Check the last two trees (to account for a trailing semi) + for tree in res.iter_mut().rev().take(2) { + if let TokenTree::Delimited(span, spacing, delim, delim_tokens) = tree { + // Inner attributes are only supported on extern blocks, functions, + // impls, and modules. All of these have their inner attributes + // placed at the beginning of the rightmost outermost braced group: + // e.g. fn foo() { #![my_attr] } + // + // Therefore, we can insert them back into the right location + // without needing to do any extra position tracking. + // + // Note: Outline modules are an exception - they can + // have attributes like `#![my_attr]` at the start of a file. + // Support for custom attributes in this position is not + // properly implemented - we always synthesize fake tokens, + // so we never reach this code. + let mut tts = vec![]; + for inner_attr in inner_attrs { + tts.extend(inner_attr.token_trees()); + } + tts.extend(delim_tokens.0.iter().cloned()); + let stream = TokenStream::new(tts); + *tree = TokenTree::Delimited(*span, *spacing, *delim, stream); + found = true; + break; + } + } + assert!(found, "Failed to find trailing delimited group in: {res:?}"); + } +} + /// Stores the tokens for an attribute target, along /// with its attributes. /// /// This is constructed during parsing when we need to capture -/// tokens. +/// tokens, for `cfg` and `cfg_attr` attributes. /// /// For example, `#[cfg(FALSE)] struct Foo {}` would /// have an `attrs` field containing the `#[cfg(FALSE)]` attr, /// and a `tokens` field storing the (unparsed) tokens `struct Foo {}` +/// +/// The `cfg`/`cfg_attr` processing occurs in +/// `StripUnconfigured::configure_tokens`. #[derive(Clone, Debug, Encodable, Decodable)] pub struct AttrsTarget { /// Attributes, both outer and inner. @@ -437,18 +453,10 @@ impl TokenStream { } pub fn from_ast(node: &(impl HasAttrs + HasTokens + fmt::Debug)) -> TokenStream { - let Some(tokens) = node.tokens() else { - panic!("missing tokens for node: {:?}", node); - }; - let attrs = node.attrs(); - let attr_stream = if attrs.is_empty() { - tokens.to_attr_token_stream() - } else { - let target = - AttrsTarget { attrs: attrs.iter().cloned().collect(), tokens: tokens.clone() }; - AttrTokenStream::new(vec![AttrTokenTree::AttrsTarget(target)]) - }; - TokenStream::new(attr_stream.to_token_trees()) + let tokens = node.tokens().unwrap_or_else(|| panic!("missing tokens for node: {:?}", node)); + let mut tts = vec![]; + attrs_and_tokens_to_token_trees(node.attrs(), tokens, &mut tts); + TokenStream::new(tts) } pub fn from_nonterminal_ast(nt: &Nonterminal) -> TokenStream { diff --git a/compiler/rustc_codegen_ssa/src/back/linker.rs b/compiler/rustc_codegen_ssa/src/back/linker.rs index 2bd5dfdce83ee..dd134ebbe6b18 100644 --- a/compiler/rustc_codegen_ssa/src/back/linker.rs +++ b/compiler/rustc_codegen_ssa/src/back/linker.rs @@ -398,7 +398,7 @@ impl<'a> GccLinker<'a> { self.link_arg("-dylib"); // Note that the `osx_rpath_install_name` option here is a hack - // purely to support rustbuild right now, we should get a more + // purely to support bootstrap right now, we should get a more // principled solution at some point to force the compiler to pass // the right `-Wl,-install_name` with an `@rpath` in it. if self.sess.opts.cg.rpath || self.sess.opts.unstable_opts.osx_rpath_install_name { diff --git a/compiler/rustc_expand/src/config.rs b/compiler/rustc_expand/src/config.rs index 40e16b4511575..9da4aa84db525 100644 --- a/compiler/rustc_expand/src/config.rs +++ b/compiler/rustc_expand/src/config.rs @@ -187,6 +187,7 @@ impl<'a> StripUnconfigured<'a> { .iter() .filter_map(|tree| match tree.clone() { AttrTokenTree::AttrsTarget(mut target) => { + // Expand any `cfg_attr` attributes. target.attrs.flat_map_in_place(|attr| self.process_cfg_attr(&attr)); if self.in_cfg(&target.attrs) { @@ -195,6 +196,8 @@ impl<'a> StripUnconfigured<'a> { ); Some(AttrTokenTree::AttrsTarget(target)) } else { + // Remove the target if there's a `cfg` attribute and + // the condition isn't satisfied. None } } @@ -253,9 +256,9 @@ impl<'a> StripUnconfigured<'a> { /// Gives a compiler warning when the `cfg_attr` contains no attributes and /// is in the original source file. Gives a compiler error if the syntax of /// the attribute is incorrect. - pub(crate) fn expand_cfg_attr(&self, attr: &Attribute, recursive: bool) -> Vec { + pub(crate) fn expand_cfg_attr(&self, cfg_attr: &Attribute, recursive: bool) -> Vec { let Some((cfg_predicate, expanded_attrs)) = - rustc_parse::parse_cfg_attr(attr, &self.sess.psess) + rustc_parse::parse_cfg_attr(cfg_attr, &self.sess.psess) else { return vec![]; }; @@ -264,7 +267,7 @@ impl<'a> StripUnconfigured<'a> { if expanded_attrs.is_empty() { self.sess.psess.buffer_lint( rustc_lint_defs::builtin::UNUSED_ATTRIBUTES, - attr.span, + cfg_attr.span, ast::CRATE_NODE_ID, BuiltinLintDiag::CfgAttrNoAttributes, ); @@ -280,20 +283,21 @@ impl<'a> StripUnconfigured<'a> { // `#[cfg_attr(false, cfg_attr(true, some_attr))]`. expanded_attrs .into_iter() - .flat_map(|item| self.process_cfg_attr(&self.expand_cfg_attr_item(attr, item))) + .flat_map(|item| self.process_cfg_attr(&self.expand_cfg_attr_item(cfg_attr, item))) .collect() } else { - expanded_attrs.into_iter().map(|item| self.expand_cfg_attr_item(attr, item)).collect() + expanded_attrs + .into_iter() + .map(|item| self.expand_cfg_attr_item(cfg_attr, item)) + .collect() } } fn expand_cfg_attr_item( &self, - attr: &Attribute, + cfg_attr: &Attribute, (item, item_span): (ast::AttrItem, Span), ) -> Attribute { - let orig_tokens = attr.get_tokens(); - // We are taking an attribute of the form `#[cfg_attr(pred, attr)]` // and producing an attribute of the form `#[attr]`. We // have captured tokens for `attr` itself, but we need to @@ -302,11 +306,11 @@ impl<'a> StripUnconfigured<'a> { // Use the `#` in `#[cfg_attr(pred, attr)]` as the `#` token // for `attr` when we expand it to `#[attr]` - let mut orig_trees = orig_tokens.trees(); + let mut orig_trees = cfg_attr.token_trees().into_iter(); let TokenTree::Token(pound_token @ Token { kind: TokenKind::Pound, .. }, _) = orig_trees.next().unwrap().clone() else { - panic!("Bad tokens for attribute {attr:?}"); + panic!("Bad tokens for attribute {cfg_attr:?}"); }; // We don't really have a good span to use for the synthesized `[]` @@ -320,12 +324,12 @@ impl<'a> StripUnconfigured<'a> { .unwrap_or_else(|| panic!("Missing tokens for {item:?}")) .to_attr_token_stream(), ); - let trees = if attr.style == AttrStyle::Inner { + let trees = if cfg_attr.style == AttrStyle::Inner { // For inner attributes, we do the same thing for the `!` in `#![some_attr]` let TokenTree::Token(bang_token @ Token { kind: TokenKind::Not, .. }, _) = orig_trees.next().unwrap().clone() else { - panic!("Bad tokens for attribute {attr:?}"); + panic!("Bad tokens for attribute {cfg_attr:?}"); }; vec![ AttrTokenTree::Token(pound_token, Spacing::Joint), @@ -340,7 +344,7 @@ impl<'a> StripUnconfigured<'a> { &self.sess.psess.attr_id_generator, item, tokens, - attr.style, + cfg_attr.style, item_span, ); if attr.has_name(sym::crate_type) { diff --git a/compiler/rustc_parse/src/lib.rs b/compiler/rustc_parse/src/lib.rs index 5522127be83ef..4454747ea0212 100644 --- a/compiler/rustc_parse/src/lib.rs +++ b/compiler/rustc_parse/src/lib.rs @@ -157,14 +157,14 @@ pub fn fake_token_stream_for_crate(psess: &ParseSess, krate: &ast::Crate) -> Tok } pub fn parse_cfg_attr( - attr: &Attribute, + cfg_attr: &Attribute, psess: &ParseSess, ) -> Option<(MetaItem, Vec<(AttrItem, Span)>)> { const CFG_ATTR_GRAMMAR_HELP: &str = "#[cfg_attr(condition, attribute, other_attribute, ...)]"; const CFG_ATTR_NOTE_REF: &str = "for more information, visit \ "; - match attr.get_normal_item().args { + match cfg_attr.get_normal_item().args { ast::AttrArgs::Delimited(ast::DelimArgs { dspan, delim, ref tokens }) if !tokens.is_empty() => { @@ -180,7 +180,7 @@ pub fn parse_cfg_attr( } _ => { psess.dcx().emit_err(errors::MalformedCfgAttr { - span: attr.span, + span: cfg_attr.span, sugg: CFG_ATTR_GRAMMAR_HELP, }); } diff --git a/compiler/rustc_parse/src/parser/attr_wrapper.rs b/compiler/rustc_parse/src/parser/attr_wrapper.rs index 38f18022e3c58..1123c31f55135 100644 --- a/compiler/rustc_parse/src/parser/attr_wrapper.rs +++ b/compiler/rustc_parse/src/parser/attr_wrapper.rs @@ -103,11 +103,8 @@ impl ToAttrTokenStream for LazyAttrTokenStreamImpl { // produce an empty `TokenStream` if no calls were made, and omit the // final token otherwise. let mut cursor_snapshot = self.cursor_snapshot.clone(); - let tokens = iter::once((FlatToken::Token(self.start_token.0.clone()), self.start_token.1)) - .chain(iter::repeat_with(|| { - let token = cursor_snapshot.next(); - (FlatToken::Token(token.0), token.1) - })) + let tokens = iter::once(FlatToken::Token(self.start_token.clone())) + .chain(iter::repeat_with(|| FlatToken::Token(cursor_snapshot.next()))) .take(self.num_calls as usize); if self.replace_ranges.is_empty() { @@ -156,11 +153,8 @@ impl ToAttrTokenStream for LazyAttrTokenStreamImpl { (range.start as usize)..(range.end as usize), target .into_iter() - .map(|target| (FlatToken::AttrsTarget(target), Spacing::Alone)) - .chain( - iter::repeat((FlatToken::Empty, Spacing::Alone)) - .take(range.len() - target_len), - ), + .map(|target| FlatToken::AttrsTarget(target)) + .chain(iter::repeat(FlatToken::Empty).take(range.len() - target_len)), ); } make_attr_token_stream(tokens.into_iter(), self.break_last_token) @@ -301,21 +295,22 @@ impl<'a> Parser<'a> { let num_calls = end_pos - start_pos; - // If we have no attributes, then we will never need to - // use any replace ranges. - let replace_ranges: Box<[ReplaceRange]> = if ret.attrs().is_empty() && !self.capture_cfg { - Box::new([]) - } else { - // Grab any replace ranges that occur *inside* the current AST node. - // We will perform the actual replacement when we convert the `LazyAttrTokenStream` - // to an `AttrTokenStream`. - self.capture_state.replace_ranges[replace_ranges_start..replace_ranges_end] - .iter() - .cloned() - .chain(inner_attr_replace_ranges.iter().cloned()) - .map(|(range, data)| ((range.start - start_pos)..(range.end - start_pos), data)) - .collect() - }; + // This is hot enough for `deep-vector` that checking the conditions for an empty iterator + // is measurably faster than actually executing the iterator. + let replace_ranges: Box<[ReplaceRange]> = + if replace_ranges_start == replace_ranges_end && inner_attr_replace_ranges.is_empty() { + Box::new([]) + } else { + // Grab any replace ranges that occur *inside* the current AST node. + // We will perform the actual replacement when we convert the `LazyAttrTokenStream` + // to an `AttrTokenStream`. + self.capture_state.replace_ranges[replace_ranges_start..replace_ranges_end] + .iter() + .cloned() + .chain(inner_attr_replace_ranges.iter().cloned()) + .map(|(range, data)| ((range.start - start_pos)..(range.end - start_pos), data)) + .collect() + }; let tokens = LazyAttrTokenStream::new(LazyAttrTokenStreamImpl { start_token, @@ -325,12 +320,9 @@ impl<'a> Parser<'a> { replace_ranges, }); - // If we support tokens at all - if let Some(target_tokens) = ret.tokens_mut() { - if target_tokens.is_none() { - // Store our newly captured tokens into the AST node. - *target_tokens = Some(tokens.clone()); - } + // If we support tokens and don't already have them, store the newly captured tokens. + if let Some(target_tokens @ None) = ret.tokens_mut() { + *target_tokens = Some(tokens.clone()); } let final_attrs = ret.attrs(); @@ -352,15 +344,10 @@ impl<'a> Parser<'a> { let target = AttrsTarget { attrs: final_attrs.iter().cloned().collect(), tokens }; self.capture_state.replace_ranges.push((start_pos..end_pos, Some(target))); self.capture_state.replace_ranges.extend(inner_attr_replace_ranges); - } - - // Only clear our `replace_ranges` when we're finished capturing entirely. - if matches!(self.capture_state.capturing, Capturing::No) { + } else if matches!(self.capture_state.capturing, Capturing::No) { + // Only clear the ranges once we've finished capturing entirely. self.capture_state.replace_ranges.clear(); - // We don't clear `inner_attr_ranges`, as doing so repeatedly - // had a measurable performance impact. Most inner attributes that - // we insert will get removed - when we drop the parser, we'll free - // up the memory used by any attributes that we didn't remove from the map. + self.capture_state.inner_attr_ranges.clear(); } Ok(ret) } @@ -370,7 +357,7 @@ impl<'a> Parser<'a> { /// `AttrTokenStream`, creating an `AttrTokenTree::Delimited` for each matching pair of open and /// close delims. fn make_attr_token_stream( - mut iter: impl Iterator, + iter: impl Iterator, break_last_token: bool, ) -> AttrTokenStream { #[derive(Debug)] @@ -379,19 +366,19 @@ fn make_attr_token_stream( open_delim_sp: Option<(Delimiter, Span, Spacing)>, inner: Vec, } - let mut stack = vec![FrameData { open_delim_sp: None, inner: vec![] }]; - let mut token_and_spacing = iter.next(); - while let Some((token, spacing)) = token_and_spacing { - match token { - FlatToken::Token(Token { kind: TokenKind::OpenDelim(delim), span }) => { - stack - .push(FrameData { open_delim_sp: Some((delim, span, spacing)), inner: vec![] }); + // The stack always has at least one element. Storing it separately makes for shorter code. + let mut stack_top = FrameData { open_delim_sp: None, inner: vec![] }; + let mut stack_rest = vec![]; + for flat_token in iter { + match flat_token { + FlatToken::Token((Token { kind: TokenKind::OpenDelim(delim), span }, spacing)) => { + stack_rest.push(mem::replace( + &mut stack_top, + FrameData { open_delim_sp: Some((delim, span, spacing)), inner: vec![] }, + )); } - FlatToken::Token(Token { kind: TokenKind::CloseDelim(delim), span }) => { - let frame_data = stack - .pop() - .unwrap_or_else(|| panic!("Token stack was empty for token: {token:?}")); - + FlatToken::Token((Token { kind: TokenKind::CloseDelim(delim), span }, spacing)) => { + let frame_data = mem::replace(&mut stack_top, stack_rest.pop().unwrap()); let (open_delim, open_sp, open_spacing) = frame_data.open_delim_sp.unwrap(); assert_eq!( open_delim, delim, @@ -401,29 +388,20 @@ fn make_attr_token_stream( let dspacing = DelimSpacing::new(open_spacing, spacing); let stream = AttrTokenStream::new(frame_data.inner); let delimited = AttrTokenTree::Delimited(dspan, dspacing, delim, stream); - stack - .last_mut() - .unwrap_or_else(|| panic!("Bottom token frame is missing for token: {token:?}")) - .inner - .push(delimited); + stack_top.inner.push(delimited); + } + FlatToken::Token((token, spacing)) => { + stack_top.inner.push(AttrTokenTree::Token(token, spacing)) + } + FlatToken::AttrsTarget(target) => { + stack_top.inner.push(AttrTokenTree::AttrsTarget(target)) } - FlatToken::Token(token) => stack - .last_mut() - .expect("Bottom token frame is missing!") - .inner - .push(AttrTokenTree::Token(token, spacing)), - FlatToken::AttrsTarget(target) => stack - .last_mut() - .expect("Bottom token frame is missing!") - .inner - .push(AttrTokenTree::AttrsTarget(target)), FlatToken::Empty => {} } - token_and_spacing = iter.next(); } - let mut final_buf = stack.pop().expect("Missing final buf!"); + if break_last_token { - let last_token = final_buf.inner.pop().unwrap(); + let last_token = stack_top.inner.pop().unwrap(); if let AttrTokenTree::Token(last_token, spacing) = last_token { let unglued_first = last_token.kind.break_two_token_op().unwrap().0; @@ -431,14 +409,14 @@ fn make_attr_token_stream( let mut first_span = last_token.span.shrink_to_lo(); first_span = first_span.with_hi(first_span.lo() + rustc_span::BytePos(1)); - final_buf + stack_top .inner .push(AttrTokenTree::Token(Token::new(unglued_first, first_span), spacing)); } else { panic!("Unexpected last token {last_token:?}") } } - AttrTokenStream::new(final_buf.inner) + AttrTokenStream::new(stack_top.inner) } // Some types are used a lot. Make sure they don't unintentionally get bigger. diff --git a/compiler/rustc_parse/src/parser/mod.rs b/compiler/rustc_parse/src/parser/mod.rs index ef9b3aabc61ca..b1588357bffec 100644 --- a/compiler/rustc_parse/src/parser/mod.rs +++ b/compiler/rustc_parse/src/parser/mod.rs @@ -1603,7 +1603,7 @@ pub(crate) fn make_unclosed_delims_error( enum FlatToken { /// A token - this holds both delimiter (e.g. '{' and '}') /// and non-delimiter tokens - Token(Token), + Token((Token, Spacing)), /// Holds the `AttrsTarget` for an AST node. The `AttrsTarget` is inserted /// directly into the constructed `AttrTokenStream` as an /// `AttrTokenTree::AttrsTarget`. diff --git a/compiler/rustc_session/src/config.rs b/compiler/rustc_session/src/config.rs index 41c99f7edeeff..e748d1ff47b63 100644 --- a/compiler/rustc_session/src/config.rs +++ b/compiler/rustc_session/src/config.rs @@ -2620,7 +2620,7 @@ pub fn build_session_options(early_dcx: &mut EarlyDiagCtxt, matches: &getopts::M // This is the location used by the `rust-src` `rustup` component. let mut candidate = sysroot.join("lib/rustlib/src/rust"); if let Ok(metadata) = candidate.symlink_metadata() { - // Replace the symlink rustbuild creates, with its destination. + // Replace the symlink bootstrap creates, with its destination. // We could try to use `fs::canonicalize` instead, but that might // produce unnecessarily verbose path. if metadata.file_type().is_symlink() { diff --git a/config.example.toml b/config.example.toml index 679abcdc7771b..68c632d91cdec 100644 --- a/config.example.toml +++ b/config.example.toml @@ -1,6 +1,6 @@ # Sample TOML configuration file for building Rust. # -# To configure rustbuild, run `./configure` or `./x.py setup`. +# To configure bootstrap, run `./configure` or `./x.py setup`. # See https://rustc-dev-guide.rust-lang.org/building/how-to-build-and-run.html#create-a-configtoml for more information. # # All options are commented out by default in this file, and they're commented @@ -109,7 +109,7 @@ # increases the size of binaries and consequently the memory required by # each linker process. # If set to 0, linker invocations are treated like any other job and -# controlled by rustbuild's -j parameter. +# controlled by bootstrap's -j parameter. #link-jobs = 0 # Whether to build LLVM as a dynamically linked library (as opposed to statically linked). @@ -371,11 +371,11 @@ # Useful for modifying only the stage2 compiler without having to pass `--keep-stage 0` each time. #local-rebuild = false -# Print out how long each rustbuild step took (mostly intended for CI and +# Print out how long each bootstrap step took (mostly intended for CI and # tracking over time) #print-step-timings = false -# Print out resource usage data for each rustbuild step, as defined by the Unix +# Print out resource usage data for each bootstrap step, as defined by the Unix # struct rusage. (Note that this setting is completely unstable: the data it # captures, what platforms it supports, the format of its associated output, and # this setting's very existence, are all subject to change.) diff --git a/library/std/Cargo.toml b/library/std/Cargo.toml index 358510b8f77d8..b991b1cf22dd8 100644 --- a/library/std/Cargo.toml +++ b/library/std/Cargo.toml @@ -87,6 +87,10 @@ std_detect_file_io = ["std_detect/std_detect_file_io"] std_detect_dlsym_getauxval = ["std_detect/std_detect_dlsym_getauxval"] std_detect_env_override = ["std_detect/std_detect_env_override"] +# Enable using raw-dylib for Windows imports. +# This will eventually be the default. +windows_raw_dylib = [] + [package.metadata.fortanix-sgx] # Maximum possible number of threads when testing threads = 125 diff --git a/library/std/src/io/buffered/bufwriter.rs b/library/std/src/io/buffered/bufwriter.rs index 2d13230ffbabd..1768bb05ddbcb 100644 --- a/library/std/src/io/buffered/bufwriter.rs +++ b/library/std/src/io/buffered/bufwriter.rs @@ -3,7 +3,7 @@ use crate::fmt; use crate::io::{ self, ErrorKind, IntoInnerError, IoSlice, Seek, SeekFrom, Write, DEFAULT_BUF_SIZE, }; -use crate::mem; +use crate::mem::{self, ManuallyDrop}; use crate::ptr; /// Wraps a writer and buffers its output. @@ -164,13 +164,13 @@ impl BufWriter { /// assert_eq!(&buffered_data.unwrap(), b"ata"); /// ``` #[stable(feature = "bufwriter_into_parts", since = "1.56.0")] - pub fn into_parts(mut self) -> (W, Result, WriterPanicked>) { - let buf = mem::take(&mut self.buf); - let buf = if !self.panicked { Ok(buf) } else { Err(WriterPanicked { buf }) }; + pub fn into_parts(self) -> (W, Result, WriterPanicked>) { + let mut this = ManuallyDrop::new(self); + let buf = mem::take(&mut this.buf); + let buf = if !this.panicked { Ok(buf) } else { Err(WriterPanicked { buf }) }; - // SAFETY: forget(self) prevents double dropping inner - let inner = unsafe { ptr::read(&self.inner) }; - mem::forget(self); + // SAFETY: double-drops are prevented by putting `this` in a ManuallyDrop that is never dropped + let inner = unsafe { ptr::read(&this.inner) }; (inner, buf) } diff --git a/library/std/src/io/buffered/tests.rs b/library/std/src/io/buffered/tests.rs index ee0db30e22c2e..ab66deaf31d22 100644 --- a/library/std/src/io/buffered/tests.rs +++ b/library/std/src/io/buffered/tests.rs @@ -1067,3 +1067,13 @@ fn bufreader_full_initialize() { // But we initialized the whole buffer! assert_eq!(reader.initialized(), reader.capacity()); } + +/// This is a regression test for https://github.com/rust-lang/rust/issues/127584. +#[test] +fn bufwriter_aliasing() { + use crate::io::{BufWriter, Cursor}; + let mut v = vec![0; 1024]; + let c = Cursor::new(&mut v); + let w = BufWriter::new(Box::new(c)); + let _ = w.into_parts(); +} diff --git a/library/std/src/sys/pal/windows/alloc.rs b/library/std/src/sys/pal/windows/alloc.rs index 24c237b5eb03d..e6cbdb6ef7d64 100644 --- a/library/std/src/sys/pal/windows/alloc.rs +++ b/library/std/src/sys/pal/windows/alloc.rs @@ -4,7 +4,7 @@ use crate::alloc::{GlobalAlloc, Layout, System}; use crate::ffi::c_void; use crate::ptr; use crate::sync::atomic::{AtomicPtr, Ordering}; -use crate::sys::c; +use crate::sys::c::{self, windows_targets}; use crate::sys::common::alloc::{realloc_fallback, MIN_ALIGN}; use core::mem::MaybeUninit; @@ -17,74 +17,71 @@ mod tests; // Flag to indicate that the memory returned by `HeapAlloc` should be zeroed. const HEAP_ZERO_MEMORY: c::DWORD = 0x00000008; -#[link(name = "kernel32")] -extern "system" { - // Get a handle to the default heap of the current process, or null if the operation fails. - // - // SAFETY: Successful calls to this function within the same process are assumed to - // always return the same handle, which remains valid for the entire lifetime of the process. - // - // See https://docs.microsoft.com/windows/win32/api/heapapi/nf-heapapi-getprocessheap - fn GetProcessHeap() -> c::HANDLE; +// Get a handle to the default heap of the current process, or null if the operation fails. +// +// SAFETY: Successful calls to this function within the same process are assumed to +// always return the same handle, which remains valid for the entire lifetime of the process. +// +// See https://docs.microsoft.com/windows/win32/api/heapapi/nf-heapapi-getprocessheap +windows_targets::link!("kernel32.dll" "system" fn GetProcessHeap() -> c::HANDLE); - // Allocate a block of `dwBytes` bytes of memory from a given heap `hHeap`. - // The allocated memory may be uninitialized, or zeroed if `dwFlags` is - // set to `HEAP_ZERO_MEMORY`. - // - // Returns a pointer to the newly-allocated memory or null if the operation fails. - // The returned pointer will be aligned to at least `MIN_ALIGN`. - // - // SAFETY: - // - `hHeap` must be a non-null handle returned by `GetProcessHeap`. - // - `dwFlags` must be set to either zero or `HEAP_ZERO_MEMORY`. - // - // Note that `dwBytes` is allowed to be zero, contrary to some other allocators. - // - // See https://docs.microsoft.com/windows/win32/api/heapapi/nf-heapapi-heapalloc - fn HeapAlloc(hHeap: c::HANDLE, dwFlags: c::DWORD, dwBytes: c::SIZE_T) -> c::LPVOID; +// Allocate a block of `dwBytes` bytes of memory from a given heap `hHeap`. +// The allocated memory may be uninitialized, or zeroed if `dwFlags` is +// set to `HEAP_ZERO_MEMORY`. +// +// Returns a pointer to the newly-allocated memory or null if the operation fails. +// The returned pointer will be aligned to at least `MIN_ALIGN`. +// +// SAFETY: +// - `hHeap` must be a non-null handle returned by `GetProcessHeap`. +// - `dwFlags` must be set to either zero or `HEAP_ZERO_MEMORY`. +// +// Note that `dwBytes` is allowed to be zero, contrary to some other allocators. +// +// See https://docs.microsoft.com/windows/win32/api/heapapi/nf-heapapi-heapalloc +windows_targets::link!("kernel32.dll" "system" fn HeapAlloc(hheap: c::HANDLE, dwflags: u32, dwbytes: usize) -> *mut core::ffi::c_void); - // Reallocate a block of memory behind a given pointer `lpMem` from a given heap `hHeap`, - // to a block of at least `dwBytes` bytes, either shrinking the block in place, - // or allocating at a new location, copying memory, and freeing the original location. - // - // Returns a pointer to the reallocated memory or null if the operation fails. - // The returned pointer will be aligned to at least `MIN_ALIGN`. - // If the operation fails the given block will never have been freed. - // - // SAFETY: - // - `hHeap` must be a non-null handle returned by `GetProcessHeap`. - // - `dwFlags` must be set to zero. - // - `lpMem` must be a non-null pointer to an allocated block returned by `HeapAlloc` or - // `HeapReAlloc`, that has not already been freed. - // If the block was successfully reallocated at a new location, pointers pointing to - // the freed memory, such as `lpMem`, must not be dereferenced ever again. - // - // Note that `dwBytes` is allowed to be zero, contrary to some other allocators. - // - // See https://docs.microsoft.com/windows/win32/api/heapapi/nf-heapapi-heaprealloc - fn HeapReAlloc( - hHeap: c::HANDLE, - dwFlags: c::DWORD, - lpMem: c::LPVOID, - dwBytes: c::SIZE_T, - ) -> c::LPVOID; +// Reallocate a block of memory behind a given pointer `lpMem` from a given heap `hHeap`, +// to a block of at least `dwBytes` bytes, either shrinking the block in place, +// or allocating at a new location, copying memory, and freeing the original location. +// +// Returns a pointer to the reallocated memory or null if the operation fails. +// The returned pointer will be aligned to at least `MIN_ALIGN`. +// If the operation fails the given block will never have been freed. +// +// SAFETY: +// - `hHeap` must be a non-null handle returned by `GetProcessHeap`. +// - `dwFlags` must be set to zero. +// - `lpMem` must be a non-null pointer to an allocated block returned by `HeapAlloc` or +// `HeapReAlloc`, that has not already been freed. +// If the block was successfully reallocated at a new location, pointers pointing to +// the freed memory, such as `lpMem`, must not be dereferenced ever again. +// +// Note that `dwBytes` is allowed to be zero, contrary to some other allocators. +// +// See https://docs.microsoft.com/windows/win32/api/heapapi/nf-heapapi-heaprealloc +windows_targets::link!("kernel32.dll" "system" fn HeapReAlloc( + hheap: c::HANDLE, + dwflags : u32, + lpmem: *const core::ffi::c_void, + dwbytes: usize +) -> *mut core::ffi::c_void); - // Free a block of memory behind a given pointer `lpMem` from a given heap `hHeap`. - // Returns a nonzero value if the operation is successful, and zero if the operation fails. - // - // SAFETY: - // - `hHeap` must be a non-null handle returned by `GetProcessHeap`. - // - `dwFlags` must be set to zero. - // - `lpMem` must be a pointer to an allocated block returned by `HeapAlloc` or `HeapReAlloc`, - // that has not already been freed. - // If the block was successfully freed, pointers pointing to the freed memory, such as `lpMem`, - // must not be dereferenced ever again. - // - // Note that `lpMem` is allowed to be null, which will not cause the operation to fail. - // - // See https://docs.microsoft.com/windows/win32/api/heapapi/nf-heapapi-heapfree - fn HeapFree(hHeap: c::HANDLE, dwFlags: c::DWORD, lpMem: c::LPVOID) -> c::BOOL; -} +// Free a block of memory behind a given pointer `lpMem` from a given heap `hHeap`. +// Returns a nonzero value if the operation is successful, and zero if the operation fails. +// +// SAFETY: +// - `hHeap` must be a non-null handle returned by `GetProcessHeap`. +// - `dwFlags` must be set to zero. +// - `lpMem` must be a pointer to an allocated block returned by `HeapAlloc` or `HeapReAlloc`, +// that has not already been freed. +// If the block was successfully freed, pointers pointing to the freed memory, such as `lpMem`, +// must not be dereferenced ever again. +// +// Note that `lpMem` is allowed to be null, which will not cause the operation to fail. +// +// See https://docs.microsoft.com/windows/win32/api/heapapi/nf-heapapi-heapfree +windows_targets::link!("kernel32.dll" "system" fn HeapFree(hheap: c::HANDLE, dwflags: u32, lpmem: *const core::ffi::c_void) -> c::BOOL); // Cached handle to the default heap of the current process. // Either a non-null handle returned by `GetProcessHeap`, or null when not yet initialized or `GetProcessHeap` failed. diff --git a/library/std/src/sys/pal/windows/c.rs b/library/std/src/sys/pal/windows/c.rs index 27aa35f69f1bf..7dfda4f714c77 100644 --- a/library/std/src/sys/pal/windows/c.rs +++ b/library/std/src/sys/pal/windows/c.rs @@ -13,7 +13,7 @@ use crate::os::raw::{c_char, c_long, c_longlong, c_uint, c_ulong, c_ushort, c_vo use crate::os::windows::io::{AsRawHandle, BorrowedHandle}; use crate::ptr; -mod windows_targets; +pub(super) mod windows_targets; mod windows_sys; pub use windows_sys::*; diff --git a/library/std/src/sys/pal/windows/c/windows_targets.rs b/library/std/src/sys/pal/windows/c/windows_targets.rs index 56c563462d366..252bceb70942b 100644 --- a/library/std/src/sys/pal/windows/c/windows_targets.rs +++ b/library/std/src/sys/pal/windows/c/windows_targets.rs @@ -3,6 +3,18 @@ //! This is a simple wrapper around an `extern` block with a `#[link]` attribute. //! It's very roughly equivalent to the windows-targets crate. +#[cfg(feature = "windows_raw_dylib")] +pub macro link { + ($library:literal $abi:literal $($link_name:literal)? $(#[$doc:meta])? fn $($function:tt)*) => ( + #[cfg_attr(not(target_arch = "x86"), link(name = $library, kind = "raw-dylib", modifiers = "+verbatim"))] + #[cfg_attr(target_arch = "x86", link(name = $library, kind = "raw-dylib", modifiers = "+verbatim", import_name_type = "undecorated"))] + extern $abi { + $(#[link_name=$link_name])? + pub fn $($function)*; + } + ) +} +#[cfg(not(feature = "windows_raw_dylib"))] pub macro link { ($library:literal $abi:literal $($link_name:literal)? $(#[$doc:meta])? fn $($function:tt)*) => ( // Note: the windows-targets crate uses a pre-built Windows.lib import library which we don't @@ -17,6 +29,7 @@ pub macro link { ) } +#[cfg(not(feature = "windows_raw_dylib"))] #[link(name = "advapi32")] #[link(name = "ntdll")] #[link(name = "userenv")] diff --git a/library/sysroot/Cargo.toml b/library/sysroot/Cargo.toml index 1ddacd92e6b94..169eeeca8c2e8 100644 --- a/library/sysroot/Cargo.toml +++ b/library/sysroot/Cargo.toml @@ -27,3 +27,4 @@ profiler = ["std/profiler"] std_detect_file_io = ["std/std_detect_file_io"] std_detect_dlsym_getauxval = ["std/std_detect_dlsym_getauxval"] std_detect_env_override = ["std/std_detect_env_override"] +windows_raw_dylib = ["std/windows_raw_dylib"] diff --git a/src/bootstrap/README.md b/src/bootstrap/README.md index fb3c862704306..0ac58645d2dfc 100644 --- a/src/bootstrap/README.md +++ b/src/bootstrap/README.md @@ -1,7 +1,7 @@ -# rustbuild - Bootstrapping Rust +# Bootstrapping Rust This README is aimed at helping to explain how Rust is bootstrapped, -and some of the technical details of the build system. +and some of the technical details of the bootstrap build system. Note that this README only covers internal information, not how to use the tool. Please check [bootstrapping dev guide][bootstrapping-dev-guide] for further information. @@ -12,17 +12,17 @@ Please check [bootstrapping dev guide][bootstrapping-dev-guide] for further info The build system defers most of the complicated logic of managing invocations of rustc and rustdoc to Cargo itself. However, moving through various stages -and copying artifacts is still necessary for it to do. Each time rustbuild +and copying artifacts is still necessary for it to do. Each time bootstrap is invoked, it will iterate through the list of predefined steps and execute each serially in turn if it matches the paths passed or is a default rule. -For each step, rustbuild relies on the step internally being incremental and -parallel. Note, though, that the `-j` parameter to rustbuild gets forwarded +For each step, bootstrap relies on the step internally being incremental and +parallel. Note, though, that the `-j` parameter to bootstrap gets forwarded to appropriate test harnesses and such. ## Build phases -The rustbuild build system goes through a few phases to actually build the -compiler. What actually happens when you invoke rustbuild is: +Bootstrap build system goes through a few phases to actually build the +compiler. What actually happens when you invoke bootstrap is: 1. The entry point script (`x` for unix like systems, `x.ps1` for windows systems, `x.py` cross-platform) is run. This script is responsible for downloading the stage0 @@ -151,9 +151,9 @@ build/ stage3/ ``` -## Extending rustbuild +## Extending bootstrap -When you use the bootstrap system, you'll call it through the entry point script +When you use bootstrap, you'll call it through the entry point script (`x`, `x.ps1`, or `x.py`). However, most of the code lives in `src/bootstrap`. `bootstrap` has a difficult problem: it is written in Rust, but yet it is run before the Rust compiler is built! To work around this, there are two components diff --git a/src/bootstrap/bootstrap.py b/src/bootstrap/bootstrap.py index 7e47b373ff9df..4e8e0fd2532f1 100644 --- a/src/bootstrap/bootstrap.py +++ b/src/bootstrap/bootstrap.py @@ -1038,7 +1038,7 @@ def build_triple(self): def check_vendored_status(self): """Check that vendoring is configured properly""" - # keep this consistent with the equivalent check in rustbuild: + # keep this consistent with the equivalent check in bootstrap: # https://github.com/rust-lang/rust/blob/a8a33cf27166d3eabaffc58ed3799e054af3b0c6/src/bootstrap/lib.rs#L399-L405 if 'SUDO_USER' in os.environ and not self.use_vendored_sources: if os.getuid() == 0: diff --git a/src/bootstrap/mk/Makefile.in b/src/bootstrap/mk/Makefile.in index cab37e0da4736..f85aefcb268d3 100644 --- a/src/bootstrap/mk/Makefile.in +++ b/src/bootstrap/mk/Makefile.in @@ -20,7 +20,7 @@ all: $(Q)$(BOOTSTRAP) doc --stage 2 $(BOOTSTRAP_ARGS) help: - $(Q)echo 'Welcome to the rustbuild build system!' + $(Q)echo 'Welcome to bootstrap, the Rust build system!' $(Q)echo $(Q)echo This makefile is a thin veneer over the ./x.py script located $(Q)echo in this directory. To get the full power of the build system diff --git a/src/bootstrap/src/bin/main.rs b/src/bootstrap/src/bin/main.rs index 44fb1911fc6d3..a7d21ba6ae127 100644 --- a/src/bootstrap/src/bin/main.rs +++ b/src/bootstrap/src/bin/main.rs @@ -1,4 +1,4 @@ -//! rustbuild, the Rust build system +//! bootstrap, the Rust build system //! //! This is the entry point for the build system used to compile the `rustc` //! compiler. Lots of documentation can be found in the `README.md` file in the diff --git a/src/bootstrap/src/core/build_steps/compile.rs b/src/bootstrap/src/core/build_steps/compile.rs index ef2af9c287336..3a9e0f3ee1866 100644 --- a/src/bootstrap/src/core/build_steps/compile.rs +++ b/src/bootstrap/src/core/build_steps/compile.rs @@ -1,7 +1,7 @@ //! Implementation of compiling various phases of the compiler and standard //! library. //! -//! This module contains some of the real meat in the rustbuild build system +//! This module contains some of the real meat in the bootstrap build system //! which is where Cargo is used to compile the standard library, libtest, and //! the compiler. This module is also responsible for assembling the sysroot as it //! goes along from the output of the previous stage. @@ -818,8 +818,8 @@ pub struct Rustc { pub compiler: Compiler, /// Whether to build a subset of crates, rather than the whole compiler. /// - /// This should only be requested by the user, not used within rustbuild itself. - /// Using it within rustbuild can lead to confusing situation where lints are replayed + /// This should only be requested by the user, not used within bootstrap itself. + /// Using it within bootstrap can lead to confusing situation where lints are replayed /// in two different steps. crates: Vec, } @@ -1829,6 +1829,21 @@ impl Step for Assemble { &self_contained_lld_dir.join(exe(name, target_compiler.host)), ); } + + // In addition to `rust-lld` also install `wasm-component-ld` when + // LLD is enabled. This is a relatively small binary that primarily + // delegates to the `rust-lld` binary for linking and then runs + // logic to create the final binary. This is used by the + // `wasm32-wasip2` target of Rust. + let wasm_component_ld_exe = + builder.ensure(crate::core::build_steps::tool::WasmComponentLd { + compiler: build_compiler.with_stage(0), + target: target_compiler.host, + }); + builder.copy_link( + &wasm_component_ld_exe, + &libdir_bin.join(wasm_component_ld_exe.file_name().unwrap()), + ); } if builder.config.llvm_enabled(target_compiler.host) { diff --git a/src/bootstrap/src/core/build_steps/doc.rs b/src/bootstrap/src/core/build_steps/doc.rs index dc46af6cf4868..633e66afe598d 100644 --- a/src/bootstrap/src/core/build_steps/doc.rs +++ b/src/bootstrap/src/core/build_steps/doc.rs @@ -1,4 +1,4 @@ -//! Documentation generation for rustbuilder. +//! Documentation generation for bootstrap. //! //! This module implements generation for all bits and pieces of documentation //! for the Rust project. This notably includes suites like the rust book, the diff --git a/src/bootstrap/src/core/build_steps/test.rs b/src/bootstrap/src/core/build_steps/test.rs index 9b4c7c9134976..4d69d6b523111 100644 --- a/src/bootstrap/src/core/build_steps/test.rs +++ b/src/bootstrap/src/core/build_steps/test.rs @@ -682,6 +682,8 @@ impl Step for CompiletestTest { let mut cargo = tool::prepare_tool_cargo( builder, compiler, + // compiletest uses libtest internals; make it use the in-tree std to make sure it never breaks + // when std sources change. Mode::ToolStd, host, "test", @@ -1321,13 +1323,12 @@ impl Step for CrateRunMakeSupport { /// Runs `cargo test` for run-make-support. fn run(self, builder: &Builder<'_>) { let host = self.host; - let compiler = builder.compiler(builder.top_stage, host); + let compiler = builder.compiler(0, host); - builder.ensure(compile::Std::new(compiler, host)); let mut cargo = tool::prepare_tool_cargo( builder, compiler, - Mode::ToolStd, + Mode::ToolBootstrap, host, "test", "src/tools/run-make-support", @@ -3043,7 +3044,7 @@ impl Step for Bootstrap { // https://github.com/rust-lang/rust/issues/49215 cmd.env("RUSTFLAGS", flags); } - // rustbuild tests are racy on directory creation so just run them one at a time. + // bootstrap tests are racy on directory creation so just run them one at a time. // Since there's not many this shouldn't be a problem. run_cargo_test(cmd, &["--test-threads=1"], &[], "bootstrap", None, compiler, host, builder); } diff --git a/src/bootstrap/src/core/build_steps/tool.rs b/src/bootstrap/src/core/build_steps/tool.rs index ad92a01bce7f3..d62166d8f0472 100644 --- a/src/bootstrap/src/core/build_steps/tool.rs +++ b/src/bootstrap/src/core/build_steps/tool.rs @@ -337,6 +337,7 @@ bootstrap_tool!( RustdocGUITest, "src/tools/rustdoc-gui-test", "rustdoc-gui-test", is_unstable_tool = true, allow_features = "test"; CoverageDump, "src/tools/coverage-dump", "coverage-dump"; RustcPerfWrapper, "src/tools/rustc-perf-wrapper", "rustc-perf-wrapper"; + WasmComponentLd, "src/tools/wasm-component-ld", "wasm-component-ld"; ); #[derive(Debug, Clone, Hash, PartialEq, Eq)] diff --git a/src/bootstrap/src/core/builder.rs b/src/bootstrap/src/core/builder.rs index b14a0c5f072cd..aeb3474360801 100644 --- a/src/bootstrap/src/core/builder.rs +++ b/src/bootstrap/src/core/builder.rs @@ -1627,11 +1627,11 @@ impl<'a> Builder<'a> { } // This tells Cargo (and in turn, rustc) to output more complete - // dependency information. Most importantly for rustbuild, this + // dependency information. Most importantly for bootstrap, this // includes sysroot artifacts, like libstd, which means that we don't - // need to track those in rustbuild (an error prone process!). This + // need to track those in bootstrap (an error prone process!). This // feature is currently unstable as there may be some bugs and such, but - // it represents a big improvement in rustbuild's reliability on + // it represents a big improvement in bootstrap's reliability on // rebuilds, so we're using it here. // // For some additional context, see #63470 (the PR originally adding @@ -1643,7 +1643,7 @@ impl<'a> Builder<'a> { // Restrict the allowed features so we don't depend on nightly // accidentally. // - // binary-dep-depinfo is used by rustbuild itself for all + // binary-dep-depinfo is used by bootstrap itself for all // compilations. // // Lots of tools depend on proc_macro2 and proc-macro-error. diff --git a/src/bootstrap/src/core/builder/tests.rs b/src/bootstrap/src/core/builder/tests.rs index aa119b8c69912..97c9ece0036ea 100644 --- a/src/bootstrap/src/core/builder/tests.rs +++ b/src/bootstrap/src/core/builder/tests.rs @@ -266,7 +266,7 @@ mod defaults { // rustdoc/rustcc/std here (the user only requested a host=B build, so // there's not really a need for us to build for target A in this case // (since we're producing stage 1 libraries/binaries). But currently - // rustbuild is just a bit buggy here; this should be fixed though. + // bootstrap is just a bit buggy here; this should be fixed though. assert_eq!( first(cache.all::()), &[ diff --git a/src/bootstrap/src/core/config/config.rs b/src/bootstrap/src/core/config/config.rs index 11207cf893542..b777167ebe5da 100644 --- a/src/bootstrap/src/core/config/config.rs +++ b/src/bootstrap/src/core/config/config.rs @@ -658,8 +658,7 @@ impl Merge for TomlConfig { } } -// We are using a decl macro instead of a derive proc macro here to reduce the compile time of -// rustbuild. +// We are using a decl macro instead of a derive proc macro here to reduce the compile time of bootstrap. macro_rules! define_config { ($(#[$attr:meta])* struct $name:ident { $($field:ident: Option<$field_ty:ty> = $field_key:literal,)* @@ -704,7 +703,7 @@ macro_rules! define_config { // The following is a trimmed version of what serde_derive generates. All parts not relevant // for toml deserialization have been removed. This reduces the binary size and improves - // compile time of rustbuild. + // compile time of bootstrap. impl<'de> Deserialize<'de> for $name { fn deserialize(deserializer: D) -> Result where diff --git a/src/bootstrap/src/core/config/flags.rs b/src/bootstrap/src/core/config/flags.rs index aeb608a9ea26b..19f752da81c13 100644 --- a/src/bootstrap/src/core/config/flags.rs +++ b/src/bootstrap/src/core/config/flags.rs @@ -1,4 +1,4 @@ -//! Command-line interface of the rustbuild build system. +//! Command-line interface of the bootstrap build system. //! //! This module implements the command-line parsing of the build system which //! has various flags to configure how it's run. diff --git a/src/bootstrap/src/core/sanity.rs b/src/bootstrap/src/core/sanity.rs index 4bdc8ac07958f..2be819d52ea1a 100644 --- a/src/bootstrap/src/core/sanity.rs +++ b/src/bootstrap/src/core/sanity.rs @@ -1,4 +1,4 @@ -//! Sanity checking performed by rustbuild before actually executing anything. +//! Sanity checking performed by bootstrap before actually executing anything. //! //! This module contains the implementation of ensuring that the build //! environment looks reasonable before progressing. This will verify that diff --git a/src/bootstrap/src/lib.rs b/src/bootstrap/src/lib.rs index 10ec7d135f0ca..daa916ce0a070 100644 --- a/src/bootstrap/src/lib.rs +++ b/src/bootstrap/src/lib.rs @@ -1,9 +1,9 @@ -//! Implementation of rustbuild, the Rust build system. +//! Implementation of bootstrap, the Rust build system. //! //! This module, and its descendants, are the implementation of the Rust build //! system. Most of this build system is backed by Cargo but the outer layer //! here serves as the ability to orchestrate calling Cargo, sequencing Cargo -//! builds, building artifacts like LLVM, etc. The goals of rustbuild are: +//! builds, building artifacts like LLVM, etc. The goals of bootstrap are: //! //! * To be an easily understandable, easily extensible, and maintainable build //! system. diff --git a/src/bootstrap/src/utils/helpers.rs b/src/bootstrap/src/utils/helpers.rs index f695b3229fe66..15133e441b491 100644 --- a/src/bootstrap/src/utils/helpers.rs +++ b/src/bootstrap/src/utils/helpers.rs @@ -1,4 +1,4 @@ -//! Various utility functions used throughout rustbuild. +//! Various utility functions used throughout bootstrap. //! //! Simple things like testing the various filesystem operations here and there, //! not a lot of interesting happenings here unfortunately. diff --git a/src/ci/docker/host-x86_64/dist-various-2/Dockerfile b/src/ci/docker/host-x86_64/dist-various-2/Dockerfile index e3cb396b78297..962484593b4be 100644 --- a/src/ci/docker/host-x86_64/dist-various-2/Dockerfile +++ b/src/ci/docker/host-x86_64/dist-various-2/Dockerfile @@ -112,6 +112,7 @@ ENV TARGETS=$TARGETS,wasm32-unknown-unknown ENV TARGETS=$TARGETS,wasm32-wasi ENV TARGETS=$TARGETS,wasm32-wasip1 ENV TARGETS=$TARGETS,wasm32-wasip1-threads +ENV TARGETS=$TARGETS,wasm32-wasip2 ENV TARGETS=$TARGETS,sparcv9-sun-solaris ENV TARGETS=$TARGETS,x86_64-pc-solaris ENV TARGETS=$TARGETS,x86_64-unknown-linux-gnux32 diff --git a/src/ci/scripts/install-clang.sh b/src/ci/scripts/install-clang.sh index 24b9904d65c25..6103aa61248ae 100755 --- a/src/ci/scripts/install-clang.sh +++ b/src/ci/scripts/install-clang.sh @@ -40,7 +40,7 @@ if isMacOS; then # our own clang can figure out the correct include path on its own. ciCommandSetEnv SDKROOT "$(xcrun --sdk macosx --show-sdk-path)" - # Configure `AR` specifically so rustbuild doesn't try to infer it as + # Configure `AR` specifically so bootstrap doesn't try to infer it as # `clang-ar` by accident. ciCommandSetEnv AR "ar" elif isWindows && ! isKnownToBeMingwBuild; then diff --git a/src/doc/rustc/src/platform-support/x86_64-fortanix-unknown-sgx.md b/src/doc/rustc/src/platform-support/x86_64-fortanix-unknown-sgx.md index 97b5827c1443f..33e1c44e6d351 100644 --- a/src/doc/rustc/src/platform-support/x86_64-fortanix-unknown-sgx.md +++ b/src/doc/rustc/src/platform-support/x86_64-fortanix-unknown-sgx.md @@ -46,7 +46,7 @@ on how to setup a development and runtime environment. As a tier 2 target, the target is built by the Rust project. -You can configure rustbuild like so: +You can configure bootstrap like so: ```toml [build] diff --git a/src/tools/build_helper/src/ci.rs b/src/tools/build_helper/src/ci.rs index 09489b0d9b7da..233fed4151c19 100644 --- a/src/tools/build_helper/src/ci.rs +++ b/src/tools/build_helper/src/ci.rs @@ -25,7 +25,7 @@ impl CiEnv { /// If in a CI environment, forces the command to run with colors. pub fn force_coloring_in_ci(self, cmd: &mut Command) { if self != CiEnv::None { - // Due to use of stamp/docker, the output stream of rustbuild is not + // Due to use of stamp/docker, the output stream of bootstrap is not // a TTY in CI, so coloring is by-default turned off. // The explicit `TERM=xterm` environment is needed for // `--color always` to actually work. This env var was lost when diff --git a/src/tools/lint-docs/src/groups.rs b/src/tools/lint-docs/src/groups.rs index f246d71d499b4..9eaa234bfaf30 100644 --- a/src/tools/lint-docs/src/groups.rs +++ b/src/tools/lint-docs/src/groups.rs @@ -37,7 +37,7 @@ impl<'a> LintExtractor<'a> { .map_err(|e| format!("could not read {}: {}", groups_path.display(), e))?; let new_contents = contents.replace("{{groups-table}}", &self.make_groups_table(lints, &groups)?); - // Delete the output because rustbuild uses hard links in its copies. + // Delete the output because bootstrap uses hard links in its copies. let _ = fs::remove_file(&groups_path); fs::write(&groups_path, new_contents) .map_err(|e| format!("could not write to {}: {}", groups_path.display(), e))?; diff --git a/src/tools/lint-docs/src/lib.rs b/src/tools/lint-docs/src/lib.rs index 72d6a495e7e79..48e2dd098769e 100644 --- a/src/tools/lint-docs/src/lib.rs +++ b/src/tools/lint-docs/src/lib.rs @@ -532,7 +532,7 @@ impl<'a> LintExtractor<'a> { } add_rename_redirect(level, &mut result); let out_path = self.out_path.join("listing").join(level.doc_filename()); - // Delete the output because rustbuild uses hard links in its copies. + // Delete the output because bootstrap uses hard links in its copies. let _ = fs::remove_file(&out_path); fs::write(&out_path, result) .map_err(|e| format!("could not write to {}: {}", out_path.display(), e))?; diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/version.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/version.rs index 1e829299e6f3c..2ff967416c006 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/version.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/version.rs @@ -15,7 +15,7 @@ pub struct VersionInfo { pub version: &'static str, /// The release channel we were built for (stable/beta/nightly/dev). /// - /// `None` if not built via rustbuild. + /// `None` if not built via bootstrap. pub release_channel: Option<&'static str>, /// Information about the Git repository we may have been built from. /// diff --git a/src/tools/rust-installer/src/combiner.rs b/src/tools/rust-installer/src/combiner.rs index c211b34850a07..e7020980dc3d6 100644 --- a/src/tools/rust-installer/src/combiner.rs +++ b/src/tools/rust-installer/src/combiner.rs @@ -109,7 +109,7 @@ impl Combiner { .with_context(|| format!("failed to read components in '{}'", input_tarball))?; for component in pkg_components.split_whitespace() { // All we need to do is copy the component directory. We could - // move it, but rustbuild wants to reuse the unpacked package + // move it, but bootstrap wants to reuse the unpacked package // dir for OS-specific installers on macOS and Windows. let component_dir = package_dir.join(component); create_dir(&component_dir)?; diff --git a/src/tools/rust-installer/test.sh b/src/tools/rust-installer/test.sh index 16b05c66197c8..7b3e745600629 100755 --- a/src/tools/rust-installer/test.sh +++ b/src/tools/rust-installer/test.sh @@ -974,7 +974,7 @@ combined_remains() { --package-name=rust \ --input-tarballs="$OUT_DIR/rustc.tar.gz,$OUT_DIR/cargo.tar.gz,$OUT_DIR/rust-docs.tar.gz" for component in rustc cargo rust-docs; do - # rustbuild wants the original extracted package intact too + # bootstrap wants the original extracted package intact too try test -d "$WORK_DIR/$component/$component" try test -d "$WORK_DIR/rust/$component" done diff --git a/src/tools/tidy/src/deps.rs b/src/tools/tidy/src/deps.rs index 3c72fae0881e9..f9bf04626f785 100644 --- a/src/tools/tidy/src/deps.rs +++ b/src/tools/tidy/src/deps.rs @@ -95,7 +95,12 @@ const EXCEPTIONS: ExceptionList = &[ ("self_cell", "Apache-2.0"), // rustc (fluent translations) ("snap", "BSD-3-Clause"), // rustc ("wasm-encoder", "Apache-2.0 WITH LLVM-exception"), // rustc + ("wasm-metadata", "Apache-2.0 WITH LLVM-exception"), // rustc ("wasmparser", "Apache-2.0 WITH LLVM-exception"), // rustc + ("wast", "Apache-2.0 WITH LLVM-exception"), // rustc + ("wat", "Apache-2.0 WITH LLVM-exception"), // rustc + ("wit-component", "Apache-2.0 WITH LLVM-exception"), // rustc + ("wit-parser", "Apache-2.0 WITH LLVM-exception"), // rustc // tidy-alphabetical-end ]; diff --git a/src/tools/wasm-component-ld/Cargo.toml b/src/tools/wasm-component-ld/Cargo.toml new file mode 100644 index 0000000000000..91ff19ad9fcb0 --- /dev/null +++ b/src/tools/wasm-component-ld/Cargo.toml @@ -0,0 +1,13 @@ +# See the `README.md` in this directory for what this tool is. + +[package] +name = "wasm-component-ld-wrapper" +version = "0.1.0" +edition = "2021" + +[[bin]] +name = "wasm-component-ld" +path = "src/main.rs" + +[dependencies] +wasm-component-ld = "0.5.4" diff --git a/src/tools/wasm-component-ld/README.md b/src/tools/wasm-component-ld/README.md new file mode 100644 index 0000000000000..54608a2dea1d4 --- /dev/null +++ b/src/tools/wasm-component-ld/README.md @@ -0,0 +1,62 @@ +# `wasm-component-ld` + +This wrapper is a wrapper around the [`wasm-component-ld`] crates.io crate. That +crate. That crate is itself a thin wrapper around two pieces: + +* `wasm-ld` - the LLVM-based linker distributed as part of LLD and packaged in + Rust as `rust-lld`. +* [`wit-component`] - a Rust crate for creating a [WebAssembly Component] from a + core wasm module. + +This linker is used for Rust's `wasm32-wasip2` target to natively output a +component instead of a core WebAssembly module, unlike other WebAssembly +targets. If you're confused about any of this here's an FAQ-style explanation of +what's going on here: + +* **What's a component?** - It's a proposal to the WebAssembly standard + primarily developed at this time by out-of-browser use cases of WebAssembly. + You can find high-level documentation [here][component docs]. + +* **What's WASIp2?** - Not to be confused with WASIp1, WASIp0, + `wasi_snapshot_preview1`, or `wasi_unstable`, it's a version of WASI. Released + in January 2024 it's the first version of WASI defined in terms of the + component model. + +* **Why does this need its own linker?** - like any target that Rust has the + `wasm32-wasip2` target needs a linker. What makes this different from other + WebAssembly targets is that WASIp2 is defined at the component level, not core + WebAssembly level. This means that filesystem functions take a `string` + instead of `i32 i32`, for example. This means that the raw output of LLVM and + `wasm-ld`, a core WebAssembly module, is not suitable. + +* **Isn't writing a linker really hard?** - Generally, yes, but this linker + works by first asking `wasm-ld` to do all the hard work. It invokes `wasm-ld` + and then uses the output core WebAssembly module to create a component. + +* **How do you create a component from a core module?** - this is the purpose of + the [`wit-component`] crate, notably the `ComponentEncoder` type. This uses + component type information embedded in the core module and a general set of + conventions/guidelines with what the core module imports/exports. A component + is then hooked up to codify all of these conventions in a component itself. + +* **Why not require users to run `wit-component` themselves?** - while possible + it adds friction to the usage `wasm32-wasip2` target. More importantly though + the "module only" output of the `wasm32-wasip2` target is not ready right now. + The standard library still imports from `wasi_snapshot_preview1` and it will + take time to migrate all usage to WASIp2. + +* **What exactly does this linker do?** - the `wasm-component-ld` has the same + CLI interface and flags as `wasm-ld`, plus some more that are + component-specific. These flags are used to forward most flags to `wasm-ld` to + produce a core wasm module. After the core wasm module is produced the + `wit-component` crate will read custom sections in the final binary which + contain component type information. After merging all this type information + together a component is produced which wraps the core module. + +If you've got any other questions about this linker or its operation don't +hesitate to reach out to the maintainers of the `wasm32-wasip2` target. + +[`wasm-component-ld`]: https://crates.io/crates/wasm-component-ld +[`wit-component`]: https://crates.io/crates/wit-component +[WebAssembly Component]: https://github.com/webassembly/component-model +[component docs]: https://component-model.bytecodealliance.org/ diff --git a/src/tools/wasm-component-ld/src/main.rs b/src/tools/wasm-component-ld/src/main.rs new file mode 100644 index 0000000000000..caaac651c4c7b --- /dev/null +++ b/src/tools/wasm-component-ld/src/main.rs @@ -0,0 +1,9 @@ +// See the `README.md` in this directory for what this tool is. + +// The source for this crate lives at +// https://github.com/bytecodealliance/wasm-component-ld and the binary is +// independently used in other projects such as `wasi-sdk` so the `main` +// function is just reexported here to delegate. A Cargo dependency is used to +// facilitate version management in the Rust repository and work well with +// vendored/offline builds. +use wasm_component_ld::main; diff --git a/tests/mir-opt/dest-prop/branch.rs b/tests/mir-opt/dest-prop/branch.rs index 481d4130c7b59..0ac79ee4ec293 100644 --- a/tests/mir-opt/dest-prop/branch.rs +++ b/tests/mir-opt/dest-prop/branch.rs @@ -1,4 +1,3 @@ -// skip-filecheck // EMIT_MIR_FOR_EACH_PANIC_STRATEGY //! Tests that assignment in both branches of an `if` are eliminated. //@ test-mir-pass: DestinationPropagation @@ -12,6 +11,10 @@ fn cond() -> bool { // EMIT_MIR branch.foo.DestinationPropagation.diff fn foo() -> i32 { + // CHECK-LABEL: fn foo( + // CHECK: debug y => [[y:_.*]]; + // CHECK: [[y]] = val() + // CHECK-NOT: [[y]] = {{_.*}}; let x = val(); let y = if cond() { diff --git a/tests/mir-opt/dest-prop/copy_propagation_arg.rs b/tests/mir-opt/dest-prop/copy_propagation_arg.rs index db4969924ffb9..084bd0544c175 100644 --- a/tests/mir-opt/dest-prop/copy_propagation_arg.rs +++ b/tests/mir-opt/dest-prop/copy_propagation_arg.rs @@ -1,4 +1,3 @@ -// skip-filecheck // EMIT_MIR_FOR_EACH_PANIC_STRATEGY // Check that DestinationPropagation does not propagate an assignment to a function argument // (doing so can break usages of the original argument value) @@ -9,18 +8,29 @@ fn dummy(x: u8) -> u8 { // EMIT_MIR copy_propagation_arg.foo.DestinationPropagation.diff fn foo(mut x: u8) { + // CHECK-LABEL: fn foo( + // CHECK: debug x => [[x:_.*]]; + // CHECK: dummy(move [[x]]) + // CHECK: [[x]] = move {{_.*}}; // calling `dummy` to make a use of `x` that copyprop cannot eliminate x = dummy(x); // this will assign a local to `x` } // EMIT_MIR copy_propagation_arg.bar.DestinationPropagation.diff fn bar(mut x: u8) { + // CHECK-LABEL: fn bar( + // CHECK: debug x => [[x:_.*]]; + // CHECK: dummy(move [[x]]) + // CHECK: [[x]] = const 5_u8; dummy(x); x = 5; } // EMIT_MIR copy_propagation_arg.baz.DestinationPropagation.diff fn baz(mut x: i32) -> i32 { + // CHECK-LABEL: fn baz( + // CHECK: debug x => [[x:_.*]]; + // CHECK-NOT: [[x]] = // self-assignment to a function argument should be eliminated x = x; x @@ -28,6 +38,12 @@ fn baz(mut x: i32) -> i32 { // EMIT_MIR copy_propagation_arg.arg_src.DestinationPropagation.diff fn arg_src(mut x: i32) -> i32 { + // CHECK-LABEL: fn arg_src( + // CHECK: debug x => [[x:_.*]]; + // CHECK: debug y => [[y:_.*]]; + // CHECK: [[y]] = [[x]] + // CHECK: [[x]] = const 123_i32; + // CHECK-NOT: {{_.*}} = [[y]]; let y = x; x = 123; // Don't propagate this assignment to `y` y diff --git a/tests/mir-opt/dest-prop/cycle.rs b/tests/mir-opt/dest-prop/cycle.rs index e414daf20f2dd..86b9b713fd001 100644 --- a/tests/mir-opt/dest-prop/cycle.rs +++ b/tests/mir-opt/dest-prop/cycle.rs @@ -1,4 +1,3 @@ -// skip-filecheck // EMIT_MIR_FOR_EACH_PANIC_STRATEGY //! Tests that cyclic assignments don't hang DestinationPropagation, and result in reasonable code. //@ test-mir-pass: DestinationPropagation @@ -8,6 +7,10 @@ fn val() -> i32 { // EMIT_MIR cycle.main.DestinationPropagation.diff fn main() { + // CHECK-LABEL: main( + // CHECK: debug x => [[x:_.*]]; + // CHECK: [[x]] = val() + // CHECK-NOT: [[x]] = {{_.*}}; let mut x = val(); let y = x; let z = y; diff --git a/tests/mir-opt/dest-prop/dead_stores_79191.rs b/tests/mir-opt/dest-prop/dead_stores_79191.rs index 5c218a328f584..61060e4f8506b 100644 --- a/tests/mir-opt/dest-prop/dead_stores_79191.rs +++ b/tests/mir-opt/dest-prop/dead_stores_79191.rs @@ -1,4 +1,3 @@ -// skip-filecheck // EMIT_MIR_FOR_EACH_PANIC_STRATEGY //@ test-mir-pass: DestinationPropagation @@ -8,6 +7,13 @@ fn id(x: T) -> T { // EMIT_MIR dead_stores_79191.f.DestinationPropagation.after.mir fn f(mut a: usize) -> usize { + // CHECK-LABEL: fn f( + // CHECK: debug a => [[a:_.*]]; + // CHECK: debug b => [[b:_.*]]; + // CHECK: [[b]] = [[a]]; + // CHECK: [[a]] = const 5_usize; + // CHECK: [[a]] = move [[b]]; + // CHECK: id::(move [[a]]) let b = a; a = 5; a = b; diff --git a/tests/mir-opt/dest-prop/dead_stores_better.rs b/tests/mir-opt/dest-prop/dead_stores_better.rs index 06445dc87031e..d2b9fe0571234 100644 --- a/tests/mir-opt/dest-prop/dead_stores_better.rs +++ b/tests/mir-opt/dest-prop/dead_stores_better.rs @@ -1,4 +1,3 @@ -// skip-filecheck // EMIT_MIR_FOR_EACH_PANIC_STRATEGY // This is a copy of the `dead_stores_79191` test, except that we turn on DSE. This demonstrates // that that pass enables this one to do more optimizations. @@ -12,6 +11,13 @@ fn id(x: T) -> T { // EMIT_MIR dead_stores_better.f.DestinationPropagation.after.mir pub fn f(mut a: usize) -> usize { + // CHECK-LABEL: fn f( + // CHECK: debug a => [[a:_.*]]; + // CHECK: debug b => [[b:_.*]]; + // CHECK: [[b]] = [[a]]; + // CHECK: [[a]] = const 5_usize; + // CHECK: [[a]] = move [[b]]; + // CHECK: id::(move [[a]]) let b = a; a = 5; a = b; diff --git a/tests/mir-opt/dest-prop/simple.rs b/tests/mir-opt/dest-prop/simple.rs index 8e5d6340e566f..833d49b8c4665 100644 --- a/tests/mir-opt/dest-prop/simple.rs +++ b/tests/mir-opt/dest-prop/simple.rs @@ -1,9 +1,15 @@ -// skip-filecheck // EMIT_MIR_FOR_EACH_PANIC_STRATEGY //! Copy of `nrvo-simple.rs`, to ensure that full dest-prop handles it too. //@ test-mir-pass: DestinationPropagation // EMIT_MIR simple.nrvo.DestinationPropagation.diff fn nrvo(init: fn(&mut [u8; 1024])) -> [u8; 1024] { + // CHECK-LABEL: fn nrvo( + // CHECK: debug init => [[init:_.*]]; + // CHECK: debug buf => [[buf:_.*]]; + // CHECK: [[buf]] = [const 0_u8; 1024]; + // CHECK-NOT: {{_.*}} = [[init]]; + // CHECK: move [[init]](move {{_.*}}) + // CHECK: {{_.*}} = [[buf]] let mut buf = [0; 1024]; init(&mut buf); buf diff --git a/tests/mir-opt/dest-prop/union.rs b/tests/mir-opt/dest-prop/union.rs index 66fadd8471225..4e6fb71bf75ec 100644 --- a/tests/mir-opt/dest-prop/union.rs +++ b/tests/mir-opt/dest-prop/union.rs @@ -8,6 +8,8 @@ fn val() -> u32 { // EMIT_MIR union.main.DestinationPropagation.diff fn main() { + // CHECK-LABEL: fn args( + // CHECK: {{_.*}} = Un { us: const 1_u32 }; union Un { us: u32, }