From 5014353b8bfa6bac9f66a8e6167e88fc624352ae Mon Sep 17 00:00:00 2001 From: bjorn3 <17426603+bjorn3@users.noreply.github.com> Date: Thu, 27 Feb 2025 16:35:57 +0000 Subject: [PATCH 01/19] Don't attempt to export compiler-builtins symbols from rust dylibs They are marked with hidden visibility to prevent them from getting exported, so we shouldn't ask the linker to export them anyway. The only thing that does it cause a warning on macOS. --- compiler/rustc_codegen_ssa/src/back/linker.rs | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/compiler/rustc_codegen_ssa/src/back/linker.rs b/compiler/rustc_codegen_ssa/src/back/linker.rs index 818edaf66031b..8e32a8aed81cb 100644 --- a/compiler/rustc_codegen_ssa/src/back/linker.rs +++ b/compiler/rustc_codegen_ssa/src/back/linker.rs @@ -1784,7 +1784,10 @@ fn exported_symbols_for_non_proc_macro(tcx: TyCtxt<'_>, crate_type: CrateType) - let mut symbols = Vec::new(); let export_threshold = symbol_export::crates_export_threshold(&[crate_type]); for_each_exported_symbols_include_dep(tcx, crate_type, |symbol, info, cnum| { - if info.level.is_below_threshold(export_threshold) { + // Do not export mangled symbols from cdylibs and don't attempt to export compiler-builtins + // from any cdylib. The latter doesn't work anyway as we use hidden visibility for + // compiler-builtins. Most linkers silently ignore it, but ld64 gives a warning. + if info.level.is_below_threshold(export_threshold) && !tcx.is_compiler_builtins(cnum) { symbols.push(symbol_export::exporting_symbol_name_for_instance_in_crate( tcx, symbol, cnum, )); From d73479e558f689c399b6e16bc2c5a50f2745cf8a Mon Sep 17 00:00:00 2001 From: bjorn3 <17426603+bjorn3@users.noreply.github.com> Date: Thu, 27 Feb 2025 16:36:20 +0000 Subject: [PATCH 02/19] Fix lint name in unused linker_messages warning --- compiler/rustc_passes/messages.ftl | 4 ++-- compiler/rustc_passes/src/check_attr.rs | 2 +- compiler/rustc_passes/src/errors.rs | 4 ++-- tests/ui/lint/linker-warning.stderr | 2 +- 4 files changed, 6 insertions(+), 6 deletions(-) diff --git a/compiler/rustc_passes/messages.ftl b/compiler/rustc_passes/messages.ftl index bc43580a7f00d..f394d5ca1d148 100644 --- a/compiler/rustc_passes/messages.ftl +++ b/compiler/rustc_passes/messages.ftl @@ -801,8 +801,8 @@ passes_unused_duplicate = passes_unused_empty_lints_note = attribute `{$name}` with an empty list has no effect -passes_unused_linker_warnings_note = - the `linker_warnings` lint can only be controlled at the root of a crate that needs to be linked +passes_unused_linker_messages_note = + the `linker_messages` lint can only be controlled at the root of a crate that needs to be linked passes_unused_multiple = multiple `{$name}` attributes diff --git a/compiler/rustc_passes/src/check_attr.rs b/compiler/rustc_passes/src/check_attr.rs index 5ada289cc2090..e9dfa620de64e 100644 --- a/compiler/rustc_passes/src/check_attr.rs +++ b/compiler/rustc_passes/src/check_attr.rs @@ -2382,7 +2382,7 @@ impl<'tcx> CheckAttrVisitor<'tcx> { .iter() .all(|kind| matches!(kind, CrateType::Rlib | CrateType::Staticlib)); if never_needs_link { - errors::UnusedNote::LinkerWarningsBinaryCrateOnly + errors::UnusedNote::LinkerMessagesBinaryCrateOnly } else { return; } diff --git a/compiler/rustc_passes/src/errors.rs b/compiler/rustc_passes/src/errors.rs index 51b5861ee0ae6..54c3520299116 100644 --- a/compiler/rustc_passes/src/errors.rs +++ b/compiler/rustc_passes/src/errors.rs @@ -770,8 +770,8 @@ pub(crate) enum UnusedNote { NoLints { name: Symbol }, #[note(passes_unused_default_method_body_const_note)] DefaultMethodBodyConst, - #[note(passes_unused_linker_warnings_note)] - LinkerWarningsBinaryCrateOnly, + #[note(passes_unused_linker_messages_note)] + LinkerMessagesBinaryCrateOnly, } #[derive(LintDiagnostic)] diff --git a/tests/ui/lint/linker-warning.stderr b/tests/ui/lint/linker-warning.stderr index 3a2c392fd0312..c678562ab5466 100644 --- a/tests/ui/lint/linker-warning.stderr +++ b/tests/ui/lint/linker-warning.stderr @@ -16,7 +16,7 @@ warning: unused attribute LL | #![allow(linker_messages)] | ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: remove this attribute | - = note: the `linker_warnings` lint can only be controlled at the root of a crate that needs to be linked + = note: the `linker_messages` lint can only be controlled at the root of a crate that needs to be linked warning: 2 warnings emitted From 61e550a5832e95b74f6f9b718e7218ab736ac011 Mon Sep 17 00:00:00 2001 From: Ayush Singh Date: Sat, 22 Feb 2025 19:37:07 +0530 Subject: [PATCH 03/19] uefi: helpers: Add DevicePathNode abstractions - UEFI device path is a series of nodes layed out in a contiguous memory region. So it makes sense to use Iterator abstraction for modeling DevicePaths - This PR has been split off from #135368 for easier review. The allow dead_code will be removed in #135368 Signed-off-by: Ayush Singh --- library/std/src/sys/pal/uefi/helpers.rs | 179 ++++++++++++++++++++++++ 1 file changed, 179 insertions(+) diff --git a/library/std/src/sys/pal/uefi/helpers.rs b/library/std/src/sys/pal/uefi/helpers.rs index dccc137d6f561..fade622022af4 100644 --- a/library/std/src/sys/pal/uefi/helpers.rs +++ b/library/std/src/sys/pal/uefi/helpers.rs @@ -216,6 +216,60 @@ pub(crate) fn device_path_to_text(path: NonNull) -> io::R Err(io::const_error!(io::ErrorKind::NotFound, "No device path to text protocol found")) } +fn device_node_to_text(path: NonNull) -> io::Result { + fn node_to_text( + protocol: NonNull, + path: NonNull, + ) -> io::Result { + let path_ptr: *mut r_efi::efi::Char16 = unsafe { + ((*protocol.as_ptr()).convert_device_node_to_text)( + path.as_ptr(), + // DisplayOnly + r_efi::efi::Boolean::FALSE, + // AllowShortcuts + r_efi::efi::Boolean::FALSE, + ) + }; + + let path = os_string_from_raw(path_ptr) + .ok_or(io::const_error!(io::ErrorKind::InvalidData, "Invalid path"))?; + + if let Some(boot_services) = crate::os::uefi::env::boot_services() { + let boot_services: NonNull = boot_services.cast(); + unsafe { + ((*boot_services.as_ptr()).free_pool)(path_ptr.cast()); + } + } + + Ok(path) + } + + static LAST_VALID_HANDLE: AtomicPtr = + AtomicPtr::new(crate::ptr::null_mut()); + + if let Some(handle) = NonNull::new(LAST_VALID_HANDLE.load(Ordering::Acquire)) { + if let Ok(protocol) = open_protocol::( + handle, + device_path_to_text::PROTOCOL_GUID, + ) { + return node_to_text(protocol, path); + } + } + + let device_path_to_text_handles = locate_handles(device_path_to_text::PROTOCOL_GUID)?; + for handle in device_path_to_text_handles { + if let Ok(protocol) = open_protocol::( + handle, + device_path_to_text::PROTOCOL_GUID, + ) { + LAST_VALID_HANDLE.store(handle.as_ptr(), Ordering::Release); + return node_to_text(protocol, path); + } + } + + Err(io::const_error!(io::ErrorKind::NotFound, "No device path to text protocol found")) +} + /// Gets RuntimeServices. pub(crate) fn runtime_services() -> Option> { let system_table: NonNull = @@ -319,6 +373,11 @@ impl<'a> BorrowedDevicePath<'a> { pub(crate) fn to_text(&self) -> io::Result { device_path_to_text(self.protocol) } + + #[expect(dead_code)] + pub(crate) const fn iter(&'a self) -> DevicePathIterator<'a> { + DevicePathIterator::new(DevicePathNode::new(self.protocol)) + } } impl<'a> crate::fmt::Debug for BorrowedDevicePath<'a> { @@ -330,6 +389,126 @@ impl<'a> crate::fmt::Debug for BorrowedDevicePath<'a> { } } +pub(crate) struct DevicePathIterator<'a>(Option>); + +impl<'a> DevicePathIterator<'a> { + const fn new(node: DevicePathNode<'a>) -> Self { + if node.is_end() { Self(None) } else { Self(Some(node)) } + } +} + +impl<'a> Iterator for DevicePathIterator<'a> { + type Item = DevicePathNode<'a>; + + fn next(&mut self) -> Option { + let cur_node = self.0?; + + let next_node = unsafe { cur_node.next_node() }; + self.0 = if next_node.is_end() { None } else { Some(next_node) }; + + Some(cur_node) + } +} + +#[derive(Copy, Clone)] +pub(crate) struct DevicePathNode<'a> { + protocol: NonNull, + phantom: PhantomData<&'a r_efi::protocols::device_path::Protocol>, +} + +impl<'a> DevicePathNode<'a> { + pub(crate) const fn new(protocol: NonNull) -> Self { + Self { protocol, phantom: PhantomData } + } + + pub(crate) const fn length(&self) -> u16 { + let len = unsafe { (*self.protocol.as_ptr()).length }; + u16::from_le_bytes(len) + } + + pub(crate) const fn node_type(&self) -> u8 { + unsafe { (*self.protocol.as_ptr()).r#type } + } + + pub(crate) const fn sub_type(&self) -> u8 { + unsafe { (*self.protocol.as_ptr()).sub_type } + } + + pub(crate) fn data(&self) -> &[u8] { + let length: usize = self.length().into(); + + // Some nodes do not have any special data + if length > 4 { + let raw_ptr: *const u8 = self.protocol.as_ptr().cast(); + let data = unsafe { raw_ptr.add(4) }; + unsafe { crate::slice::from_raw_parts(data, length - 4) } + } else { + &[] + } + } + + pub(crate) const fn is_end(&self) -> bool { + self.node_type() == r_efi::protocols::device_path::TYPE_END + && self.sub_type() == r_efi::protocols::device_path::End::SUBTYPE_ENTIRE + } + + #[expect(dead_code)] + pub(crate) const fn is_end_instance(&self) -> bool { + self.node_type() == r_efi::protocols::device_path::TYPE_END + && self.sub_type() == r_efi::protocols::device_path::End::SUBTYPE_INSTANCE + } + + pub(crate) unsafe fn next_node(&self) -> Self { + let node = unsafe { + self.protocol + .cast::() + .add(self.length().into()) + .cast::() + }; + Self::new(node) + } + + #[expect(dead_code)] + pub(crate) fn to_path(&'a self) -> BorrowedDevicePath<'a> { + BorrowedDevicePath::new(self.protocol) + } + + pub(crate) fn to_text(&self) -> io::Result { + device_node_to_text(self.protocol) + } +} + +impl<'a> PartialEq for DevicePathNode<'a> { + fn eq(&self, other: &Self) -> bool { + let self_len = self.length(); + let other_len = other.length(); + + self_len == other_len + && unsafe { + compiler_builtins::mem::memcmp( + self.protocol.as_ptr().cast(), + other.protocol.as_ptr().cast(), + usize::from(self_len), + ) == 0 + } + } +} + +impl<'a> crate::fmt::Debug for DevicePathNode<'a> { + fn fmt(&self, f: &mut crate::fmt::Formatter<'_>) -> crate::fmt::Result { + match self.to_text() { + Ok(p) => p.fmt(f), + Err(_) => f + .debug_struct("DevicePathNode") + .field("type", &self.node_type()) + .field("sub_type", &self.sub_type()) + .field("length", &self.length()) + .field("specific_device_path_data", &self.data()) + .finish(), + } + } +} + pub(crate) struct OwnedProtocol { guid: r_efi::efi::Guid, handle: NonNull, From 6f214c5bb3959dc8a646fac0dafdcaeaf7e025f8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jakub=20Ber=C3=A1nek?= Date: Thu, 13 Mar 2025 13:14:14 +0100 Subject: [PATCH 04/19] Fix pluralization of tests --- src/ci/citool/src/merge_report.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/ci/citool/src/merge_report.rs b/src/ci/citool/src/merge_report.rs index 17e42d49286fe..2dbe383b91222 100644 --- a/src/ci/citool/src/merge_report.rs +++ b/src/ci/citool/src/merge_report.rs @@ -242,7 +242,7 @@ fn report_test_changes(mut diffs: Vec) { println!(" - {}: {}", test.name, format_diff(&outcome_diff)); } if extra_tests > 0 { - println!(" - (and {extra_tests} additional {})", pluralize("tests", extra_tests)); + println!(" - (and {extra_tests} additional {})", pluralize("test", extra_tests)); } } From 208cef45fc963528a8959e3b153d3b238ca1e187 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jakub=20Ber=C3=A1nek?= Date: Thu, 13 Mar 2025 13:15:38 +0100 Subject: [PATCH 05/19] Add note about the experimental status --- .github/workflows/post-merge.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/post-merge.yml b/.github/workflows/post-merge.yml index 31e075f45d646..da566a91b469e 100644 --- a/.github/workflows/post-merge.yml +++ b/.github/workflows/post-merge.yml @@ -35,7 +35,7 @@ jobs: cd src/ci/citool - echo "Post-merge analysis result" > output.log + printf "*This is an experimental post-merge analysis result. You can ignore it.*\n\n" > output.log cargo run --release post-merge-report ${PARENT_COMMIT} ${{ github.sha }} >> output.log cat output.log From 2192d5c4e218aa137f7d38bc6c93e36af6149bf3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jakub=20Ber=C3=A1nek?= Date: Thu, 13 Mar 2025 13:18:47 +0100 Subject: [PATCH 06/19] Print the compared SHAs --- src/ci/citool/src/merge_report.rs | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/ci/citool/src/merge_report.rs b/src/ci/citool/src/merge_report.rs index 2dbe383b91222..21b77299cdb34 100644 --- a/src/ci/citool/src/merge_report.rs +++ b/src/ci/citool/src/merge_report.rs @@ -14,6 +14,8 @@ type JobName = String; pub fn post_merge_report(job_db: JobDatabase, parent: Sha, current: Sha) -> anyhow::Result<()> { let jobs = download_all_metrics(&job_db, &parent, ¤t)?; let diffs = aggregate_test_diffs(&jobs)?; + + println!("Comparing {parent} (base) -> {current} (this PR)\n"); report_test_changes(diffs); Ok(()) From 7ed913be3196ac43f6938ba5beb01e72464cc1cd Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jakub=20Ber=C3=A1nek?= Date: Thu, 13 Mar 2025 13:26:45 +0100 Subject: [PATCH 07/19] Add cache for downloading job metrics To make it easier to experiment locally. --- src/ci/citool/src/merge_report.rs | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/src/ci/citool/src/merge_report.rs b/src/ci/citool/src/merge_report.rs index 21b77299cdb34..76b7d779abc41 100644 --- a/src/ci/citool/src/merge_report.rs +++ b/src/ci/citool/src/merge_report.rs @@ -1,5 +1,6 @@ use std::cmp::Reverse; use std::collections::HashMap; +use std::path::PathBuf; use anyhow::Context; use build_helper::metrics::{JsonRoot, TestOutcome}; @@ -56,7 +57,16 @@ Maybe it was newly added?"#, Ok(jobs) } +/// Downloads job metrics of the given job for the given commit. +/// Caches the result on the local disk. fn download_job_metrics(job_name: &str, sha: &str) -> anyhow::Result { + let cache_path = PathBuf::from(".citool-cache").join(sha).join(job_name).join("metrics.json"); + if let Some(cache_entry) = + std::fs::read_to_string(&cache_path).ok().and_then(|data| serde_json::from_str(&data).ok()) + { + return Ok(cache_entry); + } + let url = get_metrics_url(job_name, sha); let mut response = ureq::get(&url).call()?; if !response.status().is_success() { @@ -70,6 +80,13 @@ fn download_job_metrics(job_name: &str, sha: &str) -> anyhow::Result { .body_mut() .read_json() .with_context(|| anyhow::anyhow!("cannot deserialize metrics from {url}"))?; + + // Ignore errors if cache cannot be created + if std::fs::create_dir_all(cache_path.parent().unwrap()).is_ok() { + if let Ok(serialized) = serde_json::to_string(&data) { + let _ = std::fs::write(&cache_path, &serialized); + } + } Ok(data) } From 5a7f227351dc12fe3217394c9ecb7e9643c6d67e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jakub=20Ber=C3=A1nek?= Date: Thu, 13 Mar 2025 13:36:34 +0100 Subject: [PATCH 08/19] Collapse report in `
` --- .github/workflows/post-merge.yml | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/.github/workflows/post-merge.yml b/.github/workflows/post-merge.yml index da566a91b469e..de31c28cc90e0 100644 --- a/.github/workflows/post-merge.yml +++ b/.github/workflows/post-merge.yml @@ -35,8 +35,13 @@ jobs: cd src/ci/citool - printf "*This is an experimental post-merge analysis result. You can ignore it.*\n\n" > output.log + printf "*This is an experimental post-merge analysis report. You can ignore it.*\n\n" > output.log + printf "
\nPost-merge report\n\n" >> output.log + cargo run --release post-merge-report ${PARENT_COMMIT} ${{ github.sha }} >> output.log + + printf "
\n" >> output.log + cat output.log gh pr comment ${HEAD_PR} -F output.log From d5d633d2460e45df25c679933ea678367b4d94a3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jakub=20Ber=C3=A1nek?= Date: Thu, 13 Mar 2025 13:55:03 +0100 Subject: [PATCH 09/19] Group diffs by tests, rather than job groups --- src/ci/citool/src/merge_report.rs | 162 ++++++++++++++++-------------- 1 file changed, 86 insertions(+), 76 deletions(-) diff --git a/src/ci/citool/src/merge_report.rs b/src/ci/citool/src/merge_report.rs index 76b7d779abc41..918e9b92f1e60 100644 --- a/src/ci/citool/src/merge_report.rs +++ b/src/ci/citool/src/merge_report.rs @@ -1,5 +1,4 @@ -use std::cmp::Reverse; -use std::collections::HashMap; +use std::collections::{HashMap, HashSet}; use std::path::PathBuf; use anyhow::Context; @@ -14,10 +13,10 @@ type JobName = String; /// Computes a post merge CI analysis report between the `parent` and `current` commits. pub fn post_merge_report(job_db: JobDatabase, parent: Sha, current: Sha) -> anyhow::Result<()> { let jobs = download_all_metrics(&job_db, &parent, ¤t)?; - let diffs = aggregate_test_diffs(&jobs)?; + let aggregated_test_diffs = aggregate_test_diffs(&jobs)?; println!("Comparing {parent} (base) -> {current} (this PR)\n"); - report_test_changes(diffs); + report_test_diffs(aggregated_test_diffs); Ok(()) } @@ -95,81 +94,80 @@ fn get_metrics_url(job_name: &str, sha: &str) -> String { format!("https://ci-artifacts.rust-lang.org/rustc-builds{suffix}/{sha}/metrics-{job_name}.json") } +/// Represents a difference in the outcome of tests between a base and a current commit. +/// Maps test diffs to jobs that contained them. +#[derive(Debug)] +struct AggregatedTestDiffs { + diffs: HashMap>, +} + fn aggregate_test_diffs( jobs: &HashMap, -) -> anyhow::Result> { - let mut job_diffs = vec![]; +) -> anyhow::Result { + let mut diffs: HashMap> = HashMap::new(); // Aggregate test suites for (name, metrics) in jobs { if let Some(parent) = &metrics.parent { let tests_parent = aggregate_tests(parent); let tests_current = aggregate_tests(&metrics.current); - let test_diffs = calculate_test_diffs(tests_parent, tests_current); - if !test_diffs.is_empty() { - job_diffs.push((name.clone(), test_diffs)); + for diff in calculate_test_diffs(tests_parent, tests_current) { + diffs.entry(diff).or_default().push(name.to_string()); } } } - // Aggregate jobs with the same diff, as often the same diff will appear in many jobs - let job_diffs: HashMap, Vec> = - job_diffs.into_iter().fold(HashMap::new(), |mut acc, (job, diffs)| { - acc.entry(diffs).or_default().push(job); - acc - }); - - Ok(job_diffs - .into_iter() - .map(|(test_diffs, jobs)| AggregatedTestDiffs { jobs, test_diffs }) - .collect()) + Ok(AggregatedTestDiffs { diffs }) +} + +#[derive(Eq, PartialEq, Hash, Debug)] +enum TestOutcomeDiff { + ChangeOutcome { before: TestOutcome, after: TestOutcome }, + Missing { before: TestOutcome }, + Added(TestOutcome), +} + +#[derive(Eq, PartialEq, Hash, Debug)] +struct TestDiff { + test: Test, + diff: TestOutcomeDiff, } -fn calculate_test_diffs( - reference: TestSuiteData, - current: TestSuiteData, -) -> Vec<(Test, TestOutcomeDiff)> { - let mut diffs = vec![]; +fn calculate_test_diffs(parent: TestSuiteData, current: TestSuiteData) -> HashSet { + let mut diffs = HashSet::new(); for (test, outcome) in ¤t.tests { - match reference.tests.get(test) { + match parent.tests.get(test) { Some(before) => { if before != outcome { - diffs.push(( - test.clone(), - TestOutcomeDiff::ChangeOutcome { + diffs.insert(TestDiff { + test: test.clone(), + diff: TestOutcomeDiff::ChangeOutcome { before: before.clone(), after: outcome.clone(), }, - )); + }); } } - None => diffs.push((test.clone(), TestOutcomeDiff::Added(outcome.clone()))), + None => { + diffs.insert(TestDiff { + test: test.clone(), + diff: TestOutcomeDiff::Added(outcome.clone()), + }); + } } } - for (test, outcome) in &reference.tests { + for (test, outcome) in &parent.tests { if !current.tests.contains_key(test) { - diffs.push((test.clone(), TestOutcomeDiff::Missing { before: outcome.clone() })); + diffs.insert(TestDiff { + test: test.clone(), + diff: TestOutcomeDiff::Missing { before: outcome.clone() }, + }); } } diffs } -/// Represents a difference in the outcome of tests between a base and a current commit. -#[derive(Debug)] -struct AggregatedTestDiffs { - /// All jobs that had the exact same test diffs. - jobs: Vec, - test_diffs: Vec<(Test, TestOutcomeDiff)>, -} - -#[derive(Eq, PartialEq, Hash, Debug)] -enum TestOutcomeDiff { - ChangeOutcome { before: TestOutcome, after: TestOutcome }, - Missing { before: TestOutcome }, - Added(TestOutcome), -} - /// Aggregates test suite executions from all bootstrap invocations in a given CI job. #[derive(Default)] struct TestSuiteData { @@ -200,16 +198,13 @@ fn normalize_test_name(name: &str) -> String { } /// Prints test changes in Markdown format to stdout. -fn report_test_changes(mut diffs: Vec) { +fn report_test_diffs(mut diff: AggregatedTestDiffs) { println!("## Test differences"); - if diffs.is_empty() { + if diff.diffs.is_empty() { println!("No test diffs found"); return; } - // Sort diffs in decreasing order by diff count - diffs.sort_by_key(|entry| Reverse(entry.test_diffs.len())); - fn format_outcome(outcome: &TestOutcome) -> String { match outcome { TestOutcome::Passed => "pass".to_string(), @@ -238,36 +233,51 @@ fn report_test_changes(mut diffs: Vec) { } } - let max_diff_count = 10; - let max_job_count = 5; - let max_test_count = 10; - - for diff in diffs.iter().take(max_diff_count) { - let mut jobs = diff.jobs.clone(); + // It would be quite noisy to repeat the jobs that contained the test changes after/next to + // every test diff. At the same time, grouping the test diffs by + // [unique set of jobs that contained them] also doesn't work well, because the test diffs + // would have to be duplicated several times. + // Instead, we create a set of unique job groups, and then print a job group after each test. + // We then print the job groups at the end, as a sort of index. + let mut grouped_diffs: Vec<(&TestDiff, u64)> = vec![]; + let mut job_list_to_group: HashMap<&[JobName], u64> = HashMap::new(); + let mut job_index: Vec<&[JobName]> = vec![]; + + for (_, jobs) in diff.diffs.iter_mut() { jobs.sort(); + } - let jobs = jobs.iter().take(max_job_count).map(|j| format!("`{j}`")).collect::>(); - - let extra_jobs = diff.jobs.len().saturating_sub(max_job_count); - let suffix = if extra_jobs > 0 { - format!(" (and {extra_jobs} {})", pluralize("other", extra_jobs)) - } else { - String::new() + let max_diff_count = 100; + for (diff, jobs) in diff.diffs.iter().take(max_diff_count) { + let jobs = &*jobs; + let job_group = match job_list_to_group.get(jobs.as_slice()) { + Some(id) => *id, + None => { + let id = job_index.len() as u64; + job_index.push(jobs); + job_list_to_group.insert(jobs, id); + id + } }; - println!("- {}{suffix}", jobs.join(",")); + grouped_diffs.push((diff, job_group)); + } - let extra_tests = diff.test_diffs.len().saturating_sub(max_test_count); - for (test, outcome_diff) in diff.test_diffs.iter().take(max_test_count) { - println!(" - {}: {}", test.name, format_diff(&outcome_diff)); - } - if extra_tests > 0 { - println!(" - (and {extra_tests} additional {})", pluralize("test", extra_tests)); - } + // Sort diffs by job group and test name + grouped_diffs.sort_by(|(d1, g1), (d2, g2)| g1.cmp(&g2).then(d1.test.name.cmp(&d2.test.name))); + + for (diff, job_group) in grouped_diffs { + println!("- `{}`: {} (*J{job_group}*)", diff.test.name, format_diff(&diff.diff)); } - let extra_diffs = diffs.len().saturating_sub(max_diff_count); + let extra_diffs = diff.diffs.len().saturating_sub(max_diff_count); if extra_diffs > 0 { - println!("\n(and {extra_diffs} additional {})", pluralize("diff", extra_diffs)); + println!("\n(and {extra_diffs} additional {})", pluralize("test diff", extra_diffs)); + } + + // Now print the job index + println!("\n**Job index**\n"); + for (group, jobs) in job_index.into_iter().enumerate() { + println!("- J{group}: `{}`", jobs.join(", ")); } } From f981a0a0cd266e88c4c504a7d3218d045cc8104e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jakub=20Ber=C3=A1nek?= Date: Thu, 13 Mar 2025 14:31:23 +0100 Subject: [PATCH 10/19] Do not print doctest diffs in the report When they are moved around in code, their name changes, which produces too noisy diffs. --- src/ci/citool/src/merge_report.rs | 56 ++++++++++++++++++++++++------- 1 file changed, 44 insertions(+), 12 deletions(-) diff --git a/src/ci/citool/src/merge_report.rs b/src/ci/citool/src/merge_report.rs index 918e9b92f1e60..be7246cfa5e2a 100644 --- a/src/ci/citool/src/merge_report.rs +++ b/src/ci/citool/src/merge_report.rs @@ -2,7 +2,7 @@ use std::collections::{HashMap, HashSet}; use std::path::PathBuf; use anyhow::Context; -use build_helper::metrics::{JsonRoot, TestOutcome}; +use build_helper::metrics::{JsonRoot, TestOutcome, TestSuiteMetadata}; use crate::jobs::JobDatabase; use crate::metrics::get_test_suites; @@ -177,6 +177,7 @@ struct TestSuiteData { #[derive(Hash, PartialEq, Eq, Debug, Clone)] struct Test { name: String, + is_doctest: bool, } /// Extracts all tests from the passed metrics and map them to their outcomes. @@ -185,7 +186,10 @@ fn aggregate_tests(metrics: &JsonRoot) -> TestSuiteData { let test_suites = get_test_suites(&metrics); for suite in test_suites { for test in &suite.tests { - let test_entry = Test { name: normalize_test_name(&test.name) }; + // Poor man's detection of doctests based on the "(line XYZ)" suffix + let is_doctest = matches!(suite.metadata, TestSuiteMetadata::CargoPackage { .. }) + && test.name.contains("(line"); + let test_entry = Test { name: normalize_test_name(&test.name), is_doctest }; tests.insert(test_entry, test.outcome.clone()); } } @@ -198,7 +202,7 @@ fn normalize_test_name(name: &str) -> String { } /// Prints test changes in Markdown format to stdout. -fn report_test_diffs(mut diff: AggregatedTestDiffs) { +fn report_test_diffs(diff: AggregatedTestDiffs) { println!("## Test differences"); if diff.diffs.is_empty() { println!("No test diffs found"); @@ -233,6 +237,10 @@ fn report_test_diffs(mut diff: AggregatedTestDiffs) { } } + fn format_job_group(group: u64) -> String { + format!("**J{group}**") + } + // It would be quite noisy to repeat the jobs that contained the test changes after/next to // every test diff. At the same time, grouping the test diffs by // [unique set of jobs that contained them] also doesn't work well, because the test diffs @@ -243,12 +251,20 @@ fn report_test_diffs(mut diff: AggregatedTestDiffs) { let mut job_list_to_group: HashMap<&[JobName], u64> = HashMap::new(); let mut job_index: Vec<&[JobName]> = vec![]; - for (_, jobs) in diff.diffs.iter_mut() { - jobs.sort(); - } + let original_diff_count = diff.diffs.len(); + let diffs = diff + .diffs + .into_iter() + .filter(|(diff, _)| !diff.test.is_doctest) + .map(|(diff, mut jobs)| { + jobs.sort(); + (diff, jobs) + }) + .collect::>(); + let doctest_count = original_diff_count.saturating_sub(diffs.len()); let max_diff_count = 100; - for (diff, jobs) in diff.diffs.iter().take(max_diff_count) { + for (diff, jobs) in diffs.iter().take(max_diff_count) { let jobs = &*jobs; let job_group = match job_list_to_group.get(jobs.as_slice()) { Some(id) => *id, @@ -266,18 +282,34 @@ fn report_test_diffs(mut diff: AggregatedTestDiffs) { grouped_diffs.sort_by(|(d1, g1), (d2, g2)| g1.cmp(&g2).then(d1.test.name.cmp(&d2.test.name))); for (diff, job_group) in grouped_diffs { - println!("- `{}`: {} (*J{job_group}*)", diff.test.name, format_diff(&diff.diff)); + println!( + "- `{}`: {} ({})", + diff.test.name, + format_diff(&diff.diff), + format_job_group(job_group) + ); } - let extra_diffs = diff.diffs.len().saturating_sub(max_diff_count); + let extra_diffs = diffs.len().saturating_sub(max_diff_count); if extra_diffs > 0 { println!("\n(and {extra_diffs} additional {})", pluralize("test diff", extra_diffs)); } - // Now print the job index - println!("\n**Job index**\n"); + if doctest_count > 0 { + println!( + "\nAdditionally, {doctest_count} doctest {} were found.", + pluralize("diff", doctest_count) + ); + } + + // Now print the job group index + println!("\n**Job group index**\n"); for (group, jobs) in job_index.into_iter().enumerate() { - println!("- J{group}: `{}`", jobs.join(", ")); + println!( + "- {}: {}", + format_job_group(group as u64), + jobs.iter().map(|j| format!("`{j}`")).collect::>().join(", ") + ); } } From c4371786d623a56072d048b9dea64abccbf4a113 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jakub=20Ber=C3=A1nek?= Date: Wed, 12 Mar 2025 13:14:11 +0100 Subject: [PATCH 11/19] Create libgccjit.so.0 alias also for CI-downloaded GCC --- src/bootstrap/src/core/build_steps/gcc.rs | 20 ++++++++++++++------ 1 file changed, 14 insertions(+), 6 deletions(-) diff --git a/src/bootstrap/src/core/build_steps/gcc.rs b/src/bootstrap/src/core/build_steps/gcc.rs index 5a4bc9bdbcba9..ea5a312a7161f 100644 --- a/src/bootstrap/src/core/build_steps/gcc.rs +++ b/src/bootstrap/src/core/build_steps/gcc.rs @@ -63,11 +63,7 @@ impl Step for Gcc { } build_gcc(&metadata, builder, target); - - let lib_alias = metadata.install_dir.join("lib/libgccjit.so.0"); - if !lib_alias.exists() { - t!(builder.symlink_file(&libgccjit_path, lib_alias)); - } + create_lib_alias(builder, &libgccjit_path); t!(metadata.stamp.write()); @@ -75,6 +71,15 @@ impl Step for Gcc { } } +/// Creates a libgccjit.so.0 alias next to libgccjit.so if it does not +/// already exist +fn create_lib_alias(builder: &Builder<'_>, libgccjit: &PathBuf) { + let lib_alias = libgccjit.parent().unwrap().join("libgccjit.so.0"); + if !lib_alias.exists() { + t!(builder.symlink_file(libgccjit, lib_alias)); + } +} + pub struct Meta { stamp: BuildStamp, out_dir: PathBuf, @@ -109,8 +114,11 @@ fn try_download_gcc(builder: &Builder<'_>, target: TargetSelection) -> Option Date: Wed, 12 Mar 2025 17:24:51 +0100 Subject: [PATCH 12/19] Change GCC build flags --- src/bootstrap/src/core/build_steps/gcc.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/bootstrap/src/core/build_steps/gcc.rs b/src/bootstrap/src/core/build_steps/gcc.rs index ea5a312a7161f..0a3208b2363a4 100644 --- a/src/bootstrap/src/core/build_steps/gcc.rs +++ b/src/bootstrap/src/core/build_steps/gcc.rs @@ -218,7 +218,7 @@ fn build_gcc(metadata: &Meta, builder: &Builder<'_>, target: TargetSelection) { .env("CXXFLAGS", "-Wno-everything -g -O2") .env("CFLAGS", "-Wno-everything -g -O2") .arg("--enable-host-shared") - .arg("--enable-languages=jit") + .arg("--enable-languages=c,jit,lto") .arg("--enable-checking=release") .arg("--disable-bootstrap") .arg("--disable-multilib") From 3fc7ca0fee9806983423dcd4aab06e42c856cd78 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jakub=20Ber=C3=A1nek?= Date: Thu, 13 Mar 2025 11:31:08 +0100 Subject: [PATCH 13/19] Use GCC for building GCC --- src/ci/docker/host-x86_64/dist-x86_64-linux/Dockerfile | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/ci/docker/host-x86_64/dist-x86_64-linux/Dockerfile b/src/ci/docker/host-x86_64/dist-x86_64-linux/Dockerfile index f54ecef1e308a..ae5bf8946dd94 100644 --- a/src/ci/docker/host-x86_64/dist-x86_64-linux/Dockerfile +++ b/src/ci/docker/host-x86_64/dist-x86_64-linux/Dockerfile @@ -101,7 +101,9 @@ ENV SCRIPT python3 ../x.py build --set rust.debug=true opt-dist && \ ./build/$HOSTS/stage0-tools-bin/opt-dist linux-ci -- python3 ../x.py dist \ --host $HOSTS --target $HOSTS \ --include-default-paths \ - build-manifest bootstrap gcc + build-manifest bootstrap && \ + # Use GCC for building GCC, as it seems to behave badly when built with Clang + CC=/rustroot/bin/cc CXX=/rustroot/bin/c++ python3 ../x.py dist gcc ENV CARGO_TARGET_X86_64_UNKNOWN_LINUX_GNU_LINKER=clang # This is the only builder which will create source tarballs From 34272a54689e1e9b9eac97e0f5e126cd4149ae39 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jakub=20Ber=C3=A1nek?= Date: Thu, 13 Mar 2025 16:27:41 +0100 Subject: [PATCH 14/19] Do not overwrite original `config.toml` in `opt-dist` So that follow-up CI commands can proceed normally. It will also avoid overwriting `config.toml` when running opt-dist tests locally. --- src/tools/opt-dist/src/tests.rs | 81 +++++++++++++++++++++------------ 1 file changed, 52 insertions(+), 29 deletions(-) diff --git a/src/tools/opt-dist/src/tests.rs b/src/tools/opt-dist/src/tests.rs index 8b3bd77141ba8..c8759bb6ff62a 100644 --- a/src/tools/opt-dist/src/tests.rs +++ b/src/tools/opt-dist/src/tests.rs @@ -1,3 +1,5 @@ +use std::path::Path; + use anyhow::Context; use camino::{Utf8Path, Utf8PathBuf}; @@ -86,36 +88,57 @@ llvm-config = "{llvm_config}" log::info!("Using following `config.toml` for running tests:\n{config_content}"); // Simulate a stage 0 compiler with the extracted optimized dist artifacts. - std::fs::write("config.toml", config_content)?; - - let x_py = env.checkout_path().join("x.py"); - let mut args = vec![ - env.python_binary(), - x_py.as_str(), - "test", - "--build", - env.host_tuple(), - "--stage", - "0", - "tests/assembly", - "tests/codegen", - "tests/codegen-units", - "tests/incremental", - "tests/mir-opt", - "tests/pretty", - "tests/run-make/glibc-symbols-x86_64-unknown-linux-gnu", - "tests/ui", - "tests/crashes", - ]; - for test_path in env.skipped_tests() { - args.extend(["--skip", test_path]); + with_backed_up_file(Path::new("config.toml"), &config_content, || { + let x_py = env.checkout_path().join("x.py"); + let mut args = vec![ + env.python_binary(), + x_py.as_str(), + "test", + "--build", + env.host_tuple(), + "--stage", + "0", + "tests/assembly", + "tests/codegen", + "tests/codegen-units", + "tests/incremental", + "tests/mir-opt", + "tests/pretty", + "tests/run-make/glibc-symbols-x86_64-unknown-linux-gnu", + "tests/ui", + "tests/crashes", + ]; + for test_path in env.skipped_tests() { + args.extend(["--skip", test_path]); + } + cmd(&args) + .env("COMPILETEST_FORCE_STAGE0", "1") + // Also run dist-only tests + .env("COMPILETEST_ENABLE_DIST_TESTS", "1") + .run() + .context("Cannot execute tests") + }) +} + +/// Backup `path` (if it exists), then write `contents` into it, and then restore the original +/// contents of the file. +fn with_backed_up_file(path: &Path, contents: &str, func: F) -> anyhow::Result<()> +where + F: FnOnce() -> anyhow::Result<()>, +{ + let original_contents = + if path.is_file() { Some(std::fs::read_to_string(path)?) } else { None }; + + // Overwrite it with new contents + std::fs::write(path, contents)?; + + let ret = func(); + + if let Some(original_contents) = original_contents { + std::fs::write(path, original_contents)?; } - cmd(&args) - .env("COMPILETEST_FORCE_STAGE0", "1") - // Also run dist-only tests - .env("COMPILETEST_ENABLE_DIST_TESTS", "1") - .run() - .context("Cannot execute tests") + + ret } /// Tries to find the version of the dist artifacts (either nightly, beta, or 1.XY.Z). From 38fc11601ea95a4ba6b11f1aa0ad36fb9d8a53d0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jakub=20Ber=C3=A1nek?= Date: Thu, 13 Mar 2025 21:32:23 +0100 Subject: [PATCH 15/19] Store libgccjit.so in a lib directory in the GCC CI tarball --- src/bootstrap/src/core/build_steps/dist.rs | 2 +- src/bootstrap/src/core/build_steps/gcc.rs | 3 +-- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/src/bootstrap/src/core/build_steps/dist.rs b/src/bootstrap/src/core/build_steps/dist.rs index c393eb55c6244..2354fe1ebafbe 100644 --- a/src/bootstrap/src/core/build_steps/dist.rs +++ b/src/bootstrap/src/core/build_steps/dist.rs @@ -2481,7 +2481,7 @@ impl Step for Gcc { fn run(self, builder: &Builder<'_>) -> Self::Output { let tarball = Tarball::new(builder, "gcc", &self.target.triple); let output = builder.ensure(super::gcc::Gcc { target: self.target }); - tarball.add_file(output.libgccjit, ".", 0o644); + tarball.add_file(output.libgccjit, "lib", 0o644); tarball.generate() } } diff --git a/src/bootstrap/src/core/build_steps/gcc.rs b/src/bootstrap/src/core/build_steps/gcc.rs index 0a3208b2363a4..19525d4ebed1c 100644 --- a/src/bootstrap/src/core/build_steps/gcc.rs +++ b/src/bootstrap/src/core/build_steps/gcc.rs @@ -115,8 +115,7 @@ fn try_download_gcc(builder: &Builder<'_>, target: TargetSelection) -> Option Date: Fri, 14 Mar 2025 03:07:36 +0000 Subject: [PATCH 16/19] Deny impls for BikeshedGuaranteedNoDrop --- library/core/src/marker.rs | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/library/core/src/marker.rs b/library/core/src/marker.rs index e234f105b0b76..a4ab4674f4a36 100644 --- a/library/core/src/marker.rs +++ b/library/core/src/marker.rs @@ -465,9 +465,13 @@ impl Copy for &T {} /// Notably, this doesn't include all trivially-destructible types for semver /// reasons. /// -/// Bikeshed name for now. +/// Bikeshed name for now. This trait does not do anything other than reflect the +/// set of types that are allowed within unions for field validity. #[unstable(feature = "bikeshed_guaranteed_no_drop", issue = "none")] #[lang = "bikeshed_guaranteed_no_drop"] +#[rustc_deny_explicit_impl] +#[rustc_do_not_implement_via_object] +#[doc(hidden)] pub trait BikeshedGuaranteedNoDrop {} /// Types for which it is safe to share references between threads. From 90bf2b159a86d19c37122334ebf4d4a0461dd77b Mon Sep 17 00:00:00 2001 From: malezjaa Date: Tue, 24 Dec 2024 10:41:38 +0100 Subject: [PATCH 17/19] Show valid crate types when the user passes unknown `--crate-type` value Co-authored-by: Jieyou Xu --- compiler/rustc_session/src/config.rs | 7 ++++++- tests/ui/crate_type_flag.rs | 4 ++++ tests/ui/crate_type_flag.stderr | 2 ++ .../crate-type-flag.empty_crate_type.stderr | 2 +- .../crate-type-flag.proc_underscore_macro.stderr | 2 +- tests/ui/invalid-compile-flags/crate-type-flag.rs | 6 +++--- .../invalid-compile-flags/crate-type-flag.unknown.stderr | 2 +- 7 files changed, 18 insertions(+), 7 deletions(-) create mode 100644 tests/ui/crate_type_flag.rs create mode 100644 tests/ui/crate_type_flag.stderr diff --git a/compiler/rustc_session/src/config.rs b/compiler/rustc_session/src/config.rs index dcdb7fa9c1054..34755249b60b1 100644 --- a/compiler/rustc_session/src/config.rs +++ b/compiler/rustc_session/src/config.rs @@ -2709,7 +2709,12 @@ pub fn parse_crate_types_from_list(list_list: Vec) -> Result CrateType::Cdylib, "bin" => CrateType::Executable, "proc-macro" => CrateType::ProcMacro, - _ => return Err(format!("unknown crate type: `{part}`")), + _ => { + return Err(format!( + "unknown crate type: `{part}`, expected one of: \ + `lib`, `rlib`, `staticlib`, `dylib`, `cdylib`, `bin`, `proc-macro`", + )); + } }; if !crate_types.contains(&new_part) { crate_types.push(new_part) diff --git a/tests/ui/crate_type_flag.rs b/tests/ui/crate_type_flag.rs new file mode 100644 index 0000000000000..4f3cfbae45d68 --- /dev/null +++ b/tests/ui/crate_type_flag.rs @@ -0,0 +1,4 @@ +//@ compile-flags: --crate-type dynlib +//@ error-pattern: unknown crate type: `dynlib`, expected one of: `lib`, `rlib`, `staticlib`, `dylib`, `cdylib`, `bin`, `proc-macro` + +fn main() {} diff --git a/tests/ui/crate_type_flag.stderr b/tests/ui/crate_type_flag.stderr new file mode 100644 index 0000000000000..26a3e1fbd6829 --- /dev/null +++ b/tests/ui/crate_type_flag.stderr @@ -0,0 +1,2 @@ +error: unknown crate type: `dynlib`, expected one of: `lib`, `rlib`, `staticlib`, `dylib`, `cdylib`, `bin`, `proc-macro` + diff --git a/tests/ui/invalid-compile-flags/crate-type-flag.empty_crate_type.stderr b/tests/ui/invalid-compile-flags/crate-type-flag.empty_crate_type.stderr index 0f8772024dfab..4ab816d30f6c9 100644 --- a/tests/ui/invalid-compile-flags/crate-type-flag.empty_crate_type.stderr +++ b/tests/ui/invalid-compile-flags/crate-type-flag.empty_crate_type.stderr @@ -1,2 +1,2 @@ -error: unknown crate type: `` +error: unknown crate type: ``, expected one of: `lib`, `rlib`, `staticlib`, `dylib`, `cdylib`, `bin`, `proc-macro` diff --git a/tests/ui/invalid-compile-flags/crate-type-flag.proc_underscore_macro.stderr b/tests/ui/invalid-compile-flags/crate-type-flag.proc_underscore_macro.stderr index a4a9741699643..91cc66801f102 100644 --- a/tests/ui/invalid-compile-flags/crate-type-flag.proc_underscore_macro.stderr +++ b/tests/ui/invalid-compile-flags/crate-type-flag.proc_underscore_macro.stderr @@ -1,2 +1,2 @@ -error: unknown crate type: `proc_macro` +error: unknown crate type: `proc_macro`, expected one of: `lib`, `rlib`, `staticlib`, `dylib`, `cdylib`, `bin`, `proc-macro` diff --git a/tests/ui/invalid-compile-flags/crate-type-flag.rs b/tests/ui/invalid-compile-flags/crate-type-flag.rs index 07d853b330716..0101430714974 100644 --- a/tests/ui/invalid-compile-flags/crate-type-flag.rs +++ b/tests/ui/invalid-compile-flags/crate-type-flag.rs @@ -45,16 +45,16 @@ // `proc-macro` is accepted, but `proc_macro` is not. //@ revisions: proc_underscore_macro //@[proc_underscore_macro] compile-flags: --crate-type=proc_macro -//@[proc_underscore_macro] error-pattern: "unknown crate type: `proc_macro`" +//@[proc_underscore_macro] error-pattern: unknown crate type: `proc_macro` // Empty `--crate-type` not accepted. //@ revisions: empty_crate_type //@[empty_crate_type] compile-flags: --crate-type= -//@[empty_crate_type] error-pattern: "unknown crate type: ``" +//@[empty_crate_type] error-pattern: unknown crate type: `` // Random unknown crate type. Also check that we can handle non-ASCII. //@ revisions: unknown //@[unknown] compile-flags: --crate-type=🤡 -//@[unknown] error-pattern: "unknown crate type: `🤡`" +//@[unknown] error-pattern: unknown crate type: `🤡` fn main() {} diff --git a/tests/ui/invalid-compile-flags/crate-type-flag.unknown.stderr b/tests/ui/invalid-compile-flags/crate-type-flag.unknown.stderr index 7fb0f09a1afbd..ec202e171f058 100644 --- a/tests/ui/invalid-compile-flags/crate-type-flag.unknown.stderr +++ b/tests/ui/invalid-compile-flags/crate-type-flag.unknown.stderr @@ -1,2 +1,2 @@ -error: unknown crate type: `🤡` +error: unknown crate type: `🤡`, expected one of: `lib`, `rlib`, `staticlib`, `dylib`, `cdylib`, `bin`, `proc-macro` From 6ef465ba126013fa6add41813cd8299072507df6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jakub=20Ber=C3=A1nek?= Date: Fri, 14 Mar 2025 09:09:26 +0100 Subject: [PATCH 18/19] Add clarification about doctests --- src/ci/citool/src/merge_report.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/ci/citool/src/merge_report.rs b/src/ci/citool/src/merge_report.rs index be7246cfa5e2a..62daa2e68530a 100644 --- a/src/ci/citool/src/merge_report.rs +++ b/src/ci/citool/src/merge_report.rs @@ -297,7 +297,7 @@ fn report_test_diffs(diff: AggregatedTestDiffs) { if doctest_count > 0 { println!( - "\nAdditionally, {doctest_count} doctest {} were found.", + "\nAdditionally, {doctest_count} doctest {} were found. These are ignored, as they are noisy.", pluralize("diff", doctest_count) ); } From bf095f6ecee956b3942625d9480e3a2f13666a67 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jakub=20Ber=C3=A1nek?= Date: Fri, 14 Mar 2025 09:16:06 +0100 Subject: [PATCH 19/19] Ensure that GCC is not built using Clang, as it misbehaves --- src/bootstrap/src/core/build_steps/gcc.rs | 15 +++++++++------ src/bootstrap/src/lib.rs | 11 +++++++++++ 2 files changed, 20 insertions(+), 6 deletions(-) diff --git a/src/bootstrap/src/core/build_steps/gcc.rs b/src/bootstrap/src/core/build_steps/gcc.rs index 19525d4ebed1c..0aa2a3325313f 100644 --- a/src/bootstrap/src/core/build_steps/gcc.rs +++ b/src/bootstrap/src/core/build_steps/gcc.rs @@ -184,6 +184,14 @@ fn libgccjit_built_path(install_dir: &Path) -> PathBuf { } fn build_gcc(metadata: &Meta, builder: &Builder<'_>, target: TargetSelection) { + if builder.build.cc_tool(target).is_like_clang() + || builder.build.cxx_tool(target).is_like_clang() + { + panic!( + "Attempting to build GCC using Clang, which is known to misbehave. Please use GCC as the host C/C++ compiler. " + ); + } + let Meta { stamp: _, out_dir, install_dir, root } = metadata; t!(fs::create_dir_all(out_dir)); @@ -210,18 +218,13 @@ fn build_gcc(metadata: &Meta, builder: &Builder<'_>, target: TargetSelection) { let mut configure_cmd = command(src_dir.join("configure")); configure_cmd .current_dir(out_dir) - // On CI, we compile GCC with Clang. - // The -Wno-everything flag is needed to make GCC compile with Clang 19. - // `-g -O2` are the default flags that are otherwise used by Make. - // FIXME(kobzol): change the flags once we have [gcc] configuration in config.toml. - .env("CXXFLAGS", "-Wno-everything -g -O2") - .env("CFLAGS", "-Wno-everything -g -O2") .arg("--enable-host-shared") .arg("--enable-languages=c,jit,lto") .arg("--enable-checking=release") .arg("--disable-bootstrap") .arg("--disable-multilib") .arg(format!("--prefix={}", install_dir.display())); + let cc = builder.build.cc(target).display().to_string(); let cc = builder .build diff --git a/src/bootstrap/src/lib.rs b/src/bootstrap/src/lib.rs index fc4084378389e..2693e85ca4871 100644 --- a/src/bootstrap/src/lib.rs +++ b/src/bootstrap/src/lib.rs @@ -27,6 +27,7 @@ use std::{env, fs, io, str}; use build_helper::ci::gha; use build_helper::exit; +use cc::Tool; use termcolor::{ColorChoice, StandardStream, WriteColor}; use utils::build_stamp::BuildStamp; use utils::channel::GitInfo; @@ -1218,6 +1219,16 @@ Executed at: {executed_at}"#, self.cc.borrow()[&target].path().into() } + /// Returns the internal `cc::Tool` for the C compiler. + fn cc_tool(&self, target: TargetSelection) -> Tool { + self.cc.borrow()[&target].clone() + } + + /// Returns the internal `cc::Tool` for the C++ compiler. + fn cxx_tool(&self, target: TargetSelection) -> Tool { + self.cxx.borrow()[&target].clone() + } + /// Returns C flags that `cc-rs` thinks should be enabled for the /// specified target by default. fn cc_handled_clags(&self, target: TargetSelection, c: CLang) -> Vec {