|  | use std::env; | 
|  | use std::error::Error; | 
|  | use std::ffi::OsString; | 
|  | use std::fs::{self, File}; | 
|  | use std::io::{self, BufWriter, Write}; | 
|  | use std::path::{Path, PathBuf}; | 
|  |  | 
|  | use ar_archive_writer::{ | 
|  | ArchiveKind, COFFShortExport, MachineTypes, NewArchiveMember, write_archive_to_stream, | 
|  | }; | 
|  | pub use ar_archive_writer::{DEFAULT_OBJECT_READER, ObjectReader}; | 
|  | use object::read::archive::ArchiveFile; | 
|  | use object::read::macho::FatArch; | 
|  | use rustc_data_structures::fx::FxIndexSet; | 
|  | use rustc_data_structures::memmap::Mmap; | 
|  | use rustc_fs_util::TempDirBuilder; | 
|  | use rustc_metadata::EncodedMetadata; | 
|  | use rustc_session::Session; | 
|  | use rustc_span::Symbol; | 
|  | use tracing::trace; | 
|  |  | 
|  | use super::metadata::{create_compressed_metadata_file, search_for_section}; | 
|  | use crate::common; | 
|  | // Re-exporting for rustc_codegen_llvm::back::archive | 
|  | pub use crate::errors::{ArchiveBuildFailure, ExtractBundledLibsError, UnknownArchiveKind}; | 
|  | use crate::errors::{ | 
|  | DlltoolFailImportLibrary, ErrorCallingDllTool, ErrorCreatingImportLibrary, ErrorWritingDEFFile, | 
|  | }; | 
|  |  | 
|  | /// An item to be included in an import library. | 
|  | /// This is a slimmed down version of `COFFShortExport` from `ar-archive-writer`. | 
|  | pub struct ImportLibraryItem { | 
|  | /// The name to be exported. | 
|  | pub name: String, | 
|  | /// The ordinal to be exported, if any. | 
|  | pub ordinal: Option<u16>, | 
|  | /// The original, decorated name if `name` is not decorated. | 
|  | pub symbol_name: Option<String>, | 
|  | /// True if this is a data export, false if it is a function export. | 
|  | pub is_data: bool, | 
|  | } | 
|  |  | 
|  | impl From<ImportLibraryItem> for COFFShortExport { | 
|  | fn from(item: ImportLibraryItem) -> Self { | 
|  | COFFShortExport { | 
|  | name: item.name, | 
|  | ext_name: None, | 
|  | symbol_name: item.symbol_name, | 
|  | alias_target: None, | 
|  | ordinal: item.ordinal.unwrap_or(0), | 
|  | noname: item.ordinal.is_some(), | 
|  | data: item.is_data, | 
|  | private: false, | 
|  | constant: false, | 
|  | } | 
|  | } | 
|  | } | 
|  |  | 
|  | pub trait ArchiveBuilderBuilder { | 
|  | fn new_archive_builder<'a>(&self, sess: &'a Session) -> Box<dyn ArchiveBuilder + 'a>; | 
|  |  | 
|  | fn create_dylib_metadata_wrapper( | 
|  | &self, | 
|  | sess: &Session, | 
|  | metadata: &EncodedMetadata, | 
|  | symbol_name: &str, | 
|  | ) -> Vec<u8> { | 
|  | create_compressed_metadata_file(sess, metadata, symbol_name) | 
|  | } | 
|  |  | 
|  | /// Creates a DLL Import Library <https://docs.microsoft.com/en-us/windows/win32/dlls/dynamic-link-library-creation#creating-an-import-library>. | 
|  | /// and returns the path on disk to that import library. | 
|  | /// This functions doesn't take `self` so that it can be called from | 
|  | /// `linker_with_args`, which is specialized on `ArchiveBuilder` but | 
|  | /// doesn't take or create an instance of that type. | 
|  | fn create_dll_import_lib( | 
|  | &self, | 
|  | sess: &Session, | 
|  | lib_name: &str, | 
|  | items: Vec<ImportLibraryItem>, | 
|  | output_path: &Path, | 
|  | ) { | 
|  | if common::is_mingw_gnu_toolchain(&sess.target) { | 
|  | // The binutils linker used on -windows-gnu targets cannot read the import | 
|  | // libraries generated by LLVM: in our attempts, the linker produced an .EXE | 
|  | // that loaded but crashed with an AV upon calling one of the imported | 
|  | // functions. Therefore, use binutils to create the import library instead, | 
|  | // by writing a .DEF file to the temp dir and calling binutils's dlltool. | 
|  | create_mingw_dll_import_lib(sess, lib_name, items, output_path); | 
|  | } else { | 
|  | trace!("creating import library"); | 
|  | trace!("  dll_name {:#?}", lib_name); | 
|  | trace!("  output_path {}", output_path.display()); | 
|  | trace!( | 
|  | "  import names: {}", | 
|  | items | 
|  | .iter() | 
|  | .map(|ImportLibraryItem { name, .. }| name.clone()) | 
|  | .collect::<Vec<_>>() | 
|  | .join(", "), | 
|  | ); | 
|  |  | 
|  | // All import names are Rust identifiers and therefore cannot contain \0 characters. | 
|  | // FIXME: when support for #[link_name] is implemented, ensure that the import names | 
|  | // still don't contain any \0 characters. Also need to check that the names don't | 
|  | // contain substrings like " @" or "NONAME" that are keywords or otherwise reserved | 
|  | // in definition files. | 
|  |  | 
|  | let mut file = match fs::File::create_new(&output_path) { | 
|  | Ok(file) => file, | 
|  | Err(error) => sess | 
|  | .dcx() | 
|  | .emit_fatal(ErrorCreatingImportLibrary { lib_name, error: error.to_string() }), | 
|  | }; | 
|  |  | 
|  | let exports = items.into_iter().map(Into::into).collect::<Vec<_>>(); | 
|  | let machine = match &*sess.target.arch { | 
|  | "x86_64" => MachineTypes::AMD64, | 
|  | "x86" => MachineTypes::I386, | 
|  | "aarch64" => MachineTypes::ARM64, | 
|  | "arm64ec" => MachineTypes::ARM64EC, | 
|  | "arm" => MachineTypes::ARMNT, | 
|  | cpu => panic!("unsupported cpu type {cpu}"), | 
|  | }; | 
|  |  | 
|  | if let Err(error) = ar_archive_writer::write_import_library( | 
|  | &mut file, | 
|  | lib_name, | 
|  | &exports, | 
|  | machine, | 
|  | !sess.target.is_like_msvc, | 
|  | // Enable compatibility with MSVC's `/WHOLEARCHIVE` flag. | 
|  | // Without this flag a duplicate symbol error would be emitted | 
|  | // when linking a rust staticlib using `/WHOLEARCHIVE`. | 
|  | // See #129020 | 
|  | true, | 
|  | ) { | 
|  | sess.dcx() | 
|  | .emit_fatal(ErrorCreatingImportLibrary { lib_name, error: error.to_string() }); | 
|  | } | 
|  | } | 
|  | } | 
|  |  | 
|  | fn extract_bundled_libs<'a>( | 
|  | &'a self, | 
|  | rlib: &'a Path, | 
|  | outdir: &Path, | 
|  | bundled_lib_file_names: &FxIndexSet<Symbol>, | 
|  | ) -> Result<(), ExtractBundledLibsError<'a>> { | 
|  | let archive_map = unsafe { | 
|  | Mmap::map( | 
|  | File::open(rlib) | 
|  | .map_err(|e| ExtractBundledLibsError::OpenFile { rlib, error: Box::new(e) })?, | 
|  | ) | 
|  | .map_err(|e| ExtractBundledLibsError::MmapFile { rlib, error: Box::new(e) })? | 
|  | }; | 
|  | let archive = ArchiveFile::parse(&*archive_map) | 
|  | .map_err(|e| ExtractBundledLibsError::ParseArchive { rlib, error: Box::new(e) })?; | 
|  |  | 
|  | for entry in archive.members() { | 
|  | let entry = entry | 
|  | .map_err(|e| ExtractBundledLibsError::ReadEntry { rlib, error: Box::new(e) })?; | 
|  | let data = entry | 
|  | .data(&*archive_map) | 
|  | .map_err(|e| ExtractBundledLibsError::ArchiveMember { rlib, error: Box::new(e) })?; | 
|  | let name = std::str::from_utf8(entry.name()) | 
|  | .map_err(|e| ExtractBundledLibsError::ConvertName { rlib, error: Box::new(e) })?; | 
|  | if !bundled_lib_file_names.contains(&Symbol::intern(name)) { | 
|  | continue; // We need to extract only native libraries. | 
|  | } | 
|  | let data = search_for_section(rlib, data, ".bundled_lib").map_err(|e| { | 
|  | ExtractBundledLibsError::ExtractSection { rlib, error: Box::<dyn Error>::from(e) } | 
|  | })?; | 
|  | std::fs::write(&outdir.join(&name), data) | 
|  | .map_err(|e| ExtractBundledLibsError::WriteFile { rlib, error: Box::new(e) })?; | 
|  | } | 
|  | Ok(()) | 
|  | } | 
|  | } | 
|  |  | 
|  | fn create_mingw_dll_import_lib( | 
|  | sess: &Session, | 
|  | lib_name: &str, | 
|  | items: Vec<ImportLibraryItem>, | 
|  | output_path: &Path, | 
|  | ) { | 
|  | let def_file_path = output_path.with_extension("def"); | 
|  |  | 
|  | let def_file_content = format!( | 
|  | "EXPORTS\n{}", | 
|  | items | 
|  | .into_iter() | 
|  | .map(|ImportLibraryItem { name, ordinal, .. }| { | 
|  | match ordinal { | 
|  | Some(n) => format!("{name} @{n} NONAME"), | 
|  | None => name, | 
|  | } | 
|  | }) | 
|  | .collect::<Vec<String>>() | 
|  | .join("\n") | 
|  | ); | 
|  |  | 
|  | match std::fs::write(&def_file_path, def_file_content) { | 
|  | Ok(_) => {} | 
|  | Err(e) => { | 
|  | sess.dcx().emit_fatal(ErrorWritingDEFFile { error: e }); | 
|  | } | 
|  | }; | 
|  |  | 
|  | // --no-leading-underscore: For the `import_name_type` feature to work, we need to be | 
|  | // able to control the *exact* spelling of each of the symbols that are being imported: | 
|  | // hence we don't want `dlltool` adding leading underscores automatically. | 
|  | let dlltool = find_binutils_dlltool(sess); | 
|  | let temp_prefix = { | 
|  | let mut path = PathBuf::from(&output_path); | 
|  | path.pop(); | 
|  | path.push(lib_name); | 
|  | path | 
|  | }; | 
|  | // dlltool target architecture args from: | 
|  | // https://github.com/llvm/llvm-project-release-prs/blob/llvmorg-15.0.6/llvm/lib/ToolDrivers/llvm-dlltool/DlltoolDriver.cpp#L69 | 
|  | let (dlltool_target_arch, dlltool_target_bitness) = match sess.target.arch.as_ref() { | 
|  | "x86_64" => ("i386:x86-64", "--64"), | 
|  | "x86" => ("i386", "--32"), | 
|  | "aarch64" => ("arm64", "--64"), | 
|  | "arm" => ("arm", "--32"), | 
|  | _ => panic!("unsupported arch {}", sess.target.arch), | 
|  | }; | 
|  | let mut dlltool_cmd = std::process::Command::new(&dlltool); | 
|  | dlltool_cmd | 
|  | .arg("-d") | 
|  | .arg(def_file_path) | 
|  | .arg("-D") | 
|  | .arg(lib_name) | 
|  | .arg("-l") | 
|  | .arg(&output_path) | 
|  | .arg("-m") | 
|  | .arg(dlltool_target_arch) | 
|  | .arg("-f") | 
|  | .arg(dlltool_target_bitness) | 
|  | .arg("--no-leading-underscore") | 
|  | .arg("--temp-prefix") | 
|  | .arg(temp_prefix); | 
|  |  | 
|  | match dlltool_cmd.output() { | 
|  | Err(e) => { | 
|  | sess.dcx().emit_fatal(ErrorCallingDllTool { | 
|  | dlltool_path: dlltool.to_string_lossy(), | 
|  | error: e, | 
|  | }); | 
|  | } | 
|  | // dlltool returns '0' on failure, so check for error output instead. | 
|  | Ok(output) if !output.stderr.is_empty() => { | 
|  | sess.dcx().emit_fatal(DlltoolFailImportLibrary { | 
|  | dlltool_path: dlltool.to_string_lossy(), | 
|  | dlltool_args: dlltool_cmd | 
|  | .get_args() | 
|  | .map(|arg| arg.to_string_lossy()) | 
|  | .collect::<Vec<_>>() | 
|  | .join(" "), | 
|  | stdout: String::from_utf8_lossy(&output.stdout), | 
|  | stderr: String::from_utf8_lossy(&output.stderr), | 
|  | }) | 
|  | } | 
|  | _ => {} | 
|  | } | 
|  | } | 
|  |  | 
|  | fn find_binutils_dlltool(sess: &Session) -> OsString { | 
|  | assert!(sess.target.options.is_like_windows && !sess.target.options.is_like_msvc); | 
|  | if let Some(dlltool_path) = &sess.opts.cg.dlltool { | 
|  | return dlltool_path.clone().into_os_string(); | 
|  | } | 
|  |  | 
|  | let tool_name: OsString = if sess.host.options.is_like_windows { | 
|  | // If we're compiling on Windows, always use "dlltool.exe". | 
|  | "dlltool.exe" | 
|  | } else { | 
|  | // On other platforms, use the architecture-specific name. | 
|  | match sess.target.arch.as_ref() { | 
|  | "x86_64" => "x86_64-w64-mingw32-dlltool", | 
|  | "x86" => "i686-w64-mingw32-dlltool", | 
|  | "aarch64" => "aarch64-w64-mingw32-dlltool", | 
|  |  | 
|  | // For non-standard architectures (e.g., aarch32) fallback to "dlltool". | 
|  | _ => "dlltool", | 
|  | } | 
|  | } | 
|  | .into(); | 
|  |  | 
|  | // NOTE: it's not clear how useful it is to explicitly search PATH. | 
|  | for dir in env::split_paths(&env::var_os("PATH").unwrap_or_default()) { | 
|  | let full_path = dir.join(&tool_name); | 
|  | if full_path.is_file() { | 
|  | return full_path.into_os_string(); | 
|  | } | 
|  | } | 
|  |  | 
|  | // The user didn't specify the location of the dlltool binary, and we weren't able | 
|  | // to find the appropriate one on the PATH. Just return the name of the tool | 
|  | // and let the invocation fail with a hopefully useful error message. | 
|  | tool_name | 
|  | } | 
|  |  | 
|  | pub trait ArchiveBuilder { | 
|  | fn add_file(&mut self, path: &Path); | 
|  |  | 
|  | fn add_archive( | 
|  | &mut self, | 
|  | archive: &Path, | 
|  | skip: Box<dyn FnMut(&str) -> bool + 'static>, | 
|  | ) -> io::Result<()>; | 
|  |  | 
|  | fn build(self: Box<Self>, output: &Path) -> bool; | 
|  | } | 
|  |  | 
|  | pub struct ArArchiveBuilderBuilder; | 
|  |  | 
|  | impl ArchiveBuilderBuilder for ArArchiveBuilderBuilder { | 
|  | fn new_archive_builder<'a>(&self, sess: &'a Session) -> Box<dyn ArchiveBuilder + 'a> { | 
|  | Box::new(ArArchiveBuilder::new(sess, &DEFAULT_OBJECT_READER)) | 
|  | } | 
|  | } | 
|  |  | 
|  | #[must_use = "must call build() to finish building the archive"] | 
|  | pub struct ArArchiveBuilder<'a> { | 
|  | sess: &'a Session, | 
|  | object_reader: &'static ObjectReader, | 
|  |  | 
|  | src_archives: Vec<(PathBuf, Mmap)>, | 
|  | // Don't use an `HashMap` here, as the order is important. `lib.rmeta` needs | 
|  | // to be at the end of an archive in some cases for linkers to not get confused. | 
|  | entries: Vec<(Vec<u8>, ArchiveEntry)>, | 
|  | } | 
|  |  | 
|  | #[derive(Debug)] | 
|  | enum ArchiveEntry { | 
|  | FromArchive { archive_index: usize, file_range: (u64, u64) }, | 
|  | File(PathBuf), | 
|  | } | 
|  |  | 
|  | impl<'a> ArArchiveBuilder<'a> { | 
|  | pub fn new(sess: &'a Session, object_reader: &'static ObjectReader) -> ArArchiveBuilder<'a> { | 
|  | ArArchiveBuilder { sess, object_reader, src_archives: vec![], entries: vec![] } | 
|  | } | 
|  | } | 
|  |  | 
|  | fn try_filter_fat_archs( | 
|  | archs: &[impl FatArch], | 
|  | target_arch: object::Architecture, | 
|  | archive_path: &Path, | 
|  | archive_map_data: &[u8], | 
|  | ) -> io::Result<Option<PathBuf>> { | 
|  | let desired = match archs.iter().find(|a| a.architecture() == target_arch) { | 
|  | Some(a) => a, | 
|  | None => return Ok(None), | 
|  | }; | 
|  |  | 
|  | let (mut new_f, extracted_path) = tempfile::Builder::new() | 
|  | .suffix(archive_path.file_name().unwrap()) | 
|  | .tempfile()? | 
|  | .keep() | 
|  | .unwrap(); | 
|  |  | 
|  | new_f.write_all( | 
|  | desired.data(archive_map_data).map_err(|e| io::Error::new(io::ErrorKind::Other, e))?, | 
|  | )?; | 
|  |  | 
|  | Ok(Some(extracted_path)) | 
|  | } | 
|  |  | 
|  | pub fn try_extract_macho_fat_archive( | 
|  | sess: &Session, | 
|  | archive_path: &Path, | 
|  | ) -> io::Result<Option<PathBuf>> { | 
|  | let archive_map = unsafe { Mmap::map(File::open(&archive_path)?)? }; | 
|  | let target_arch = match sess.target.arch.as_ref() { | 
|  | "aarch64" => object::Architecture::Aarch64, | 
|  | "x86_64" => object::Architecture::X86_64, | 
|  | _ => return Ok(None), | 
|  | }; | 
|  |  | 
|  | if let Ok(h) = object::read::macho::MachOFatFile32::parse(&*archive_map) { | 
|  | let archs = h.arches(); | 
|  | try_filter_fat_archs(archs, target_arch, archive_path, &*archive_map) | 
|  | } else if let Ok(h) = object::read::macho::MachOFatFile64::parse(&*archive_map) { | 
|  | let archs = h.arches(); | 
|  | try_filter_fat_archs(archs, target_arch, archive_path, &*archive_map) | 
|  | } else { | 
|  | // Not a FatHeader at all, just return None. | 
|  | Ok(None) | 
|  | } | 
|  | } | 
|  |  | 
|  | impl<'a> ArchiveBuilder for ArArchiveBuilder<'a> { | 
|  | fn add_archive( | 
|  | &mut self, | 
|  | archive_path: &Path, | 
|  | mut skip: Box<dyn FnMut(&str) -> bool + 'static>, | 
|  | ) -> io::Result<()> { | 
|  | let mut archive_path = archive_path.to_path_buf(); | 
|  | if self.sess.target.llvm_target.contains("-apple-macosx") | 
|  | && let Some(new_archive_path) = try_extract_macho_fat_archive(self.sess, &archive_path)? | 
|  | { | 
|  | archive_path = new_archive_path | 
|  | } | 
|  |  | 
|  | if self.src_archives.iter().any(|archive| archive.0 == archive_path) { | 
|  | return Ok(()); | 
|  | } | 
|  |  | 
|  | let archive_map = unsafe { Mmap::map(File::open(&archive_path)?)? }; | 
|  | let archive = ArchiveFile::parse(&*archive_map) | 
|  | .map_err(|err| io::Error::new(io::ErrorKind::InvalidData, err))?; | 
|  | let archive_index = self.src_archives.len(); | 
|  |  | 
|  | for entry in archive.members() { | 
|  | let entry = entry.map_err(|err| io::Error::new(io::ErrorKind::InvalidData, err))?; | 
|  | let file_name = String::from_utf8(entry.name().to_vec()) | 
|  | .map_err(|err| io::Error::new(io::ErrorKind::InvalidData, err))?; | 
|  | if !skip(&file_name) { | 
|  | if entry.is_thin() { | 
|  | let member_path = archive_path.parent().unwrap().join(Path::new(&file_name)); | 
|  | self.entries.push((file_name.into_bytes(), ArchiveEntry::File(member_path))); | 
|  | } else { | 
|  | self.entries.push(( | 
|  | file_name.into_bytes(), | 
|  | ArchiveEntry::FromArchive { archive_index, file_range: entry.file_range() }, | 
|  | )); | 
|  | } | 
|  | } | 
|  | } | 
|  |  | 
|  | self.src_archives.push((archive_path, archive_map)); | 
|  | Ok(()) | 
|  | } | 
|  |  | 
|  | /// Adds an arbitrary file to this archive | 
|  | fn add_file(&mut self, file: &Path) { | 
|  | self.entries.push(( | 
|  | file.file_name().unwrap().to_str().unwrap().to_string().into_bytes(), | 
|  | ArchiveEntry::File(file.to_owned()), | 
|  | )); | 
|  | } | 
|  |  | 
|  | /// Combine the provided files, rlibs, and native libraries into a single | 
|  | /// `Archive`. | 
|  | fn build(self: Box<Self>, output: &Path) -> bool { | 
|  | let sess = self.sess; | 
|  | match self.build_inner(output) { | 
|  | Ok(any_members) => any_members, | 
|  | Err(error) => { | 
|  | sess.dcx().emit_fatal(ArchiveBuildFailure { path: output.to_owned(), error }) | 
|  | } | 
|  | } | 
|  | } | 
|  | } | 
|  |  | 
|  | impl<'a> ArArchiveBuilder<'a> { | 
|  | fn build_inner(self, output: &Path) -> io::Result<bool> { | 
|  | let archive_kind = match &*self.sess.target.archive_format { | 
|  | "gnu" => ArchiveKind::Gnu, | 
|  | "bsd" => ArchiveKind::Bsd, | 
|  | "darwin" => ArchiveKind::Darwin, | 
|  | "coff" => ArchiveKind::Coff, | 
|  | "aix_big" => ArchiveKind::AixBig, | 
|  | kind => { | 
|  | self.sess.dcx().emit_fatal(UnknownArchiveKind { kind }); | 
|  | } | 
|  | }; | 
|  |  | 
|  | let mut entries = Vec::new(); | 
|  |  | 
|  | for (entry_name, entry) in self.entries { | 
|  | let data = | 
|  | match entry { | 
|  | ArchiveEntry::FromArchive { archive_index, file_range } => { | 
|  | let src_archive = &self.src_archives[archive_index]; | 
|  |  | 
|  | let data = &src_archive.1 | 
|  | [file_range.0 as usize..file_range.0 as usize + file_range.1 as usize]; | 
|  |  | 
|  | Box::new(data) as Box<dyn AsRef<[u8]>> | 
|  | } | 
|  | ArchiveEntry::File(file) => unsafe { | 
|  | Box::new( | 
|  | Mmap::map(File::open(file).map_err(|err| { | 
|  | io_error_context("failed to open object file", err) | 
|  | })?) | 
|  | .map_err(|err| io_error_context("failed to map object file", err))?, | 
|  | ) as Box<dyn AsRef<[u8]>> | 
|  | }, | 
|  | }; | 
|  |  | 
|  | entries.push(NewArchiveMember { | 
|  | buf: data, | 
|  | object_reader: self.object_reader, | 
|  | member_name: String::from_utf8(entry_name).unwrap(), | 
|  | mtime: 0, | 
|  | uid: 0, | 
|  | gid: 0, | 
|  | perms: 0o644, | 
|  | }) | 
|  | } | 
|  |  | 
|  | // Write to a temporary file first before atomically renaming to the final name. | 
|  | // This prevents programs (including rustc) from attempting to read a partial archive. | 
|  | // It also enables writing an archive with the same filename as a dependency on Windows as | 
|  | // required by a test. | 
|  | // The tempfile crate currently uses 0o600 as mode for the temporary files and directories | 
|  | // it creates. We need it to be the default mode for back compat reasons however. (See | 
|  | // #107495) To handle this we are telling tempfile to create a temporary directory instead | 
|  | // and then inside this directory create a file using File::create. | 
|  | let archive_tmpdir = TempDirBuilder::new() | 
|  | .suffix(".temp-archive") | 
|  | .tempdir_in(output.parent().unwrap_or_else(|| Path::new(""))) | 
|  | .map_err(|err| { | 
|  | io_error_context("couldn't create a directory for the temp file", err) | 
|  | })?; | 
|  | let archive_tmpfile_path = archive_tmpdir.path().join("tmp.a"); | 
|  | let archive_tmpfile = File::create_new(&archive_tmpfile_path) | 
|  | .map_err(|err| io_error_context("couldn't create the temp file", err))?; | 
|  |  | 
|  | let mut archive_tmpfile = BufWriter::new(archive_tmpfile); | 
|  | write_archive_to_stream( | 
|  | &mut archive_tmpfile, | 
|  | &entries, | 
|  | archive_kind, | 
|  | false, | 
|  | /* is_ec = */ self.sess.target.arch == "arm64ec", | 
|  | )?; | 
|  | archive_tmpfile.flush()?; | 
|  | drop(archive_tmpfile); | 
|  |  | 
|  | let any_entries = !entries.is_empty(); | 
|  | drop(entries); | 
|  | // Drop src_archives to unmap all input archives, which is necessary if we want to write the | 
|  | // output archive to the same location as an input archive on Windows. | 
|  | drop(self.src_archives); | 
|  |  | 
|  | fs::rename(archive_tmpfile_path, output) | 
|  | .map_err(|err| io_error_context("failed to rename archive file", err))?; | 
|  | archive_tmpdir | 
|  | .close() | 
|  | .map_err(|err| io_error_context("failed to remove temporary directory", err))?; | 
|  |  | 
|  | Ok(any_entries) | 
|  | } | 
|  | } | 
|  |  | 
|  | fn io_error_context(context: &str, err: io::Error) -> io::Error { | 
|  | io::Error::new(io::ErrorKind::Other, format!("{context}: {err}")) | 
|  | } |