Rewrite LLVM's archive writer in Rust
This allows it to be used by other codegen backends
This commit is contained in:
parent
c3a1c023c0
commit
be6708428f
24 changed files with 445 additions and 527 deletions
|
@ -6,14 +6,18 @@ use rustc_span::symbol::Symbol;
|
|||
|
||||
use super::metadata::search_for_section;
|
||||
|
||||
pub use ar_archive_writer::get_native_object_symbols;
|
||||
use ar_archive_writer::{write_archive_to_stream, ArchiveKind, NewArchiveMember};
|
||||
use object::read::archive::ArchiveFile;
|
||||
use object::read::macho::FatArch;
|
||||
|
||||
use std::error::Error;
|
||||
use std::fs::File;
|
||||
use std::io;
|
||||
use std::io::{self, Write};
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
use crate::errors::ExtractBundledLibsError;
|
||||
// Re-exporting for rustc_codegen_llvm::back::archive
|
||||
pub use crate::errors::{ArchiveBuildFailure, ExtractBundledLibsError, UnknownArchiveKind};
|
||||
|
||||
pub trait ArchiveBuilderBuilder {
|
||||
fn new_archive_builder<'a>(&self, sess: &'a Session) -> Box<dyn ArchiveBuilder<'a> + 'a>;
|
||||
|
@ -80,3 +84,202 @@ pub trait ArchiveBuilder<'a> {
|
|||
|
||||
fn build(self: Box<Self>, output: &Path) -> bool;
|
||||
}
|
||||
|
||||
#[must_use = "must call build() to finish building the archive"]
|
||||
pub struct ArArchiveBuilder<'a> {
|
||||
sess: &'a Session,
|
||||
get_object_symbols:
|
||||
fn(buf: &[u8], f: &mut dyn FnMut(&[u8]) -> io::Result<()>) -> io::Result<bool>,
|
||||
|
||||
src_archives: Vec<(PathBuf, Mmap)>,
|
||||
// Don't use an `HashMap` here, as the order is important. `lib.rmeta` needs
|
||||
// to be at the end of an archive in some cases for linkers to not get confused.
|
||||
entries: Vec<(Vec<u8>, ArchiveEntry)>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
enum ArchiveEntry {
|
||||
FromArchive { archive_index: usize, file_range: (u64, u64) },
|
||||
File(PathBuf),
|
||||
}
|
||||
|
||||
impl<'a> ArArchiveBuilder<'a> {
|
||||
pub fn new(
|
||||
sess: &'a Session,
|
||||
get_object_symbols: fn(
|
||||
buf: &[u8],
|
||||
f: &mut dyn FnMut(&[u8]) -> io::Result<()>,
|
||||
) -> io::Result<bool>,
|
||||
) -> ArArchiveBuilder<'a> {
|
||||
ArArchiveBuilder { sess, get_object_symbols, src_archives: vec![], entries: vec![] }
|
||||
}
|
||||
}
|
||||
|
||||
fn try_filter_fat_archs(
|
||||
archs: object::read::Result<&[impl FatArch]>,
|
||||
target_arch: object::Architecture,
|
||||
archive_path: &Path,
|
||||
archive_map_data: &[u8],
|
||||
) -> io::Result<Option<PathBuf>> {
|
||||
let archs = archs.map_err(|e| io::Error::new(io::ErrorKind::Other, e))?;
|
||||
|
||||
let desired = match archs.iter().filter(|a| a.architecture() == target_arch).next() {
|
||||
Some(a) => a,
|
||||
None => return Ok(None),
|
||||
};
|
||||
|
||||
let (mut new_f, extracted_path) = tempfile::Builder::new()
|
||||
.suffix(archive_path.file_name().unwrap())
|
||||
.tempfile()?
|
||||
.keep()
|
||||
.unwrap();
|
||||
|
||||
new_f.write_all(
|
||||
desired.data(archive_map_data).map_err(|e| io::Error::new(io::ErrorKind::Other, e))?,
|
||||
)?;
|
||||
|
||||
Ok(Some(extracted_path))
|
||||
}
|
||||
|
||||
pub fn try_extract_macho_fat_archive(
|
||||
sess: &Session,
|
||||
archive_path: &Path,
|
||||
) -> io::Result<Option<PathBuf>> {
|
||||
let archive_map = unsafe { Mmap::map(File::open(&archive_path)?)? };
|
||||
let target_arch = match sess.target.arch.as_ref() {
|
||||
"aarch64" => object::Architecture::Aarch64,
|
||||
"x86_64" => object::Architecture::X86_64,
|
||||
_ => return Ok(None),
|
||||
};
|
||||
|
||||
match object::macho::FatHeader::parse(&*archive_map) {
|
||||
Ok(h) if h.magic.get(object::endian::BigEndian) == object::macho::FAT_MAGIC => {
|
||||
let archs = object::macho::FatHeader::parse_arch32(&*archive_map);
|
||||
try_filter_fat_archs(archs, target_arch, archive_path, &*archive_map)
|
||||
}
|
||||
Ok(h) if h.magic.get(object::endian::BigEndian) == object::macho::FAT_MAGIC_64 => {
|
||||
let archs = object::macho::FatHeader::parse_arch64(&*archive_map);
|
||||
try_filter_fat_archs(archs, target_arch, archive_path, &*archive_map)
|
||||
}
|
||||
// Not a FatHeader at all, just return None.
|
||||
_ => Ok(None),
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> ArchiveBuilder<'a> for ArArchiveBuilder<'a> {
|
||||
fn add_archive(
|
||||
&mut self,
|
||||
archive_path: &Path,
|
||||
mut skip: Box<dyn FnMut(&str) -> bool + 'static>,
|
||||
) -> io::Result<()> {
|
||||
let mut archive_path = archive_path.to_path_buf();
|
||||
if self.sess.target.llvm_target.contains("-apple-macosx") {
|
||||
if let Some(new_archive_path) =
|
||||
try_extract_macho_fat_archive(&self.sess, &archive_path)?
|
||||
{
|
||||
archive_path = new_archive_path
|
||||
}
|
||||
}
|
||||
|
||||
if self.src_archives.iter().any(|archive| archive.0 == archive_path) {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let archive_map = unsafe { Mmap::map(File::open(&archive_path)?)? };
|
||||
let archive = ArchiveFile::parse(&*archive_map)
|
||||
.map_err(|err| io::Error::new(io::ErrorKind::InvalidData, err))?;
|
||||
let archive_index = self.src_archives.len();
|
||||
|
||||
for entry in archive.members() {
|
||||
let entry = entry.map_err(|err| io::Error::new(io::ErrorKind::InvalidData, err))?;
|
||||
let file_name = String::from_utf8(entry.name().to_vec())
|
||||
.map_err(|err| io::Error::new(io::ErrorKind::InvalidData, err))?;
|
||||
if !skip(&file_name) {
|
||||
self.entries.push((
|
||||
file_name.into_bytes(),
|
||||
ArchiveEntry::FromArchive { archive_index, file_range: entry.file_range() },
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
self.src_archives.push((archive_path.to_owned(), archive_map));
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Adds an arbitrary file to this archive
|
||||
fn add_file(&mut self, file: &Path) {
|
||||
self.entries.push((
|
||||
file.file_name().unwrap().to_str().unwrap().to_string().into_bytes(),
|
||||
ArchiveEntry::File(file.to_owned()),
|
||||
));
|
||||
}
|
||||
|
||||
/// Combine the provided files, rlibs, and native libraries into a single
|
||||
/// `Archive`.
|
||||
fn build(self: Box<Self>, output: &Path) -> bool {
|
||||
let sess = self.sess;
|
||||
match self.build_inner(output) {
|
||||
Ok(any_members) => any_members,
|
||||
Err(e) => sess.emit_fatal(ArchiveBuildFailure { error: e }),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> ArArchiveBuilder<'a> {
|
||||
fn build_inner(self, output: &Path) -> io::Result<bool> {
|
||||
let archive_kind = match &*self.sess.target.archive_format {
|
||||
"gnu" => ArchiveKind::Gnu,
|
||||
"bsd" => ArchiveKind::Bsd,
|
||||
"darwin" => ArchiveKind::Darwin,
|
||||
"coff" => ArchiveKind::Coff,
|
||||
kind => {
|
||||
self.sess.emit_fatal(UnknownArchiveKind { kind });
|
||||
}
|
||||
};
|
||||
|
||||
let mut entries = Vec::new();
|
||||
|
||||
for (entry_name, entry) in self.entries {
|
||||
let data =
|
||||
match entry {
|
||||
ArchiveEntry::FromArchive { archive_index, file_range } => {
|
||||
let src_archive = &self.src_archives[archive_index];
|
||||
|
||||
let data = &src_archive.1
|
||||
[file_range.0 as usize..file_range.0 as usize + file_range.1 as usize];
|
||||
|
||||
Box::new(data) as Box<dyn AsRef<[u8]>>
|
||||
}
|
||||
ArchiveEntry::File(file) => unsafe {
|
||||
Box::new(
|
||||
Mmap::map(File::open(file).map_err(|err| {
|
||||
io_error_context("failed to open object file", err)
|
||||
})?)
|
||||
.map_err(|err| io_error_context("failed to map object file", err))?,
|
||||
) as Box<dyn AsRef<[u8]>>
|
||||
},
|
||||
};
|
||||
|
||||
entries.push(NewArchiveMember {
|
||||
buf: data,
|
||||
get_symbols: self.get_object_symbols,
|
||||
member_name: String::from_utf8(entry_name).unwrap(),
|
||||
mtime: 0,
|
||||
uid: 0,
|
||||
gid: 0,
|
||||
perms: 0o644,
|
||||
})
|
||||
}
|
||||
|
||||
let mut w = File::create(output)
|
||||
.map_err(|err| io_error_context("failed to create archive file", err))?;
|
||||
|
||||
write_archive_to_stream(&mut w, &entries, true, archive_kind, true, false)?;
|
||||
|
||||
Ok(!entries.is_empty())
|
||||
}
|
||||
}
|
||||
|
||||
fn io_error_context(context: &str, err: io::Error) -> io::Error {
|
||||
io::Error::new(io::ErrorKind::Other, format!("{context}: {err}"))
|
||||
}
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue