Support extracting any partition from payload.bin

This commit is contained in:
topjohnwu 2023-05-25 23:45:38 -07:00 committed by John Wu
parent ec31cab5a7
commit 659b9c6fee
14 changed files with 203 additions and 116 deletions

View File

@ -180,7 +180,9 @@ abstract class MagiskInstallImpl protected constructor(
} }
@Throws(IOException::class) @Throws(IOException::class)
private fun processZip(input: InputStream) { private fun processZip(input: InputStream): ExtendedFile {
val boot = installDir.getChildFile("boot.img")
val initBoot = installDir.getChildFile("init_boot.img")
ZipInputStream(input).use { zipIn -> ZipInputStream(input).use { zipIn ->
lateinit var entry: ZipEntry lateinit var entry: ZipEntry
while (zipIn.nextEntry?.also { entry = it } != null) { while (zipIn.nextEntry?.also { entry = it } != null) {
@ -190,27 +192,36 @@ abstract class MagiskInstallImpl protected constructor(
console.add("- Extracting payload") console.add("- Extracting payload")
val dest = File(installDir, "payload.bin") val dest = File(installDir, "payload.bin")
FileOutputStream(dest).use { zipIn.copyTo(it) } FileOutputStream(dest).use { zipIn.copyTo(it) }
processPayload(Uri.fromFile(dest)) try {
break return processPayload(Uri.fromFile(dest))
} catch (e: IOException) {
// No boot image in payload.bin, continue to find boot images
}
} }
"init_boot.img" -> { "init_boot.img" -> {
console.add("- Extracting init_boot image") console.add("- Extracting init_boot image")
FileOutputStream("$installDir/boot.img").use { zipIn.copyTo(it) } initBoot.newOutputStream().use { zipIn.copyTo(it) }
break return initBoot
} }
"boot.img" -> { "boot.img" -> {
console.add("- Extracting boot image") console.add("- Extracting boot image")
FileOutputStream("$installDir/boot.img").use { zipIn.copyTo(it) } boot.newOutputStream().use { zipIn.copyTo(it) }
// no break here since there might be an init_boot.img // no break here since there might be an init_boot.img
} }
} }
} }
} }
if (boot.exists()) {
return boot
} else {
console.add("! No boot image found")
throw IOException()
}
} }
@Throws(IOException::class) @Throws(IOException::class)
@Synchronized @Synchronized
private fun processPayload(input: Uri) { private fun processPayload(input: Uri): ExtendedFile {
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.O) { if (Build.VERSION.SDK_INT < Build.VERSION_CODES.O) {
throw IOException("Payload is only supported on Android Oreo or above") throw IOException("Payload is only supported on Android Oreo or above")
} }
@ -226,8 +237,7 @@ abstract class MagiskInstallImpl protected constructor(
.command( .command(
"$installDir/magiskboot", "$installDir/magiskboot",
"extract", "extract",
"-", "-"
"$installDir/boot.img"
) )
.start() .start()
if (process.waitFor() != 0) { if (process.waitFor() != 0) {
@ -241,6 +251,16 @@ abstract class MagiskInstallImpl protected constructor(
Os.dup2(bk.fileDescriptor, 0) Os.dup2(bk.fileDescriptor, 0)
} }
} }
val boot = installDir.getChildFile("boot.img")
val initBoot = installDir.getChildFile("init_boot.img")
return when {
initBoot.exists() -> initBoot
boot.exists() -> boot
else -> {
console.add("! No boot image found")
throw IOException()
}
}
} catch (e: ErrnoException) { } catch (e: ErrnoException) {
throw IOException(e) throw IOException(e)
} }
@ -356,14 +376,15 @@ abstract class MagiskInstallImpl protected constructor(
outFile = MediaStoreUtils.getFile("$filename.tar", true) outFile = MediaStoreUtils.getFile("$filename.tar", true)
processTar(src, outFile!!.uri.outputStream()) processTar(src, outFile!!.uri.outputStream())
} else { } else {
srcBoot = installDir.getChildFile("boot.img") srcBoot = if (headMagic.contentEquals("CrAU".toByteArray())) {
if (headMagic.contentEquals("CrAU".toByteArray())) {
processPayload(uri) processPayload(uri)
} else if (headMagic.contentEquals("PK\u0003\u0004".toByteArray())) { } else if (headMagic.contentEquals("PK\u0003\u0004".toByteArray())) {
processZip(src) processZip(src)
} else { } else {
val boot = installDir.getChildFile("boot.img")
console.add("- Copying image to cache") console.add("- Copying image to cache")
src.cleanPump(srcBoot.newOutputStream()) src.cleanPump(boot.newOutputStream())
boot
} }
// raw image // raw image
outFile = MediaStoreUtils.getFile("$filename.img", true) outFile = MediaStoreUtils.getFile("$filename.img", true)

2
native/src/Cargo.lock generated
View File

@ -31,6 +31,7 @@ dependencies = [
"cxx", "cxx",
"cxx-gen", "cxx-gen",
"libc", "libc",
"thiserror",
] ]
[[package]] [[package]]
@ -213,6 +214,7 @@ dependencies = [
name = "magiskboot" name = "magiskboot"
version = "0.0.0" version = "0.0.0"
dependencies = [ dependencies = [
"anyhow",
"base", "base",
"byteorder", "byteorder",
"cxx", "cxx",

View File

@ -10,6 +10,9 @@ cfg-if = "1.0"
anyhow = "1.0" anyhow = "1.0"
num-traits = "0.2" num-traits = "0.2"
num-derive = "0.3" num-derive = "0.3"
thiserror = "1.0"
protobuf = "3.2.0"
byteorder = "1"
[profile.dev] [profile.dev]
opt-level = "z" opt-level = "z"

View File

@ -13,3 +13,4 @@ cxx-gen = { workspace = true }
cxx = { workspace = true } cxx = { workspace = true }
libc = { workspace = true } libc = { workspace = true }
cfg-if = { workspace = true } cfg-if = { workspace = true }
thiserror = { workspace = true }

View File

@ -1,4 +1,5 @@
#![feature(format_args_nl)] #![feature(format_args_nl)]
#![feature(io_error_more)]
pub use libc; pub use libc;

View File

@ -157,32 +157,14 @@ macro_rules! debug {
} }
pub trait ResultExt { pub trait ResultExt {
fn ok_or_log(&self); fn log(self) -> Self;
fn ok_or_msg(&self, args: Arguments);
fn log_on_error(&self) -> &Self;
fn msg_on_error(&self, args: Arguments) -> &Self;
} }
impl<R, E: Display> ResultExt for Result<R, E> { impl<T, E: Display> ResultExt for Result<T, E> {
fn ok_or_log(&self) { fn log(self) -> Self {
if let Err(e) = self { if let Err(e) = &self {
error!("{}", e); error!("{:#}", e);
} }
}
fn ok_or_msg(&self, args: Arguments) {
if let Err(e) = self {
error!("{}: {}", args, e);
}
}
fn log_on_error(&self) -> &Self {
self.ok_or_log();
self
}
fn msg_on_error(&self, args: Arguments) -> &Self {
self.ok_or_msg(args);
self self
} }
} }

View File

@ -1,8 +1,11 @@
use std::cmp::min; use std::cmp::min;
use std::ffi::CStr; use std::ffi::CStr;
use std::fmt::Arguments; use std::fmt::{Arguments, Debug};
use std::str::Utf8Error;
use std::{fmt, slice}; use std::{fmt, slice};
use thiserror::Error;
pub fn copy_str(dest: &mut [u8], src: &[u8]) -> usize { pub fn copy_str(dest: &mut [u8], src: &[u8]) -> usize {
let len = min(src.len(), dest.len() - 1); let len = min(src.len(), dest.len() - 1);
dest[..len].copy_from_slice(&src[..len]); dest[..len].copy_from_slice(&src[..len]);
@ -78,6 +81,24 @@ macro_rules! raw_cstr {
}}; }};
} }
#[derive(Debug, Error)]
pub enum StrErr {
#[error(transparent)]
Invalid(#[from] Utf8Error),
#[error("argument is null")]
NullPointer,
}
pub fn ptr_to_str_result<'a, T>(ptr: *const T) -> Result<&'a str, StrErr> {
if ptr.is_null() {
Err(StrErr::NullPointer)
} else {
unsafe { CStr::from_ptr(ptr.cast()) }
.to_str()
.map_err(|e| StrErr::from(e))
}
}
pub fn ptr_to_str<'a, T>(ptr: *const T) -> &'a str { pub fn ptr_to_str<'a, T>(ptr: *const T) -> &'a str {
if ptr.is_null() { if ptr.is_null() {
"(null)" "(null)"

View File

@ -14,5 +14,6 @@ cxx-gen = { workspace = true }
[dependencies] [dependencies]
base = { path = "../base" } base = { path = "../base" }
cxx = { path = "../external/cxx-rs" } cxx = { path = "../external/cxx-rs" }
protobuf = "3.2.0" protobuf = { workspace = true }
byteorder = "1" byteorder = { workspace = true }
anyhow = { workspace = true }

View File

@ -718,16 +718,16 @@ void compress(const char *method, const char *infile, const char *outfile) {
unlink(infile); unlink(infile);
} }
bool decompress(const unsigned char *in, uint64_t in_size, int fd) { bool decompress(rust::Slice<const uint8_t> buf, int fd) {
format_t type = check_fmt(in, in_size); format_t type = check_fmt(buf.data(), buf.length());
if (!COMPRESSED(type)) { if (!COMPRESSED(type)) {
LOGE("Input file is not a supported compressed type!\n"); LOGE("Input file is not a supported compression format!\n");
return false; return false;
} }
auto strm = get_decoder(type, make_unique<fd_channel>(fd)); auto strm = get_decoder(type, make_unique<fd_channel>(fd));
if (!strm->write(in, in_size)) { if (!strm->write(buf.data(), buf.length())) {
return false; return false;
} }
return true; return true;

View File

@ -1,5 +1,6 @@
#pragma once #pragma once
#include <cxx.h>
#include <stream.hpp> #include <stream.hpp>
#include "format.hpp" #include "format.hpp"
@ -8,4 +9,4 @@ out_strm_ptr get_encoder(format_t type, out_strm_ptr &&base);
out_strm_ptr get_decoder(format_t type, out_strm_ptr &&base); out_strm_ptr get_decoder(format_t type, out_strm_ptr &&base);
void compress(const char *method, const char *infile, const char *outfile); void compress(const char *method, const char *infile, const char *outfile);
void decompress(char *infile, const char *outfile); void decompress(char *infile, const char *outfile);
bool decompress(const unsigned char *in, uint64_t in_size, int fd); bool decompress(rust::Slice<const uint8_t> buf, int fd);

View File

@ -1,5 +1,7 @@
#![feature(format_args_nl)] #![feature(format_args_nl)]
extern crate core;
pub use base; pub use base;
pub use payload::*; pub use payload::*;
@ -8,14 +10,15 @@ mod update_metadata;
#[cxx::bridge] #[cxx::bridge]
pub mod ffi { pub mod ffi {
extern "C++" { unsafe extern "C++" {
include!("compress.hpp"); include!("compress.hpp");
pub unsafe fn decompress(in_: *const u8, in_size: u64, fd: i32) -> bool; fn decompress(buf: &[u8], fd: i32) -> bool;
} }
#[namespace = "rust"] #[namespace = "rust"]
extern "Rust" { extern "Rust" {
unsafe fn extract_boot_from_payload( unsafe fn extract_boot_from_payload(
partition: *const c_char,
in_path: *const c_char, in_path: *const c_char,
out_path: *const c_char, out_path: *const c_char,
) -> bool; ) -> bool;

View File

@ -24,8 +24,7 @@ Supported actions:
a file with its corresponding file name in the current directory. a file with its corresponding file name in the current directory.
Supported components: kernel, kernel_dtb, ramdisk.cpio, second, Supported components: kernel, kernel_dtb, ramdisk.cpio, second,
dtb, extra, and recovery_dtbo. dtb, extra, and recovery_dtbo.
By default, each component will be automatically decompressed By default, each component will be decompressed on-the-fly.
on-the-fly before writing to the output file.
If '-n' is provided, all decompression operations will be skipped; If '-n' is provided, all decompression operations will be skipped;
each component will remain untouched, dumped in its original format. each component will remain untouched, dumped in its original format.
If '-h' is provided, the boot image header information will be If '-h' is provided, the boot image header information will be
@ -46,8 +45,13 @@ Supported actions:
If env variable PATCHVBMETAFLAG is set to true, all disable flags in If env variable PATCHVBMETAFLAG is set to true, all disable flags in
the boot image's vbmeta header will be set. the boot image's vbmeta header will be set.
extract <payload.bin> <outbootimg> extract <payload.bin> [partition] [outfile]
Extract the boot image from payload.bin to <outbootimg>. Extract [partition] from <payload.bin> to [outfile].
If [outfile] is not specified, then output to '[partition].img'.
If [partition] is not specified, then attempt to extract either
'init_boot' or 'boot'. Which partition was chosen can be determined
by whichever 'init_boot.img' or 'boot.img' exists.
<payload.bin>/[outfile] can be '-' to be STDIN/STDOUT.
hexpatch <file> <hexpattern1> <hexpattern2> hexpatch <file> <hexpattern1> <hexpattern2>
Search <hexpattern1> in <file>, and replace it with <hexpattern2> Search <hexpattern1> in <file>, and replace it with <hexpattern2>
@ -204,8 +208,12 @@ int main(int argc, char *argv[]) {
} else if (argc > 3 && action == "dtb") { } else if (argc > 3 && action == "dtb") {
if (dtb_commands(argc - 2, argv + 2)) if (dtb_commands(argc - 2, argv + 2))
usage(argv[0]); usage(argv[0]);
} else if (argc > 3 && action == "extract") { } else if (argc > 2 && action == "extract") {
return rust::extract_boot_from_payload(argv[2], argv[3]) ? 0 : 1; return rust::extract_boot_from_payload(
argv[2],
argc > 3 ? argv[3] : nullptr,
argc > 4 ? argv[4] : nullptr
) ? 0 : 1;
} else { } else {
usage(argv[0]); usage(argv[0]);
} }

View File

@ -1,148 +1,171 @@
use std::fs::File; use std::fs::File;
use std::io; use std::io::{BufReader, Read, Seek, SeekFrom, Write};
use std::io::{BufReader, ErrorKind, Read, Seek, SeekFrom, Write};
use std::os::fd::{AsRawFd, FromRawFd}; use std::os::fd::{AsRawFd, FromRawFd};
use anyhow::{anyhow, Context};
use byteorder::{BigEndian, ReadBytesExt}; use byteorder::{BigEndian, ReadBytesExt};
use protobuf::{EnumFull, Message}; use protobuf::{EnumFull, Message};
use base::libc::c_char; use base::libc::c_char;
use base::ptr_to_str; use base::{ptr_to_str_result, StrErr};
use base::{ResultExt, WriteExt}; use base::{ResultExt, WriteExt};
use crate::ffi; use crate::ffi;
use crate::update_metadata::install_operation::Type; use crate::update_metadata::install_operation::Type;
use crate::update_metadata::DeltaArchiveManifest; use crate::update_metadata::DeltaArchiveManifest;
macro_rules! data_err { macro_rules! bad_payload {
($fmt:expr) => { io::Error::new(ErrorKind::InvalidData, format!($fmt)) }; ($msg:literal) => {
($fmt:expr, $($args:tt)*) => { anyhow!(concat!("invalid payload: ", $msg))
io::Error::new(ErrorKind::InvalidData, format!($fmt, $($args)*)) };
($($args:tt)*) => {
anyhow!("invalid payload: {}", format_args!($($args)*))
}; };
} }
static PAYLOAD_MAGIC: &str = "CrAU"; const PAYLOAD_MAGIC: &str = "CrAU";
fn do_extract_boot_from_payload(in_path: &str, out_path: &str) -> io::Result<()> { fn do_extract_boot_from_payload(
in_path: &str,
partition: Option<&str>,
out_path: Option<&str>,
) -> anyhow::Result<()> {
let mut reader = BufReader::new(if in_path == "-" { let mut reader = BufReader::new(if in_path == "-" {
unsafe { File::from_raw_fd(0) } unsafe { File::from_raw_fd(0) }
} else { } else {
File::open(in_path)? File::open(in_path).with_context(|| format!("cannot open '{in_path}'"))?
}); });
let buf = &mut [0u8; 4]; let buf = &mut [0u8; 4];
reader.read_exact(buf)?; reader.read_exact(buf)?;
if buf != PAYLOAD_MAGIC.as_bytes() { if buf != PAYLOAD_MAGIC.as_bytes() {
return Err(data_err!("invalid payload magic")); return Err(bad_payload!("invalid magic"));
} }
let version = reader.read_u64::<BigEndian>()?; let version = reader.read_u64::<BigEndian>()?;
if version != 2 { if version != 2 {
return Err(data_err!("unsupported version {}", version)); return Err(bad_payload!("unsupported version: {}", version));
} }
let manifest_len = reader.read_u64::<BigEndian>()?; let manifest_len = reader.read_u64::<BigEndian>()? as usize;
if manifest_len == 0 { if manifest_len == 0 {
return Err(data_err!("manifest length is zero")); return Err(bad_payload!("manifest length is zero"));
} }
let manifest_sig_len = reader.read_u32::<BigEndian>()?; let manifest_sig_len = reader.read_u32::<BigEndian>()?;
if manifest_sig_len == 0 { if manifest_sig_len == 0 {
return Err(data_err!("manifest signature length is zero")); return Err(bad_payload!("manifest signature length is zero"));
} }
let mut buf = vec![0; manifest_len as usize]; let mut buf = Vec::new();
buf.resize(manifest_len, 0u8);
reader.read_exact(&mut buf)?; let manifest = {
let manifest = &mut buf[..manifest_len];
let manifest = DeltaArchiveManifest::parse_from_bytes(&buf)?; reader.read_exact(manifest)?;
DeltaArchiveManifest::parse_from_bytes(&manifest)?
};
if !manifest.has_minor_version() || manifest.minor_version() != 0 { if !manifest.has_minor_version() || manifest.minor_version() != 0 {
return Err(data_err!( return Err(bad_payload!(
"delta payloads are not supported, please use a full payload file" "delta payloads are not supported, please use a full payload file"
)); ));
} }
if !manifest.has_block_size() {
return Err(data_err!("block size not found"));
}
let boot = manifest.partitions.iter().find(|partition| { let block_size = manifest.block_size() as u64;
partition.has_partition_name() && partition.partition_name() == "init_boot"
}); let part = match partition {
None => {
let boot = manifest
.partitions
.iter()
.find(|partition| partition.partition_name() == "init_boot");
let boot = match boot { let boot = match boot {
Some(boot) => Some(boot), Some(boot) => Some(boot),
None => manifest.partitions.iter().find(|partition| { None => manifest
partition.has_partition_name() && partition.partition_name() == "boot" .partitions
}), .iter()
.find(|partition| partition.partition_name() == "boot"),
};
boot.ok_or(anyhow!("boot partition not found"))?
}
Some(partition) => manifest
.partitions
.iter()
.find(|p| p.partition_name() == partition)
.ok_or(anyhow!("partition '{partition}' not found"))?,
}; };
let boot = boot.ok_or(data_err!("boot partition not found"))?;
let base_offset = reader.stream_position()? + manifest_sig_len as u64; let out_str: String;
let out_path = match out_path {
let block_size = manifest None => {
.block_size out_str = format!("{}.img", part.partition_name());
.ok_or(data_err!("block size not found"))? as u64; out_str.as_str()
}
Some(p) => p,
};
let mut out_file = if out_path == "-" { let mut out_file = if out_path == "-" {
unsafe { File::from_raw_fd(1) } unsafe { File::from_raw_fd(1) }
} else { } else {
File::create(out_path)? File::create(out_path).with_context(|| format!("cannot write to '{out_path}'"))?
}; };
for operation in boot.operations.iter() { let base_offset = reader.stream_position()? + manifest_sig_len as u64;
for operation in part.operations.iter() {
let data_len = operation let data_len = operation
.data_length .data_length
.ok_or(data_err!("data length not found"))?; .ok_or(bad_payload!("data length not found"))? as usize;
let data_offset = operation let data_offset = operation
.data_offset .data_offset
.ok_or(data_err!("data offset not found"))?; .ok_or(bad_payload!("data offset not found"))?;
let data_type = operation let data_type = operation
.type_ .type_
.ok_or(data_err!("operation type not found"))?; .ok_or(bad_payload!("operation type not found"))?
let data_type = data_type
.enum_value() .enum_value()
.map_err(|_| data_err!("operation type not valid"))?; .map_err(|_| bad_payload!("operation type not valid"))?;
let mut buf = vec![0; data_len as usize]; buf.resize(data_len, 0u8);
let data = &mut buf[..data_len];
reader.seek(SeekFrom::Start(base_offset + data_offset))?; reader.seek(SeekFrom::Start(base_offset + data_offset))?;
reader.read_exact(&mut buf)?; reader.read_exact(data)?;
let out_offset = operation let out_offset = operation
.dst_extents .dst_extents
.get(0) .get(0)
.ok_or(data_err!("dst extents not found"))? .ok_or(bad_payload!("dst extents not found"))?
.start_block .start_block
.ok_or(data_err!("start block not found"))? .ok_or(bad_payload!("start block not found"))?
* block_size; * block_size;
match data_type { match data_type {
Type::REPLACE => { Type::REPLACE => {
out_file.seek(SeekFrom::Start(out_offset))?; out_file.seek(SeekFrom::Start(out_offset))?;
out_file.write_all(&buf)?; out_file.write_all(&data)?;
} }
Type::ZERO => { Type::ZERO => {
for ext in operation.dst_extents.iter() { for ext in operation.dst_extents.iter() {
let out_seek = let out_seek = ext
ext.start_block.ok_or(data_err!("start block not found"))? * block_size; .start_block
let num_blocks = ext.num_blocks.ok_or(data_err!("num blocks not found"))?; .ok_or(bad_payload!("start block not found"))?
* block_size;
let num_blocks = ext.num_blocks.ok_or(bad_payload!("num blocks not found"))?;
out_file.seek(SeekFrom::Start(out_seek))?; out_file.seek(SeekFrom::Start(out_seek))?;
out_file.write_zeros(num_blocks as usize)?; out_file.write_zeros(num_blocks as usize)?;
} }
} }
Type::REPLACE_BZ | Type::REPLACE_XZ => { Type::REPLACE_BZ | Type::REPLACE_XZ => {
out_file.seek(SeekFrom::Start(out_offset))?; out_file.seek(SeekFrom::Start(out_offset))?;
unsafe { if !ffi::decompress(data, out_file.as_raw_fd()) {
if !ffi::decompress(buf.as_ptr(), buf.len() as u64, out_file.as_raw_fd()) { return Err(bad_payload!("decompression failed"));
return Err(data_err!("decompression failed"));
}
} }
} }
_ => { _ => {
return Err(data_err!( return Err(bad_payload!(
"unsupported operation type: {}", "unsupported operation type: {}",
data_type.descriptor().name() data_type.descriptor().name()
)); ));
@ -153,10 +176,30 @@ fn do_extract_boot_from_payload(in_path: &str, out_path: &str) -> io::Result<()>
Ok(()) Ok(())
} }
pub fn extract_boot_from_payload(in_path: *const c_char, out_path: *const c_char) -> bool { pub fn extract_boot_from_payload(
let in_path = ptr_to_str(in_path); in_path: *const c_char,
let out_path = ptr_to_str(out_path); partition: *const c_char,
do_extract_boot_from_payload(in_path, out_path) out_path: *const c_char,
.msg_on_error(format_args!("Failed to extract boot from payload")) ) -> bool {
.is_ok() fn inner(
in_path: *const c_char,
partition: *const c_char,
out_path: *const c_char,
) -> anyhow::Result<()> {
let in_path = ptr_to_str_result(in_path)?;
let partition = match ptr_to_str_result(partition) {
Ok(s) => Some(s),
Err(StrErr::NullPointer) => None,
Err(e) => Err(e)?,
};
let out_path = match ptr_to_str_result(out_path) {
Ok(s) => Some(s),
Err(StrErr::NullPointer) => None,
Err(e) => Err(e)?,
};
do_extract_boot_from_payload(in_path, partition, out_path)
.context("Failed to extract from payload")?;
Ok(())
}
inner(in_path, partition, out_path).log().is_ok()
} }

View File

@ -44,7 +44,7 @@ pub fn load_rule_file(sepol: Pin<&mut sepolicy>, filename: &[u8]) {
load_rules_from_reader(sepol, &mut reader); load_rules_from_reader(sepol, &mut reader);
Ok(()) Ok(())
} }
inner(sepol, filename).ok_or_log(); inner(sepol, filename).log().ok();
} }
pub fn load_rules(sepol: Pin<&mut sepolicy>, rules: &[u8]) { pub fn load_rules(sepol: Pin<&mut sepolicy>, rules: &[u8]) {