Compare commits

...

26 Commits
v30.4 ... v30.5

Author SHA1 Message Date
topjohnwu
363566d0d5 Release Magisk v30.5
[skip ci]
2025-12-01 01:52:46 -08:00
topjohnwu
d9dc459bf4 Update system_properties
Fix #9408
2025-12-01 00:25:39 -08:00
topjohnwu
5d6b703622 Move occupy and unoccupy out of base crate 2025-11-29 00:13:41 -08:00
topjohnwu
f7ce9c38e1 Run through clippy and rustfmt 2025-11-29 00:13:41 -08:00
LoveSy
bdbfb40383 Use rootfs for magisktmp if possible 2025-11-29 00:13:41 -08:00
topjohnwu
283fc0f46f Update cargo dependencies 2025-11-28 21:00:48 -08:00
topjohnwu
2c24a41bf2 Update gradle dependencies 2025-11-27 03:36:49 -08:00
topjohnwu
97c93a1f4d Smaller release binary size 2025-11-27 02:00:54 -08:00
topjohnwu
8d534e6de8 Update cxx-rs 2025-11-25 02:29:45 -08:00
topjohnwu
3a60ef2039 Update to ONDK r29.3 2025-11-21 13:28:46 -08:00
Wang Han
52d7eff03f Fix splice direction for ptmx out stream 2025-11-19 15:14:59 -08:00
topjohnwu
020e23ea13 Disable help triggers on subcommands 2025-11-03 16:16:49 -08:00
topjohnwu
1599bfc2c5 Update dependencies 2025-11-02 13:52:32 -08:00
Wang Han
c8d51b38ba Enhance fdt_header validation for empty dtb 2025-11-02 02:42:48 -08:00
Wang Han
f741a4aeb8 Free regex resources in plt_hook_commit
Free regex resources for registered and ignored hooks before clearing the lists.
2025-11-02 01:59:03 -08:00
topjohnwu
4ee2235961 Update dependencies 2025-10-20 10:30:09 -07:00
topjohnwu
536e50c6e0 Support optional trailing positional arguments 2025-10-19 17:15:30 -07:00
topjohnwu
57d9fc6099 Support short only options and switches 2025-10-19 17:15:30 -07:00
topjohnwu
52d8910bdd Cleanup code for EarlyExit during help triggers 2025-10-19 17:15:30 -07:00
topjohnwu
c94bd49a89 Update default help triggers 2025-10-19 17:15:30 -07:00
topjohnwu
b72ba6759e Vendor argh sources
Further customization will come in future commits
2025-10-19 17:15:30 -07:00
topjohnwu
5bcb55b7fc Format Rust imports with rustfmt 2025-10-19 17:15:30 -07:00
topjohnwu
0dc8231585 Make all dependencies workspace = true 2025-10-19 17:15:30 -07:00
Wang Han
470acc93c9 Remove clickable attribute from item_module_md2.xml 2025-10-19 14:02:02 -07:00
Wang Han
0edb80b10f Set module card as non clickable
It's so easy to mis-click card.
2025-10-19 14:02:02 -07:00
topjohnwu
bcc6296d94 Build debug without debug-info 2025-10-03 00:16:17 -07:00
61 changed files with 3583 additions and 526 deletions

View File

@@ -27,10 +27,8 @@
isEnabled="@{!item.removed && item.enabled && !item.showNotice}" isEnabled="@{!item.removed && item.enabled && !item.showNotice}"
android:layout_width="match_parent" android:layout_width="match_parent"
android:layout_height="wrap_content" android:layout_height="wrap_content"
android:clickable="@{!item.removed && item.enabled && !item.showNotice}"
android:focusable="@{!item.removed && item.enabled && !item.showNotice}" android:focusable="@{!item.removed && item.enabled && !item.showNotice}"
android:nextFocusRight="@id/module_indicator" android:nextFocusRight="@id/module_indicator"
android:onClick="@{() -> item.setEnabled(!item.enabled)}"
app:cardBackgroundColor="@color/color_card_background_color_selector" app:cardBackgroundColor="@color/color_card_background_color_selector"
tools:isEnabled="false" tools:isEnabled="false"
tools:layout_gravity="center" tools:layout_gravity="center"

View File

@@ -55,7 +55,7 @@ fun Project.setupCommon() {
compileSdkVersion(36) compileSdkVersion(36)
buildToolsVersion = "36.0.0" buildToolsVersion = "36.0.0"
ndkPath = "$sdkDirectory/ndk/magisk" ndkPath = "$sdkDirectory/ndk/magisk"
ndkVersion = "29.0.13846066" ndkVersion = "29.0.14206865"
defaultConfig { defaultConfig {
minSdk = 23 minSdk = 23

View File

@@ -19,7 +19,7 @@ abstract class SuLogDatabase : RoomDatabase() {
companion object { companion object {
val MIGRATION_1_2 = object : Migration(1, 2) { val MIGRATION_1_2 = object : Migration(1, 2) {
override fun migrate(database: SupportSQLiteDatabase) = with(database) { override fun migrate(db: SupportSQLiteDatabase) = with(db) {
execSQL("ALTER TABLE logs ADD COLUMN target INTEGER NOT NULL DEFAULT -1") execSQL("ALTER TABLE logs ADD COLUMN target INTEGER NOT NULL DEFAULT -1")
execSQL("ALTER TABLE logs ADD COLUMN context TEXT NOT NULL DEFAULT ''") execSQL("ALTER TABLE logs ADD COLUMN context TEXT NOT NULL DEFAULT ''")
execSQL("ALTER TABLE logs ADD COLUMN gids TEXT NOT NULL DEFAULT ''") execSQL("ALTER TABLE logs ADD COLUMN gids TEXT NOT NULL DEFAULT ''")

View File

@@ -44,7 +44,7 @@ object ServiceLocator {
private fun createSuLogDatabase(context: Context) = private fun createSuLogDatabase(context: Context) =
Room.databaseBuilder(context, SuLogDatabase::class.java, "sulogs.db") Room.databaseBuilder(context, SuLogDatabase::class.java, "sulogs.db")
.addMigrations(SuLogDatabase.MIGRATION_1_2) .addMigrations(SuLogDatabase.MIGRATION_1_2)
.fallbackToDestructiveMigration() .fallbackToDestructiveMigration(true)
.build() .build()
private fun createMarkwon(context: Context) = private fun createMarkwon(context: Context) =

View File

@@ -34,7 +34,7 @@ data class ModuleJson(
@JsonClass(generateAdapter = true) @JsonClass(generateAdapter = true)
data class ReleaseAssets( data class ReleaseAssets(
val name: String, val name: String,
@Json(name = "browser_download_url") val url: String, @param:Json(name = "browser_download_url") val url: String,
) )
class DateTimeAdapter { class DateTimeAdapter {
@@ -51,12 +51,12 @@ class DateTimeAdapter {
@JsonClass(generateAdapter = true) @JsonClass(generateAdapter = true)
data class Release( data class Release(
@Json(name = "tag_name") val tag: String, @param:Json(name = "tag_name") val tag: String,
val name: String, val name: String,
val prerelease: Boolean, val prerelease: Boolean,
val assets: List<ReleaseAssets>, val assets: List<ReleaseAssets>,
val body: String, val body: String,
@Json(name = "created_at") val createdTime: Instant, @param:Json(name = "created_at") val createdTime: Instant,
) { ) {
val versionCode: Int get() { val versionCode: Int get() {
return if (tag[0] == 'v') { return if (tag[0] == 'v') {

View File

@@ -30,4 +30,4 @@ android.nonFinalResIds=false
# Magisk # Magisk
magisk.stubVersion=40 magisk.stubVersion=40
magisk.versionCode=30400 magisk.versionCode=30500

View File

@@ -1,16 +1,16 @@
[versions] [versions]
kotlin = "2.2.20" kotlin = "2.2.21"
android = "8.13.0" android = "8.13.1"
ksp = "2.2.20-2.0.2" ksp = "2.3.3"
rikka = "1.3.0" rikka = "1.3.0"
navigation = "2.9.4" navigation = "2.9.6"
libsu = "6.0.0" libsu = "6.0.0"
okhttp = "5.1.0" okhttp = "5.3.2"
retrofit = "3.0.0" retrofit = "3.0.0"
room = "2.8.0" room = "2.8.4"
[libraries] [libraries]
bcpkix = { module = "org.bouncycastle:bcpkix-jdk18on", version = "1.82" } bcpkix = { module = "org.bouncycastle:bcpkix-jdk18on", version = "1.83" }
commons-compress = { module = "org.apache.commons:commons-compress", version = "1.28.0" } commons-compress = { module = "org.apache.commons:commons-compress", version = "1.28.0" }
retrofit = { module = "com.squareup.retrofit2:retrofit", version.ref = "retrofit" } retrofit = { module = "com.squareup.retrofit2:retrofit", version.ref = "retrofit" }
retrofit-moshi = { module = "com.squareup.retrofit2:converter-moshi", version.ref = "retrofit" } retrofit-moshi = { module = "com.squareup.retrofit2:converter-moshi", version.ref = "retrofit" }
@@ -23,10 +23,10 @@ timber = { module = "com.jakewharton.timber:timber", version = "5.0.1" }
jgit = { module = "org.eclipse.jgit:org.eclipse.jgit", version = "7.1.0.202411261347-r" } jgit = { module = "org.eclipse.jgit:org.eclipse.jgit", version = "7.1.0.202411261347-r" }
# AndroidX # AndroidX
activity = { module = "androidx.activity:activity", version = "1.11.0" } activity = { module = "androidx.activity:activity", version = "1.12.0" }
appcompat = { module = "androidx.appcompat:appcompat", version = "1.7.1" } appcompat = { module = "androidx.appcompat:appcompat", version = "1.7.1" }
core-ktx = { module = "androidx.core:core-ktx", version = "1.17.0" } core-ktx = { module = "androidx.core:core-ktx", version = "1.17.0" }
core-splashscreen = { module = "androidx.core:core-splashscreen", version = "1.0.1" } core-splashscreen = { module = "androidx.core:core-splashscreen", version = "1.2.0" }
constraintlayout = { module = "androidx.constraintlayout:constraintlayout", version = "2.2.1" } constraintlayout = { module = "androidx.constraintlayout:constraintlayout", version = "2.2.1" }
fragment-ktx = { module = "androidx.fragment:fragment-ktx", version = "1.8.9" } fragment-ktx = { module = "androidx.fragment:fragment-ktx", version = "1.8.9" }
navigation-fragment-ktx = { module = "androidx.navigation:navigation-fragment-ktx", version.ref = "navigation" } navigation-fragment-ktx = { module = "androidx.navigation:navigation-fragment-ktx", version.ref = "navigation" }
@@ -62,6 +62,6 @@ android-gradle-plugin = { module = "com.android.tools.build:gradle", version.ref
ksp-plugin = { module = "com.google.devtools.ksp:com.google.devtools.ksp.gradle.plugin", version.ref = "ksp" } ksp-plugin = { module = "com.google.devtools.ksp:com.google.devtools.ksp.gradle.plugin", version.ref = "ksp" }
navigation-safe-args-plugin = { module = "androidx.navigation:navigation-safe-args-gradle-plugin", version.ref = "navigation" } navigation-safe-args-plugin = { module = "androidx.navigation:navigation-safe-args-gradle-plugin", version.ref = "navigation" }
lsparanoid-plugin = { module = "org.lsposed.lsparanoid:gradle-plugin", version = "0.6.0" } lsparanoid-plugin = { module = "org.lsposed.lsparanoid:gradle-plugin", version = "0.6.0" }
moshi-plugin = { module = "dev.zacsweers.moshix:dev.zacsweers.moshix.gradle.plugin", version = "0.32.0" } moshi-plugin = { module = "dev.zacsweers.moshix:dev.zacsweers.moshix.gradle.plugin", version = "0.34.1" }
[plugins] [plugins]

View File

@@ -80,7 +80,7 @@ support_targets = {"magisk", "magiskinit", "magiskboot", "magiskpolicy", "resetp
default_targets = support_targets - {"resetprop"} default_targets = support_targets - {"resetprop"}
rust_targets = default_targets.copy() rust_targets = default_targets.copy()
clean_targets = {"native", "cpp", "rust", "app"} clean_targets = {"native", "cpp", "rust", "app"}
ondk_version = "r29.2" ondk_version = "r29.3"
# Global vars # Global vars
config = {} config = {}

View File

@@ -1,5 +1,10 @@
# Magisk Changelog # Magisk Changelog
### v30.5 (2025.12.1)
- [General] Improve commandline argument parsing logic
- [resetprop] Properly support Android versions with property overrides
### v30.4 (2025.10.2) ### v30.4 (2025.10.2)
- [MagiskSU] Fix several implementation bugs - [MagiskSU] Fix several implementation bugs

View File

@@ -10,3 +10,6 @@ target-dir = "../out/rust"
build-std = ["std", "panic_abort"] build-std = ["std", "panic_abort"]
build-std-features = ["panic_immediate_abort", "optimize_for_size"] build-std-features = ["panic_immediate_abort", "optimize_for_size"]
profile-rustflags = true profile-rustflags = true
[profile.release]
rustflags = ["-Z", "location-detail=none", "-Z", "fmt-debug=none"]

413
native/src/Cargo.lock generated
View File

@@ -10,38 +10,9 @@ checksum = "320119579fcad9c21884f5c4861d16174d0e06250625266f50fe6898340abefa"
[[package]] [[package]]
name = "anstyle" name = "anstyle"
version = "1.0.11" version = "1.0.13"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "862ed96ca487e809f1c8e5a8447f6ee2cf102f846893800b20cebdf541fc6bbd" checksum = "5192cca8006f1fd4f7237516f40fa183bb07f8fbdfedaa0036de5ea9b0b45e78"
[[package]]
name = "argh"
version = "0.1.13"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "34ff18325c8a36b82f992e533ece1ec9f9a9db446bd1c14d4f936bac88fcd240"
dependencies = [
"argh_derive",
"argh_shared",
"rust-fuzzy-search",
]
[[package]]
name = "argh_derive"
version = "0.1.13"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "adb7b2b83a50d329d5d8ccc620f5c7064028828538bdf5646acd60dc1f767803"
dependencies = [
"argh_shared",
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "argh_shared"
version = "0.1.13"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a464143cc82dedcdc3928737445362466b7674b5db4e2eb8e869846d6d84f4f6"
[[package]] [[package]]
name = "autocfg" name = "autocfg"
@@ -53,13 +24,13 @@ checksum = "c08606f8c3cbf4ce6ec8e28fb0014a2c086708fe954eaa885384a6165172e7e8"
name = "base" name = "base"
version = "0.0.0" version = "0.0.0"
dependencies = [ dependencies = [
"argh",
"bitflags", "bitflags",
"bytemuck", "bytemuck",
"cfg-if", "cfg-if",
"const_format", "const_format",
"cxx", "cxx",
"cxx-gen", "cxx-gen",
"derive",
"libc", "libc",
"nix", "nix",
"num-derive", "num-derive",
@@ -96,9 +67,9 @@ checksum = "5e764a1d40d510daf35e07be9eb06e75770908c27d411ee6c92109c9840eaaf7"
[[package]] [[package]]
name = "bitflags" name = "bitflags"
version = "2.9.4" version = "2.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2261d10cca569e4643e526d8dc2e62e433cc8aba21ab764233731f8d369bf394" checksum = "812e12b5285cc515a9c72a5c1d3b6d46a19dac5acfef5265968c166106e31dd3"
[[package]] [[package]]
name = "block-buffer" name = "block-buffer"
@@ -111,9 +82,9 @@ dependencies = [
[[package]] [[package]]
name = "block-buffer" name = "block-buffer"
version = "0.11.0-rc.5" version = "0.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e9ef36a6fcdb072aa548f3da057640ec10859eb4e91ddf526ee648d50c76a949" checksum = "96eb4cdd6cf1b31d671e9efe75c5d1ec614776856cefbe109ca373554a6d514f"
dependencies = [ dependencies = [
"hybrid-array", "hybrid-array",
] ]
@@ -126,18 +97,18 @@ checksum = "46c5e41b57b8bba42a04676d81cb89e9ee8e859a1a66f80a5a72e1cb76b34d43"
[[package]] [[package]]
name = "bytemuck" name = "bytemuck"
version = "1.23.2" version = "1.24.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3995eaeebcdf32f91f980d360f78732ddc061097ab4e39991ae7a6ace9194677" checksum = "1fbdf580320f38b612e485521afda1ee26d10cc9884efaaa750d383e13e3c5f4"
dependencies = [ dependencies = [
"bytemuck_derive", "bytemuck_derive",
] ]
[[package]] [[package]]
name = "bytemuck_derive" name = "bytemuck_derive"
version = "1.10.1" version = "1.10.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4f154e572231cb6ba2bd1176980827e3d5dc04cc183a75dea38109fbdd672d29" checksum = "f9abbd1bc6865053c427f7198e6af43bfdedc55ab791faed4fbd361d789575ff"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
@@ -152,18 +123,18 @@ checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b"
[[package]] [[package]]
name = "bzip2" name = "bzip2"
version = "0.6.0" version = "0.6.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bea8dcd42434048e4f7a304411d9273a411f647446c1234a65ce0554923f4cff" checksum = "f3a53fac24f34a81bc9954b5d6cfce0c21e18ec6959f44f56e8e90e4bb7c346c"
dependencies = [ dependencies = [
"libbz2-rs-sys", "libbz2-rs-sys",
] ]
[[package]] [[package]]
name = "cc" name = "cc"
version = "1.2.39" version = "1.2.48"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e1354349954c6fc9cb0deab020f27f783cf0b604e8bb754dc4658ecf0d29c35f" checksum = "c481bdbf0ed3b892f6f806287d72acd515b352a4ec27a208489b8c1bc839633a"
dependencies = [ dependencies = [
"find-msvc-tools", "find-msvc-tools",
"shlex", "shlex",
@@ -171,9 +142,9 @@ dependencies = [
[[package]] [[package]]
name = "cfg-if" name = "cfg-if"
version = "1.0.3" version = "1.0.4"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2fd1289c04a9ea8cb22300a459a72a385d7c73d3259e2ed7dcb2af674838cfa9" checksum = "9330f8b2ff13f34540b44e946ef35111825727b38d33286ef986142615121801"
[[package]] [[package]]
name = "cfg_aliases" name = "cfg_aliases"
@@ -183,18 +154,18 @@ checksum = "613afe47fcd5fac7ccf1db93babcb082c5994d996f20b8b159f2ad1658eb5724"
[[package]] [[package]]
name = "clap" name = "clap"
version = "4.5.48" version = "4.5.53"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e2134bb3ea021b78629caa971416385309e0131b351b25e01dc16fb54e1b5fae" checksum = "c9e340e012a1bf4935f5282ed1436d1489548e8f72308207ea5df0e23d2d03f8"
dependencies = [ dependencies = [
"clap_builder", "clap_builder",
] ]
[[package]] [[package]]
name = "clap_builder" name = "clap_builder"
version = "4.5.48" version = "4.5.53"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c2ba64afa3c0a6df7fa517765e31314e983f51dda798ffba27b988194fb65dc9" checksum = "d76b5d13eaa18c901fd2f7fca939fefe3a0727a953561fefdf3b2922b8569d00"
dependencies = [ dependencies = [
"anstyle", "anstyle",
"clap_lex", "clap_lex",
@@ -203,15 +174,15 @@ dependencies = [
[[package]] [[package]]
name = "clap_lex" name = "clap_lex"
version = "0.7.5" version = "0.7.6"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b94f61472cee1439c0b966b47e3aca9ae07e45d070759512cd390ea2bebc6675" checksum = "a1d728cc89cf3aee9ff92b05e62b19ee65a02b5702cff7d5a377e32c6ae29d8d"
[[package]] [[package]]
name = "codespan-reporting" name = "codespan-reporting"
version = "0.12.0" version = "0.13.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fe6d2e5af09e8c8ad56c969f2157a3d4238cebc7c55f0a517728c38f7b200f81" checksum = "af491d569909a7e4dee0ad7db7f5341fef5c614d5b8ec8cf765732aba3cff681"
dependencies = [ dependencies = [
"serde", "serde",
"termcolor", "termcolor",
@@ -226,9 +197,9 @@ checksum = "0dabb6555f92fb9ee4140454eb5dcd14c7960e1225c6d1a6cc361f032947713e"
[[package]] [[package]]
name = "const_format" name = "const_format"
version = "0.2.34" version = "0.2.35"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "126f97965c8ad46d6d9163268ff28432e8f6a1196a55578867832e3049df63dd" checksum = "7faa7469a93a566e9ccc1c73fe783b4a65c274c5ace346038dca9c39fe0030ad"
dependencies = [ dependencies = [
"const_format_proc_macros", "const_format_proc_macros",
] ]
@@ -255,9 +226,9 @@ dependencies = [
[[package]] [[package]]
name = "crc" name = "crc"
version = "3.3.0" version = "3.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9710d3b3739c2e349eb44fe848ad0b7c8cb1e42bd87ee49371df2f7acaf3e675" checksum = "5eb8a2a1cd12ab0d987a5d5e825195d372001a4094a0376319d5a0ad71c1ba0d"
dependencies = [ dependencies = [
"crc-catalog", "crc-catalog",
] ]
@@ -279,9 +250,9 @@ dependencies = [
[[package]] [[package]]
name = "crypto-bigint" name = "crypto-bigint"
version = "0.7.0-rc.7" version = "0.7.0-rc.10"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e0bfcfbe68dec4e49b4e93da8f091ce39556549554905fcb07308f6eeefae46c" checksum = "6715836b4946e8585016e80b79c7561476aff3b22f7b756778e7b109d86086c6"
dependencies = [ dependencies = [
"hybrid-array", "hybrid-array",
"num-traits", "num-traits",
@@ -293,9 +264,9 @@ dependencies = [
[[package]] [[package]]
name = "crypto-common" name = "crypto-common"
version = "0.1.6" version = "0.1.7"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3" checksum = "78c8292055d1c1df0cce5d180393dc8cce0abec0a7102adb6c7b1eef6016d60a"
dependencies = [ dependencies = [
"generic-array", "generic-array",
"typenum", "typenum",
@@ -303,18 +274,18 @@ dependencies = [
[[package]] [[package]]
name = "crypto-common" name = "crypto-common"
version = "0.2.0-rc.4" version = "0.2.0-rc.5"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6a8235645834fbc6832939736ce2f2d08192652269e11010a6240f61b908a1c6" checksum = "919bd05924682a5480aec713596b9e2aabed3a0a6022fab6847f85a99e5f190a"
dependencies = [ dependencies = [
"hybrid-array", "hybrid-array",
] ]
[[package]] [[package]]
name = "crypto-primes" name = "crypto-primes"
version = "0.7.0-pre.3" version = "0.7.0-pre.4"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "25f2523fbb68811c8710829417ad488086720a6349e337c38d12fa81e09e50bf" checksum = "fdd9b2855017318a49714c07ee8895b89d3510d54fa6d86be5835de74c389609"
dependencies = [ dependencies = [
"crypto-bigint", "crypto-bigint",
"libm", "libm",
@@ -323,18 +294,32 @@ dependencies = [
[[package]] [[package]]
name = "cxx" name = "cxx"
version = "1.0.170" version = "1.0.189"
dependencies = [ dependencies = [
"cc", "cc",
"cxx-build",
"cxxbridge-cmd", "cxxbridge-cmd",
"cxxbridge-flags", "cxxbridge-flags",
"cxxbridge-macro", "cxxbridge-macro",
"foldhash", "foldhash",
] ]
[[package]]
name = "cxx-build"
version = "1.0.189"
dependencies = [
"cc",
"codespan-reporting",
"indexmap",
"proc-macro2",
"quote",
"scratch",
"syn",
]
[[package]] [[package]]
name = "cxx-gen" name = "cxx-gen"
version = "0.7.170" version = "0.7.189"
dependencies = [ dependencies = [
"codespan-reporting", "codespan-reporting",
"indexmap", "indexmap",
@@ -345,7 +330,7 @@ dependencies = [
[[package]] [[package]]
name = "cxxbridge-cmd" name = "cxxbridge-cmd"
version = "1.0.170" version = "1.0.189"
dependencies = [ dependencies = [
"clap", "clap",
"codespan-reporting", "codespan-reporting",
@@ -357,24 +342,23 @@ dependencies = [
[[package]] [[package]]
name = "cxxbridge-flags" name = "cxxbridge-flags"
version = "1.0.170" version = "1.0.189"
[[package]] [[package]]
name = "cxxbridge-macro" name = "cxxbridge-macro"
version = "1.0.170" version = "1.0.189"
dependencies = [ dependencies = [
"indexmap", "indexmap",
"proc-macro2", "proc-macro2",
"quote", "quote",
"rustversion",
"syn", "syn",
] ]
[[package]] [[package]]
name = "der" name = "der"
version = "0.8.0-rc.9" version = "0.8.0-rc.10"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e9d8dd2f26c86b27a2a8ea2767ec7f9df7a89516e4794e54ac01ee618dda3aa4" checksum = "02c1d73e9668ea6b6a28172aa55f3ebec38507131ce179051c8033b5c6037653"
dependencies = [ dependencies = [
"const-oid", "const-oid",
"der_derive", "der_derive",
@@ -410,29 +394,29 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292" checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292"
dependencies = [ dependencies = [
"block-buffer 0.10.4", "block-buffer 0.10.4",
"crypto-common 0.1.6", "crypto-common 0.1.7",
] ]
[[package]] [[package]]
name = "digest" name = "digest"
version = "0.11.0-rc.2" version = "0.11.0-rc.4"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6749b668519cd7149ee3d11286a442a8a8bdc3a9d529605f579777bfccc5a4bc" checksum = "ea390c940e465846d64775e55e3115d5dc934acb953de6f6e6360bc232fe2bf7"
dependencies = [ dependencies = [
"block-buffer 0.11.0-rc.5", "block-buffer 0.11.0",
"const-oid", "const-oid",
"crypto-common 0.2.0-rc.4", "crypto-common 0.2.0-rc.5",
"subtle", "subtle",
] ]
[[package]] [[package]]
name = "ecdsa" name = "ecdsa"
version = "0.17.0-rc.7" version = "0.17.0-rc.9"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b4ab355ec063f7a110eb627471058093aba00eb7f4e70afbd15e696b79d1077b" checksum = "e914ecb8e11a02f42cc05f6b43675d1e5aa4d446cd207f9f818903a1ab34f19f"
dependencies = [ dependencies = [
"der", "der",
"digest 0.11.0-rc.2", "digest 0.11.0-rc.4",
"elliptic-curve", "elliptic-curve",
"rfc6979", "rfc6979",
"signature", "signature",
@@ -442,19 +426,21 @@ dependencies = [
[[package]] [[package]]
name = "elliptic-curve" name = "elliptic-curve"
version = "0.14.0-rc.14" version = "0.14.0-rc.17"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6ae7ba52b8bca06caab3e74b7cf8858a2934e6e75d80b03dbe48d2d394a4489c" checksum = "39ecd2903524729de5d0cba7589121744513feadd56d71980cb480c48caceb11"
dependencies = [ dependencies = [
"base16ct", "base16ct",
"crypto-bigint", "crypto-bigint",
"digest 0.11.0-rc.2", "digest 0.11.0-rc.4",
"ff", "getrandom",
"group",
"hybrid-array", "hybrid-array",
"once_cell",
"pem-rfc7468", "pem-rfc7468",
"pkcs8", "pkcs8",
"rand_core", "rand_core",
"rustcrypto-ff",
"rustcrypto-group",
"sec1", "sec1",
"subtle", "subtle",
"zeroize", "zeroize",
@@ -472,16 +458,6 @@ version = "0.1.5"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "784a4df722dc6267a04af36895398f59d21d07dce47232adf31ec0ff2fa45e67" checksum = "784a4df722dc6267a04af36895398f59d21d07dce47232adf31ec0ff2fa45e67"
[[package]]
name = "ff"
version = "0.14.0-pre.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d42dd26f5790eda47c1a2158ea4120e32c35ddc9a7743c98a292accc01b54ef3"
dependencies = [
"rand_core",
"subtle",
]
[[package]] [[package]]
name = "fiat-crypto" name = "fiat-crypto"
version = "0.3.0" version = "0.3.0"
@@ -490,9 +466,9 @@ checksum = "64cd1e32ddd350061ae6edb1b082d7c54915b5c672c389143b9a63403a109f24"
[[package]] [[package]]
name = "find-msvc-tools" name = "find-msvc-tools"
version = "0.1.2" version = "0.1.5"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1ced73b1dacfc750a6db6c0a0c3a3853c8b41997e2e2c563dc90804ae6867959" checksum = "3a3076410a55c90011c298b04d0cfa770b00fa04e1e3c97d3f6c9de105a03844"
[[package]] [[package]]
name = "flagset" name = "flagset"
@@ -502,9 +478,9 @@ checksum = "b7ac824320a75a52197e8f2d787f6a38b6718bb6897a35142d749af3c0e8f4fe"
[[package]] [[package]]
name = "flate2" name = "flate2"
version = "1.1.2" version = "1.1.5"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4a3d7db9596fecd151c5f638c0ee5d5bd487b6e0ea232e5dc96d5250f6f94b1d" checksum = "bfe33edd8e85a12a67454e37f8c75e730830d83e313556ab9ebf9ee7fbeb3bfb"
dependencies = [ dependencies = [
"crc32fast", "crc32fast",
"libz-rs-sys", "libz-rs-sys",
@@ -529,57 +505,47 @@ dependencies = [
[[package]] [[package]]
name = "getrandom" name = "getrandom"
version = "0.3.3" version = "0.3.4"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "26145e563e54f2cadc477553f1ec5ee650b00862f0a58bcd12cbdc5f0ea2d2f4" checksum = "899def5c37c4fd7b2664648c28120ecec138e4d395b459e5ca34f9cce2dd77fd"
dependencies = [ dependencies = [
"cfg-if", "cfg-if",
"libc", "libc",
"r-efi", "r-efi",
"wasi", "wasip2",
]
[[package]]
name = "group"
version = "0.14.0-pre.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1ff6a0b2dd4b981b1ae9e3e6830ab146771f3660d31d57bafd9018805a91b0f1"
dependencies = [
"ff",
"rand_core",
"subtle",
] ]
[[package]] [[package]]
name = "hashbrown" name = "hashbrown"
version = "0.16.0" version = "0.16.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5419bdc4f6a9207fbeba6d11b604d481addf78ecd10c11ad51e76c2f6482748d" checksum = "841d1cc9bed7f9236f321df977030373f4a4163ae1a7dbfe1a51a2c1a51d9100"
[[package]] [[package]]
name = "hmac" name = "hmac"
version = "0.13.0-rc.2" version = "0.13.0-rc.3"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a3fd4dc94c318c1ede8a2a48341c250d6ddecd3ba793da2820301a9f92417ad9" checksum = "f1c597ac7d6cc8143e30e83ef70915e7f883b18d8bec2e2b2bce47f5bbb06d57"
dependencies = [ dependencies = [
"digest 0.11.0-rc.2", "digest 0.11.0-rc.4",
] ]
[[package]] [[package]]
name = "hybrid-array" name = "hybrid-array"
version = "0.4.4" version = "0.4.5"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2bad028b20a90afcdb5e28a53392562f1db2bdfa238aa1a978b911461bfffb92" checksum = "f471e0a81b2f90ffc0cb2f951ae04da57de8baa46fa99112b062a5173a5088d0"
dependencies = [ dependencies = [
"subtle",
"typenum", "typenum",
"zeroize", "zeroize",
] ]
[[package]] [[package]]
name = "indexmap" name = "indexmap"
version = "2.11.4" version = "2.12.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4b0f83760fb341a774ed326568e19f5a863af4a952def8c39f9ab92fd95b88e5" checksum = "0ad4bb2b565bca0645f4d68c5c9af97fba094e9791da685bf83cb5f3ce74acf2"
dependencies = [ dependencies = [
"equivalent", "equivalent",
"hashbrown", "hashbrown",
@@ -593,9 +559,9 @@ checksum = "2c4a545a15244c7d945065b5d392b2d2d7f21526fba56ce51467b06ed445e8f7"
[[package]] [[package]]
name = "libc" name = "libc"
version = "0.2.176" version = "0.2.177"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "58f929b4d672ea937a23a1ab494143d968337a5f47e56d0815df1e0890ddf174" checksum = "2874a2af47a2325c2001a6e6fad9b16a53b802102b528163885171cf92b15976"
[[package]] [[package]]
name = "libm" name = "libm"
@@ -636,9 +602,9 @@ dependencies = [
[[package]] [[package]]
name = "lzma-rust2" name = "lzma-rust2"
version = "0.14.2" version = "0.15.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "96d3176655c152883186e161a1031192759660878ac04b2f776290b4f4e770a9" checksum = "96d43a6fec3e2f1176fd435ff6f0e337dab57361918f0f51bbc75995151e2ca0"
dependencies = [ dependencies = [
"crc", "crc",
"sha2 0.10.9", "sha2 0.10.9",
@@ -648,14 +614,12 @@ dependencies = [
name = "magisk" name = "magisk"
version = "0.0.0" version = "0.0.0"
dependencies = [ dependencies = [
"argh",
"base", "base",
"bit-set", "bit-set",
"bitflags", "bitflags",
"bytemuck", "bytemuck",
"cxx", "cxx",
"cxx-gen", "cxx-gen",
"derive",
"nix", "nix",
"num-derive", "num-derive",
"num-traits", "num-traits",
@@ -668,7 +632,6 @@ dependencies = [
name = "magiskboot" name = "magiskboot"
version = "0.0.0" version = "0.0.0"
dependencies = [ dependencies = [
"argh",
"base", "base",
"bytemuck", "bytemuck",
"byteorder", "byteorder",
@@ -676,7 +639,7 @@ dependencies = [
"cxx", "cxx",
"cxx-gen", "cxx-gen",
"der", "der",
"digest 0.11.0-rc.2", "digest 0.11.0-rc.4",
"fdt", "fdt",
"flate2", "flate2",
"lz4", "lz4",
@@ -689,7 +652,7 @@ dependencies = [
"quick-protobuf", "quick-protobuf",
"rsa", "rsa",
"sha1", "sha1",
"sha2 0.11.0-rc.2", "sha2 0.11.0-rc.3",
"size", "size",
"x509-cert", "x509-cert",
"zopfli", "zopfli",
@@ -710,7 +673,6 @@ dependencies = [
name = "magiskpolicy" name = "magiskpolicy"
version = "0.0.0" version = "0.0.0"
dependencies = [ dependencies = [
"argh",
"base", "base",
"cxx", "cxx",
"cxx-gen", "cxx-gen",
@@ -735,6 +697,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1fa76a2c86f704bdb222d66965fb3d63269ce38518b83cb0575fca855ebb6316" checksum = "1fa76a2c86f704bdb222d66965fb3d63269ce38518b83cb0575fca855ebb6316"
dependencies = [ dependencies = [
"adler2", "adler2",
"simd-adler32",
] ]
[[package]] [[package]]
@@ -780,51 +743,56 @@ dependencies = [
] ]
[[package]] [[package]]
name = "p256" name = "once_cell"
version = "0.14.0-pre.11" version = "1.21.3"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "81b374901df34ee468167a58e2a49e468cb059868479cafebeb804f6b855423d" checksum = "42f5e15c9953c5e4ccceeb2e7382a716482c34515315f7b03532b8b4e8393d2d"
[[package]]
name = "p256"
version = "0.14.0-rc.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cdbe8d6ac92e515ca2179ac331c1e4def09db2217d394683e73dace705c2f0c5"
dependencies = [ dependencies = [
"ecdsa", "ecdsa",
"elliptic-curve", "elliptic-curve",
"primefield", "primefield",
"primeorder", "primeorder",
"sha2 0.11.0-rc.2", "sha2 0.11.0-rc.3",
] ]
[[package]] [[package]]
name = "p384" name = "p384"
version = "0.14.0-pre.11" version = "0.14.0-rc.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "701032b3730df6b882496d6cee8221de0ce4bc11ddc64e6d89784aa5b8a6de30" checksum = "29c729847b7cf17b9c96f9e6504400f64ae90cb1cdf23610cc1a51f18538ff95"
dependencies = [ dependencies = [
"ecdsa", "ecdsa",
"elliptic-curve", "elliptic-curve",
"fiat-crypto", "fiat-crypto",
"primefield", "primefield",
"primeorder", "primeorder",
"sha2 0.11.0-rc.2", "sha2 0.11.0-rc.3",
] ]
[[package]] [[package]]
name = "p521" name = "p521"
version = "0.14.0-pre.11" version = "0.14.0-rc.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "40ba29c2906eb5c89a8c411c4f11243ee4e5517ee7d71d9a13fedc877a6057b1" checksum = "75296e7cb5d53c8a5083ff26b5707177962cd5851af961a56316e863f1ea757c"
dependencies = [ dependencies = [
"base16ct", "base16ct",
"ecdsa", "ecdsa",
"elliptic-curve", "elliptic-curve",
"primefield", "primefield",
"primeorder", "primeorder",
"rand_core", "sha2 0.11.0-rc.3",
"sha2 0.11.0-rc.2",
] ]
[[package]] [[package]]
name = "pb-rs" name = "pb-rs"
version = "0.10.0" version = "0.10.0"
source = "git+https://github.com/tafia/quick-protobuf.git#54e7d6c5d981c6f7cec2e9a2167c10ed0f9392b4" source = "git+https://github.com/topjohnwu/quick-protobuf.git#980b0fb0ff81f59c0faa6e6db490fb8ecf59c633"
dependencies = [ dependencies = [
"log", "log",
"nom", "nom",
@@ -832,9 +800,9 @@ dependencies = [
[[package]] [[package]]
name = "pem-rfc7468" name = "pem-rfc7468"
version = "1.0.0-rc.3" version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a8e58fab693c712c0d4e88f8eb3087b6521d060bcaf76aeb20cb192d809115ba" checksum = "a6305423e0e7738146434843d1694d621cce767262b2a86910beab705e4493d9"
dependencies = [ dependencies = [
"base64ct", "base64ct",
] ]
@@ -851,9 +819,9 @@ dependencies = [
[[package]] [[package]]
name = "pkcs8" name = "pkcs8"
version = "0.11.0-rc.7" version = "0.11.0-rc.8"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "93eac55f10aceed84769df670ea4a32d2ffad7399400d41ee1c13b1cd8e1b478" checksum = "77089aec8290d0b7bb01b671b091095cf1937670725af4fd73d47249f03b12c0"
dependencies = [ dependencies = [
"der", "der",
"spki", "spki",
@@ -861,31 +829,31 @@ dependencies = [
[[package]] [[package]]
name = "primefield" name = "primefield"
version = "0.14.0-pre.6" version = "0.14.0-rc.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d7fcd4a163053332fd93f39b81c133e96a98567660981654579c90a99062fbf5" checksum = "1c3ad342f52c70a953d95acb09a55450fdc07c2214283b81536c3f83f714568e"
dependencies = [ dependencies = [
"crypto-bigint", "crypto-bigint",
"ff",
"rand_core", "rand_core",
"rustcrypto-ff",
"subtle", "subtle",
"zeroize", "zeroize",
] ]
[[package]] [[package]]
name = "primeorder" name = "primeorder"
version = "0.14.0-pre.9" version = "0.14.0-rc.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1c36e8766fcd270fa9c665b9dc364f570695f5a59240949441b077a397f15b74" checksum = "f5e84a5f07d7a7c85f299e17753a98d8a09f10799894a637c9ce08d834b6ca02"
dependencies = [ dependencies = [
"elliptic-curve", "elliptic-curve",
] ]
[[package]] [[package]]
name = "proc-macro2" name = "proc-macro2"
version = "1.0.101" version = "1.0.103"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "89ae43fd86e4158d6db51ad8e2b80f313af9cc74f5c0e03ccb87de09998732de" checksum = "5ee95bc4ef87b8d5ba32e8b7714ccc834865276eab0aed5c9958d00ec45f49e8"
dependencies = [ dependencies = [
"unicode-ident", "unicode-ident",
] ]
@@ -893,16 +861,16 @@ dependencies = [
[[package]] [[package]]
name = "quick-protobuf" name = "quick-protobuf"
version = "0.8.1" version = "0.8.1"
source = "git+https://github.com/tafia/quick-protobuf.git#54e7d6c5d981c6f7cec2e9a2167c10ed0f9392b4" source = "git+https://github.com/topjohnwu/quick-protobuf.git#980b0fb0ff81f59c0faa6e6db490fb8ecf59c633"
dependencies = [ dependencies = [
"byteorder", "byteorder",
] ]
[[package]] [[package]]
name = "quote" name = "quote"
version = "1.0.40" version = "1.0.42"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1885c039570dc00dcb4ff087a89e185fd56bae234ddc7f056a945bf36467248d" checksum = "a338cc41d27e6cc6dce6cefc13a0729dfbb81c262b1f519331575dd80ef3067f"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
] ]
@@ -915,18 +883,15 @@ checksum = "69cdb34c158ceb288df11e18b4bd39de994f6657d83847bdffdbd7f346754b0f"
[[package]] [[package]]
name = "rand_core" name = "rand_core"
version = "0.9.3" version = "0.10.0-rc-2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "99d9a13982dcf210057a8a78572b2217b667c3beacbf3a0d8b454f6f82837d38" checksum = "104a23e4e8b77312a823b6b5613edbac78397e2f34320bc7ac4277013ec4478e"
dependencies = [
"getrandom",
]
[[package]] [[package]]
name = "rfc6979" name = "rfc6979"
version = "0.5.0-rc.1" version = "0.5.0-rc.3"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d369f9c4f79388704648e7bcb92749c0d6cf4397039293a9b747694fa4fb4bae" checksum = "63b8e2323084c987a72875b2fd682b7307d5cf14d47e3875bb5e89948e8809d4"
dependencies = [ dependencies = [
"hmac", "hmac",
"subtle", "subtle",
@@ -934,18 +899,18 @@ dependencies = [
[[package]] [[package]]
name = "rsa" name = "rsa"
version = "0.10.0-rc.8" version = "0.10.0-rc.10"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fd8c26d4f6d0d2689c1cc822ac369edb64b4a090bc53141ae563bfa19c797300" checksum = "e499c52862d75a86c0024cc99dcb6d7127d15af3beae7b03573d62fab7ade08a"
dependencies = [ dependencies = [
"const-oid", "const-oid",
"crypto-bigint", "crypto-bigint",
"crypto-primes", "crypto-primes",
"digest 0.11.0-rc.2", "digest 0.11.0-rc.4",
"pkcs1", "pkcs1",
"pkcs8", "pkcs8",
"rand_core", "rand_core",
"sha2 0.11.0-rc.2", "sha2 0.11.0-rc.3",
"signature", "signature",
"spki", "spki",
"subtle", "subtle",
@@ -953,16 +918,31 @@ dependencies = [
] ]
[[package]] [[package]]
name = "rust-fuzzy-search" name = "rustcrypto-ff"
version = "0.1.1" version = "0.14.0-pre.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a157657054ffe556d8858504af8a672a054a6e0bd9e8ee531059100c0fa11bb2" checksum = "aa9cd37111549306f79b09aa2618e15b1e8241b7178c286821e3dd71579db4db"
dependencies = [
"rand_core",
"subtle",
]
[[package]] [[package]]
name = "rustversion" name = "rustcrypto-group"
version = "1.0.22" version = "0.14.0-pre.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b39cdef0fa800fc44525c84ccb54a029961a8215f9619753635a9c0d2538d46d" checksum = "e394cd734b5f97dfc3484fa42aad7acd912961c2bcd96c99aa05b3d6cab7cafd"
dependencies = [
"rand_core",
"rustcrypto-ff",
"subtle",
]
[[package]]
name = "scratch"
version = "1.0.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d68f2ec51b097e4c1a75b681a8bec621909b5e91f15bb7b840c4f2f7b01148b2"
[[package]] [[package]]
name = "sec1" name = "sec1"
@@ -979,9 +959,9 @@ dependencies = [
[[package]] [[package]]
name = "serde" name = "serde"
version = "1.0.227" version = "1.0.228"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "80ece43fc6fbed4eb5392ab50c07334d3e577cbf40997ee896fe7af40bba4245" checksum = "9a8e94ea7f378bd32cbbd37198a4a91436180c5bb472411e48b5ec2e2124ae9e"
dependencies = [ dependencies = [
"serde_core", "serde_core",
"serde_derive", "serde_derive",
@@ -989,18 +969,18 @@ dependencies = [
[[package]] [[package]]
name = "serde_core" name = "serde_core"
version = "1.0.227" version = "1.0.228"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7a576275b607a2c86ea29e410193df32bc680303c82f31e275bbfcafe8b33be5" checksum = "41d385c7d4ca58e59fc732af25c3983b67ac852c1a25000afe1175de458b67ad"
dependencies = [ dependencies = [
"serde_derive", "serde_derive",
] ]
[[package]] [[package]]
name = "serde_derive" name = "serde_derive"
version = "1.0.227" version = "1.0.228"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "51e694923b8824cf0e9b382adf0f60d4e05f348f357b38833a3fa5ed7c2ede04" checksum = "d540f220d3187173da220f885ab66608367b6574e925011a9353e4badda91d79"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
@@ -1019,13 +999,13 @@ dependencies = [
[[package]] [[package]]
name = "sha1" name = "sha1"
version = "0.11.0-rc.2" version = "0.11.0-rc.3"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c5e046edf639aa2e7afb285589e5405de2ef7e61d4b0ac1e30256e3eab911af9" checksum = "aa1ae819b9870cadc959a052363de870944a1646932d274a4e270f64bf79e5ef"
dependencies = [ dependencies = [
"cfg-if", "cfg-if",
"cpufeatures", "cpufeatures",
"digest 0.11.0-rc.2", "digest 0.11.0-rc.4",
] ]
[[package]] [[package]]
@@ -1041,13 +1021,13 @@ dependencies = [
[[package]] [[package]]
name = "sha2" name = "sha2"
version = "0.11.0-rc.2" version = "0.11.0-rc.3"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d1e3878ab0f98e35b2df35fe53201d088299b41a6bb63e3e34dada2ac4abd924" checksum = "19d43dc0354d88b791216bb5c1bfbb60c0814460cc653ae0ebd71f286d0bd927"
dependencies = [ dependencies = [
"cfg-if", "cfg-if",
"cpufeatures", "cpufeatures",
"digest 0.11.0-rc.2", "digest 0.11.0-rc.4",
] ]
[[package]] [[package]]
@@ -1058,11 +1038,11 @@ checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64"
[[package]] [[package]]
name = "signature" name = "signature"
version = "3.0.0-rc.4" version = "3.0.0-rc.5"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fc280a6ff65c79fbd6622f64d7127f32b85563bca8c53cd2e9141d6744a9056d" checksum = "2a0251c9d6468f4ba853b6352b190fb7c1e405087779917c238445eb03993826"
dependencies = [ dependencies = [
"digest 0.11.0-rc.2", "digest 0.11.0-rc.4",
"rand_core", "rand_core",
] ]
@@ -1102,9 +1082,9 @@ checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292"
[[package]] [[package]]
name = "syn" name = "syn"
version = "2.0.106" version = "2.0.111"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ede7c438028d4436d71104916910f5bb611972c5cfd7f89b8300a8186e6fada6" checksum = "390cc9a294ab71bdb1aa2e99d13be9c753cd2d7bd6560c77118597410c4d2e87"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
@@ -1122,18 +1102,18 @@ dependencies = [
[[package]] [[package]]
name = "thiserror" name = "thiserror"
version = "2.0.16" version = "2.0.17"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3467d614147380f2e4e374161426ff399c91084acd2363eaf549172b3d5e60c0" checksum = "f63587ca0f12b72a0600bcba1d40081f830876000bb46dd2337a3051618f4fc8"
dependencies = [ dependencies = [
"thiserror-impl", "thiserror-impl",
] ]
[[package]] [[package]]
name = "thiserror-impl" name = "thiserror-impl"
version = "2.0.16" version = "2.0.17"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6c5e1be1c48b9172ee610da68fd9cd2770e7a4056cb3fc98710ee6906f0c7960" checksum = "3ff15c8ecd7de3849db632e14d18d2571fa09dfc5ed93479bc4485c7a517c913"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
@@ -1163,21 +1143,21 @@ dependencies = [
[[package]] [[package]]
name = "typenum" name = "typenum"
version = "1.18.0" version = "1.19.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1dccffe3ce07af9386bfd29e80c0ab1a8205a2fc34e4bcd40364df902cfa8f3f" checksum = "562d481066bde0658276a35467c4af00bdc6ee726305698a55b86e61d7ad82bb"
[[package]] [[package]]
name = "unicode-ident" name = "unicode-ident"
version = "1.0.19" version = "1.0.22"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f63a545481291138910575129486daeaf8ac54aee4387fe7906919f7830c7d9d" checksum = "9312f7c4f6ff9069b165498234ce8be658059c6728633667c526e27dc2cf1df5"
[[package]] [[package]]
name = "unicode-width" name = "unicode-width"
version = "0.2.1" version = "0.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4a1a07cc7db3810833284e8d372ccdc6da29741639ecc70c9ec107df0fa6154c" checksum = "b4ac048d71ede7ee76d585517add45da530660ef4390e49b098733c6e897f254"
[[package]] [[package]]
name = "unicode-xid" name = "unicode-xid"
@@ -1191,15 +1171,6 @@ version = "0.9.5"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a" checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a"
[[package]]
name = "wasi"
version = "0.14.7+wasi-0.2.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "883478de20367e224c0090af9cf5f9fa85bed63a95c1abf3afc5c083ebc06e8c"
dependencies = [
"wasip2",
]
[[package]] [[package]]
name = "wasip2" name = "wasip2"
version = "1.0.1+wasi-0.2.4" version = "1.0.1+wasi-0.2.4"
@@ -1220,15 +1191,15 @@ dependencies = [
[[package]] [[package]]
name = "windows-link" name = "windows-link"
version = "0.2.0" version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "45e46c0661abb7180e7b9c281db115305d49ca1709ab8242adf09666d2173c65" checksum = "f0805222e57f7521d6a62e36fa9163bc891acd422f971defe97d64e70d0a4fe5"
[[package]] [[package]]
name = "windows-sys" name = "windows-sys"
version = "0.61.1" version = "0.61.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6f109e41dd4a3c848907eb83d5a42ea98b3769495597450cf6d153507b166f0f" checksum = "ae137229bcbd6cdf0f7b80a31df61766145077ddf49416a728b02cb3921ff3fc"
dependencies = [ dependencies = [
"windows-link", "windows-link",
] ]
@@ -1253,9 +1224,9 @@ dependencies = [
[[package]] [[package]]
name = "zeroize" name = "zeroize"
version = "1.8.1" version = "1.8.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ced3678a2879b30306d323f4542626697a464a97c0a07c9aebf7ebca65cd4dde" checksum = "b97154e67e32c85465826e8bcc1c59429aaaf107c1e4a9e53c8d8ccd5eff88d0"
dependencies = [ dependencies = [
"zeroize_derive", "zeroize_derive",
] ]
@@ -1279,9 +1250,9 @@ checksum = "2f06ae92f42f5e5c42443fd094f245eb656abf56dd7cce9b8b263236565e00f2"
[[package]] [[package]]
name = "zopfli" name = "zopfli"
version = "0.8.2" version = "0.8.3"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "edfc5ee405f504cd4984ecc6f14d02d55cfda60fa4b689434ef4102aae150cd7" checksum = "f05cd8797d63865425ff89b5c4a48804f35ba0ce8d125800027ad6017d2b5249"
dependencies = [ dependencies = [
"bumpalo", "bumpalo",
"crc32fast", "crc32fast",

View File

@@ -1,6 +1,6 @@
[workspace] [workspace]
exclude = ["external"] exclude = ["external"]
members = ["base", "boot", "core", "core/derive", "init", "sepolicy"] members = ["base", "base/derive", "boot", "core", "init", "sepolicy"]
resolver = "2" resolver = "2"
[workspace.package] [workspace.package]
@@ -8,53 +8,56 @@ version = "0.0.0"
edition = "2024" edition = "2024"
[workspace.dependencies] [workspace.dependencies]
base = { path = "base" }
derive = { path = "base/derive" }
magiskpolicy = { path = "sepolicy" }
cxx = { path = "external/cxx-rs" } cxx = { path = "external/cxx-rs" }
cxx-gen = { path = "external/cxx-rs/gen/lib" } cxx-gen = { path = "external/cxx-rs/gen/lib" }
libc = "0.2.176" libc = "0.2.177"
cfg-if = "1.0.3" cfg-if = "1.0.4"
num-traits = "0.2.19" num-traits = "0.2.19"
num-derive = "0.4.2" num-derive = "0.4.2"
thiserror = "2.0.16" thiserror = "2.0.17"
byteorder = "1.5.0" byteorder = "1.5.0"
size = "0.5.0" size = "0.5.0"
bytemuck = "1.23.2" bytemuck = "1.24.0"
fdt = "0.1.5" fdt = "0.1.5"
const_format = "0.2.34" const_format = "0.2.35"
bit-set = "0.8.0" bit-set = "0.8.0"
syn = "2.0.106" syn = "2.0.111"
quote = "1.0.40" quote = "1.0.42"
proc-macro2 = "1.0.101" proc-macro2 = "1.0.103"
argh = { version = "0.1.13", default-features = false }
pb-rs = { version = "0.10.0", default-features = false } pb-rs = { version = "0.10.0", default-features = false }
quick-protobuf = "0.8.1" quick-protobuf = "0.8.1"
flate2 = { version = "1.1.2", default-features = false } flate2 = { version = "1.1.5", default-features = false }
bzip2 = "0.6.0" bzip2 = "0.6.1"
zopfli = "0.8.2" zopfli = "0.8.3"
lz4 = "1.28.1" lz4 = "1.28.1"
lzma-rust2 = { version = "0.14.2", default-features = false } lzma-rust2 = { version = "0.15.2", default-features = false }
nix = "0.30.1" nix = "0.30.1"
bitflags = "2.9.4" bitflags = "2.10.0"
# Rust crypto crates are tied together # Rust crypto crates are tied together
sha1 = "0.11.0-rc.2" sha1 = "0.11.0-rc.3"
sha2 = "0.11.0-rc.2" sha2 = "0.11.0-rc.3"
digest = "0.11.0-rc.2" digest = "0.11.0-rc.4"
p256 = "0.14.0-pre.11" p256 = "0.14.0-rc.1"
p384 = "0.14.0-pre.11" p384 = "0.14.0-rc.1"
p521 = "0.14.0-pre.11" p521 = "0.14.0-rc.1"
rsa = "0.10.0-rc.8" rsa = "0.10.0-rc.10"
x509-cert = "0.3.0-rc.2" x509-cert = "0.3.0-rc.2"
der = "0.8.0-rc.9" der = "0.8.0-rc.10"
[patch.crates-io] [patch.crates-io]
pb-rs = { git = "https://github.com/tafia/quick-protobuf.git" } pb-rs = { git = "https://github.com/topjohnwu/quick-protobuf.git" }
quick-protobuf = { git = "https://github.com/tafia/quick-protobuf.git" } quick-protobuf = { git = "https://github.com/topjohnwu/quick-protobuf.git" }
lz4-sys = { path = "external/lz4-sys" } lz4-sys = { path = "external/lz4-sys" }
[profile.dev] [profile.dev]
opt-level = "z" opt-level = "z"
lto = "thin" lto = "thin"
panic = "abort" panic = "abort"
debug = "none"
[profile.release] [profile.release]
opt-level = "z" opt-level = "z"

View File

@@ -13,11 +13,11 @@ selinux = []
cxx-gen = { workspace = true } cxx-gen = { workspace = true }
[dependencies] [dependencies]
derive = { workspace = true }
cxx = { workspace = true } cxx = { workspace = true }
libc = { workspace = true } libc = { workspace = true }
cfg-if = { workspace = true } cfg-if = { workspace = true }
thiserror = { workspace = true } thiserror = { workspace = true }
argh = { workspace = true }
bytemuck = { workspace = true } bytemuck = { workspace = true }
num-traits = { workspace = true } num-traits = { workspace = true }
num-derive = { workspace = true } num-derive = { workspace = true }

1226
native/src/base/argh.rs Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,185 @@
// Copyright (c) 2020 Google LLC All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
use proc_macro2::{Span, TokenStream};
use quote::ToTokens;
use std::cell::RefCell;
/// A type for collecting procedural macro errors.
#[derive(Default)]
pub struct Errors {
errors: RefCell<Vec<syn::Error>>,
}
/// Produce functions to expect particular literals in `syn::Expr`
macro_rules! expect_lit_fn {
($(($fn_name:ident, $syn_type:ident, $variant:ident, $lit_name:literal),)*) => {
$(
pub fn $fn_name<'a>(&self, e: &'a syn::Expr) -> Option<&'a syn::$syn_type> {
if let syn::Expr::Lit(syn::ExprLit { lit: syn::Lit::$variant(inner), .. }) = e {
Some(inner)
} else {
self.unexpected_lit($lit_name, e);
None
}
}
)*
}
}
/// Produce functions to expect particular variants of `syn::Meta`
macro_rules! expect_meta_fn {
($(($fn_name:ident, $syn_type:ident, $variant:ident, $meta_name:literal),)*) => {
$(
pub fn $fn_name<'a>(&self, meta: &'a syn::Meta) -> Option<&'a syn::$syn_type> {
if let syn::Meta::$variant(inner) = meta {
Some(inner)
} else {
self.unexpected_meta($meta_name, meta);
None
}
}
)*
}
}
impl Errors {
/// Issue an error like:
///
/// Duplicate foo attribute
/// First foo attribute here
pub fn duplicate_attrs(
&self,
attr_kind: &str,
first: &impl syn::spanned::Spanned,
second: &impl syn::spanned::Spanned,
) {
self.duplicate_attrs_inner(attr_kind, first.span(), second.span())
}
fn duplicate_attrs_inner(&self, attr_kind: &str, first: Span, second: Span) {
self.err_span(second, &["Duplicate ", attr_kind, " attribute"].concat());
self.err_span(first, &["First ", attr_kind, " attribute here"].concat());
}
expect_lit_fn![
(expect_lit_str, LitStr, Str, "string"),
(expect_lit_char, LitChar, Char, "character"),
(expect_lit_int, LitInt, Int, "integer"),
];
expect_meta_fn![
(expect_meta_word, Path, Path, "path"),
(expect_meta_list, MetaList, List, "list"),
(
expect_meta_name_value,
MetaNameValue,
NameValue,
"name-value pair"
),
];
fn unexpected_lit(&self, expected: &str, found: &syn::Expr) {
fn lit_kind(lit: &syn::Lit) -> &'static str {
use syn::Lit::{Bool, Byte, ByteStr, Char, Float, Int, Str, Verbatim};
match lit {
Str(_) => "string",
ByteStr(_) => "bytestring",
Byte(_) => "byte",
Char(_) => "character",
Int(_) => "integer",
Float(_) => "float",
Bool(_) => "boolean",
Verbatim(_) => "unknown (possibly extra-large integer)",
_ => "unknown literal kind",
}
}
if let syn::Expr::Lit(syn::ExprLit { lit, .. }) = found {
self.err(
found,
&[
"Expected ",
expected,
" literal, found ",
lit_kind(lit),
" literal",
]
.concat(),
)
} else {
self.err(
found,
&[
"Expected ",
expected,
" literal, found non-literal expression.",
]
.concat(),
)
}
}
fn unexpected_meta(&self, expected: &str, found: &syn::Meta) {
fn meta_kind(meta: &syn::Meta) -> &'static str {
use syn::Meta::{List, NameValue, Path};
match meta {
Path(_) => "path",
List(_) => "list",
NameValue(_) => "name-value pair",
}
}
self.err(
found,
&[
"Expected ",
expected,
" attribute, found ",
meta_kind(found),
" attribute",
]
.concat(),
)
}
/// Issue an error relating to a particular `Spanned` structure.
pub fn err(&self, spanned: &impl syn::spanned::Spanned, msg: &str) {
self.err_span(spanned.span(), msg);
}
/// Issue an error relating to a particular `Span`.
pub fn err_span(&self, span: Span, msg: &str) {
self.push(syn::Error::new(span, msg));
}
/// Issue an error spanning over the given syntax tree node.
pub fn err_span_tokens<T: ToTokens>(&self, tokens: T, msg: &str) {
self.push(syn::Error::new_spanned(tokens, msg));
}
/// Push a `syn::Error` onto the list of errors to issue.
pub fn push(&self, err: syn::Error) {
self.errors.borrow_mut().push(err);
}
/// Convert a `syn::Result` to an `Option`, logging the error if present.
pub fn ok<T>(&self, r: syn::Result<T>) -> Option<T> {
match r {
Ok(v) => Some(v),
Err(e) => {
self.push(e);
None
}
}
}
}
impl ToTokens for Errors {
/// Convert the errors into tokens that, when emit, will cause
/// the user of the macro to receive compiler errors.
fn to_tokens(&self, tokens: &mut TokenStream) {
tokens.extend(self.errors.borrow().iter().map(|e| e.to_compile_error()));
}
}

View File

@@ -0,0 +1,912 @@
// Copyright (c) 2020 Google LLC All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
use syn::ext::IdentExt as _;
/// Implementation of the `FromArgs` and `argh(...)` derive attributes.
///
/// For more thorough documentation, see the `argh` crate itself.
extern crate proc_macro;
use errors::Errors;
use parse_attrs::{FieldAttrs, FieldKind, TypeAttrs, check_long_name};
use proc_macro2::{Span, TokenStream};
use quote::{ToTokens, quote, quote_spanned};
use std::collections::HashMap;
use std::str::FromStr;
use syn::spanned::Spanned;
use syn::{GenericArgument, LitStr, PathArguments, Type};
mod errors;
mod parse_attrs;
/// Transform the input into a token stream containing any generated implementations,
/// as well as all errors that occurred.
pub(crate) fn impl_from_args(input: &syn::DeriveInput) -> TokenStream {
let errors = &Errors::default();
let type_attrs = &TypeAttrs::parse(errors, input);
let mut output_tokens = match &input.data {
syn::Data::Struct(ds) => {
impl_from_args_struct(errors, &input.ident, type_attrs, &input.generics, ds)
}
syn::Data::Enum(de) => {
impl_from_args_enum(errors, &input.ident, type_attrs, &input.generics, de)
}
syn::Data::Union(_) => {
errors.err(input, "`#[derive(FromArgs)]` cannot be applied to unions");
TokenStream::new()
}
};
errors.to_tokens(&mut output_tokens);
output_tokens
}
/// The kind of optionality a parameter has.
enum Optionality {
None,
Defaulted(TokenStream),
Optional,
Repeating,
DefaultedRepeating(TokenStream),
}
impl PartialEq<Optionality> for Optionality {
fn eq(&self, other: &Optionality) -> bool {
use Optionality::*;
// NB: (Defaulted, Defaulted) can't contain the same token streams
matches!((self, other), (Optional, Optional) | (Repeating, Repeating))
}
}
impl Optionality {
/// Whether or not this is `Optionality::None`
fn is_required(&self) -> bool {
matches!(self, Optionality::None)
}
}
/// A field of a `#![derive(FromArgs)]` struct with attributes and some other
/// notable metadata appended.
struct StructField<'a> {
/// The original parsed field
field: &'a syn::Field,
/// The parsed attributes of the field
attrs: FieldAttrs,
/// The field name. This is contained optionally inside `field`,
/// but is duplicated non-optionally here to indicate that all field that
/// have reached this point must have a field name, and it no longer
/// needs to be unwrapped.
name: &'a syn::Ident,
/// Similar to `name` above, this is contained optionally inside `FieldAttrs`,
/// but here is fully present to indicate that we only have to consider fields
/// with a valid `kind` at this point.
kind: FieldKind,
// If `field.ty` is `Vec<T>` or `Option<T>`, this is `T`, otherwise it's `&field.ty`.
// This is used to enable consistent parsing code between optional and non-optional
// keyed and subcommand fields.
ty_without_wrapper: &'a syn::Type,
// Whether the field represents an optional value, such as an `Option` subcommand field
// or an `Option` or `Vec` keyed argument, or if it has a `default`.
optionality: Optionality,
// The `--`-prefixed name of the option, if one exists.
long_name: Option<String>,
}
impl<'a> StructField<'a> {
/// Attempts to parse a field of a `#[derive(FromArgs)]` struct, pulling out the
/// fields required for code generation.
fn new(errors: &Errors, field: &'a syn::Field, attrs: FieldAttrs) -> Option<Self> {
let name = field.ident.as_ref().expect("missing ident for named field");
// Ensure that one "kind" is present (switch, option, subcommand, positional)
let kind = if let Some(field_type) = &attrs.field_type {
field_type.kind
} else {
errors.err(
field,
concat!(
"Missing `argh` field kind attribute.\n",
"Expected one of: `switch`, `option`, `remaining`, `subcommand`, `positional`",
),
);
return None;
};
// Parse out whether a field is optional (`Option` or `Vec`).
let optionality;
let ty_without_wrapper;
match kind {
FieldKind::Switch => {
if !ty_expect_switch(errors, &field.ty) {
return None;
}
optionality = Optionality::Optional;
ty_without_wrapper = &field.ty;
}
FieldKind::Option | FieldKind::Positional => {
if let Some(default) = &attrs.default {
let tokens = match TokenStream::from_str(&default.value()) {
Ok(tokens) => tokens,
Err(_) => {
errors.err(&default, "Invalid tokens: unable to lex `default` value");
return None;
}
};
// Set the span of the generated tokens to the string literal
let tokens: TokenStream = tokens
.into_iter()
.map(|mut tree| {
tree.set_span(default.span());
tree
})
.collect();
let inner = if let Some(x) = ty_inner(&["Vec"], &field.ty) {
optionality = Optionality::DefaultedRepeating(tokens);
x
} else {
optionality = Optionality::Defaulted(tokens);
&field.ty
};
ty_without_wrapper = inner;
} else {
let mut inner = None;
optionality = if let Some(x) = ty_inner(&["Option"], &field.ty) {
inner = Some(x);
Optionality::Optional
} else if let Some(x) = ty_inner(&["Vec"], &field.ty) {
inner = Some(x);
Optionality::Repeating
} else {
Optionality::None
};
ty_without_wrapper = inner.unwrap_or(&field.ty);
}
}
FieldKind::SubCommand => {
let inner = ty_inner(&["Option"], &field.ty);
optionality = if inner.is_some() {
Optionality::Optional
} else {
Optionality::None
};
ty_without_wrapper = inner.unwrap_or(&field.ty);
}
}
// Determine the "long" name of options and switches.
// Defaults to the kebab-cased field name if `#[argh(long = "...")]` is omitted.
// If `#[argh(long = none)]` is explicitly set, no long name will be set.
let long_name = match kind {
FieldKind::Switch | FieldKind::Option => {
let long_name = match &attrs.long {
None => {
let kebab_name = to_kebab_case(&name.unraw().to_string());
check_long_name(errors, name, &kebab_name);
Some(kebab_name)
}
Some(None) => None,
Some(Some(long)) => Some(long.value()),
}
.map(|long_name| {
if long_name == "help" {
errors.err(field, "Custom `--help` flags are not supported.");
}
format!("--{}", long_name)
});
if let (None, None) = (&attrs.short, &long_name) {
errors.err(field, "At least one of `short` or `long` has to be set.")
};
long_name
}
FieldKind::SubCommand | FieldKind::Positional => None,
};
Some(StructField {
field,
attrs,
kind,
optionality,
ty_without_wrapper,
name,
long_name,
})
}
pub(crate) fn positional_arg_name(&self) -> String {
self.attrs
.arg_name
.as_ref()
.map(LitStr::value)
.unwrap_or_else(|| self.name.to_string().trim_matches('_').to_owned())
}
fn option_arg_name(&self) -> String {
match (&self.attrs.short, &self.long_name) {
(None, None) => unreachable!("short and long cannot both be None"),
(Some(short), None) => format!("-{}", short.value()),
(None, Some(long)) => long.clone(),
(Some(short), Some(long)) => format!("-{},{long}", short.value()),
}
}
}
fn to_kebab_case(s: &str) -> String {
let words = s.split('_').filter(|word| !word.is_empty());
let mut res = String::with_capacity(s.len());
for word in words {
if !res.is_empty() {
res.push('-')
}
res.push_str(word)
}
res
}
/// Implements `FromArgs` and `TopLevelCommand` or `SubCommand` for a `#[derive(FromArgs)]` struct.
fn impl_from_args_struct(
errors: &Errors,
name: &syn::Ident,
type_attrs: &TypeAttrs,
generic_args: &syn::Generics,
ds: &syn::DataStruct,
) -> TokenStream {
let fields = match &ds.fields {
syn::Fields::Named(fields) => fields,
syn::Fields::Unnamed(_) => {
errors.err(
&ds.struct_token,
"`#![derive(FromArgs)]` is not currently supported on tuple structs",
);
return TokenStream::new();
}
syn::Fields::Unit => {
errors.err(
&ds.struct_token,
"#![derive(FromArgs)]` cannot be applied to unit structs",
);
return TokenStream::new();
}
};
let fields: Vec<_> = fields
.named
.iter()
.filter_map(|field| {
let attrs = FieldAttrs::parse(errors, field);
StructField::new(errors, field, attrs)
})
.collect();
ensure_unique_names(errors, &fields);
ensure_only_trailing_positionals_are_optional(errors, &fields);
let impl_span = Span::call_site();
let from_args_method = impl_from_args_struct_from_args(errors, type_attrs, &fields);
let top_or_sub_cmd_impl = top_or_sub_cmd_impl(errors, name, type_attrs, generic_args);
let (impl_generics, ty_generics, where_clause) = generic_args.split_for_impl();
let trait_impl = quote_spanned! { impl_span =>
#[automatically_derived]
impl #impl_generics argh::FromArgs for #name #ty_generics #where_clause {
#from_args_method
}
#top_or_sub_cmd_impl
};
trait_impl
}
fn impl_from_args_struct_from_args<'a>(
errors: &Errors,
type_attrs: &TypeAttrs,
fields: &'a [StructField<'a>],
) -> TokenStream {
let init_fields = declare_local_storage_for_from_args_fields(fields);
let unwrap_fields = unwrap_from_args_fields(fields);
let positional_fields: Vec<&StructField<'_>> = fields
.iter()
.filter(|field| field.kind == FieldKind::Positional)
.collect();
let positional_field_idents = positional_fields.iter().map(|field| &field.field.ident);
let positional_field_names = positional_fields.iter().map(|field| field.name.to_string());
let last_positional_is_repeating = positional_fields
.last()
.map(|field| field.optionality == Optionality::Repeating)
.unwrap_or(false);
let last_positional_is_greedy = positional_fields
.last()
.map(|field| field.kind == FieldKind::Positional && field.attrs.greedy.is_some())
.unwrap_or(false);
let flag_output_table = fields.iter().filter_map(|field| {
let field_name = &field.field.ident;
match field.kind {
FieldKind::Option => Some(quote! { argh::ParseStructOption::Value(&mut #field_name) }),
FieldKind::Switch => Some(quote! { argh::ParseStructOption::Flag(&mut #field_name) }),
FieldKind::SubCommand | FieldKind::Positional => None,
}
});
let flag_str_to_output_table_map = flag_str_to_output_table_map_entries(fields);
let mut subcommands_iter = fields
.iter()
.filter(|field| field.kind == FieldKind::SubCommand)
.fuse();
let subcommand: Option<&StructField<'_>> = subcommands_iter.next();
for dup_subcommand in subcommands_iter {
errors.duplicate_attrs(
"subcommand",
subcommand.unwrap().field,
dup_subcommand.field,
);
}
let impl_span = Span::call_site();
let missing_requirements_ident = syn::Ident::new("__missing_requirements", impl_span);
let append_missing_requirements =
append_missing_requirements(&missing_requirements_ident, fields);
let parse_subcommands = if let Some(subcommand) = subcommand {
let name = subcommand.name;
let ty = subcommand.ty_without_wrapper;
quote_spanned! { impl_span =>
Some(argh::ParseStructSubCommand {
subcommands: <#ty as argh::SubCommands>::COMMANDS,
dynamic_subcommands: &<#ty as argh::SubCommands>::dynamic_commands(),
parse_func: &mut |__command, __remaining_args| {
#name = Some(<#ty as argh::FromArgs>::from_args(__command, __remaining_args)?);
Ok(())
},
})
}
} else {
quote_spanned! { impl_span => None }
};
let help_triggers = get_help_triggers(type_attrs);
let method_impl = quote_spanned! { impl_span =>
fn from_args(__cmd_name: &[&str], __args: &[&str])
-> std::result::Result<Self, argh::EarlyExit>
{
#![allow(clippy::unwrap_in_result)]
#( #init_fields )*
argh::parse_struct_args(
__cmd_name,
__args,
argh::ParseStructOptions {
arg_to_slot: &[ #( #flag_str_to_output_table_map ,)* ],
slots: &mut [ #( #flag_output_table, )* ],
help_triggers: &[ #( #help_triggers ),* ],
},
argh::ParseStructPositionals {
positionals: &mut [
#(
argh::ParseStructPositional {
name: #positional_field_names,
slot: &mut #positional_field_idents as &mut dyn argh::ParseValueSlot,
},
)*
],
last_is_repeating: #last_positional_is_repeating,
last_is_greedy: #last_positional_is_greedy,
},
#parse_subcommands,
)?;
let mut #missing_requirements_ident = argh::MissingRequirements::default();
#(
#append_missing_requirements
)*
#missing_requirements_ident.err_on_any()?;
Ok(Self {
#( #unwrap_fields, )*
})
}
};
method_impl
}
/// get help triggers vector from type_attrs.help_triggers as a [`Vec<String>`]
///
/// Defaults to vec!["-h", "--help"] if type_attrs.help_triggers is None
fn get_help_triggers(type_attrs: &TypeAttrs) -> Vec<String> {
if type_attrs.is_subcommand.is_some() {
// Subcommands should never have any help triggers
Vec::new()
} else {
type_attrs.help_triggers.as_ref().map_or_else(
|| vec!["-h".to_string(), "--help".to_string()],
|s| {
s.iter()
.filter_map(|s| {
let trigger = s.value();
let trigger_trimmed = trigger.trim().to_owned();
if trigger_trimmed.is_empty() {
None
} else {
Some(trigger_trimmed)
}
})
.collect::<Vec<_>>()
},
)
}
}
/// Ensures that only trailing positional args are non-required.
fn ensure_only_trailing_positionals_are_optional(errors: &Errors, fields: &[StructField<'_>]) {
let mut first_non_required_span = None;
for field in fields {
if field.kind == FieldKind::Positional {
if let Some(first) = first_non_required_span
&& field.optionality.is_required()
{
errors.err_span(
first,
"Only trailing positional arguments may be `Option`, `Vec`, or defaulted.",
);
errors.err(
&field.field,
"Later non-optional positional argument declared here.",
);
return;
}
if !field.optionality.is_required() {
first_non_required_span = Some(field.field.span());
}
}
}
}
/// Ensures that only one short or long name is used.
fn ensure_unique_names(errors: &Errors, fields: &[StructField<'_>]) {
let mut seen_short_names = HashMap::new();
let mut seen_long_names = HashMap::new();
for field in fields {
if let Some(short_name) = &field.attrs.short {
let short_name = short_name.value();
if let Some(first_use_field) = seen_short_names.get(&short_name) {
errors.err_span_tokens(
first_use_field,
&format!(
"The short name of \"-{}\" was already used here.",
short_name
),
);
errors.err_span_tokens(field.field, "Later usage here.");
}
seen_short_names.insert(short_name, &field.field);
}
if let Some(long_name) = &field.long_name {
if let Some(first_use_field) = seen_long_names.get(&long_name) {
errors.err_span_tokens(
*first_use_field,
&format!("The long name of \"{}\" was already used here.", long_name),
);
errors.err_span_tokens(field.field, "Later usage here.");
}
seen_long_names.insert(long_name, field.field);
}
}
}
/// Implement `argh::TopLevelCommand` or `argh::SubCommand` as appropriate.
fn top_or_sub_cmd_impl(
errors: &Errors,
name: &syn::Ident,
type_attrs: &TypeAttrs,
generic_args: &syn::Generics,
) -> TokenStream {
let description = String::new();
let (impl_generics, ty_generics, where_clause) = generic_args.split_for_impl();
if type_attrs.is_subcommand.is_none() {
// Not a subcommand
quote! {
#[automatically_derived]
impl #impl_generics argh::TopLevelCommand for #name #ty_generics #where_clause {}
}
} else {
let empty_str = syn::LitStr::new("", Span::call_site());
let subcommand_name = type_attrs.name.as_ref().unwrap_or_else(|| {
errors.err(
name,
"`#[argh(name = \"...\")]` attribute is required for subcommands",
);
&empty_str
});
quote! {
#[automatically_derived]
impl #impl_generics argh::SubCommand for #name #ty_generics #where_clause {
const COMMAND: &'static argh::CommandInfo = &argh::CommandInfo {
name: #subcommand_name,
description: #description,
};
}
}
}
}
/// Declare a local slots to store each field in during parsing.
///
/// Most fields are stored in `Option<FieldType>` locals.
/// `argh(option)` fields are stored in a `ParseValueSlotTy` along with a
/// function that knows how to decode the appropriate value.
fn declare_local_storage_for_from_args_fields<'a>(
fields: &'a [StructField<'a>],
) -> impl Iterator<Item = TokenStream> + 'a {
fields.iter().map(|field| {
let field_name = &field.field.ident;
let field_type = &field.ty_without_wrapper;
// Wrap field types in `Option` if they aren't already `Option` or `Vec`-wrapped.
let field_slot_type = match field.optionality {
Optionality::Optional | Optionality::Repeating => (&field.field.ty).into_token_stream(),
Optionality::None | Optionality::Defaulted(_) => {
quote! { std::option::Option<#field_type> }
}
Optionality::DefaultedRepeating(_) => {
quote! { std::option::Option<std::vec::Vec<#field_type>> }
}
};
match field.kind {
FieldKind::Option | FieldKind::Positional => {
let from_str_fn = match &field.attrs.from_str_fn {
Some(from_str_fn) => from_str_fn.into_token_stream(),
None => {
quote! {
<#field_type as argh::FromArgValue>::from_arg_value
}
}
};
quote! {
let mut #field_name: argh::ParseValueSlotTy<#field_slot_type, #field_type>
= argh::ParseValueSlotTy {
slot: std::default::Default::default(),
parse_func: |_, value| { #from_str_fn(value) },
};
}
}
FieldKind::SubCommand => {
quote! { let mut #field_name: #field_slot_type = None; }
}
FieldKind::Switch => {
quote! { let mut #field_name: #field_slot_type = argh::Flag::default(); }
}
}
})
}
/// Unwrap non-optional fields and take options out of their tuple slots.
fn unwrap_from_args_fields<'a>(
fields: &'a [StructField<'a>],
) -> impl Iterator<Item = TokenStream> + 'a {
fields.iter().map(|field| {
let field_name = field.name;
match field.kind {
FieldKind::Option | FieldKind::Positional => match &field.optionality {
Optionality::None => quote! {
#field_name: #field_name.slot.unwrap()
},
Optionality::Optional | Optionality::Repeating => {
quote! { #field_name: #field_name.slot }
}
Optionality::Defaulted(tokens) | Optionality::DefaultedRepeating(tokens) => {
quote! {
#field_name: #field_name.slot.unwrap_or_else(|| #tokens)
}
}
},
FieldKind::Switch => field_name.into_token_stream(),
FieldKind::SubCommand => match field.optionality {
Optionality::None => quote! { #field_name: #field_name.unwrap() },
Optionality::Optional | Optionality::Repeating => field_name.into_token_stream(),
Optionality::Defaulted(_) | Optionality::DefaultedRepeating(_) => unreachable!(),
},
}
})
}
/// Entries of tokens like `("--some-flag-key", 5)` that map from a flag key string
/// to an index in the output table.
fn flag_str_to_output_table_map_entries<'a>(fields: &'a [StructField<'a>]) -> Vec<TokenStream> {
let mut flag_str_to_output_table_map = vec![];
for (i, field) in fields.iter().enumerate() {
if let Some(short) = &field.attrs.short {
let short = format!("-{}", short.value());
flag_str_to_output_table_map.push(quote! { (#short, #i) });
}
if let Some(long) = &field.long_name {
flag_str_to_output_table_map.push(quote! { (#long, #i) });
}
}
flag_str_to_output_table_map
}
/// For each non-optional field, add an entry to the `argh::MissingRequirements`.
fn append_missing_requirements<'a>(
// missing_requirements_ident
mri: &syn::Ident,
fields: &'a [StructField<'a>],
) -> impl Iterator<Item = TokenStream> + 'a {
let mri = mri.clone();
fields
.iter()
.filter(|f| f.optionality.is_required())
.map(move |field| {
let field_name = field.name;
match field.kind {
FieldKind::Switch => unreachable!("switches are always optional"),
FieldKind::Positional => {
let name = field.positional_arg_name();
quote! {
if #field_name.slot.is_none() {
#mri.missing_positional_arg(#name)
}
}
}
FieldKind::Option => {
let name = field.option_arg_name();
quote! {
if #field_name.slot.is_none() {
#mri.missing_option(#name)
}
}
}
FieldKind::SubCommand => {
let ty = field.ty_without_wrapper;
quote! {
if #field_name.is_none() {
#mri.missing_subcommands(
<#ty as argh::SubCommands>::COMMANDS
.iter()
.cloned()
.chain(
<#ty as argh::SubCommands>::dynamic_commands()
.iter()
.copied()
),
)
}
}
}
}
})
}
/// Require that a type can be a `switch`.
/// Throws an error for all types except booleans and integers
fn ty_expect_switch(errors: &Errors, ty: &syn::Type) -> bool {
fn ty_can_be_switch(ty: &syn::Type) -> bool {
if let syn::Type::Path(path) = ty {
if path.qself.is_some() {
return false;
}
if path.path.segments.len() != 1 {
return false;
}
let ident = &path.path.segments[0].ident;
// `Option<bool>` can be used as a `switch`.
if ident == "Option"
&& let PathArguments::AngleBracketed(args) = &path.path.segments[0].arguments
&& let GenericArgument::Type(Type::Path(p)) = &args.args[0]
&& p.path.segments[0].ident == "bool"
{
return true;
}
[
"bool", "u8", "u16", "u32", "u64", "u128", "i8", "i16", "i32", "i64", "i128",
]
.iter()
.any(|path| ident == path)
} else {
false
}
}
let res = ty_can_be_switch(ty);
if !res {
errors.err(
ty,
"switches must be of type `bool`, `Option<bool>`, or integer type",
);
}
res
}
/// Returns `Some(T)` if a type is `wrapper_name<T>` for any `wrapper_name` in `wrapper_names`.
fn ty_inner<'a>(wrapper_names: &[&str], ty: &'a syn::Type) -> Option<&'a syn::Type> {
if let syn::Type::Path(path) = ty {
if path.qself.is_some() {
return None;
}
// Since we only check the last path segment, it isn't necessarily the case that
// we're referring to `std::vec::Vec` or `std::option::Option`, but there isn't
// a fool proof way to check these since name resolution happens after macro expansion,
// so this is likely "good enough" (so long as people don't have their own types called
// `Option` or `Vec` that take one generic parameter they're looking to parse).
let last_segment = path.path.segments.last()?;
if !wrapper_names.iter().any(|name| last_segment.ident == *name) {
return None;
}
if let syn::PathArguments::AngleBracketed(gen_args) = &last_segment.arguments {
let generic_arg = gen_args.args.first()?;
if let syn::GenericArgument::Type(ty) = &generic_arg {
return Some(ty);
}
}
}
None
}
/// Implements `FromArgs` and `SubCommands` for a `#![derive(FromArgs)]` enum.
fn impl_from_args_enum(
errors: &Errors,
name: &syn::Ident,
type_attrs: &TypeAttrs,
generic_args: &syn::Generics,
de: &syn::DataEnum,
) -> TokenStream {
parse_attrs::check_enum_type_attrs(errors, type_attrs, &de.enum_token.span);
// An enum variant like `<name>(<ty>)`
struct SubCommandVariant<'a> {
name: &'a syn::Ident,
ty: &'a syn::Type,
}
let mut dynamic_type_and_variant = None;
let variants: Vec<SubCommandVariant<'_>> = de
.variants
.iter()
.filter_map(|variant| {
let name = &variant.ident;
let ty = enum_only_single_field_unnamed_variants(errors, &variant.fields)?;
if parse_attrs::VariantAttrs::parse(errors, variant)
.is_dynamic
.is_some()
{
if dynamic_type_and_variant.is_some() {
errors.err(variant, "Only one variant can have the `dynamic` attribute");
}
dynamic_type_and_variant = Some((ty, name));
None
} else {
Some(SubCommandVariant { name, ty })
}
})
.collect();
let name_repeating = std::iter::repeat(name.clone());
let variant_ty = variants.iter().map(|x| x.ty).collect::<Vec<_>>();
let variant_names = variants.iter().map(|x| x.name).collect::<Vec<_>>();
let dynamic_from_args =
dynamic_type_and_variant
.as_ref()
.map(|(dynamic_type, dynamic_variant)| {
quote! {
if let Some(result) = <#dynamic_type as argh::DynamicSubCommand>::try_from_args(
command_name, args) {
return result.map(#name::#dynamic_variant);
}
}
});
let dynamic_commands = dynamic_type_and_variant.as_ref().map(|(dynamic_type, _)| {
quote! {
fn dynamic_commands() -> &'static [&'static argh::CommandInfo] {
<#dynamic_type as argh::DynamicSubCommand>::commands()
}
}
});
let (impl_generics, ty_generics, where_clause) = generic_args.split_for_impl();
quote! {
impl #impl_generics argh::FromArgs for #name #ty_generics #where_clause {
fn from_args(command_name: &[&str], args: &[&str])
-> std::result::Result<Self, argh::EarlyExit>
{
let subcommand_name = if let Some(subcommand_name) = command_name.last() {
*subcommand_name
} else {
return Err(argh::EarlyExit::from("no subcommand name".to_owned()));
};
#(
if subcommand_name == <#variant_ty as argh::SubCommand>::COMMAND.name {
return Ok(#name_repeating::#variant_names(
<#variant_ty as argh::FromArgs>::from_args(command_name, args)?
));
}
)*
#dynamic_from_args
Err(argh::EarlyExit::from("no subcommand matched".to_owned()))
}
}
impl #impl_generics argh::SubCommands for #name #ty_generics #where_clause {
const COMMANDS: &'static [&'static argh::CommandInfo] = &[#(
<#variant_ty as argh::SubCommand>::COMMAND,
)*];
#dynamic_commands
}
}
}
/// Returns `Some(Bar)` if the field is a single-field unnamed variant like `Foo(Bar)`.
/// Otherwise, generates an error.
fn enum_only_single_field_unnamed_variants<'a>(
errors: &Errors,
variant_fields: &'a syn::Fields,
) -> Option<&'a syn::Type> {
macro_rules! with_enum_suggestion {
($help_text:literal) => {
concat!(
$help_text,
"\nInstead, use a variant with a single unnamed field for each subcommand:\n",
" enum MyCommandEnum {\n",
" SubCommandOne(SubCommandOne),\n",
" SubCommandTwo(SubCommandTwo),\n",
" }",
)
};
}
match variant_fields {
syn::Fields::Named(fields) => {
errors.err(
fields,
with_enum_suggestion!(
"`#![derive(FromArgs)]` `enum`s do not support variants with named fields."
),
);
None
}
syn::Fields::Unit => {
errors.err(
variant_fields,
with_enum_suggestion!(
"`#![derive(FromArgs)]` does not support `enum`s with no variants."
),
);
None
}
syn::Fields::Unnamed(fields) => {
if fields.unnamed.len() != 1 {
errors.err(
fields,
with_enum_suggestion!(
"`#![derive(FromArgs)]` `enum` variants must only contain one field."
),
);
None
} else {
// `unwrap` is okay because of the length check above.
let first_field = fields.unnamed.first().unwrap();
Some(&first_field.ty)
}
}
}
}

View File

@@ -0,0 +1,688 @@
// Copyright (c) 2020 Google LLC All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
use syn::parse::Parser;
use syn::punctuated::Punctuated;
use super::errors::Errors;
use proc_macro2::Span;
use std::collections::hash_map::{Entry, HashMap};
/// Attributes applied to a field of a `#![derive(FromArgs)]` struct.
#[derive(Default)]
pub struct FieldAttrs {
pub default: Option<syn::LitStr>,
pub description: Option<Description>,
pub from_str_fn: Option<syn::ExprPath>,
pub field_type: Option<FieldType>,
pub long: Option<Option<syn::LitStr>>,
pub short: Option<syn::LitChar>,
pub arg_name: Option<syn::LitStr>,
pub greedy: Option<syn::Path>,
pub hidden_help: bool,
}
/// The purpose of a particular field on a `#![derive(FromArgs)]` struct.
#[derive(Copy, Clone, Eq, PartialEq)]
pub enum FieldKind {
/// Switches are booleans that are set to "true" by passing the flag.
Switch,
/// Options are `--key value`. They may be optional (using `Option`),
/// or repeating (using `Vec`), or required (neither `Option` nor `Vec`)
Option,
/// Subcommand fields (of which there can be at most one) refer to enums
/// containing one of several potential subcommands. They may be optional
/// (using `Option`) or required (no `Option`).
SubCommand,
/// Positional arguments are parsed literally if the input
/// does not begin with `-` or `--` and is not a subcommand.
/// They are parsed in declaration order, and only the last positional
/// argument in a type may be an `Option`, `Vec`, or have a default value.
Positional,
}
/// The type of a field on a `#![derive(FromArgs)]` struct.
///
/// This is a simple wrapper around `FieldKind` which includes the `syn::Ident`
/// of the attribute containing the field kind.
pub struct FieldType {
pub kind: FieldKind,
pub ident: syn::Ident,
}
/// A description of a `#![derive(FromArgs)]` struct.
///
/// Defaults to the docstring if one is present, or `#[argh(description = "...")]`
/// if one is provided.
pub struct Description {
/// Whether the description was an explicit annotation or whether it was a doc string.
pub explicit: bool,
pub content: syn::LitStr,
}
impl FieldAttrs {
pub fn parse(errors: &Errors, field: &syn::Field) -> Self {
let mut this = Self::default();
for attr in &field.attrs {
if is_doc_attr(attr) {
parse_attr_doc(errors, attr, &mut this.description);
continue;
}
let ml = if let Some(ml) = argh_attr_to_meta_list(errors, attr) {
ml
} else {
continue;
};
for meta in ml {
let name = meta.path();
if name.is_ident("arg_name") {
if let Some(m) = errors.expect_meta_name_value(&meta) {
this.parse_attr_arg_name(errors, m);
}
} else if name.is_ident("default") {
if let Some(m) = errors.expect_meta_name_value(&meta) {
this.parse_attr_default(errors, m);
}
} else if name.is_ident("description") {
if let Some(m) = errors.expect_meta_name_value(&meta) {
parse_attr_description(errors, m, &mut this.description);
}
} else if name.is_ident("from_str_fn") {
if let Some(m) = errors.expect_meta_list(&meta) {
this.parse_attr_from_str_fn(errors, m);
}
} else if name.is_ident("long") {
if let Some(m) = errors.expect_meta_name_value(&meta) {
this.parse_attr_long(errors, m);
}
} else if name.is_ident("option") {
parse_attr_field_type(errors, &meta, FieldKind::Option, &mut this.field_type);
} else if name.is_ident("short") {
if let Some(m) = errors.expect_meta_name_value(&meta) {
this.parse_attr_short(errors, m);
}
} else if name.is_ident("subcommand") {
parse_attr_field_type(
errors,
&meta,
FieldKind::SubCommand,
&mut this.field_type,
);
} else if name.is_ident("switch") {
parse_attr_field_type(errors, &meta, FieldKind::Switch, &mut this.field_type);
} else if name.is_ident("positional") {
parse_attr_field_type(
errors,
&meta,
FieldKind::Positional,
&mut this.field_type,
);
} else if name.is_ident("greedy") {
this.greedy = Some(name.clone());
} else if name.is_ident("hidden_help") {
this.hidden_help = true;
} else {
errors.err(
&meta,
concat!(
"Invalid field-level `argh` attribute\n",
"Expected one of: `arg_name`, `default`, `description`, `from_str_fn`, `greedy`, ",
"`long`, `option`, `short`, `subcommand`, `switch`, `hidden_help`",
),
);
}
}
}
if let (Some(default), Some(field_type)) = (&this.default, &this.field_type) {
match field_type.kind {
FieldKind::Option | FieldKind::Positional => {}
FieldKind::SubCommand | FieldKind::Switch => errors.err(
default,
"`default` may only be specified on `#[argh(option)]` \
or `#[argh(positional)]` fields",
),
}
}
match (&this.greedy, this.field_type.as_ref().map(|f| f.kind)) {
(Some(_), Some(FieldKind::Positional)) => {}
(Some(greedy), Some(_)) => errors.err(
&greedy,
"`greedy` may only be specified on `#[argh(positional)]` \
fields",
),
_ => {}
}
if let Some(d) = &this.description {
check_option_description(errors, d.content.value().trim(), d.content.span());
}
this
}
fn parse_attr_from_str_fn(&mut self, errors: &Errors, m: &syn::MetaList) {
parse_attr_fn_name(errors, m, "from_str_fn", &mut self.from_str_fn)
}
fn parse_attr_default(&mut self, errors: &Errors, m: &syn::MetaNameValue) {
parse_attr_single_string(errors, m, "default", &mut self.default);
}
fn parse_attr_arg_name(&mut self, errors: &Errors, m: &syn::MetaNameValue) {
parse_attr_single_string(errors, m, "arg_name", &mut self.arg_name);
}
fn parse_attr_long(&mut self, errors: &Errors, m: &syn::MetaNameValue) {
if let Some(first) = &self.long {
errors.duplicate_attrs("long", first, m);
} else if let syn::Expr::Path(syn::ExprPath { path, .. }) = &m.value
&& let Some(ident) = path.get_ident()
&& ident.to_string().eq_ignore_ascii_case("none")
{
self.long = Some(None);
} else if let Some(lit_str) = errors.expect_lit_str(&m.value) {
self.long = Some(Some(lit_str.clone()));
}
if let Some(Some(long)) = &self.long {
let value = long.value();
check_long_name(errors, long, &value);
}
}
fn parse_attr_short(&mut self, errors: &Errors, m: &syn::MetaNameValue) {
if let Some(first) = &self.short {
errors.duplicate_attrs("short", first, m);
} else if let Some(lit_char) = errors.expect_lit_char(&m.value) {
self.short = Some(lit_char.clone());
if !lit_char.value().is_ascii() {
errors.err(lit_char, "Short names must be ASCII");
}
}
}
}
pub(crate) fn check_long_name(errors: &Errors, spanned: &impl syn::spanned::Spanned, value: &str) {
if !value.is_ascii() {
errors.err(spanned, "Long names must be ASCII");
}
if !value
.chars()
.all(|c| c.is_lowercase() || c == '-' || c.is_ascii_digit())
{
errors.err(
spanned,
"Long names may only contain lowercase letters, digits, and dashes",
);
}
}
fn parse_attr_fn_name(
errors: &Errors,
m: &syn::MetaList,
attr_name: &str,
slot: &mut Option<syn::ExprPath>,
) {
if let Some(first) = slot {
errors.duplicate_attrs(attr_name, first, m);
}
*slot = errors.ok(m.parse_args());
}
fn parse_attr_field_type(
errors: &Errors,
meta: &syn::Meta,
kind: FieldKind,
slot: &mut Option<FieldType>,
) {
if let Some(path) = errors.expect_meta_word(meta) {
if let Some(first) = slot {
errors.duplicate_attrs("field kind", &first.ident, path);
} else if let Some(word) = path.get_ident() {
*slot = Some(FieldType {
kind,
ident: word.clone(),
});
}
}
}
// Whether the attribute is one like `#[<name> ...]`
fn is_matching_attr(name: &str, attr: &syn::Attribute) -> bool {
attr.path().segments.len() == 1 && attr.path().segments[0].ident == name
}
/// Checks for `#[doc ...]`, which is generated by doc comments.
fn is_doc_attr(attr: &syn::Attribute) -> bool {
is_matching_attr("doc", attr)
}
/// Checks for `#[argh ...]`
fn is_argh_attr(attr: &syn::Attribute) -> bool {
is_matching_attr("argh", attr)
}
/// Filters out non-`#[argh(...)]` attributes and converts to a sequence of `syn::Meta`.
fn argh_attr_to_meta_list(
errors: &Errors,
attr: &syn::Attribute,
) -> Option<impl IntoIterator<Item = syn::Meta>> {
if !is_argh_attr(attr) {
return None;
}
let ml = errors.expect_meta_list(&attr.meta)?;
errors.ok(ml.parse_args_with(
syn::punctuated::Punctuated::<syn::Meta, syn::Token![,]>::parse_terminated,
))
}
/// Represents a `#[derive(FromArgs)]` type's top-level attributes.
#[derive(Default)]
pub struct TypeAttrs {
pub is_subcommand: Option<syn::Ident>,
pub name: Option<syn::LitStr>,
pub description: Option<Description>,
pub examples: Vec<syn::LitStr>,
pub notes: Vec<syn::LitStr>,
pub error_codes: Vec<(syn::LitInt, syn::LitStr)>,
/// Arguments that trigger printing of the help message
pub help_triggers: Option<Vec<syn::LitStr>>,
}
impl TypeAttrs {
/// Parse top-level `#[argh(...)]` attributes
pub fn parse(errors: &Errors, derive_input: &syn::DeriveInput) -> Self {
let mut this = TypeAttrs::default();
for attr in &derive_input.attrs {
if is_doc_attr(attr) {
parse_attr_doc(errors, attr, &mut this.description);
continue;
}
let ml: Vec<syn::Meta> = if let Some(ml) = argh_attr_to_meta_list(errors, attr) {
ml.into_iter().collect()
} else {
continue;
};
for meta in ml.iter() {
let name = meta.path();
if name.is_ident("description") {
if let Some(m) = errors.expect_meta_name_value(meta) {
parse_attr_description(errors, m, &mut this.description);
}
} else if name.is_ident("error_code") {
if let Some(m) = errors.expect_meta_list(meta) {
this.parse_attr_error_code(errors, m);
}
} else if name.is_ident("example") {
if let Some(m) = errors.expect_meta_name_value(meta) {
this.parse_attr_example(errors, m);
}
} else if name.is_ident("name") {
if let Some(m) = errors.expect_meta_name_value(meta) {
this.parse_attr_name(errors, m);
}
} else if name.is_ident("note") {
if let Some(m) = errors.expect_meta_name_value(meta) {
this.parse_attr_note(errors, m);
}
} else if name.is_ident("subcommand") {
if let Some(ident) = errors.expect_meta_word(meta).and_then(|p| p.get_ident()) {
this.parse_attr_subcommand(errors, ident);
}
} else if name.is_ident("help_triggers") {
if let Some(m) = errors.expect_meta_list(meta) {
Self::parse_help_triggers(m, errors, &mut this);
}
} else {
errors.err(
meta,
concat!(
"Invalid type-level `argh` attribute\n",
"Expected one of: `description`, `error_code`, `example`, `name`, ",
"`note`, `subcommand`, `help_triggers`",
),
);
}
}
if this.is_subcommand.is_some() && this.help_triggers.is_some() {
let help_meta = ml
.iter()
.find(|meta| meta.path().is_ident("help_triggers"))
.unwrap();
errors.err(help_meta, "Cannot use `help_triggers` on a subcommand");
}
}
this.check_error_codes(errors);
this
}
/// Checks that error codes are within range for `i32` and that they are
/// never duplicated.
fn check_error_codes(&self, errors: &Errors) {
// map from error code to index
let mut map: HashMap<u64, usize> = HashMap::new();
for (index, (lit_int, _lit_str)) in self.error_codes.iter().enumerate() {
let value = match lit_int.base10_parse::<u64>() {
Ok(v) => v,
Err(e) => {
errors.push(e);
continue;
}
};
if value > (i32::MAX as u64) {
errors.err(lit_int, "Error code out of range for `i32`");
}
match map.entry(value) {
Entry::Occupied(previous) => {
let previous_index = *previous.get();
let (previous_lit_int, _previous_lit_str) = &self.error_codes[previous_index];
errors.err(lit_int, &format!("Duplicate error code {}", value));
errors.err(
previous_lit_int,
&format!("Error code {} previously defined here", value),
);
}
Entry::Vacant(slot) => {
slot.insert(index);
}
}
}
}
fn parse_attr_error_code(&mut self, errors: &Errors, ml: &syn::MetaList) {
errors.ok(ml.parse_args_with(|input: syn::parse::ParseStream| {
let err_code = input.parse()?;
input.parse::<syn::Token![,]>()?;
let err_msg = input.parse()?;
if let (Some(err_code), Some(err_msg)) = (
errors.expect_lit_int(&err_code),
errors.expect_lit_str(&err_msg),
) {
self.error_codes.push((err_code.clone(), err_msg.clone()));
}
Ok(())
}));
}
fn parse_attr_example(&mut self, errors: &Errors, m: &syn::MetaNameValue) {
parse_attr_multi_string(errors, m, &mut self.examples)
}
fn parse_attr_name(&mut self, errors: &Errors, m: &syn::MetaNameValue) {
parse_attr_single_string(errors, m, "name", &mut self.name);
if let Some(name) = &self.name
&& name.value() == "help"
{
errors.err(name, "Custom `help` commands are not supported.");
}
}
fn parse_attr_note(&mut self, errors: &Errors, m: &syn::MetaNameValue) {
parse_attr_multi_string(errors, m, &mut self.notes)
}
fn parse_attr_subcommand(&mut self, errors: &Errors, ident: &syn::Ident) {
if let Some(first) = &self.is_subcommand {
errors.duplicate_attrs("subcommand", first, ident);
} else {
self.is_subcommand = Some(ident.clone());
}
}
// get the list of arguments that trigger printing of the help message as a vector of strings (help_arguments("-h", "--help", "help"))
fn parse_help_triggers(m: &syn::MetaList, errors: &Errors, this: &mut TypeAttrs) {
let parser = Punctuated::<syn::Expr, syn::Token![,]>::parse_terminated;
match parser.parse(m.tokens.clone().into()) {
Ok(args) => {
let mut triggers = Vec::new();
for arg in args {
if let syn::Expr::Lit(syn::ExprLit {
lit: syn::Lit::Str(lit_str),
..
}) = arg
{
triggers.push(lit_str);
}
}
this.help_triggers = Some(triggers);
}
Err(err) => errors.push(err),
}
}
}
/// Represents an enum variant's attributes.
#[derive(Default)]
pub struct VariantAttrs {
pub is_dynamic: Option<syn::Path>,
}
impl VariantAttrs {
/// Parse enum variant `#[argh(...)]` attributes
pub fn parse(errors: &Errors, variant: &syn::Variant) -> Self {
let mut this = VariantAttrs::default();
let fields = match &variant.fields {
syn::Fields::Named(fields) => Some(&fields.named),
syn::Fields::Unnamed(fields) => Some(&fields.unnamed),
syn::Fields::Unit => None,
};
for field in fields.into_iter().flatten() {
for attr in &field.attrs {
if is_argh_attr(attr) {
err_unused_enum_attr(errors, attr);
}
}
}
for attr in &variant.attrs {
let ml = if let Some(ml) = argh_attr_to_meta_list(errors, attr) {
ml
} else {
continue;
};
for meta in ml {
let name = meta.path();
if name.is_ident("dynamic") {
if let Some(prev) = this.is_dynamic.as_ref() {
errors.duplicate_attrs("dynamic", prev, &meta);
} else {
this.is_dynamic = errors.expect_meta_word(&meta).cloned();
}
} else {
errors.err(
&meta,
"Invalid variant-level `argh` attribute\n\
Variants can only have the #[argh(dynamic)] attribute.",
);
}
}
}
this
}
}
fn check_option_description(errors: &Errors, desc: &str, span: Span) {
let chars = &mut desc.trim().chars();
match (chars.next(), chars.next()) {
(Some(x), _) if x.is_lowercase() => {}
// If both the first and second letter are not lowercase,
// this is likely an initialism which should be allowed.
(Some(x), Some(y)) if !x.is_lowercase() && (y.is_alphanumeric() && !y.is_lowercase()) => {}
_ => {
errors.err_span(span, "Descriptions must begin with a lowercase letter");
}
}
}
fn parse_attr_single_string(
errors: &Errors,
m: &syn::MetaNameValue,
name: &str,
slot: &mut Option<syn::LitStr>,
) {
if let Some(first) = slot {
errors.duplicate_attrs(name, first, m);
} else if let Some(lit_str) = errors.expect_lit_str(&m.value) {
*slot = Some(lit_str.clone());
}
}
fn parse_attr_multi_string(errors: &Errors, m: &syn::MetaNameValue, list: &mut Vec<syn::LitStr>) {
if let Some(lit_str) = errors.expect_lit_str(&m.value) {
list.push(lit_str.clone());
}
}
fn parse_attr_doc(errors: &Errors, attr: &syn::Attribute, slot: &mut Option<Description>) {
let nv = if let Some(nv) = errors.expect_meta_name_value(&attr.meta) {
nv
} else {
return;
};
// Don't replace an existing explicit description.
if slot.as_ref().map(|d| d.explicit).unwrap_or(false) {
return;
}
if let Some(lit_str) = errors.expect_lit_str(&nv.value) {
let lit_str = if let Some(previous) = slot {
let previous = &previous.content;
let previous_span = previous.span();
syn::LitStr::new(
&(previous.value() + &unescape_doc(lit_str.value())),
previous_span,
)
} else {
syn::LitStr::new(&unescape_doc(lit_str.value()), lit_str.span())
};
*slot = Some(Description {
explicit: false,
content: lit_str,
});
}
}
/// Replaces escape sequences in doc-comments with the characters they represent.
///
/// Rustdoc understands CommonMark escape sequences consisting of a backslash followed by an ASCII
/// punctuation character. Any other backslash is treated as a literal backslash.
fn unescape_doc(s: String) -> String {
let mut result = String::with_capacity(s.len());
let mut characters = s.chars().peekable();
while let Some(mut character) = characters.next() {
if character == '\\'
&& let Some(next_character) = characters.peek()
&& next_character.is_ascii_punctuation()
{
character = *next_character;
characters.next();
}
// Braces must be escaped as this string will be used as a format string
if character == '{' || character == '}' {
result.push(character);
}
result.push(character);
}
result
}
fn parse_attr_description(errors: &Errors, m: &syn::MetaNameValue, slot: &mut Option<Description>) {
let lit_str = if let Some(lit_str) = errors.expect_lit_str(&m.value) {
lit_str
} else {
return;
};
// Don't allow multiple explicit (non doc-comment) descriptions
if let Some(description) = slot
&& description.explicit
{
errors.duplicate_attrs("description", &description.content, lit_str);
}
*slot = Some(Description {
explicit: true,
content: lit_str.clone(),
});
}
/// Checks that a `#![derive(FromArgs)]` enum has an `#[argh(subcommand)]`
/// attribute and that it does not have any other type-level `#[argh(...)]` attributes.
pub fn check_enum_type_attrs(errors: &Errors, type_attrs: &TypeAttrs, type_span: &Span) {
let TypeAttrs {
is_subcommand,
name,
description,
examples,
notes,
error_codes,
help_triggers,
} = type_attrs;
// Ensure that `#[argh(subcommand)]` is present.
if is_subcommand.is_none() {
errors.err_span(
*type_span,
concat!(
"`#![derive(FromArgs)]` on `enum`s can only be used to enumerate subcommands.\n",
"Consider adding `#[argh(subcommand)]` to the `enum` declaration.",
),
);
}
// Error on all other type-level attributes.
if let Some(name) = name {
err_unused_enum_attr(errors, name);
}
if let Some(description) = description
&& description.explicit
{
err_unused_enum_attr(errors, &description.content);
}
if let Some(example) = examples.first() {
err_unused_enum_attr(errors, example);
}
if let Some(note) = notes.first() {
err_unused_enum_attr(errors, note);
}
if let Some(err_code) = error_codes.first() {
err_unused_enum_attr(errors, &err_code.0);
}
if let Some(triggers) = help_triggers
&& let Some(trigger) = triggers.first()
{
err_unused_enum_attr(errors, trigger);
}
}
fn err_unused_enum_attr(errors: &Errors, location: &impl syn::spanned::Spanned) {
errors.err(
location,
concat!(
"Unused `argh` attribute on `#![derive(FromArgs)]` enum. ",
"Such `enum`s can only be used to dispatch to subcommands, ",
"and should only contain the #[argh(subcommand)] attribute.",
),
);
}

View File

@@ -0,0 +1,19 @@
#![recursion_limit = "256"]
use proc_macro::TokenStream;
mod argh;
mod decodable;
#[proc_macro_derive(Decodable)]
pub fn derive_decodable(input: TokenStream) -> TokenStream {
decodable::derive_decodable(input)
}
/// Entrypoint for `#[derive(FromArgs)]`.
#[proc_macro_derive(FromArgs, attributes(argh))]
pub fn argh_derive(input: TokenStream) -> TokenStream {
let ast = syn::parse_macro_input!(input as syn::DeriveInput);
let token = argh::impl_from_args(&ast);
token.into()
}

View File

@@ -4,7 +4,10 @@ use crate::{
fd_path, fd_set_attr, fd_path, fd_set_attr,
}; };
use libc::{dirent, mode_t}; use libc::{dirent, mode_t};
use nix::{errno::Errno, fcntl::AtFlags, fcntl::OFlag, sys::stat::Mode, unistd::UnlinkatFlags}; use nix::errno::Errno;
use nix::fcntl::{AtFlags, OFlag};
use nix::sys::stat::Mode;
use nix::unistd::UnlinkatFlags;
use std::fs::File; use std::fs::File;
use std::ops::Deref; use std::ops::Deref;
use std::os::fd::{AsFd, AsRawFd, BorrowedFd, IntoRawFd, OwnedFd, RawFd}; use std::os::fd::{AsFd, AsRawFd, BorrowedFd, IntoRawFd, OwnedFd, RawFd};

View File

@@ -4,13 +4,10 @@ use crate::{
}; };
use bytemuck::{Pod, bytes_of, bytes_of_mut}; use bytemuck::{Pod, bytes_of, bytes_of_mut};
use libc::{c_uint, makedev, mode_t}; use libc::{c_uint, makedev, mode_t};
use nix::{ use nix::errno::Errno;
errno::Errno, use nix::fcntl::{AT_FDCWD, OFlag};
fcntl::{AT_FDCWD, OFlag}, use nix::sys::stat::{FchmodatFlags, Mode};
sys::stat::{FchmodatFlags, Mode}, use nix::unistd::{AccessFlags, Gid, Uid};
unistd::AccessFlags,
unistd::{Gid, Uid},
};
use num_traits::AsPrimitive; use num_traits::AsPrimitive;
use std::cmp::min; use std::cmp::min;
use std::ffi::CStr; use std::ffi::CStr;

View File

@@ -1,14 +1,13 @@
#![feature(vec_into_raw_parts)] #![feature(vec_into_raw_parts)]
#![allow(clippy::missing_safety_doc)] #![allow(clippy::missing_safety_doc)]
pub use const_format; pub use {const_format, libc, nix};
pub use libc;
pub use nix;
pub use cstr::{ pub use cstr::{
FsPathFollow, StrErr, Utf8CStr, Utf8CStrBuf, Utf8CStrBufArr, Utf8CStrBufRef, Utf8CString, FsPathFollow, StrErr, Utf8CStr, Utf8CStrBuf, Utf8CStrBufArr, Utf8CStrBufRef, Utf8CString,
}; };
use cxx_extern::*; use cxx_extern::*;
pub use derive;
pub use dir::*; pub use dir::*;
pub use ffi::{Utf8CStrRef, fork_dont_care, set_nice_name}; pub use ffi::{Utf8CStrRef, fork_dont_care, set_nice_name};
pub use files::*; pub use files::*;
@@ -16,6 +15,7 @@ pub use logging::*;
pub use misc::*; pub use misc::*;
pub use result::*; pub use result::*;
pub mod argh;
pub mod cstr; pub mod cstr;
mod cxx_extern; mod cxx_extern;
mod dir; mod dir;

View File

@@ -1,16 +1,13 @@
use super::argh::{EarlyExit, MissingRequirements};
use crate::{Utf8CStr, Utf8CString, cstr, ffi}; use crate::{Utf8CStr, Utf8CString, cstr, ffi};
use argh::{EarlyExit, MissingRequirements};
use libc::c_char; use libc::c_char;
use std::{ use std::fmt::Arguments;
fmt, use std::io::Write;
fmt::Arguments, use std::mem::ManuallyDrop;
io::Write, use std::process::exit;
mem::ManuallyDrop, use std::sync::Arc;
process::exit, use std::sync::atomic::{AtomicPtr, Ordering};
slice, str, use std::{fmt, slice, str};
sync::Arc,
sync::atomic::{AtomicPtr, Ordering},
};
pub fn errno() -> &'static mut i32 { pub fn errno() -> &'static mut i32 {
unsafe { &mut *libc::__errno() } unsafe { &mut *libc::__errno() }
@@ -87,17 +84,16 @@ impl<T> EarlyExitExt<T> for Result<T, EarlyExit> {
fn on_early_exit<F: FnOnce()>(self, print_help_msg: F) -> T { fn on_early_exit<F: FnOnce()>(self, print_help_msg: F) -> T {
match self { match self {
Ok(t) => t, Ok(t) => t,
Err(EarlyExit { output, status }) => match status { Err(EarlyExit { output, is_help }) => {
Ok(_) => { if is_help {
print_help_msg(); print_help_msg();
exit(0) exit(0)
} } else {
Err(_) => {
eprintln!("{output}"); eprintln!("{output}");
print_help_msg(); print_help_msg();
exit(1) exit(1)
} }
}, }
} }
} }
} }

View File

@@ -12,12 +12,11 @@ cxx-gen = { workspace = true }
pb-rs = { workspace = true } pb-rs = { workspace = true }
[dependencies] [dependencies]
base = { path = "../base" } base = { workspace = true }
cxx = { workspace = true } cxx = { workspace = true }
byteorder = { workspace = true } byteorder = { workspace = true }
size = { workspace = true } size = { workspace = true }
quick-protobuf = { workspace = true } quick-protobuf = { workspace = true }
argh = { workspace = true }
sha1 = { workspace = true } sha1 = { workspace = true }
sha2 = { workspace = true } sha2 = { workspace = true }
digest = { workspace = true } digest = { workspace = true }

View File

@@ -279,9 +279,10 @@ static int find_dtb_offset(const uint8_t *buf, unsigned sz) {
auto fdt_hdr = reinterpret_cast<const fdt_header *>(curr); auto fdt_hdr = reinterpret_cast<const fdt_header *>(curr);
// Check that fdt_header.totalsize does not overflow kernel image size // Check that fdt_header.totalsize does not overflow kernel image size or is empty dtb
// https://github.com/torvalds/linux/commit/7b937cc243e5b1df8780a0aa743ce800df6c68d1
uint32_t totalsize = fdt_hdr->totalsize; uint32_t totalsize = fdt_hdr->totalsize;
if (totalsize > end - curr) if (totalsize > end - curr || totalsize <= 0x48)
continue; continue;
// Check that fdt_header.off_dt_struct does not overflow kernel image size // Check that fdt_header.off_dt_struct does not overflow kernel image size

View File

@@ -1,4 +1,5 @@
use pb_rs::{ConfigBuilder, types::FileDescriptor}; use pb_rs::ConfigBuilder;
use pb_rs::types::FileDescriptor;
use crate::codegen::gen_cxx_binding; use crate::codegen::gen_cxx_binding;

View File

@@ -6,9 +6,11 @@ use crate::patch::hexpatch;
use crate::payload::extract_boot_from_payload; use crate::payload::extract_boot_from_payload;
use crate::sign::{sha1_hash, sign_boot_image}; use crate::sign::{sha1_hash, sign_boot_image};
use argh::{CommandInfo, EarlyExit, FromArgs, SubCommand}; use argh::{CommandInfo, EarlyExit, FromArgs, SubCommand};
use base::libc::umask;
use base::nix::fcntl::OFlag;
use base::{ use base::{
CmdArgs, EarlyExitExt, LoggedResult, MappedFile, PositionalArgParser, ResultExt, Utf8CStr, CmdArgs, EarlyExitExt, LoggedResult, MappedFile, PositionalArgParser, ResultExt, Utf8CStr,
Utf8CString, WriteExt, cmdline_logging, cstr, libc::umask, log_err, nix::fcntl::OFlag, Utf8CString, WriteExt, argh, cmdline_logging, cstr, log_err,
}; };
use std::ffi::c_char; use std::ffi::c_char;
use std::io::{Seek, SeekFrom, Write}; use std::io::{Seek, SeekFrom, Write};
@@ -41,9 +43,9 @@ enum Action {
#[derive(FromArgs)] #[derive(FromArgs)]
#[argh(subcommand, name = "unpack")] #[argh(subcommand, name = "unpack")]
struct Unpack { struct Unpack {
#[argh(switch, short = 'n')] #[argh(switch, short = 'n', long = none)]
no_decompress: bool, no_decompress: bool,
#[argh(switch, short = 'h')] #[argh(switch, short = 'h', long = none)]
dump_header: bool, dump_header: bool,
#[argh(positional)] #[argh(positional)]
img: Utf8CString, img: Utf8CString,
@@ -52,12 +54,12 @@ struct Unpack {
#[derive(FromArgs)] #[derive(FromArgs)]
#[argh(subcommand, name = "repack")] #[argh(subcommand, name = "repack")]
struct Repack { struct Repack {
#[argh(switch, short = 'n')] #[argh(switch, short = 'n', long = none)]
no_compress: bool, no_compress: bool,
#[argh(positional)] #[argh(positional)]
img: Utf8CString, img: Utf8CString,
#[argh(positional, default = r#"Utf8CString::from("new-boot.img")"#)] #[argh(positional)]
out: Utf8CString, out: Option<Utf8CString>,
} }
#[derive(FromArgs)] #[derive(FromArgs)]
@@ -75,33 +77,24 @@ struct Sign {
#[argh(positional)] #[argh(positional)]
img: Utf8CString, img: Utf8CString,
#[argh(positional)] #[argh(positional)]
args: Vec<Utf8CString>, name: Option<Utf8CString>,
#[argh(positional)]
cert: Option<Utf8CString>,
#[argh(positional)]
key: Option<Utf8CString>,
} }
#[derive(FromArgs)]
#[argh(subcommand, name = "extract")]
struct Extract { struct Extract {
#[argh(positional)]
payload: Utf8CString, payload: Utf8CString,
#[argh(positional)]
partition: Option<Utf8CString>, partition: Option<Utf8CString>,
#[argh(positional)]
outfile: Option<Utf8CString>, outfile: Option<Utf8CString>,
} }
impl FromArgs for Extract {
fn from_args(_command_name: &[&str], args: &[&str]) -> Result<Self, EarlyExit> {
let mut parse = PositionalArgParser(args.iter());
Ok(Extract {
payload: parse.required("payload.bin")?,
partition: parse.optional(),
outfile: parse.last_optional()?,
})
}
}
impl SubCommand for Extract {
const COMMAND: &'static CommandInfo = &CommandInfo {
name: "extract",
description: "",
};
}
#[derive(FromArgs)] #[derive(FromArgs)]
#[argh(subcommand, name = "hexpatch")] #[argh(subcommand, name = "hexpatch")]
struct HexPatch { struct HexPatch {
@@ -134,7 +127,7 @@ struct Dtb {
#[derive(FromArgs)] #[derive(FromArgs)]
#[argh(subcommand, name = "split")] #[argh(subcommand, name = "split")]
struct Split { struct Split {
#[argh(switch, short = 'n')] #[argh(switch, short = 'n', long = none)]
no_decompress: bool, no_decompress: bool,
#[argh(positional)] #[argh(positional)]
file: Utf8CString, file: Utf8CString,
@@ -184,28 +177,15 @@ impl SubCommand for Compress {
}; };
} }
#[derive(FromArgs)]
#[argh(subcommand, name = "decompress")]
struct Decompress { struct Decompress {
#[argh(positional)]
file: Utf8CString, file: Utf8CString,
#[argh(positional)]
out: Option<Utf8CString>, out: Option<Utf8CString>,
} }
impl FromArgs for Decompress {
fn from_args(_command_name: &[&str], args: &[&str]) -> Result<Self, EarlyExit> {
let mut iter = PositionalArgParser(args.iter());
Ok(Decompress {
file: iter.required("infile")?,
out: iter.last_optional()?,
})
}
}
impl SubCommand for Decompress {
const COMMAND: &'static CommandInfo = &CommandInfo {
name: "decompress",
description: "",
};
}
fn print_usage(cmd: &str) { fn print_usage(cmd: &str) {
eprintln!( eprintln!(
r#"MagiskBoot - Boot Image Modification Tool r#"MagiskBoot - Boot Image Modification Tool
@@ -384,21 +364,24 @@ fn boot_main(cmds: CmdArgs) -> LoggedResult<i32> {
img, img,
out, out,
}) => { }) => {
repack(&img, &out, no_compress); repack(
&img,
out.as_deref().unwrap_or(cstr!("new-boot.img")),
no_compress,
);
} }
Action::Verify(Verify { img, cert }) => { Action::Verify(Verify { img, cert }) => {
if !verify_cmd(&img, cert.as_deref()) { if !verify_cmd(&img, cert.as_deref()) {
return log_err!(); return log_err!();
} }
} }
Action::Sign(Sign { img, args }) => { Action::Sign(Sign {
let mut iter = args.iter(); img,
sign_cmd( name,
&img, cert,
iter.next().map(AsRef::as_ref), key,
iter.next().map(AsRef::as_ref), }) => {
iter.next().map(AsRef::as_ref), sign_cmd(&img, name.as_deref(), cert.as_deref(), key.as_deref())?;
)?;
} }
Action::Extract(Extract { Action::Extract(Extract {
payload, payload,

View File

@@ -1,13 +1,19 @@
use crate::ffi::{FileFormat, check_fmt}; use crate::ffi::{FileFormat, check_fmt};
use base::nix::fcntl::OFlag;
use base::{ use base::{
Chunker, FileOrStd, LoggedResult, ReadExt, ResultExt, Utf8CStr, Utf8CString, WriteExt, log_err, Chunker, FileOrStd, LoggedResult, ReadExt, ResultExt, Utf8CStr, Utf8CString, WriteExt, log_err,
nix::fcntl::OFlag,
}; };
use bzip2::{Compression as BzCompression, read::BzDecoder, write::BzEncoder}; use bzip2::Compression as BzCompression;
use flate2::{Compression as GzCompression, read::MultiGzDecoder, write::GzEncoder}; use bzip2::read::BzDecoder;
use bzip2::write::BzEncoder;
use flate2::Compression as GzCompression;
use flate2::read::MultiGzDecoder;
use flate2::write::GzEncoder;
use lz4::block::CompressionMode;
use lz4::liblz4::BlockChecksum;
use lz4::{ use lz4::{
BlockMode, BlockSize, ContentChecksum, Decoder as LZ4FrameDecoder, Encoder as LZ4FrameEncoder, BlockMode, BlockSize, ContentChecksum, Decoder as LZ4FrameDecoder, Encoder as LZ4FrameEncoder,
EncoderBuilder as LZ4FrameEncoderBuilder, block::CompressionMode, liblz4::BlockChecksum, EncoderBuilder as LZ4FrameEncoderBuilder,
}; };
use lzma_rust2::{CheckType, LzmaOptions, LzmaReader, LzmaWriter, XzOptions, XzReader, XzWriter}; use lzma_rust2::{CheckType, LzmaOptions, LzmaReader, LzmaWriter, XzOptions, XzReader, XzWriter};
use std::cmp::min; use std::cmp::min;

View File

@@ -1,5 +1,10 @@
#![allow(clippy::useless_conversion)] #![allow(clippy::useless_conversion)]
use argh::FromArgs;
use base::argh;
use bytemuck::{Pod, Zeroable, from_bytes};
use num_traits::cast::AsPrimitive;
use size::{Base, Size, Style};
use std::cmp::Ordering; use std::cmp::Ordering;
use std::collections::{BTreeMap, HashMap}; use std::collections::{BTreeMap, HashMap};
use std::fmt::{Display, Formatter}; use std::fmt::{Display, Formatter};
@@ -9,11 +14,6 @@ use std::mem::size_of;
use std::process::exit; use std::process::exit;
use std::str; use std::str;
use argh::FromArgs;
use bytemuck::{Pod, Zeroable, from_bytes};
use num_traits::cast::AsPrimitive;
use size::{Base, Size, Style};
use crate::check_env; use crate::check_env;
use crate::compress::{get_decoder, get_encoder}; use crate::compress::{get_decoder, get_encoder};
use crate::ffi::FileFormat; use crate::ffi::FileFormat;
@@ -23,9 +23,10 @@ use base::libc::{
S_IWOTH, S_IWUSR, S_IXGRP, S_IXOTH, S_IXUSR, dev_t, gid_t, major, makedev, minor, mknod, S_IWOTH, S_IWUSR, S_IXGRP, S_IXOTH, S_IXUSR, dev_t, gid_t, major, makedev, minor, mknod,
mode_t, uid_t, mode_t, uid_t,
}; };
use base::nix::fcntl::OFlag;
use base::{ use base::{
BytesExt, EarlyExitExt, LoggedResult, MappedFile, OptionExt, ResultExt, Utf8CStr, Utf8CStrBuf, BytesExt, EarlyExitExt, LoggedResult, MappedFile, OptionExt, ResultExt, Utf8CStr, Utf8CStrBuf,
WriteExt, cstr, log_err, nix::fcntl::OFlag, WriteExt, cstr, log_err,
}; };
#[derive(FromArgs)] #[derive(FromArgs)]

View File

@@ -1,14 +1,11 @@
use argh::FromArgs;
use base::{LoggedResult, MappedFile, Utf8CStr, argh};
use fdt::node::{FdtNode, NodeProperty};
use fdt::{Fdt, FdtError};
use std::cell::UnsafeCell; use std::cell::UnsafeCell;
use argh::FromArgs; use crate::check_env;
use fdt::{ use crate::patch::patch_verity;
Fdt, FdtError,
node::{FdtNode, NodeProperty},
};
use base::{LoggedResult, MappedFile, Utf8CStr};
use crate::{check_env, patch::patch_verity};
#[derive(FromArgs)] #[derive(FromArgs)]
#[argh(subcommand)] #[argh(subcommand)]
@@ -21,7 +18,7 @@ pub(crate) enum DtbAction {
#[derive(FromArgs)] #[derive(FromArgs)]
#[argh(subcommand, name = "print")] #[argh(subcommand, name = "print")]
pub(crate) struct Print { pub(crate) struct Print {
#[argh(switch, short = 'f')] #[argh(switch, short = 'f', long = none)]
fstab: bool, fstab: bool,
} }

View File

@@ -1,5 +1,4 @@
#![feature(format_args_nl)] #![feature(format_args_nl)]
#![feature(btree_extract_if)]
#![feature(iter_intersperse)] #![feature(iter_intersperse)]
#![feature(try_blocks)] #![feature(try_blocks)]

View File

@@ -1,15 +1,13 @@
use crate::compress::get_decoder; use crate::compress::get_decoder;
use crate::ffi::check_fmt; use crate::ffi::check_fmt;
use crate::proto::update_metadata::{DeltaArchiveManifest, mod_InstallOperation::Type}; use crate::proto::update_metadata::DeltaArchiveManifest;
use crate::proto::update_metadata::mod_InstallOperation::Type;
use base::{LoggedError, LoggedResult, ReadSeekExt, ResultExt, WriteExt, error}; use base::{LoggedError, LoggedResult, ReadSeekExt, ResultExt, WriteExt, error};
use byteorder::{BigEndian, ReadBytesExt}; use byteorder::{BigEndian, ReadBytesExt};
use quick_protobuf::{BytesReader, MessageRead}; use quick_protobuf::{BytesReader, MessageRead};
use std::io::Cursor; use std::fs::File;
use std::{ use std::io::{BufReader, Cursor, Read, Seek, SeekFrom, Write};
fs::File, use std::os::fd::FromRawFd;
io::{BufReader, Read, Seek, SeekFrom, Write},
os::fd::FromRawFd,
};
macro_rules! bad_payload { macro_rules! bad_payload {
($msg:literal) => {{ ($msg:literal) => {{

View File

@@ -20,8 +20,7 @@ cxx-gen = { workspace = true }
pb-rs = { workspace = true } pb-rs = { workspace = true }
[dependencies] [dependencies]
base = { path = "../base", features = ["selinux"] } base = { workspace = true, features = ["selinux"] }
derive = { path = "derive" }
cxx = { workspace = true } cxx = { workspace = true }
num-traits = { workspace = true } num-traits = { workspace = true }
num-derive = { workspace = true } num-derive = { workspace = true }
@@ -29,6 +28,5 @@ quick-protobuf = { workspace = true }
bytemuck = { workspace = true, features = ["derive"] } bytemuck = { workspace = true, features = ["derive"] }
thiserror = { workspace = true } thiserror = { workspace = true }
bit-set = { workspace = true } bit-set = { workspace = true }
argh = { workspace = true }
nix = { workspace = true, features = ["fs", "mount", "poll", "signal", "term", "user", "zerocopy"] } nix = { workspace = true, features = ["fs", "mount", "poll", "signal", "term", "user", "zerocopy"] }
bitflags = { workspace = true } bitflags = { workspace = true }

View File

@@ -1,4 +1,5 @@
use pb_rs::{ConfigBuilder, types::FileDescriptor}; use pb_rs::ConfigBuilder;
use pb_rs::types::FileDescriptor;
use crate::codegen::gen_cxx_binding; use crate::codegen::gen_cxx_binding;

View File

@@ -21,12 +21,10 @@ use base::{
AtomicArc, BufReadExt, FileAttr, FsPathBuilder, LoggedResult, ReadExt, ResultExt, Utf8CStr, AtomicArc, BufReadExt, FileAttr, FsPathBuilder, LoggedResult, ReadExt, ResultExt, Utf8CStr,
Utf8CStrBuf, WriteExt, cstr, fork_dont_care, info, libc, log_err, set_nice_name, Utf8CStrBuf, WriteExt, cstr, fork_dont_care, info, libc, log_err, set_nice_name,
}; };
use nix::{ use nix::fcntl::OFlag;
fcntl::OFlag, use nix::mount::MsFlags;
mount::MsFlags, use nix::sys::signal::SigSet;
sys::signal::SigSet, use nix::unistd::{dup2_stderr, dup2_stdin, dup2_stdout, getpid, getuid, setsid};
unistd::{dup2_stderr, dup2_stdin, dup2_stdout, getpid, getuid, setsid},
};
use num_traits::AsPrimitive; use num_traits::AsPrimitive;
use std::fmt::Write as _; use std::fmt::Write as _;
use std::io::{BufReader, Write}; use std::io::{BufReader, Write};

View File

@@ -1,8 +0,0 @@
use proc_macro::TokenStream;
mod decodable;
#[proc_macro_derive(Decodable)]
pub fn derive_decodable(input: TokenStream) -> TokenStream {
decodable::derive_decodable(input)
}

View File

@@ -1,5 +1,4 @@
#![feature(try_blocks)] #![feature(try_blocks)]
#![feature(let_chains)]
#![feature(fn_traits)] #![feature(fn_traits)]
#![feature(unix_socket_ancillary_data)] #![feature(unix_socket_ancillary_data)]
#![feature(unix_socket_peek)] #![feature(unix_socket_peek)]
@@ -9,8 +8,8 @@
use crate::ffi::SuRequest; use crate::ffi::SuRequest;
use crate::socket::Encodable; use crate::socket::Encodable;
use base::derive::Decodable;
use daemon::{MagiskD, connect_daemon_for_cxx}; use daemon::{MagiskD, connect_daemon_for_cxx};
use derive::Decodable;
use logging::{android_logging, zygisk_close_logd, zygisk_get_logd, zygisk_logging}; use logging::{android_logging, zygisk_close_logd, zygisk_get_logd, zygisk_logging};
use magisk::magisk_main; use magisk::magisk_main;
use mount::revert_unmount; use mount::revert_unmount;

View File

@@ -1,17 +1,16 @@
use crate::consts::{LOG_PIPE, LOGFILE}; use crate::consts::{LOG_PIPE, LOGFILE};
use crate::ffi::get_magisk_tmp; use crate::ffi::get_magisk_tmp;
use crate::logging::LogFile::{Actual, Buffer}; use crate::logging::LogFile::{Actual, Buffer};
use base::const_format::concatcp;
use base::{ use base::{
FsPathBuilder, LogLevel, LoggedResult, ReadExt, ResultExt, Utf8CStr, Utf8CStrBuf, WriteExt, FsPathBuilder, LogLevel, LoggedResult, ReadExt, ResultExt, Utf8CStr, Utf8CStrBuf, WriteExt,
const_format::concatcp, cstr, libc, new_daemon_thread, raw_cstr, update_logger, cstr, libc, new_daemon_thread, raw_cstr, update_logger,
}; };
use bytemuck::{Pod, Zeroable, bytes_of, write_zeroes}; use bytemuck::{Pod, Zeroable, bytes_of, write_zeroes};
use libc::{PIPE_BUF, c_char, localtime_r, sigtimedwait, time_t, timespec, tm}; use libc::{PIPE_BUF, c_char, localtime_r, sigtimedwait, time_t, timespec, tm};
use nix::{ use nix::fcntl::OFlag;
fcntl::OFlag, use nix::sys::signal::{SigSet, SigmaskHow, Signal};
sys::signal::{SigSet, SigmaskHow, Signal}, use nix::unistd::{Gid, Uid, chown, getpid, gettid};
unistd::{Gid, Uid, chown, getpid, gettid},
};
use num_derive::{FromPrimitive, ToPrimitive}; use num_derive::{FromPrimitive, ToPrimitive};
use num_traits::FromPrimitive; use num_traits::FromPrimitive;
use std::cmp::min; use std::cmp::min;

View File

@@ -5,7 +5,7 @@ use crate::mount::find_preinit_device;
use crate::selinux::restorecon; use crate::selinux::restorecon;
use crate::socket::{Decodable, Encodable}; use crate::socket::{Decodable, Encodable};
use argh::FromArgs; use argh::FromArgs;
use base::{CmdArgs, EarlyExitExt, LoggedResult, Utf8CString, clone_attr}; use base::{CmdArgs, EarlyExitExt, LoggedResult, Utf8CString, argh, clone_attr};
use nix::poll::{PollFd, PollFlags, PollTimeout}; use nix::poll::{PollFd, PollFlags, PollTimeout};
use std::ffi::c_char; use std::ffi::c_char;
use std::os::fd::AsFd; use std::os::fd::AsFd;

View File

@@ -8,7 +8,9 @@ use base::{
Utf8CStrBuf, Utf8CString, WalkResult, clone_attr, cstr, debug, error, info, libc, raw_cstr, Utf8CStrBuf, Utf8CString, WalkResult, clone_attr, cstr, debug, error, info, libc, raw_cstr,
warn, warn,
}; };
use nix::{fcntl::OFlag, mount::MsFlags, unistd::UnlinkatFlags}; use nix::fcntl::OFlag;
use nix::mount::MsFlags;
use nix::unistd::UnlinkatFlags;
use std::collections::BTreeMap; use std::collections::BTreeMap;
use std::os::fd::IntoRawFd; use std::os::fd::IntoRawFd;
use std::path::{Component, Path}; use std::path::{Component, Path};

View File

@@ -6,12 +6,11 @@ use base::{
debug, info, libc, parse_mount_info, warn, debug, info, libc, parse_mount_info, warn,
}; };
use libc::{c_uint, dev_t}; use libc::{c_uint, dev_t};
use nix::{ use nix::mount::MsFlags;
mount::MsFlags, use nix::sys::stat::{Mode, SFlag, mknod};
sys::stat::{Mode, SFlag, mknod},
};
use num_traits::AsPrimitive; use num_traits::AsPrimitive;
use std::{cmp::Ordering::Greater, cmp::Ordering::Less, path::Path, path::PathBuf}; use std::cmp::Ordering::{Greater, Less};
use std::path::{Path, PathBuf};
pub fn setup_preinit_dir() { pub fn setup_preinit_dir() {
let magisk_tmp = get_magisk_tmp(); let magisk_tmp = get_magisk_tmp();
@@ -222,7 +221,10 @@ pub fn revert_unmount(pid: i32) {
// Unmount Magisk tmpfs and mounts from module files // Unmount Magisk tmpfs and mounts from module files
for info in parse_mount_info("self") { for info in parse_mount_info("self") {
if info.source == "magisk" || info.root.starts_with("/adb/modules") { if info.source == "magisk"
|| info.root.starts_with("/adb/modules")
|| (info.fs_type == "rootfs" && info.root.starts_with("/magisk"))
{
targets.push(info.target); targets.push(info.target);
} }
} }

View File

@@ -1,12 +1,12 @@
use super::{ use super::persist::{
PropInfo, PropReader, SYS_PROP, persist_delete_prop, persist_get_all_props, persist_get_prop, persist_set_prop,
persist::{persist_delete_prop, persist_get_all_props, persist_get_prop, persist_set_prop},
}; };
use super::{PropInfo, PropReader, SYS_PROP};
use argh::{EarlyExit, FromArgs, MissingRequirements}; use argh::{EarlyExit, FromArgs, MissingRequirements};
use base::libc::PROP_VALUE_MAX; use base::libc::PROP_VALUE_MAX;
use base::{ use base::{
BufReadExt, CmdArgs, EarlyExitExt, LogLevel, LoggedResult, ResultExt, Utf8CStr, Utf8CStrBuf, BufReadExt, CmdArgs, EarlyExitExt, LogLevel, LoggedResult, ResultExt, Utf8CStr, Utf8CStrBuf,
Utf8CString, cstr, debug, log_err, set_log_level_state, Utf8CString, argh, cstr, debug, log_err, set_log_level_state,
}; };
use nix::fcntl::OFlag; use nix::fcntl::OFlag;
use std::collections::BTreeMap; use std::collections::BTreeMap;
@@ -17,21 +17,21 @@ use std::io::BufReader;
struct ResetProp { struct ResetProp {
#[argh(switch, short = 'v')] #[argh(switch, short = 'v')]
verbose: bool, verbose: bool,
#[argh(switch, short = 'w')] #[argh(switch, short = 'w', long = none)]
wait_mode: bool, wait_mode: bool,
#[argh(switch, short = 'p')] #[argh(switch, short = 'p', long = none)]
persist: bool, persist: bool,
#[argh(switch, short = 'P')] #[argh(switch, short = 'P', long = none)]
persist_only: bool, persist_only: bool,
#[argh(switch, short = 'Z')] #[argh(switch, short = 'Z', long = none)]
context: bool, context: bool,
#[argh(switch, short = 'n')] #[argh(switch, short = 'n', long = none)]
skip_svc: bool, skip_svc: bool,
#[argh(option, short = 'f')] #[argh(option, short = 'f')]
file: Option<Utf8CString>, file: Option<Utf8CString>,
#[argh(option, long = "delete", short = 'd')] #[argh(option, short = 'd', long = "delete")]
delete_key: Option<Utf8CString>, delete_key: Option<Utf8CString>,
#[argh(positional)] #[argh(positional, greedy = true)]
args: Vec<Utf8CString>, args: Vec<Utf8CString>,
} }
@@ -57,7 +57,7 @@ Wait mode arguments (toggled with -w):
General flags: General flags:
-h,--help show this message -h,--help show this message
-v print verbose output to stderr -v,--verbose print verbose output to stderr
-w switch to wait mode -w switch to wait mode
Read mode flags: Read mode flags:

View File

@@ -1,20 +1,17 @@
use nix::fcntl::OFlag; use nix::fcntl::OFlag;
use quick_protobuf::{BytesReader, MessageRead, MessageWrite, Writer}; use quick_protobuf::{BytesReader, MessageRead, MessageWrite, Writer};
use std::io::Read; use std::fs::File;
use std::{ use std::io::{BufWriter, Read, Write};
fs::File, use std::os::fd::FromRawFd;
io::{BufWriter, Write},
os::fd::FromRawFd,
};
use crate::resetprop::PropReader; use crate::resetprop::PropReader;
use crate::resetprop::proto::persistent_properties::{ use crate::resetprop::proto::persistent_properties::PersistentProperties;
PersistentProperties, mod_PersistentProperties::PersistentPropertyRecord, use crate::resetprop::proto::persistent_properties::mod_PersistentProperties::PersistentPropertyRecord;
};
use base::const_format::concatcp; use base::const_format::concatcp;
use base::libc::mkstemp;
use base::{ use base::{
Directory, FsPathBuilder, LibcReturn, LoggedResult, MappedFile, SilentLogExt, Utf8CStr, Directory, FsPathBuilder, LibcReturn, LoggedResult, MappedFile, SilentLogExt, Utf8CStr,
Utf8CStrBuf, WalkResult, clone_attr, cstr, debug, libc::mkstemp, log_err, Utf8CStrBuf, WalkResult, clone_attr, cstr, debug, log_err,
}; };
const PERSIST_PROP_DIR: &str = "/data/property"; const PERSIST_PROP_DIR: &str = "/data/property";

View File

@@ -8,14 +8,14 @@ use ExtraVal::{Bool, Int, IntList, Str};
use base::{ use base::{
BytesExt, FileAttr, LibcReturn, LoggedResult, ResultExt, Utf8CStrBuf, cstr, fork_dont_care, BytesExt, FileAttr, LibcReturn, LoggedResult, ResultExt, Utf8CStrBuf, cstr, fork_dont_care,
}; };
use nix::{ use nix::fcntl::OFlag;
fcntl::OFlag, use nix::poll::{PollFd, PollFlags, PollTimeout};
poll::{PollFd, PollFlags, PollTimeout},
};
use num_traits::AsPrimitive; use num_traits::AsPrimitive;
use std::fmt::Write;
use std::fs::File;
use std::os::fd::AsFd; use std::os::fd::AsFd;
use std::os::unix::net::UCred; use std::os::unix::net::UCred;
use std::{fmt::Write, fs::File, process::Command, process::exit}; use std::process::{Command, exit};
struct Extra<'a> { struct Extra<'a> {
key: &'static str, key: &'static str,

View File

@@ -1,13 +1,11 @@
use base::{FileOrStd, LibcReturn, LoggedResult, OsResult, ResultExt, libc, warn}; use base::{FileOrStd, LibcReturn, LoggedResult, OsResult, ResultExt, libc, warn};
use libc::{STDIN_FILENO, TIOCGWINSZ, TIOCSWINSZ, c_int, winsize}; use libc::{STDIN_FILENO, TIOCGWINSZ, TIOCSWINSZ, c_int, winsize};
use nix::{ use nix::fcntl::{OFlag, SpliceFFlags};
fcntl::{OFlag, SpliceFFlags}, use nix::poll::{PollFd, PollFlags, PollTimeout, poll};
poll::{PollFd, PollFlags, PollTimeout, poll}, use nix::sys::signal::{SigSet, Signal, raise};
sys::signal::{SigSet, Signal, raise}, use nix::sys::signalfd::{SfdFlags, SignalFd};
sys::signalfd::{SfdFlags, SignalFd}, use nix::sys::termios::{SetArg, Termios, cfmakeraw, tcgetattr, tcsetattr};
sys::termios::{SetArg, Termios, cfmakeraw, tcgetattr, tcsetattr}, use nix::unistd::pipe2;
unistd::pipe2,
};
use std::fs::File; use std::fs::File;
use std::io::{Read, Write}; use std::io::{Read, Write};
use std::mem::MaybeUninit; use std::mem::MaybeUninit;
@@ -143,7 +141,7 @@ fn pump_tty_impl(ptmx: File, pump_stdin: bool) -> LoggedResult<()> {
if raw_fd == STDIN_FILENO { if raw_fd == STDIN_FILENO {
pump_via_splice(FileOrStd::StdIn.as_file(), &ptmx, &pipe_fd)?; pump_via_splice(FileOrStd::StdIn.as_file(), &ptmx, &pipe_fd)?;
} else if raw_fd == raw_ptmx { } else if raw_fd == raw_ptmx {
pump_via_splice(&ptmx, FileOrStd::StdIn.as_file(), &pipe_fd)?; pump_via_splice(&ptmx, FileOrStd::StdOut.as_file(), &pipe_fd)?;
} else if raw_fd == raw_sig { } else if raw_fd == raw_sig {
sync_winsize(raw_ptmx); sync_winsize(raw_ptmx);
signal_fd.as_ref().unwrap().read_signal()?; signal_fd.as_ref().unwrap().read_signal()?;

View File

@@ -1,5 +1,6 @@
use base::{ResultExt, new_daemon_thread}; use base::{ResultExt, new_daemon_thread};
use nix::{sys::signal::SigSet, unistd::getpid, unistd::gettid}; use nix::sys::signal::SigSet;
use nix::unistd::{getpid, gettid};
use std::sync::{Condvar, LazyLock, Mutex, WaitTimeoutResult}; use std::sync::{Condvar, LazyLock, Mutex, WaitTimeoutResult};
use std::time::Duration; use std::time::Duration;

View File

@@ -208,6 +208,12 @@ bool ZygiskContext::plt_hook_commit() {
{ {
mutex_guard lock(hook_info_lock); mutex_guard lock(hook_info_lock);
plt_hook_process_regex(); plt_hook_process_regex();
for (auto& reg: register_info) {
regfree(&reg.regex);
}
for (auto& ign: ignore_info) {
regfree(&ign.regex);
}
register_info.clear(); register_info.clear();
ignore_info.clear(); ignore_info.clear();
} }

View File

@@ -11,7 +11,7 @@ path = "lib.rs"
cxx-gen = { workspace = true } cxx-gen = { workspace = true }
[dependencies] [dependencies]
base = { path = "../base" } base = { workspace = true }
magiskpolicy = { path = "../sepolicy", default-features = false } magiskpolicy = { workspace = true, features = ["no-main"] }
cxx = { workspace = true } cxx = { workspace = true }
num-traits = { workspace = true } num-traits = { workspace = true }

View File

@@ -1,19 +1,12 @@
use crate::ffi::backup_init; use crate::ffi::{BootConfig, MagiskInit, backup_init, magisk_proxy_main};
use crate::mount::is_rootfs; use crate::logging::setup_klog;
use crate::mount::{is_rootfs, occupy, unoccupy};
use crate::twostage::hexpatch_init_for_second_stage; use crate::twostage::hexpatch_init_for_second_stage;
use crate::{ use base::libc::{basename, getpid, mount, umask};
ffi::{BootConfig, MagiskInit, magisk_proxy_main}, use base::nix::mount::MsFlags;
logging::setup_klog, use base::{LibcReturn, LoggedResult, ResultExt, Utf8CStr, cstr, info, nix, raw_cstr};
}; use std::ffi::{CStr, c_char};
use base::{ use std::ptr::null;
LibcReturn, LoggedResult, ResultExt, cstr, info,
libc::{basename, getpid, mount, umask},
raw_cstr,
};
use std::{
ffi::{CStr, c_char},
ptr::null,
};
impl MagiskInit { impl MagiskInit {
fn new(argv: *mut *mut c_char) -> Self { fn new(argv: *mut *mut c_char) -> Self {
@@ -39,7 +32,7 @@ impl MagiskInit {
fn first_stage(&self) { fn first_stage(&self) {
info!("First Stage Init"); info!("First Stage Init");
self.prepare_data(); let rootfs_magisktmp = self.prepare_data(true);
if !cstr!("/sdcard").exists() && !cstr!("/first_stage_ramdisk/sdcard").exists() { if !cstr!("/sdcard").exists() && !cstr!("/first_stage_ramdisk/sdcard").exists() {
self.hijack_init_with_switch_root(); self.hijack_init_with_switch_root();
@@ -49,11 +42,28 @@ impl MagiskInit {
// Fallback to hexpatch if /sdcard exists // Fallback to hexpatch if /sdcard exists
hexpatch_init_for_second_stage(true); hexpatch_init_for_second_stage(true);
} }
if rootfs_magisktmp {
info!("Occupy /data");
occupy(cstr!("/data"));
}
} }
fn second_stage(&mut self) { fn second_stage(&mut self) {
info!("Second Stage Init"); info!("Second Stage Init");
if unoccupy(cstr!("/data")) {
nix::mount::mount(
None::<&Utf8CStr>,
cstr!("/data"),
None::<&Utf8CStr>,
MsFlags::MS_REMOUNT,
Some(cstr!("size=100%")),
)
.check_os_err("mount", Some("/data"), Some("tmpfs"))
.log_ok();
}
cstr!("/init").unmount().ok(); cstr!("/init").unmount().ok();
cstr!("/system/bin/init").unmount().ok(); // just in case cstr!("/system/bin/init").unmount().ok(); // just in case
cstr!("/data/init").remove().ok(); cstr!("/data/init").remove().ok();
@@ -79,7 +89,7 @@ impl MagiskInit {
fn legacy_system_as_root(&mut self) { fn legacy_system_as_root(&mut self) {
info!("Legacy SAR Init"); info!("Legacy SAR Init");
self.prepare_data(); self.prepare_data(false);
let is_two_stage = self.mount_system_root(); let is_two_stage = self.mount_system_root();
if is_two_stage { if is_two_stage {
hexpatch_init_for_second_stage(false); hexpatch_init_for_second_stage(false);
@@ -90,7 +100,7 @@ impl MagiskInit {
fn rootfs(&mut self) { fn rootfs(&mut self) {
info!("RootFS Init"); info!("RootFS Init");
self.prepare_data(); self.prepare_data(false);
self.restore_ramdisk_init(); self.restore_ramdisk_init();
self.patch_rw_root(); self.patch_rw_root();
} }

View File

@@ -1,20 +1,18 @@
use crate::ffi::MagiskInit; use crate::ffi::MagiskInit;
use base::WalkResult::{Continue, Skip};
use base::nix::mount::{MntFlags, mount, umount2};
use base::{ use base::{
Directory, FsPathBuilder, LibcReturn, LoggedResult, ResultExt, Utf8CStr, cstr, debug, libc, Directory, FsPathBuilder, LibcReturn, LoggedResult, ResultExt, Utf8CStr, cstr, debug, libc,
nix, parse_mount_info, raw_cstr, nix, parse_mount_info, raw_cstr,
}; };
use cxx::CxxString; use cxx::CxxString;
use nix::{ use nix::mount::MsFlags;
mount::MsFlags, use nix::sys::statfs::{FsType, TMPFS_MAGIC, statfs};
sys::statfs::{FsType, TMPFS_MAGIC, statfs}, use nix::unistd::{chdir, chroot};
unistd::{chdir, chroot},
};
use num_traits::AsPrimitive; use num_traits::AsPrimitive;
use std::{ use std::collections::BTreeSet;
collections::BTreeSet, use std::ops::Bound::{Excluded, Unbounded};
ops::Bound::{Excluded, Unbounded}, use std::pin::Pin;
pin::Pin,
};
unsafe extern "C" { unsafe extern "C" {
static environ: *const *mut libc::c_char; static environ: *const *mut libc::c_char;
@@ -66,6 +64,54 @@ pub(crate) fn is_device_mounted(dev: u64, target: Pin<&mut CxxString>) -> bool {
false false
} }
pub(crate) fn occupy(path: &Utf8CStr) {
Directory::open(path)
.map(|mut dir| {
dir.pre_order_walk(|entry| {
let mut path = cstr::buf::default();
entry.resolve_path(&mut path)?;
let path = path.as_utf8_cstr();
mount(
Some(path),
path,
None::<&Utf8CStr>,
MsFlags::MS_BIND | MsFlags::MS_RDONLY,
None::<&Utf8CStr>,
)
.check_os_err("occupy", Some(path), None)?;
Ok(Continue)
})
.log_ok();
})
.log_ok();
}
pub(crate) fn unoccupy(path: &Utf8CStr) -> bool {
let mut ok = false;
Directory::open(path)
.map(|mut dir| {
ok = dir
.pre_order_walk(|entry| {
let mut path = cstr::buf::default();
entry.resolve_path(&mut path)?;
let path = path.as_utf8_cstr();
umount2(path, MntFlags::MNT_DETACH).check_os_err(
"unoccupy",
Some(path),
None,
)?;
if entry.is_dir() {
Ok(Skip)
} else {
Ok(Continue)
}
})
.is_ok();
})
.log_ok();
ok
}
const RAMFS_MAGIC: u32 = 0x858458f6; const RAMFS_MAGIC: u32 = 0x858458f6;
pub(crate) fn is_rootfs() -> bool { pub(crate) fn is_rootfs() -> bool {
@@ -77,22 +123,44 @@ pub(crate) fn is_rootfs() -> bool {
} }
impl MagiskInit { impl MagiskInit {
pub(crate) fn prepare_data(&self) { pub(crate) fn prepare_data(&self, use_rootfs: bool) -> bool {
debug!("Setup data tmp"); debug!("Setup data tmp");
cstr!("/data").mkdir(0o755).log_ok(); cstr!("/data").mkdir(0o755).log_ok();
nix::mount::mount(
Some(cstr!("magisk")),
cstr!("/data"),
Some(cstr!("tmpfs")),
MsFlags::empty(),
Some(cstr!("mode=755")),
)
.check_os_err("mount", Some("/data"), Some("tmpfs"))
.log_ok();
cstr!("/init").copy_to(cstr!("/data/magiskinit")).ok(); let mut rootfs_magisktmp = false;
cstr!("/.backup").copy_to(cstr!("/data/.backup")).ok();
cstr!("/overlay.d").copy_to(cstr!("/data/overlay.d")).ok(); if use_rootfs {
cstr!("/magisk").mkdir(0o755).log_ok();
rootfs_magisktmp = cstr!("/magisk")
.bind_mount_to(cstr!("/data"), false)
.is_ok();
}
if rootfs_magisktmp {
cstr!("/init")
.rename_to(cstr!("/magisk/magiskinit"))
.log_ok();
cstr!("/.backup").copy_to(cstr!("/magisk/.backup")).ok();
cstr!("/overlay.d")
.rename_to(cstr!("/magisk/overlay.d"))
.ok();
} else {
nix::mount::mount(
Some(cstr!("magisk")),
cstr!("/data"),
Some(cstr!("tmpfs")),
MsFlags::empty(),
Some(cstr!("mode=755")),
)
.check_os_err("mount", Some("/data"), Some("tmpfs"))
.log_ok();
cstr!("/init").copy_to(cstr!("/data/magiskinit")).ok();
cstr!("/.backup").copy_to(cstr!("/data/.backup")).ok();
cstr!("/overlay.d").copy_to(cstr!("/data/overlay.d")).ok();
}
rootfs_magisktmp
} }
pub(crate) fn exec_init(&mut self) { pub(crate) fn exec_init(&mut self) {

View File

@@ -5,13 +5,10 @@ use base::{
BufReadExt, Directory, FsPathBuilder, LoggedResult, ResultExt, Utf8CStr, Utf8CString, BufReadExt, Directory, FsPathBuilder, LoggedResult, ResultExt, Utf8CStr, Utf8CString,
clone_attr, cstr, debug, clone_attr, cstr, debug,
}; };
use std::io::BufReader; use std::fs::File;
use std::{ use std::io::{BufReader, Write};
fs::File, use std::mem;
io::Write, use std::os::fd::{FromRawFd, RawFd};
mem,
os::fd::{FromRawFd, RawFd},
};
pub fn inject_magisk_rc(fd: RawFd, tmp_dir: &Utf8CStr) { pub fn inject_magisk_rc(fd: RawFd, tmp_dir: &Utf8CStr) {
debug!("Injecting magisk rc"); debug!("Injecting magisk rc");

View File

@@ -84,20 +84,14 @@ impl MagiskInit {
.log_ok(); .log_ok();
debug!("Symlink /storage/self/primary -> /system/system/bin/init"); debug!("Symlink /storage/self/primary -> /system/system/bin/init");
} }
cstr!("/init").rename_to(cstr!("/sdcard")).log_ok();
// First try to mount magiskinit from rootfs to workaround Samsung RKP // Binding mounting from rootfs is not supported before Linux 3.12
if cstr!("/sdcard") cstr!("/sdcard")
.create(OFlag::O_RDONLY | OFlag::O_CLOEXEC, 0)
.log_ok();
cstr!("/data/magiskinit")
.bind_mount_to(cstr!("/sdcard"), false) .bind_mount_to(cstr!("/sdcard"), false)
.is_ok() .log_ok();
{ debug!("Bind mount /data/magiskinit -> /sdcard");
debug!("Bind mount /sdcard -> /sdcard");
} else {
// Binding mounting from rootfs is not supported before Linux 3.12
cstr!("/data/magiskinit")
.bind_mount_to(cstr!("/sdcard"), false)
.log_ok();
debug!("Bind mount /data/magiskinit -> /sdcard");
}
} }
} }

2
native/src/rustfmt.toml Normal file
View File

@@ -0,0 +1,2 @@
unstable_features = true
imports_granularity = "Module"

View File

@@ -11,10 +11,8 @@ path = "lib.rs"
cxx-gen = { workspace = true } cxx-gen = { workspace = true }
[features] [features]
default = ["main"] no-main = []
main = []
[dependencies] [dependencies]
base = { path = "../base" } base = { workspace = true }
cxx = { workspace = true } cxx = { workspace = true }
argh = { workspace = true }

View File

@@ -1,9 +1,10 @@
use crate::ffi::SePolicy; use crate::ffi::SePolicy;
use crate::statement::format_statement_help; use crate::statement::format_statement_help;
use argh::FromArgs; use argh::FromArgs;
use base::libc::umask;
use base::{ use base::{
CmdArgs, EarlyExitExt, FmtAdaptor, LoggedResult, Utf8CString, cmdline_logging, cstr, CmdArgs, EarlyExitExt, FmtAdaptor, LoggedResult, Utf8CString, argh, cmdline_logging, cstr,
libc::umask, log_err, log_err,
}; };
use std::ffi::c_char; use std::ffi::c_char;
use std::io::stderr; use std::io::stderr;

View File

@@ -8,7 +8,7 @@ use crate::ffi::SePolicy;
#[path = "../include/consts.rs"] #[path = "../include/consts.rs"]
mod consts; mod consts;
#[cfg(feature = "main")] #[cfg(not(feature = "no-main"))]
mod cli; mod cli;
mod rules; mod rules;
mod statement; mod statement;

View File

@@ -1,5 +1,6 @@
use crate::SePolicy;
use crate::consts::{SEPOL_FILE_TYPE, SEPOL_LOG_TYPE, SEPOL_PROC_DOMAIN}; use crate::consts::{SEPOL_FILE_TYPE, SEPOL_LOG_TYPE, SEPOL_PROC_DOMAIN};
use crate::{SePolicy, ffi::Xperm}; use crate::ffi::Xperm;
use base::{LogLevel, set_log_level_state}; use base::{LogLevel, set_log_level_state};
macro_rules! rules { macro_rules! rules {
@@ -109,7 +110,7 @@ impl SePolicy {
allow(["kernel"], ["rootfs", "tmpfs"], ["chr_file"], ["write"]); allow(["kernel"], ["rootfs", "tmpfs"], ["chr_file"], ["write"]);
// Allow magiskinit daemon to handle mock selinuxfs // Allow magiskinit daemon to handle mock selinuxfs
allow(["kernel"], ["tmpfs"], ["fifo_file"], ["open", "read", "write"]); allow(["kernel"], ["rootfs", "tmpfs"], ["fifo_file"], ["open", "read", "write"]);
// For relabelling files // For relabelling files
allow(["rootfs"], ["labeledfs", "tmpfs"], ["filesystem"], ["associate"]); allow(["rootfs"], ["labeledfs", "tmpfs"], ["filesystem"], ["associate"]);

View File

@@ -1,10 +1,12 @@
use std::fmt::{Display, Formatter, Write}; use std::fmt::{Display, Formatter, Write};
use std::io::{BufRead, BufReader, Cursor}; use std::io::{BufRead, BufReader, Cursor};
use std::{iter::Peekable, vec::IntoIter}; use std::iter::Peekable;
use std::vec::IntoIter;
use crate::SePolicy; use crate::SePolicy;
use crate::ffi::Xperm; use crate::ffi::Xperm;
use base::{BufReadExt, LoggedResult, Utf8CStr, error, nix::fcntl::OFlag, warn}; use base::nix::fcntl::OFlag;
use base::{BufReadExt, LoggedResult, Utf8CStr, error, warn};
pub enum Token<'a> { pub enum Token<'a> {
AL, AL,