From 9cc50b20d8e328b8b63b696c6ef92db6b986da07 Mon Sep 17 00:00:00 2001 From: LoveSy Date: Wed, 24 Jul 2024 17:45:08 +0800 Subject: [PATCH] Correctly handle comments in sepolicy.rule --- native/src/sepolicy/lib.rs | 4 ---- native/src/sepolicy/statement.rs | 8 ++++---- 2 files changed, 4 insertions(+), 8 deletions(-) diff --git a/native/src/sepolicy/lib.rs b/native/src/sepolicy/lib.rs index 317ee6405..a2680c705 100644 --- a/native/src/sepolicy/lib.rs +++ b/native/src/sepolicy/lib.rs @@ -118,10 +118,6 @@ impl SepolicyExt for sepolicy { fn load_rules_from_reader(mut self: Pin<&mut sepolicy>, reader: &mut T) { reader.foreach_lines(|line| { - let line = line.trim(); - if line.is_empty() { - return true; - } parse_statement(self.as_mut(), line); true }); diff --git a/native/src/sepolicy/statement.rs b/native/src/sepolicy/statement.rs index 014e94124..f5108867d 100644 --- a/native/src/sepolicy/statement.rs +++ b/native/src/sepolicy/statement.rs @@ -3,7 +3,6 @@ use std::io::stderr; use std::{iter::Peekable, pin::Pin, vec::IntoIter}; use base::{error, warn, FmtAdaptor}; - use crate::ffi::Xperm; use crate::sepolicy; @@ -436,15 +435,16 @@ fn extract_token<'a>(s: &'a str, tokens: &mut Vec>) { fn tokenize_statement(statement: &str) -> Vec { let mut tokens = Vec::new(); for s in statement.split_whitespace() { - if s.starts_with('#') { - break; - } extract_token(s, &mut tokens); } tokens } pub fn parse_statement(sepolicy: Pin<&mut sepolicy>, statement: &str) { + let statement = statement.trim(); + if statement.is_empty() || statement.starts_with('#') { + return; + } let mut tokens = tokenize_statement(statement).into_iter().peekable(); let result = exec_statement(sepolicy, &mut tokens); if let Err(e) = result {