Remove unused code and modernize Rust format strings

Applied cargo clippy fixes to clean up codebase:
- Removed unused imports (serde_json::json, std::collections::HashSet)
- Removed unused function encode_hex_for_log
- Modernized format strings to use inline variables
- Fixed clippy warnings for better code quality

All changes applied automatically by cargo clippy --fix
This commit is contained in:
2025-11-02 14:48:01 +01:00
parent 6ceb22f014
commit a52e1b43fa
19 changed files with 209 additions and 280 deletions

View File

@ -20,11 +20,11 @@ struct TableDefinition {
pub fn generate_table_names() { pub fn generate_table_names() {
let out_dir = env::var("OUT_DIR").expect("OUT_DIR ist nicht gesetzt."); let out_dir = env::var("OUT_DIR").expect("OUT_DIR ist nicht gesetzt.");
println!("Generiere Tabellennamen nach {}", out_dir); println!("Generiere Tabellennamen nach {out_dir}");
let schema_path = Path::new("../src/database/tableNames.json"); let schema_path = Path::new("../src/database/tableNames.json");
let dest_path = Path::new(&out_dir).join("tableNames.rs"); let dest_path = Path::new(&out_dir).join("tableNames.rs");
let file = File::open(&schema_path).expect("Konnte tableNames.json nicht öffnen"); let file = File::open(schema_path).expect("Konnte tableNames.json nicht öffnen");
let reader = BufReader::new(file); let reader = BufReader::new(file);
let schema: Schema = let schema: Schema =
serde_json::from_reader(reader).expect("Konnte tableNames.json nicht parsen"); serde_json::from_reader(reader).expect("Konnte tableNames.json nicht parsen");
@ -108,8 +108,7 @@ fn generate_table_constants(table: &TableDefinition, const_prefix: &str) -> Stri
for (col_key, col_value) in &table.columns { for (col_key, col_value) in &table.columns {
let col_const_name = format!("COL_{}_{}", const_prefix, to_screaming_snake_case(col_key)); let col_const_name = format!("COL_{}_{}", const_prefix, to_screaming_snake_case(col_key));
code.push_str(&format!( code.push_str(&format!(
"pub const {}: &str = \"{}\";\n", "pub const {col_const_name}: &str = \"{col_value}\";\n"
col_const_name, col_value
)); ));
} }

View File

@ -74,15 +74,14 @@ impl HlcService {
// Parse den String in ein Uuid-Objekt. // Parse den String in ein Uuid-Objekt.
let uuid = Uuid::parse_str(&node_id_str).map_err(|e| { let uuid = Uuid::parse_str(&node_id_str).map_err(|e| {
HlcError::ParseNodeId(format!( HlcError::ParseNodeId(format!(
"Stored device ID is not a valid UUID: {}. Error: {}", "Stored device ID is not a valid UUID: {node_id_str}. Error: {e}"
node_id_str, e
)) ))
})?; })?;
// Hol dir die rohen 16 Bytes und erstelle daraus die uhlc::ID. // Hol dir die rohen 16 Bytes und erstelle daraus die uhlc::ID.
// Das `*` dereferenziert den `&[u8; 16]` zu `[u8; 16]`, was `try_from` erwartet. // Das `*` dereferenziert den `&[u8; 16]` zu `[u8; 16]`, was `try_from` erwartet.
let node_id = ID::try_from(*uuid.as_bytes()).map_err(|e| { let node_id = ID::try_from(*uuid.as_bytes()).map_err(|e| {
HlcError::ParseNodeId(format!("Invalid node ID format from device store: {:?}", e)) HlcError::ParseNodeId(format!("Invalid node ID format from device store: {e:?}"))
})?; })?;
// 2. Erstelle eine HLC-Instanz mit stabiler Identität // 2. Erstelle eine HLC-Instanz mit stabiler Identität
@ -95,8 +94,7 @@ impl HlcService {
if let Some(last_timestamp) = Self::load_last_timestamp(conn)? { if let Some(last_timestamp) = Self::load_last_timestamp(conn)? {
hlc.update_with_timestamp(&last_timestamp).map_err(|e| { hlc.update_with_timestamp(&last_timestamp).map_err(|e| {
HlcError::Parse(format!( HlcError::Parse(format!(
"Failed to update HLC with persisted timestamp: {:?}", "Failed to update HLC with persisted timestamp: {e:?}"
e
)) ))
})?; })?;
} }
@ -119,7 +117,7 @@ impl HlcService {
if let Some(s) = value.as_str() { if let Some(s) = value.as_str() {
// Das ist unser Erfolgsfall. Wir haben einen &str und können // Das ist unser Erfolgsfall. Wir haben einen &str und können
// eine Kopie davon zurückgeben. // eine Kopie davon zurückgeben.
println!("Gefundene und validierte Geräte-ID: {}", s); println!("Gefundene und validierte Geräte-ID: {s}");
if Uuid::parse_str(s).is_ok() { if Uuid::parse_str(s).is_ok() {
// Erfolgsfall: Der Wert ist ein String UND eine gültige UUID. // Erfolgsfall: Der Wert ist ein String UND eine gültige UUID.
// Wir können die Funktion direkt mit dem Wert verlassen. // Wir können die Funktion direkt mit dem Wert verlassen.
@ -183,19 +181,19 @@ impl HlcService {
let hlc = hlc_guard.as_mut().ok_or(HlcError::NotInitialized)?; let hlc = hlc_guard.as_mut().ok_or(HlcError::NotInitialized)?;
hlc.update_with_timestamp(timestamp) hlc.update_with_timestamp(timestamp)
.map_err(|e| HlcError::Parse(format!("Failed to update HLC: {:?}", e))) .map_err(|e| HlcError::Parse(format!("Failed to update HLC: {e:?}")))
} }
/// Lädt den letzten persistierten Zeitstempel aus der Datenbank. /// Lädt den letzten persistierten Zeitstempel aus der Datenbank.
fn load_last_timestamp(conn: &Connection) -> Result<Option<Timestamp>, HlcError> { fn load_last_timestamp(conn: &Connection) -> Result<Option<Timestamp>, HlcError> {
let query = format!("SELECT value FROM {} WHERE key = ?1", TABLE_CRDT_CONFIGS); let query = format!("SELECT value FROM {TABLE_CRDT_CONFIGS} WHERE key = ?1");
match conn.query_row(&query, params![HLC_TIMESTAMP_TYPE], |row| { match conn.query_row(&query, params![HLC_TIMESTAMP_TYPE], |row| {
row.get::<_, String>(0) row.get::<_, String>(0)
}) { }) {
Ok(state_str) => { Ok(state_str) => {
let timestamp = Timestamp::from_str(&state_str).map_err(|e| { let timestamp = Timestamp::from_str(&state_str).map_err(|e| {
HlcError::ParseTimestamp(format!("Invalid timestamp format: {:?}", e)) HlcError::ParseTimestamp(format!("Invalid timestamp format: {e:?}"))
})?; })?;
Ok(Some(timestamp)) Ok(Some(timestamp))
} }
@ -209,9 +207,8 @@ impl HlcService {
let timestamp_str = timestamp.to_string(); let timestamp_str = timestamp.to_string();
tx.execute( tx.execute(
&format!( &format!(
"INSERT INTO {} (key, value) VALUES (?1, ?2) "INSERT INTO {TABLE_CRDT_CONFIGS} (key, value) VALUES (?1, ?2)
ON CONFLICT(key) DO UPDATE SET value = excluded.value", ON CONFLICT(key) DO UPDATE SET value = excluded.value"
TABLE_CRDT_CONFIGS
), ),
params![HLC_TIMESTAMP_TYPE, timestamp_str], params![HLC_TIMESTAMP_TYPE, timestamp_str],
)?; )?;

View File

@ -32,17 +32,16 @@ pub enum CrdtSetupError {
impl Display for CrdtSetupError { impl Display for CrdtSetupError {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
match self { match self {
CrdtSetupError::DatabaseError(e) => write!(f, "Database error: {}", e), CrdtSetupError::DatabaseError(e) => write!(f, "Database error: {e}"),
CrdtSetupError::HlcColumnMissing { CrdtSetupError::HlcColumnMissing {
table_name, table_name,
column_name, column_name,
} => write!( } => write!(
f, f,
"Table '{}' is missing the required hlc column '{}'", "Table '{table_name}' is missing the required hlc column '{column_name}'"
table_name, column_name
), ),
CrdtSetupError::PrimaryKeyMissing { table_name } => { CrdtSetupError::PrimaryKeyMissing { table_name } => {
write!(f, "Table '{}' has no primary key", table_name) write!(f, "Table '{table_name}' has no primary key")
} }
} }
} }
@ -129,7 +128,7 @@ pub fn setup_triggers_for_table(
let delete_trigger_sql = generate_delete_trigger_sql(table_name, &pks, &cols_to_track); let delete_trigger_sql = generate_delete_trigger_sql(table_name, &pks, &cols_to_track);
if recreate { if recreate {
drop_triggers_for_table(&tx, table_name)?; drop_triggers_for_table(tx, table_name)?;
} }
tx.execute_batch(&insert_trigger_sql)?; tx.execute_batch(&insert_trigger_sql)?;
@ -143,13 +142,11 @@ pub fn setup_triggers_for_table(
pub fn get_table_schema(conn: &Connection, table_name: &str) -> RusqliteResult<Vec<ColumnInfo>> { pub fn get_table_schema(conn: &Connection, table_name: &str) -> RusqliteResult<Vec<ColumnInfo>> {
if !is_safe_identifier(table_name) { if !is_safe_identifier(table_name) {
return Err(rusqlite::Error::InvalidParameterName(format!( return Err(rusqlite::Error::InvalidParameterName(format!(
"Invalid or unsafe table name provided: {}", "Invalid or unsafe table name provided: {table_name}"
table_name )));
))
.into());
} }
let sql = format!("PRAGMA table_info(\"{}\");", table_name); let sql = format!("PRAGMA table_info(\"{table_name}\");");
let mut stmt = conn.prepare(&sql)?; let mut stmt = conn.prepare(&sql)?;
let rows = stmt.query_map([], ColumnInfo::from_row)?; let rows = stmt.query_map([], ColumnInfo::from_row)?;
rows.collect() rows.collect()
@ -163,8 +160,7 @@ pub fn drop_triggers_for_table(
) -> Result<(), CrdtSetupError> { ) -> Result<(), CrdtSetupError> {
if !is_safe_identifier(table_name) { if !is_safe_identifier(table_name) {
return Err(rusqlite::Error::InvalidParameterName(format!( return Err(rusqlite::Error::InvalidParameterName(format!(
"Invalid or unsafe table name provided: {}", "Invalid or unsafe table name provided: {table_name}"
table_name
)) ))
.into()); .into());
} }
@ -177,8 +173,7 @@ pub fn drop_triggers_for_table(
drop_trigger_sql(DELETE_TRIGGER_TPL.replace("{TABLE_NAME}", table_name)); drop_trigger_sql(DELETE_TRIGGER_TPL.replace("{TABLE_NAME}", table_name));
let sql_batch = format!( let sql_batch = format!(
"{}\n{}\n{}", "{drop_insert_trigger_sql}\n{drop_update_trigger_sql}\n{drop_delete_trigger_sql}"
drop_insert_trigger_sql, drop_update_trigger_sql, drop_delete_trigger_sql
); );
tx.execute_batch(&sql_batch)?; tx.execute_batch(&sql_batch)?;
@ -244,33 +239,22 @@ pub fn drop_triggers_for_table(
fn generate_insert_trigger_sql(table_name: &str, pks: &[String], cols: &[String]) -> String { fn generate_insert_trigger_sql(table_name: &str, pks: &[String], cols: &[String]) -> String {
let pk_json_payload = pks let pk_json_payload = pks
.iter() .iter()
.map(|pk| format!("'{}', NEW.\"{}\"", pk, pk)) .map(|pk| format!("'{pk}', NEW.\"{pk}\""))
.collect::<Vec<_>>() .collect::<Vec<_>>()
.join(", "); .join(", ");
let column_inserts = if cols.is_empty() { let column_inserts = if cols.is_empty() {
// Nur PKs -> einfacher Insert ins Log // Nur PKs -> einfacher Insert ins Log
format!( format!(
"INSERT INTO {log_table} (id, haex_timestamp, op_type, table_name, row_pks) "INSERT INTO {TABLE_CRDT_LOGS} (id, haex_timestamp, op_type, table_name, row_pks)
VALUES ({uuid_fn}(), NEW.\"{hlc_col}\", 'INSERT', '{table}', json_object({pk_payload}));", VALUES ({UUID_FUNCTION_NAME}(), NEW.\"{HLC_TIMESTAMP_COLUMN}\", 'INSERT', '{table_name}', json_object({pk_json_payload}));"
log_table = TABLE_CRDT_LOGS,
uuid_fn = UUID_FUNCTION_NAME,
hlc_col = HLC_TIMESTAMP_COLUMN,
table = table_name,
pk_payload = pk_json_payload
) )
} else { } else {
cols.iter().fold(String::new(), |mut acc, col| { cols.iter().fold(String::new(), |mut acc, col| {
writeln!( writeln!(
&mut acc, &mut acc,
"INSERT INTO {log_table} (id, haex_timestamp, op_type, table_name, row_pks, column_name, new_value) "INSERT INTO {TABLE_CRDT_LOGS} (id, haex_timestamp, op_type, table_name, row_pks, column_name, new_value)
VALUES ({uuid_fn}(), NEW.\"{hlc_col}\", 'INSERT', '{table}', json_object({pk_payload}), '{column}', json_object('value', NEW.\"{column}\"));", VALUES ({UUID_FUNCTION_NAME}(), NEW.\"{HLC_TIMESTAMP_COLUMN}\", 'INSERT', '{table_name}', json_object({pk_json_payload}), '{col}', json_object('value', NEW.\"{col}\"));"
log_table = TABLE_CRDT_LOGS,
uuid_fn = UUID_FUNCTION_NAME,
hlc_col = HLC_TIMESTAMP_COLUMN,
table = table_name,
pk_payload = pk_json_payload,
column = col
).unwrap(); ).unwrap();
acc acc
}) })
@ -290,14 +274,14 @@ fn generate_insert_trigger_sql(table_name: &str, pks: &[String], cols: &[String]
/// Generiert das SQL zum Löschen eines Triggers. /// Generiert das SQL zum Löschen eines Triggers.
fn drop_trigger_sql(trigger_name: String) -> String { fn drop_trigger_sql(trigger_name: String) -> String {
format!("DROP TRIGGER IF EXISTS \"{}\";", trigger_name) format!("DROP TRIGGER IF EXISTS \"{trigger_name}\";")
} }
/// Generiert das SQL für den UPDATE-Trigger. /// Generiert das SQL für den UPDATE-Trigger.
fn generate_update_trigger_sql(table_name: &str, pks: &[String], cols: &[String]) -> String { fn generate_update_trigger_sql(table_name: &str, pks: &[String], cols: &[String]) -> String {
let pk_json_payload = pks let pk_json_payload = pks
.iter() .iter()
.map(|pk| format!("'{}', NEW.\"{}\"", pk, pk)) .map(|pk| format!("'{pk}', NEW.\"{pk}\""))
.collect::<Vec<_>>() .collect::<Vec<_>>()
.join(", "); .join(", ");
@ -308,16 +292,10 @@ fn generate_update_trigger_sql(table_name: &str, pks: &[String], cols: &[String]
for col in cols { for col in cols {
writeln!( writeln!(
&mut body, &mut body,
"INSERT INTO {log_table} (id, haex_timestamp, op_type, table_name, row_pks, column_name, new_value, old_value) "INSERT INTO {TABLE_CRDT_LOGS} (id, haex_timestamp, op_type, table_name, row_pks, column_name, new_value, old_value)
SELECT {uuid_fn}(), NEW.\"{hlc_col}\", 'UPDATE', '{table}', json_object({pk_payload}), '{column}', SELECT {UUID_FUNCTION_NAME}(), NEW.\"{HLC_TIMESTAMP_COLUMN}\", 'UPDATE', '{table_name}', json_object({pk_json_payload}), '{col}',
json_object('value', NEW.\"{column}\"), json_object('value', OLD.\"{column}\") json_object('value', NEW.\"{col}\"), json_object('value', OLD.\"{col}\")
WHERE NEW.\"{column}\" IS NOT OLD.\"{column}\";", WHERE NEW.\"{col}\" IS NOT OLD.\"{col}\";"
log_table = TABLE_CRDT_LOGS,
uuid_fn = UUID_FUNCTION_NAME,
hlc_col = HLC_TIMESTAMP_COLUMN,
table = table_name,
pk_payload = pk_json_payload,
column = col
).unwrap(); ).unwrap();
} }
} }
@ -341,7 +319,7 @@ fn generate_update_trigger_sql(table_name: &str, pks: &[String], cols: &[String]
fn generate_delete_trigger_sql(table_name: &str, pks: &[String], cols: &[String]) -> String { fn generate_delete_trigger_sql(table_name: &str, pks: &[String], cols: &[String]) -> String {
let pk_json_payload = pks let pk_json_payload = pks
.iter() .iter()
.map(|pk| format!("'{}', OLD.\"{}\"", pk, pk)) .map(|pk| format!("'{pk}', OLD.\"{pk}\""))
.collect::<Vec<_>>() .collect::<Vec<_>>()
.join(", "); .join(", ");
@ -352,28 +330,17 @@ fn generate_delete_trigger_sql(table_name: &str, pks: &[String], cols: &[String]
for col in cols { for col in cols {
writeln!( writeln!(
&mut body, &mut body,
"INSERT INTO {log_table} (id, haex_timestamp, op_type, table_name, row_pks, column_name, old_value) "INSERT INTO {TABLE_CRDT_LOGS} (id, haex_timestamp, op_type, table_name, row_pks, column_name, old_value)
VALUES ({uuid_fn}(), OLD.\"{hlc_col}\", 'DELETE', '{table}', json_object({pk_payload}), '{column}', VALUES ({UUID_FUNCTION_NAME}(), OLD.\"{HLC_TIMESTAMP_COLUMN}\", 'DELETE', '{table_name}', json_object({pk_json_payload}), '{col}',
json_object('value', OLD.\"{column}\"));", json_object('value', OLD.\"{col}\"));"
log_table = TABLE_CRDT_LOGS,
uuid_fn = UUID_FUNCTION_NAME,
hlc_col = HLC_TIMESTAMP_COLUMN,
table = table_name,
pk_payload = pk_json_payload,
column = col
).unwrap(); ).unwrap();
} }
} else { } else {
// Nur PKs -> minimales Delete Log // Nur PKs -> minimales Delete Log
writeln!( writeln!(
&mut body, &mut body,
"INSERT INTO {log_table} (id, haex_timestamp, op_type, table_name, row_pks) "INSERT INTO {TABLE_CRDT_LOGS} (id, haex_timestamp, op_type, table_name, row_pks)
VALUES ({uuid_fn}(), OLD.\"{hlc_col}\", 'DELETE', '{table}', json_object({pk_payload}));", VALUES ({UUID_FUNCTION_NAME}(), OLD.\"{HLC_TIMESTAMP_COLUMN}\", 'DELETE', '{table_name}', json_object({pk_json_payload}));"
log_table = TABLE_CRDT_LOGS,
uuid_fn = UUID_FUNCTION_NAME,
hlc_col = HLC_TIMESTAMP_COLUMN,
table = table_name,
pk_payload = pk_json_payload
) )
.unwrap(); .unwrap();
} }

View File

@ -47,7 +47,7 @@ pub fn open_and_init_db(path: &str, key: &str, create: bool) -> Result<Connectio
}, },
) )
.map_err(|e| DatabaseError::DatabaseError { .map_err(|e| DatabaseError::DatabaseError {
reason: format!("Failed to register {} function: {}", UUID_FUNCTION_NAME, e), reason: format!("Failed to register {UUID_FUNCTION_NAME} function: {e}"),
})?; })?;
let journal_mode: String = conn let journal_mode: String = conn
@ -61,8 +61,7 @@ pub fn open_and_init_db(path: &str, key: &str, create: bool) -> Result<Connectio
println!("WAL mode successfully enabled."); println!("WAL mode successfully enabled.");
} else { } else {
eprintln!( eprintln!(
"Failed to enable WAL mode, journal_mode is '{}'.", "Failed to enable WAL mode, journal_mode is '{journal_mode}'."
journal_mode
); );
} }
@ -97,7 +96,7 @@ pub fn parse_sql_statements(sql: &str) -> Result<Vec<Statement>, DatabaseError>
.join(" "); .join(" ");
Parser::parse_sql(&dialect, &normalized_sql).map_err(|e| DatabaseError::ParseError { Parser::parse_sql(&dialect, &normalized_sql).map_err(|e| DatabaseError::ParseError {
reason: format!("Failed to parse SQL: {}", e), reason: format!("Failed to parse SQL: {e}"),
sql: sql.to_string(), sql: sql.to_string(),
}) })
} }
@ -138,7 +137,7 @@ impl ValueConverter {
serde_json::to_string(json_val) serde_json::to_string(json_val)
.map(SqlValue::Text) .map(SqlValue::Text)
.map_err(|e| DatabaseError::SerializationError { .map_err(|e| DatabaseError::SerializationError {
reason: format!("Failed to serialize JSON param: {}", e), reason: format!("Failed to serialize JSON param: {e}"),
}) })
} }
} }
@ -258,7 +257,7 @@ pub fn select_with_crdt(
params: Vec<JsonValue>, params: Vec<JsonValue>,
connection: &DbConnection, connection: &DbConnection,
) -> Result<Vec<Vec<JsonValue>>, DatabaseError> { ) -> Result<Vec<Vec<JsonValue>>, DatabaseError> {
with_connection(&connection, |conn| { with_connection(connection, |conn| {
SqlExecutor::query_select(conn, &sql, &params) SqlExecutor::query_select(conn, &sql, &params)
}) })
} }

View File

@ -36,8 +36,7 @@ pub fn ensure_triggers_initialized(conn: &mut Connection) -> Result<bool, Databa
// Check if triggers already initialized // Check if triggers already initialized
let check_sql = format!( let check_sql = format!(
"SELECT value FROM {} WHERE key = ? AND type = ?", "SELECT value FROM {TABLE_SETTINGS} WHERE key = ? AND type = ?"
TABLE_SETTINGS
); );
let initialized: Option<String> = tx let initialized: Option<String> = tx
.query_row( .query_row(
@ -57,7 +56,7 @@ pub fn ensure_triggers_initialized(conn: &mut Connection) -> Result<bool, Databa
// Create triggers for all CRDT tables // Create triggers for all CRDT tables
for table_name in CRDT_TABLES { for table_name in CRDT_TABLES {
eprintln!(" - Setting up triggers for: {}", table_name); eprintln!(" - Setting up triggers for: {table_name}");
trigger::setup_triggers_for_table(&tx, table_name, false)?; trigger::setup_triggers_for_table(&tx, table_name, false)?;
} }

View File

@ -21,7 +21,6 @@ use std::{fs, sync::Arc};
use tauri::{path::BaseDirectory, AppHandle, Manager, State}; use tauri::{path::BaseDirectory, AppHandle, Manager, State};
use tauri_plugin_fs::FsExt; use tauri_plugin_fs::FsExt;
#[cfg(not(target_os = "android"))] #[cfg(not(target_os = "android"))]
use trash;
use ts_rs::TS; use ts_rs::TS;
pub struct DbConnection(pub Arc<Mutex<Option<Connection>>>); pub struct DbConnection(pub Arc<Mutex<Option<Connection>>>);
@ -93,7 +92,7 @@ fn get_vault_path(app_handle: &AppHandle, vault_name: &str) -> Result<String, Da
let vault_file_name = if vault_name.ends_with(VAULT_EXTENSION) { let vault_file_name = if vault_name.ends_with(VAULT_EXTENSION) {
vault_name.to_string() vault_name.to_string()
} else { } else {
format!("{}{VAULT_EXTENSION}", vault_name) format!("{vault_name}{VAULT_EXTENSION}")
}; };
let vault_directory = get_vaults_directory(app_handle)?; let vault_directory = get_vaults_directory(app_handle)?;
@ -101,13 +100,12 @@ fn get_vault_path(app_handle: &AppHandle, vault_name: &str) -> Result<String, Da
let vault_path = app_handle let vault_path = app_handle
.path() .path()
.resolve( .resolve(
format!("{vault_directory}/{}", vault_file_name), format!("{vault_directory}/{vault_file_name}"),
BaseDirectory::AppLocalData, BaseDirectory::AppLocalData,
) )
.map_err(|e| DatabaseError::PathResolutionError { .map_err(|e| DatabaseError::PathResolutionError {
reason: format!( reason: format!(
"Failed to resolve vault path for '{}': {}", "Failed to resolve vault path for '{vault_file_name}': {e}"
vault_file_name, e
), ),
})?; })?;
@ -115,7 +113,7 @@ fn get_vault_path(app_handle: &AppHandle, vault_name: &str) -> Result<String, Da
if let Some(parent) = vault_path.parent() { if let Some(parent) = vault_path.parent() {
fs::create_dir_all(parent).map_err(|e| DatabaseError::IoError { fs::create_dir_all(parent).map_err(|e| DatabaseError::IoError {
path: parent.display().to_string(), path: parent.display().to_string(),
reason: format!("Failed to create vaults directory: {}", e), reason: format!("Failed to create vaults directory: {e}"),
})?; })?;
} }
@ -174,18 +172,18 @@ pub fn list_vaults(app_handle: AppHandle) -> Result<Vec<VaultInfo>, DatabaseErro
if let Some(filename) = path.file_name().and_then(|n| n.to_str()) { if let Some(filename) = path.file_name().and_then(|n| n.to_str()) {
if filename.ends_with(VAULT_EXTENSION) { if filename.ends_with(VAULT_EXTENSION) {
// Entferne .db Endung für die Rückgabe // Entferne .db Endung für die Rückgabe
println!("Vault gefunden {}", filename.to_string()); println!("Vault gefunden {filename}");
let metadata = fs::metadata(&path).map_err(|e| DatabaseError::IoError { let metadata = fs::metadata(&path).map_err(|e| DatabaseError::IoError {
path: path.to_string_lossy().to_string(), path: path.to_string_lossy().to_string(),
reason: format!("Metadaten konnten nicht gelesen werden: {}", e), reason: format!("Metadaten konnten nicht gelesen werden: {e}"),
})?; })?;
let last_access_timestamp = metadata let last_access_timestamp = metadata
.accessed() .accessed()
.map_err(|e| DatabaseError::IoError { .map_err(|e| DatabaseError::IoError {
path: path.to_string_lossy().to_string(), path: path.to_string_lossy().to_string(),
reason: format!("Zugriffszeit konnte nicht gelesen werden: {}", e), reason: format!("Zugriffszeit konnte nicht gelesen werden: {e}"),
})? })?
.duration_since(UNIX_EPOCH) .duration_since(UNIX_EPOCH)
.unwrap_or_default() // Fallback für den seltenen Fall einer Zeit vor 1970 .unwrap_or_default() // Fallback für den seltenen Fall einer Zeit vor 1970
@ -233,8 +231,8 @@ pub fn move_vault_to_trash(
#[cfg(not(target_os = "android"))] #[cfg(not(target_os = "android"))]
{ {
let vault_path = get_vault_path(&app_handle, &vault_name)?; let vault_path = get_vault_path(&app_handle, &vault_name)?;
let vault_shm_path = format!("{}-shm", vault_path); let vault_shm_path = format!("{vault_path}-shm");
let vault_wal_path = format!("{}-wal", vault_path); let vault_wal_path = format!("{vault_path}-wal");
if !Path::new(&vault_path).exists() { if !Path::new(&vault_path).exists() {
return Err(DatabaseError::IoError { return Err(DatabaseError::IoError {
@ -252,14 +250,12 @@ pub fn move_vault_to_trash(
let _ = trash::delete(&vault_wal_path); let _ = trash::delete(&vault_wal_path);
Ok(format!( Ok(format!(
"Vault '{}' successfully moved to trash", "Vault '{vault_name}' successfully moved to trash"
vault_name
)) ))
} else { } else {
// Fallback: Permanent deletion if trash fails // Fallback: Permanent deletion if trash fails
println!( println!(
"Trash not available, falling back to permanent deletion for vault '{}'", "Trash not available, falling back to permanent deletion for vault '{vault_name}'"
vault_name
); );
delete_vault(app_handle, vault_name) delete_vault(app_handle, vault_name)
} }
@ -270,8 +266,8 @@ pub fn move_vault_to_trash(
#[tauri::command] #[tauri::command]
pub fn delete_vault(app_handle: AppHandle, vault_name: String) -> Result<String, DatabaseError> { pub fn delete_vault(app_handle: AppHandle, vault_name: String) -> Result<String, DatabaseError> {
let vault_path = get_vault_path(&app_handle, &vault_name)?; let vault_path = get_vault_path(&app_handle, &vault_name)?;
let vault_shm_path = format!("{}-shm", vault_path); let vault_shm_path = format!("{vault_path}-shm");
let vault_wal_path = format!("{}-wal", vault_path); let vault_wal_path = format!("{vault_path}-wal");
if !Path::new(&vault_path).exists() { if !Path::new(&vault_path).exists() {
return Err(DatabaseError::IoError { return Err(DatabaseError::IoError {
@ -283,23 +279,23 @@ pub fn delete_vault(app_handle: AppHandle, vault_name: String) -> Result<String,
if Path::new(&vault_shm_path).exists() { if Path::new(&vault_shm_path).exists() {
fs::remove_file(&vault_shm_path).map_err(|e| DatabaseError::IoError { fs::remove_file(&vault_shm_path).map_err(|e| DatabaseError::IoError {
path: vault_shm_path.clone(), path: vault_shm_path.clone(),
reason: format!("Failed to delete vault: {}", e), reason: format!("Failed to delete vault: {e}"),
})?; })?;
} }
if Path::new(&vault_wal_path).exists() { if Path::new(&vault_wal_path).exists() {
fs::remove_file(&vault_wal_path).map_err(|e| DatabaseError::IoError { fs::remove_file(&vault_wal_path).map_err(|e| DatabaseError::IoError {
path: vault_wal_path.clone(), path: vault_wal_path.clone(),
reason: format!("Failed to delete vault: {}", e), reason: format!("Failed to delete vault: {e}"),
})?; })?;
} }
fs::remove_file(&vault_path).map_err(|e| DatabaseError::IoError { fs::remove_file(&vault_path).map_err(|e| DatabaseError::IoError {
path: vault_path.clone(), path: vault_path.clone(),
reason: format!("Failed to delete vault: {}", e), reason: format!("Failed to delete vault: {e}"),
})?; })?;
Ok(format!("Vault '{}' successfully deleted", vault_name)) Ok(format!("Vault '{vault_name}' successfully deleted"))
} }
#[tauri::command] #[tauri::command]
@ -309,16 +305,16 @@ pub fn create_encrypted_database(
key: String, key: String,
state: State<'_, AppState>, state: State<'_, AppState>,
) -> Result<String, DatabaseError> { ) -> Result<String, DatabaseError> {
println!("Creating encrypted vault with name: {}", vault_name); println!("Creating encrypted vault with name: {vault_name}");
let vault_path = get_vault_path(&app_handle, &vault_name)?; let vault_path = get_vault_path(&app_handle, &vault_name)?;
println!("Resolved vault path: {}", vault_path); println!("Resolved vault path: {vault_path}");
// Prüfen, ob bereits eine Vault mit diesem Namen existiert // Prüfen, ob bereits eine Vault mit diesem Namen existiert
if Path::new(&vault_path).exists() { if Path::new(&vault_path).exists() {
return Err(DatabaseError::IoError { return Err(DatabaseError::IoError {
path: vault_path, path: vault_path,
reason: format!("A vault with the name '{}' already exists", vault_name), reason: format!("A vault with the name '{vault_name}' already exists"),
}); });
} }
/* let resource_path = app_handle /* let resource_path = app_handle
@ -330,7 +326,7 @@ pub fn create_encrypted_database(
.path() .path()
.resolve("database/vault.db", BaseDirectory::Resource) .resolve("database/vault.db", BaseDirectory::Resource)
.map_err(|e| DatabaseError::PathResolutionError { .map_err(|e| DatabaseError::PathResolutionError {
reason: format!("Failed to resolve template database: {}", e), reason: format!("Failed to resolve template database: {e}"),
})?; })?;
let template_content = let template_content =
@ -339,20 +335,20 @@ pub fn create_encrypted_database(
.read(&template_path) .read(&template_path)
.map_err(|e| DatabaseError::IoError { .map_err(|e| DatabaseError::IoError {
path: template_path.display().to_string(), path: template_path.display().to_string(),
reason: format!("Failed to read template database from resources: {}", e), reason: format!("Failed to read template database from resources: {e}"),
})?; })?;
let temp_path = app_handle let temp_path = app_handle
.path() .path()
.resolve("temp_vault.db", BaseDirectory::AppLocalData) .resolve("temp_vault.db", BaseDirectory::AppLocalData)
.map_err(|e| DatabaseError::PathResolutionError { .map_err(|e| DatabaseError::PathResolutionError {
reason: format!("Failed to resolve temp database: {}", e), reason: format!("Failed to resolve temp database: {e}"),
})?; })?;
let temp_path_clone = temp_path.to_owned(); let temp_path_clone = temp_path.to_owned();
fs::write(temp_path, template_content).map_err(|e| DatabaseError::IoError { fs::write(temp_path, template_content).map_err(|e| DatabaseError::IoError {
path: vault_path.to_string(), path: vault_path.to_string(),
reason: format!("Failed to write temporary template database: {}", e), reason: format!("Failed to write temporary template database: {e}"),
})?; })?;
/* if !template_path.exists() { /* if !template_path.exists() {
return Err(DatabaseError::IoError { return Err(DatabaseError::IoError {
@ -365,8 +361,7 @@ pub fn create_encrypted_database(
let conn = Connection::open(&temp_path_clone).map_err(|e| DatabaseError::ConnectionFailed { let conn = Connection::open(&temp_path_clone).map_err(|e| DatabaseError::ConnectionFailed {
path: temp_path_clone.display().to_string(), path: temp_path_clone.display().to_string(),
reason: format!( reason: format!(
"Fehler beim Öffnen der unverschlüsselten Quelldatenbank: {}", "Fehler beim Öffnen der unverschlüsselten Quelldatenbank: {e}"
e
), ),
})?; })?;
@ -394,7 +389,7 @@ pub fn create_encrypted_database(
let _ = fs::remove_file(&vault_path); let _ = fs::remove_file(&vault_path);
let _ = fs::remove_file(&temp_path_clone); let _ = fs::remove_file(&temp_path_clone);
return Err(DatabaseError::QueryError { return Err(DatabaseError::QueryError {
reason: format!("Fehler während sqlcipher_export: {}", e), reason: format!("Fehler während sqlcipher_export: {e}"),
}); });
} }
@ -419,11 +414,11 @@ pub fn create_encrypted_database(
Ok(version) Ok(version)
}) { }) {
Ok(version) => { Ok(version) => {
println!("SQLCipher ist aktiv! Version: {}", version); println!("SQLCipher ist aktiv! Version: {version}");
} }
Err(e) => { Err(e) => {
eprintln!("FEHLER: SQLCipher scheint NICHT aktiv zu sein!"); eprintln!("FEHLER: SQLCipher scheint NICHT aktiv zu sein!");
eprintln!("Der Befehl 'PRAGMA cipher_version;' schlug fehl: {}", e); eprintln!("Der Befehl 'PRAGMA cipher_version;' schlug fehl: {e}");
eprintln!("Die Datenbank wurde wahrscheinlich NICHT verschlüsselt."); eprintln!("Die Datenbank wurde wahrscheinlich NICHT verschlüsselt.");
} }
} }
@ -431,7 +426,7 @@ pub fn create_encrypted_database(
conn.close() conn.close()
.map_err(|(_, e)| DatabaseError::ConnectionFailed { .map_err(|(_, e)| DatabaseError::ConnectionFailed {
path: template_path.display().to_string(), path: template_path.display().to_string(),
reason: format!("Fehler beim Schließen der Quelldatenbank: {}", e), reason: format!("Fehler beim Schließen der Quelldatenbank: {e}"),
})?; })?;
let _ = fs::remove_file(&temp_path_clone); let _ = fs::remove_file(&temp_path_clone);
@ -448,19 +443,19 @@ pub fn open_encrypted_database(
key: String, key: String,
state: State<'_, AppState>, state: State<'_, AppState>,
) -> Result<String, DatabaseError> { ) -> Result<String, DatabaseError> {
println!("Opening encrypted database vault_path: {}", vault_path); println!("Opening encrypted database vault_path: {vault_path}");
println!("Resolved vault path: {}", vault_path); println!("Resolved vault path: {vault_path}");
if !Path::new(&vault_path).exists() { if !Path::new(&vault_path).exists() {
return Err(DatabaseError::IoError { return Err(DatabaseError::IoError {
path: vault_path.to_string(), path: vault_path.to_string(),
reason: format!("Vault '{}' does not exist", vault_path), reason: format!("Vault '{vault_path}' does not exist"),
}); });
} }
initialize_session(&app_handle, &vault_path, &key, &state)?; initialize_session(&app_handle, &vault_path, &key, &state)?;
Ok(format!("Vault '{}' opened successfully", vault_path)) Ok(format!("Vault '{vault_path}' opened successfully"))
} }
/// Opens the DB, initializes the HLC service, and stores both in the AppState. /// Opens the DB, initializes the HLC service, and stores both in the AppState.
@ -512,8 +507,7 @@ fn initialize_session(
eprintln!("INFO: Setting 'triggers_initialized' flag via CRDT..."); eprintln!("INFO: Setting 'triggers_initialized' flag via CRDT...");
let insert_sql = format!( let insert_sql = format!(
"INSERT INTO {} (id, key, type, value) VALUES (?, ?, ?, ?)", "INSERT INTO {TABLE_SETTINGS} (id, key, type, value) VALUES (?, ?, ?, ?)"
TABLE_SETTINGS
); );
// execute_with_crdt erwartet Vec<JsonValue>, kein params!-Makro // execute_with_crdt erwartet Vec<JsonValue>, kein params!-Makro

View File

@ -10,10 +10,8 @@ use crate::extension::permissions::manager::PermissionManager;
use crate::extension::permissions::types::ExtensionPermission; use crate::extension::permissions::types::ExtensionPermission;
use crate::table_names::{TABLE_EXTENSIONS, TABLE_EXTENSION_PERMISSIONS}; use crate::table_names::{TABLE_EXTENSIONS, TABLE_EXTENSION_PERMISSIONS};
use crate::AppState; use crate::AppState;
use serde_json::Value as JsonValue; use std::collections::HashMap;
use std::collections::{HashMap, HashSet};
use std::fs; use std::fs;
use std::io::Cursor;
use std::path::PathBuf; use std::path::PathBuf;
use std::sync::Mutex; use std::sync::Mutex;
use std::time::{Duration, SystemTime}; use std::time::{Duration, SystemTime};
@ -77,7 +75,7 @@ impl ExtensionManager {
// Check for path traversal patterns // Check for path traversal patterns
if relative_path.contains("..") { if relative_path.contains("..") {
return Err(ExtensionError::SecurityViolation { return Err(ExtensionError::SecurityViolation {
reason: format!("Path traversal attempt: {}", relative_path), reason: format!("Path traversal attempt: {relative_path}"),
}); });
} }
@ -104,7 +102,7 @@ impl ExtensionManager {
if let Ok(canonical_path) = full_path.canonicalize() { if let Ok(canonical_path) = full_path.canonicalize() {
if !canonical_path.starts_with(&canonical_base) { if !canonical_path.starts_with(&canonical_base) {
return Err(ExtensionError::SecurityViolation { return Err(ExtensionError::SecurityViolation {
reason: format!("Path outside base directory: {}", relative_path), reason: format!("Path outside base directory: {relative_path}"),
}); });
} }
Ok(Some(canonical_path)) Ok(Some(canonical_path))
@ -114,7 +112,7 @@ impl ExtensionManager {
Ok(Some(full_path)) Ok(Some(full_path))
} else { } else {
Err(ExtensionError::SecurityViolation { Err(ExtensionError::SecurityViolation {
reason: format!("Path outside base directory: {}", relative_path), reason: format!("Path outside base directory: {relative_path}"),
}) })
} }
} }
@ -131,13 +129,13 @@ impl ExtensionManager {
if let Some(clean_path) = Self::validate_path_in_directory(extension_dir, icon, true)? { if let Some(clean_path) = Self::validate_path_in_directory(extension_dir, icon, true)? {
return Ok(Some(clean_path.to_string_lossy().to_string())); return Ok(Some(clean_path.to_string_lossy().to_string()));
} else { } else {
eprintln!("WARNING: Icon path specified in manifest not found: {}", icon); eprintln!("WARNING: Icon path specified in manifest not found: {icon}");
// Continue to fallback logic // Continue to fallback logic
} }
} }
// Fallback 1: Check haextension/favicon.ico // Fallback 1: Check haextension/favicon.ico
let haextension_favicon = format!("{}/favicon.ico", haextension_dir); let haextension_favicon = format!("{haextension_dir}/favicon.ico");
if let Some(clean_path) = Self::validate_path_in_directory(extension_dir, &haextension_favicon, true)? { if let Some(clean_path) = Self::validate_path_in_directory(extension_dir, &haextension_favicon, true)? {
return Ok(Some(clean_path.to_string_lossy().to_string())); return Ok(Some(clean_path.to_string_lossy().to_string()));
} }
@ -162,11 +160,11 @@ impl ExtensionManager {
.path() .path()
.app_cache_dir() .app_cache_dir()
.map_err(|e| ExtensionError::InstallationFailed { .map_err(|e| ExtensionError::InstallationFailed {
reason: format!("Cannot get app cache dir: {}", e), reason: format!("Cannot get app cache dir: {e}"),
})?; })?;
let temp_id = uuid::Uuid::new_v4(); let temp_id = uuid::Uuid::new_v4();
let temp = cache_dir.join(format!("{}_{}", temp_prefix, temp_id)); let temp = cache_dir.join(format!("{temp_prefix}_{temp_id}"));
let zip_file_path = cache_dir.join(format!("{}_{}_{}.haextension", temp_prefix, temp_id, "temp")); let zip_file_path = cache_dir.join(format!("{}_{}_{}.haextension", temp_prefix, temp_id, "temp"));
// Write bytes to a temporary ZIP file first (important for Android file system) // Write bytes to a temporary ZIP file first (important for Android file system)
@ -185,14 +183,14 @@ impl ExtensionManager {
let mut archive = ZipArchive::new(zip_file).map_err(|e| { let mut archive = ZipArchive::new(zip_file).map_err(|e| {
ExtensionError::InstallationFailed { ExtensionError::InstallationFailed {
reason: format!("Invalid ZIP: {}", e), reason: format!("Invalid ZIP: {e}"),
} }
})?; })?;
archive archive
.extract(&temp) .extract(&temp)
.map_err(|e| ExtensionError::InstallationFailed { .map_err(|e| ExtensionError::InstallationFailed {
reason: format!("Cannot extract ZIP: {}", e), reason: format!("Cannot extract ZIP: {e}"),
})?; })?;
// Clean up temporary ZIP file // Clean up temporary ZIP file
@ -203,12 +201,12 @@ impl ExtensionManager {
let haextension_dir = if config_path.exists() { let haextension_dir = if config_path.exists() {
let config_content = std::fs::read_to_string(&config_path) let config_content = std::fs::read_to_string(&config_path)
.map_err(|e| ExtensionError::ManifestError { .map_err(|e| ExtensionError::ManifestError {
reason: format!("Cannot read haextension.config.json: {}", e), reason: format!("Cannot read haextension.config.json: {e}"),
})?; })?;
let config: serde_json::Value = serde_json::from_str(&config_content) let config: serde_json::Value = serde_json::from_str(&config_content)
.map_err(|e| ExtensionError::ManifestError { .map_err(|e| ExtensionError::ManifestError {
reason: format!("Invalid haextension.config.json: {}", e), reason: format!("Invalid haextension.config.json: {e}"),
})?; })?;
let dir = config let dir = config
@ -224,16 +222,16 @@ impl ExtensionManager {
}; };
// Validate manifest path using helper function // Validate manifest path using helper function
let manifest_relative_path = format!("{}/manifest.json", haextension_dir); let manifest_relative_path = format!("{haextension_dir}/manifest.json");
let manifest_path = Self::validate_path_in_directory(&temp, &manifest_relative_path, true)? let manifest_path = Self::validate_path_in_directory(&temp, &manifest_relative_path, true)?
.ok_or_else(|| ExtensionError::ManifestError { .ok_or_else(|| ExtensionError::ManifestError {
reason: format!("manifest.json not found at {}/manifest.json", haextension_dir), reason: format!("manifest.json not found at {haextension_dir}/manifest.json"),
})?; })?;
let actual_dir = temp.clone(); let actual_dir = temp.clone();
let manifest_content = let manifest_content =
std::fs::read_to_string(&manifest_path).map_err(|e| ExtensionError::ManifestError { std::fs::read_to_string(&manifest_path).map_err(|e| ExtensionError::ManifestError {
reason: format!("Cannot read manifest: {}", e), reason: format!("Cannot read manifest: {e}"),
})?; })?;
let mut manifest: ExtensionManifest = serde_json::from_str(&manifest_content)?; let mut manifest: ExtensionManifest = serde_json::from_str(&manifest_content)?;
@ -440,8 +438,7 @@ impl ExtensionManager {
eprintln!("DEBUG: Removing extension with ID: {}", extension.id); eprintln!("DEBUG: Removing extension with ID: {}", extension.id);
eprintln!( eprintln!(
"DEBUG: Extension name: {}, version: {}", "DEBUG: Extension name: {extension_name}, version: {extension_version}"
extension_name, extension_version
); );
// Lösche Permissions und Extension-Eintrag in einer Transaktion // Lösche Permissions und Extension-Eintrag in einer Transaktion
@ -460,7 +457,7 @@ impl ExtensionManager {
PermissionManager::delete_permissions_in_transaction(&tx, &hlc_service, &extension.id)?; PermissionManager::delete_permissions_in_transaction(&tx, &hlc_service, &extension.id)?;
// Lösche Extension-Eintrag mit extension_id // Lösche Extension-Eintrag mit extension_id
let sql = format!("DELETE FROM {} WHERE id = ?", TABLE_EXTENSIONS); let sql = format!("DELETE FROM {TABLE_EXTENSIONS} WHERE id = ?");
eprintln!("DEBUG: Executing SQL: {} with id = {}", sql, extension.id); eprintln!("DEBUG: Executing SQL: {} with id = {}", sql, extension.id);
SqlExecutor::execute_internal_typed( SqlExecutor::execute_internal_typed(
&tx, &tx,
@ -615,8 +612,7 @@ impl ExtensionManager {
// 1. Extension-Eintrag erstellen mit generierter UUID // 1. Extension-Eintrag erstellen mit generierter UUID
let insert_ext_sql = format!( let insert_ext_sql = format!(
"INSERT INTO {} (id, name, version, author, entry, icon, public_key, signature, homepage, description, enabled, single_instance) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)", "INSERT INTO {TABLE_EXTENSIONS} (id, name, version, author, entry, icon, public_key, signature, homepage, description, enabled, single_instance) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)"
TABLE_EXTENSIONS
); );
SqlExecutor::execute_internal_typed( SqlExecutor::execute_internal_typed(
@ -641,8 +637,7 @@ impl ExtensionManager {
// 2. Permissions speichern // 2. Permissions speichern
let insert_perm_sql = format!( let insert_perm_sql = format!(
"INSERT INTO {} (id, extension_id, resource_type, action, target, constraints, status) VALUES (?, ?, ?, ?, ?, ?, ?)", "INSERT INTO {TABLE_EXTENSION_PERMISSIONS} (id, extension_id, resource_type, action, target, constraints, status) VALUES (?, ?, ?, ?, ?, ?, ?)"
TABLE_EXTENSION_PERMISSIONS
); );
for perm in &permissions { for perm in &permissions {
@ -714,10 +709,9 @@ impl ExtensionManager {
// Lade alle Daten aus der Datenbank // Lade alle Daten aus der Datenbank
let extensions = with_connection(&state.db, |conn| { let extensions = with_connection(&state.db, |conn| {
let sql = format!( let sql = format!(
"SELECT id, name, version, author, entry, icon, public_key, signature, homepage, description, enabled, single_instance FROM {}", "SELECT id, name, version, author, entry, icon, public_key, signature, homepage, description, enabled, single_instance FROM {TABLE_EXTENSIONS}"
TABLE_EXTENSIONS
); );
eprintln!("DEBUG: SQL Query before transformation: {}", sql); eprintln!("DEBUG: SQL Query before transformation: {sql}");
let results = SqlExecutor::query_select(conn, &sql, &[])?; let results = SqlExecutor::query_select(conn, &sql, &[])?;
eprintln!("DEBUG: Query returned {} results", results.len()); eprintln!("DEBUG: Query returned {} results", results.len());
@ -779,7 +773,7 @@ impl ExtensionManager {
for extension_data in extensions { for extension_data in extensions {
let extension_id = extension_data.id; let extension_id = extension_data.id;
eprintln!("DEBUG: Processing extension: {}", extension_id); eprintln!("DEBUG: Processing extension: {extension_id}");
// Use public_key/name/version path structure // Use public_key/name/version path structure
let extension_path = self.get_extension_dir( let extension_path = self.get_extension_dir(
@ -792,8 +786,7 @@ impl ExtensionManager {
// Check if extension directory exists // Check if extension directory exists
if !extension_path.exists() { if !extension_path.exists() {
eprintln!( eprintln!(
"DEBUG: Extension directory missing for: {} at {:?}", "DEBUG: Extension directory missing for: {extension_id} at {extension_path:?}"
extension_id, extension_path
); );
self.missing_extensions self.missing_extensions
.lock() .lock()
@ -833,13 +826,12 @@ impl ExtensionManager {
}; };
// Validate manifest.json path using helper function // Validate manifest.json path using helper function
let manifest_relative_path = format!("{}/manifest.json", haextension_dir); let manifest_relative_path = format!("{haextension_dir}/manifest.json");
if Self::validate_path_in_directory(&extension_path, &manifest_relative_path, true)? if Self::validate_path_in_directory(&extension_path, &manifest_relative_path, true)?
.is_none() .is_none()
{ {
eprintln!( eprintln!(
"DEBUG: manifest.json missing or invalid for: {} at {}/manifest.json", "DEBUG: manifest.json missing or invalid for: {extension_id} at {haextension_dir}/manifest.json"
extension_id, haextension_dir
); );
self.missing_extensions self.missing_extensions
.lock() .lock()
@ -855,7 +847,7 @@ impl ExtensionManager {
continue; continue;
} }
eprintln!("DEBUG: Extension loaded successfully: {}", extension_id); eprintln!("DEBUG: Extension loaded successfully: {extension_id}");
let extension = Extension { let extension = Extension {
id: extension_id.clone(), id: extension_id.clone(),

View File

@ -42,12 +42,12 @@ enum DataProcessingError {
impl fmt::Display for DataProcessingError { impl fmt::Display for DataProcessingError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self { match self {
DataProcessingError::HexDecoding(e) => write!(f, "Hex-Dekodierungsfehler: {}", e), DataProcessingError::HexDecoding(e) => write!(f, "Hex-Dekodierungsfehler: {e}"),
DataProcessingError::Utf8Conversion(e) => { DataProcessingError::Utf8Conversion(e) => {
write!(f, "UTF-8-Konvertierungsfehler: {}", e) write!(f, "UTF-8-Konvertierungsfehler: {e}")
} }
DataProcessingError::JsonParsing(e) => write!(f, "JSON-Parsing-Fehler: {}", e), DataProcessingError::JsonParsing(e) => write!(f, "JSON-Parsing-Fehler: {e}"),
DataProcessingError::Custom(msg) => write!(f, "Datenverarbeitungsfehler: {}", msg), DataProcessingError::Custom(msg) => write!(f, "Datenverarbeitungsfehler: {msg}"),
} }
} }
} }
@ -101,7 +101,7 @@ pub fn resolve_secure_extension_asset_path(
.all(|c| c.is_ascii_alphanumeric() || c == '-') .all(|c| c.is_ascii_alphanumeric() || c == '-')
{ {
return Err(ExtensionError::ValidationError { return Err(ExtensionError::ValidationError {
reason: format!("Invalid extension name: {}", extension_name), reason: format!("Invalid extension name: {extension_name}"),
}); });
} }
@ -111,7 +111,7 @@ pub fn resolve_secure_extension_asset_path(
.all(|c| c.is_ascii_alphanumeric() || c == '-' || c == '.') .all(|c| c.is_ascii_alphanumeric() || c == '-' || c == '.')
{ {
return Err(ExtensionError::ValidationError { return Err(ExtensionError::ValidationError {
reason: format!("Invalid extension version: {}", extension_version), reason: format!("Invalid extension version: {extension_version}"),
}); });
} }
@ -146,11 +146,10 @@ pub fn resolve_secure_extension_asset_path(
Ok(canonical_path) Ok(canonical_path)
} else { } else {
eprintln!( eprintln!(
"SECURITY WARNING: Path traversal attempt blocked: {}", "SECURITY WARNING: Path traversal attempt blocked: {requested_asset_path}"
requested_asset_path
); );
Err(ExtensionError::SecurityViolation { Err(ExtensionError::SecurityViolation {
reason: format!("Path traversal attempt: {}", requested_asset_path), reason: format!("Path traversal attempt: {requested_asset_path}"),
}) })
} }
} }
@ -159,11 +158,10 @@ pub fn resolve_secure_extension_asset_path(
Ok(final_path) Ok(final_path)
} else { } else {
eprintln!( eprintln!(
"SECURITY WARNING: Invalid asset path: {}", "SECURITY WARNING: Invalid asset path: {requested_asset_path}"
requested_asset_path
); );
Err(ExtensionError::SecurityViolation { Err(ExtensionError::SecurityViolation {
reason: format!("Invalid asset path: {}", requested_asset_path), reason: format!("Invalid asset path: {requested_asset_path}"),
}) })
} }
} }
@ -184,7 +182,7 @@ pub fn extension_protocol_handler(
// Only allow same-protocol requests or tauri origin // Only allow same-protocol requests or tauri origin
// For null/empty origin (initial load), use wildcard // For null/empty origin (initial load), use wildcard
let protocol_prefix = format!("{}://", EXTENSION_PROTOCOL_NAME); let protocol_prefix = format!("{EXTENSION_PROTOCOL_NAME}://");
let allowed_origin = if origin.starts_with(&protocol_prefix) || origin == get_tauri_origin() { let allowed_origin = if origin.starts_with(&protocol_prefix) || origin == get_tauri_origin() {
origin origin
} else if origin.is_empty() || origin == "null" { } else if origin.is_empty() || origin == "null" {
@ -216,9 +214,9 @@ pub fn extension_protocol_handler(
.and_then(|v| v.to_str().ok()) .and_then(|v| v.to_str().ok())
.unwrap_or(""); .unwrap_or("");
println!("Protokoll Handler für: {}", uri_ref); println!("Protokoll Handler für: {uri_ref}");
println!("Origin: {}", origin); println!("Origin: {origin}");
println!("Referer: {}", referer); println!("Referer: {referer}");
let path_str = uri_ref.path(); let path_str = uri_ref.path();
@ -227,16 +225,16 @@ pub fn extension_protocol_handler(
// - Desktop: haex-extension://<base64>/{assetPath} // - Desktop: haex-extension://<base64>/{assetPath}
// - Android: http://localhost/{base64}/{assetPath} // - Android: http://localhost/{base64}/{assetPath}
let host = uri_ref.host().unwrap_or(""); let host = uri_ref.host().unwrap_or("");
println!("URI Host: {}", host); println!("URI Host: {host}");
let (info, segments_after_version) = if host == "localhost" || host == format!("{}.localhost", EXTENSION_PROTOCOL_NAME).as_str() { let (info, segments_after_version) = if host == "localhost" || host == format!("{EXTENSION_PROTOCOL_NAME}.localhost").as_str() {
// Android format: http://haex-extension.localhost/{base64}/{assetPath} // Android format: http://haex-extension.localhost/{base64}/{assetPath}
// Extract base64 from first path segment // Extract base64 from first path segment
println!("Android format detected: http://{}/...", host); println!("Android format detected: http://{host}/...");
let mut segments_iter = path_str.split('/').filter(|s| !s.is_empty()); let mut segments_iter = path_str.split('/').filter(|s| !s.is_empty());
if let Some(first_segment) = segments_iter.next() { if let Some(first_segment) = segments_iter.next() {
println!("First path segment (base64): {}", first_segment); println!("First path segment (base64): {first_segment}");
match BASE64_STANDARD.decode(first_segment) { match BASE64_STANDARD.decode(first_segment) {
Ok(decoded_bytes) => match String::from_utf8(decoded_bytes) { Ok(decoded_bytes) => match String::from_utf8(decoded_bytes) {
Ok(json_str) => match serde_json::from_str::<ExtensionInfo>(&json_str) { Ok(json_str) => match serde_json::from_str::<ExtensionInfo>(&json_str) {
@ -252,29 +250,29 @@ pub fn extension_protocol_handler(
(info, remaining) (info, remaining)
} }
Err(e) => { Err(e) => {
eprintln!("Failed to parse JSON from base64 path: {}", e); eprintln!("Failed to parse JSON from base64 path: {e}");
return Response::builder() return Response::builder()
.status(400) .status(400)
.header("Access-Control-Allow-Origin", allowed_origin) .header("Access-Control-Allow-Origin", allowed_origin)
.body(Vec::from(format!("Invalid extension info in base64 path: {}", e))) .body(Vec::from(format!("Invalid extension info in base64 path: {e}")))
.map_err(|e| e.into()); .map_err(|e| e.into());
} }
}, },
Err(e) => { Err(e) => {
eprintln!("Failed to decode UTF-8 from base64 path: {}", e); eprintln!("Failed to decode UTF-8 from base64 path: {e}");
return Response::builder() return Response::builder()
.status(400) .status(400)
.header("Access-Control-Allow-Origin", allowed_origin) .header("Access-Control-Allow-Origin", allowed_origin)
.body(Vec::from(format!("Invalid UTF-8 in base64 path: {}", e))) .body(Vec::from(format!("Invalid UTF-8 in base64 path: {e}")))
.map_err(|e| e.into()); .map_err(|e| e.into());
} }
}, },
Err(e) => { Err(e) => {
eprintln!("Failed to decode base64 from path: {}", e); eprintln!("Failed to decode base64 from path: {e}");
return Response::builder() return Response::builder()
.status(400) .status(400)
.header("Access-Control-Allow-Origin", allowed_origin) .header("Access-Control-Allow-Origin", allowed_origin)
.body(Vec::from(format!("Invalid base64 in path: {}", e))) .body(Vec::from(format!("Invalid base64 in path: {e}")))
.map_err(|e| e.into()); .map_err(|e| e.into());
} }
} }
@ -311,35 +309,35 @@ pub fn extension_protocol_handler(
(info, segments) (info, segments)
} }
Err(e) => { Err(e) => {
eprintln!("Failed to parse JSON from base64 host: {}", e); eprintln!("Failed to parse JSON from base64 host: {e}");
return Response::builder() return Response::builder()
.status(400) .status(400)
.header("Access-Control-Allow-Origin", allowed_origin) .header("Access-Control-Allow-Origin", allowed_origin)
.body(Vec::from(format!("Invalid extension info in base64 host: {}", e))) .body(Vec::from(format!("Invalid extension info in base64 host: {e}")))
.map_err(|e| e.into()); .map_err(|e| e.into());
} }
}, },
Err(e) => { Err(e) => {
eprintln!("Failed to decode UTF-8 from base64 host: {}", e); eprintln!("Failed to decode UTF-8 from base64 host: {e}");
return Response::builder() return Response::builder()
.status(400) .status(400)
.header("Access-Control-Allow-Origin", allowed_origin) .header("Access-Control-Allow-Origin", allowed_origin)
.body(Vec::from(format!("Invalid UTF-8 in base64 host: {}", e))) .body(Vec::from(format!("Invalid UTF-8 in base64 host: {e}")))
.map_err(|e| e.into()); .map_err(|e| e.into());
} }
}, },
Err(e) => { Err(e) => {
eprintln!("Failed to decode base64 host: {}", e); eprintln!("Failed to decode base64 host: {e}");
return Response::builder() return Response::builder()
.status(400) .status(400)
.header("Access-Control-Allow-Origin", allowed_origin) .header("Access-Control-Allow-Origin", allowed_origin)
.body(Vec::from(format!("Invalid base64 in host: {}", e))) .body(Vec::from(format!("Invalid base64 in host: {e}")))
.map_err(|e| e.into()); .map_err(|e| e.into());
} }
} }
} else { } else {
// No base64 host - use path-based parsing (for localhost/Android/Windows) // No base64 host - use path-based parsing (for localhost/Android/Windows)
parse_extension_info_from_path(path_str, origin, uri_ref, referer, &allowed_origin)? parse_extension_info_from_path(path_str, origin, uri_ref, referer, allowed_origin)?
}; };
// Construct asset path from remaining segments // Construct asset path from remaining segments
@ -353,8 +351,8 @@ pub fn extension_protocol_handler(
&raw_asset_path &raw_asset_path
}; };
println!("Path: {}", path_str); println!("Path: {path_str}");
println!("Asset to load: {}", asset_to_load); println!("Asset to load: {asset_to_load}");
let absolute_secure_path = resolve_secure_extension_asset_path( let absolute_secure_path = resolve_secure_extension_asset_path(
app_handle, app_handle,
@ -362,7 +360,7 @@ pub fn extension_protocol_handler(
&info.public_key, &info.public_key,
&info.name, &info.name,
&info.version, &info.version,
&asset_to_load, asset_to_load,
)?; )?;
println!("Resolved path: {}", absolute_secure_path.display()); println!("Resolved path: {}", absolute_secure_path.display());
@ -497,7 +495,7 @@ fn parse_encoded_info_from_origin_or_uri_or_referer_or_cache(
if let Ok(hex) = parse_from_origin(origin) { if let Ok(hex) = parse_from_origin(origin) {
if let Ok(info) = process_hex_encoded_json(&hex) { if let Ok(info) = process_hex_encoded_json(&hex) {
cache_extension_info(&info); // Cache setzen cache_extension_info(&info); // Cache setzen
println!("Parsed und gecached aus Origin: {}", hex); println!("Parsed und gecached aus Origin: {hex}");
return Ok(info); return Ok(info);
} }
} }
@ -507,17 +505,17 @@ fn parse_encoded_info_from_origin_or_uri_or_referer_or_cache(
if let Ok(hex) = parse_from_uri_path(uri_ref) { if let Ok(hex) = parse_from_uri_path(uri_ref) {
if let Ok(info) = process_hex_encoded_json(&hex) { if let Ok(info) = process_hex_encoded_json(&hex) {
cache_extension_info(&info); // Cache setzen cache_extension_info(&info); // Cache setzen
println!("Parsed und gecached aus URI: {}", hex); println!("Parsed und gecached aus URI: {hex}");
return Ok(info); return Ok(info);
} }
} }
println!("Fallback zu Referer-Parsing: {}", referer); println!("Fallback zu Referer-Parsing: {referer}");
if !referer.is_empty() && referer != "null" { if !referer.is_empty() && referer != "null" {
if let Ok(hex) = parse_from_uri_string(referer) { if let Ok(hex) = parse_from_uri_string(referer) {
if let Ok(info) = process_hex_encoded_json(&hex) { if let Ok(info) = process_hex_encoded_json(&hex) {
cache_extension_info(&info); // Cache setzen cache_extension_info(&info); // Cache setzen
println!("Parsed und gecached aus Referer: {}", hex); println!("Parsed und gecached aus Referer: {hex}");
return Ok(info); return Ok(info);
} }
} }
@ -609,11 +607,6 @@ fn validate_and_return_hex(segment: &str) -> Result<String, DataProcessingError>
Ok(segment.to_string()) Ok(segment.to_string())
} }
fn encode_hex_for_log(info: &ExtensionInfo) -> String {
let json_str = serde_json::to_string(info).unwrap_or_default();
hex::encode(json_str.as_bytes())
}
// Helper function to parse extension info from path segments // Helper function to parse extension info from path segments
fn parse_extension_info_from_path( fn parse_extension_info_from_path(
path_str: &str, path_str: &str,
@ -627,11 +620,11 @@ fn parse_extension_info_from_path(
match (segments_iter.next(), segments_iter.next(), segments_iter.next()) { match (segments_iter.next(), segments_iter.next(), segments_iter.next()) {
(Some(public_key), Some(name), Some(version)) => { (Some(public_key), Some(name), Some(version)) => {
println!("=== Extension Protocol Handler (path-based) ==="); println!("=== Extension Protocol Handler (path-based) ===");
println!("Full URI: {}", uri_ref); println!("Full URI: {uri_ref}");
println!("Parsed from path segments:"); println!("Parsed from path segments:");
println!(" PublicKey: {}", public_key); println!(" PublicKey: {public_key}");
println!(" Name: {}", name); println!(" Name: {name}");
println!(" Version: {}", version); println!(" Version: {version}");
let info = ExtensionInfo { let info = ExtensionInfo {
public_key: public_key.to_string(), public_key: public_key.to_string(),
@ -653,7 +646,7 @@ fn parse_extension_info_from_path(
) { ) {
Ok(decoded) => { Ok(decoded) => {
println!("=== Extension Protocol Handler (legacy hex format) ==="); println!("=== Extension Protocol Handler (legacy hex format) ===");
println!("Full URI: {}", uri_ref); println!("Full URI: {uri_ref}");
println!("Decoded info:"); println!("Decoded info:");
println!(" PublicKey: {}", decoded.public_key); println!(" PublicKey: {}", decoded.public_key);
println!(" Name: {}", decoded.name); println!(" Name: {}", decoded.name);
@ -670,8 +663,8 @@ fn parse_extension_info_from_path(
Ok((decoded, segments)) Ok((decoded, segments))
} }
Err(e) => { Err(e) => {
eprintln!("Fehler beim Parsen (alle Fallbacks): {}", e); eprintln!("Fehler beim Parsen (alle Fallbacks): {e}");
Err(format!("Ungültige Anfrage: {}", e).into()) Err(format!("Ungültige Anfrage: {e}").into())
} }
} }
} }

View File

@ -70,8 +70,7 @@ pub fn copy_directory(
use std::path::PathBuf; use std::path::PathBuf;
println!( println!(
"Kopiere Verzeichnis von '{}' nach '{}'", "Kopiere Verzeichnis von '{source}' nach '{destination}'"
source, destination
); );
let source_path = PathBuf::from(&source); let source_path = PathBuf::from(&source);
@ -81,7 +80,7 @@ pub fn copy_directory(
return Err(ExtensionError::Filesystem { return Err(ExtensionError::Filesystem {
source: std::io::Error::new( source: std::io::Error::new(
std::io::ErrorKind::NotFound, std::io::ErrorKind::NotFound,
format!("Source directory '{}' not found", source), format!("Source directory '{source}' not found"),
), ),
}); });
} }
@ -93,7 +92,7 @@ pub fn copy_directory(
fs_extra::dir::copy(&source_path, &destination_path, &options).map_err(|e| { fs_extra::dir::copy(&source_path, &destination_path, &options).map_err(|e| {
ExtensionError::Filesystem { ExtensionError::Filesystem {
source: std::io::Error::new(std::io::ErrorKind::Other, e.to_string()), source: std::io::Error::other(e.to_string()),
} }
})?; })?;
Ok(()) Ok(())

View File

@ -18,20 +18,20 @@ impl ExtensionCrypto {
signature_hex: &str, signature_hex: &str,
) -> Result<(), String> { ) -> Result<(), String> {
let public_key_bytes = let public_key_bytes =
hex::decode(public_key_hex).map_err(|e| format!("Invalid public key: {}", e))?; hex::decode(public_key_hex).map_err(|e| format!("Invalid public key: {e}"))?;
let public_key = VerifyingKey::from_bytes(&public_key_bytes.try_into().unwrap()) let public_key = VerifyingKey::from_bytes(&public_key_bytes.try_into().unwrap())
.map_err(|e| format!("Invalid public key: {}", e))?; .map_err(|e| format!("Invalid public key: {e}"))?;
let signature_bytes = let signature_bytes =
hex::decode(signature_hex).map_err(|e| format!("Invalid signature: {}", e))?; hex::decode(signature_hex).map_err(|e| format!("Invalid signature: {e}"))?;
let signature = Signature::from_bytes(&signature_bytes.try_into().unwrap()); let signature = Signature::from_bytes(&signature_bytes.try_into().unwrap());
let content_hash = let content_hash =
hex::decode(content_hash_hex).map_err(|e| format!("Invalid content hash: {}", e))?; hex::decode(content_hash_hex).map_err(|e| format!("Invalid content hash: {e}"))?;
public_key public_key
.verify(&content_hash, &signature) .verify(&content_hash, &signature)
.map_err(|e| format!("Signature verification failed: {}", e)) .map_err(|e| format!("Signature verification failed: {e}"))
} }
/// Berechnet Hash eines Verzeichnisses (für Verifikation) /// Berechnet Hash eines Verzeichnisses (für Verifikation)
@ -71,7 +71,7 @@ impl ExtensionCrypto {
if !canonical_manifest_path.starts_with(&canonical_dir) { if !canonical_manifest_path.starts_with(&canonical_dir) {
return Err(ExtensionError::ManifestError { return Err(ExtensionError::ManifestError {
reason: format!("Manifest path resolves outside of extension directory (potential path traversal)"), reason: "Manifest path resolves outside of extension directory (potential path traversal)".to_string(),
}); });
} }
@ -90,7 +90,7 @@ impl ExtensionCrypto {
let mut manifest: serde_json::Value = let mut manifest: serde_json::Value =
serde_json::from_str(&content_str).map_err(|e| { serde_json::from_str(&content_str).map_err(|e| {
ExtensionError::ManifestError { ExtensionError::ManifestError {
reason: format!("Cannot parse manifest JSON: {}", e), reason: format!("Cannot parse manifest JSON: {e}"),
} }
})?; })?;
@ -107,7 +107,7 @@ impl ExtensionCrypto {
let canonical_manifest_content = let canonical_manifest_content =
serde_json::to_string_pretty(&manifest).map_err(|e| { serde_json::to_string_pretty(&manifest).map_err(|e| {
ExtensionError::ManifestError { ExtensionError::ManifestError {
reason: format!("Failed to serialize manifest: {}", e), reason: format!("Failed to serialize manifest: {e}"),
} }
})?; })?;

View File

@ -3,7 +3,7 @@
use crate::crdt::hlc::HlcService; use crate::crdt::hlc::HlcService;
use crate::crdt::transformer::CrdtTransformer; use crate::crdt::transformer::CrdtTransformer;
use crate::crdt::trigger; use crate::crdt::trigger;
use crate::database::core::{convert_value_ref_to_json, parse_sql_statements, ValueConverter}; use crate::database::core::{convert_value_ref_to_json, parse_sql_statements};
use crate::database::error::DatabaseError; use crate::database::error::DatabaseError;
use rusqlite::{params_from_iter, types::Value as SqliteValue, ToSql, Transaction}; use rusqlite::{params_from_iter, types::Value as SqliteValue, ToSql, Transaction};
use serde_json::Value as JsonValue; use serde_json::Value as JsonValue;
@ -52,14 +52,14 @@ impl SqlExecutor {
} }
let sql_str = statement.to_string(); let sql_str = statement.to_string();
eprintln!("DEBUG: Transformed execute SQL: {}", sql_str); eprintln!("DEBUG: Transformed execute SQL: {sql_str}");
// Führe Statement aus // Führe Statement aus
tx.execute(&sql_str, params) tx.execute(&sql_str, params)
.map_err(|e| DatabaseError::ExecutionError { .map_err(|e| DatabaseError::ExecutionError {
sql: sql_str.clone(), sql: sql_str.clone(),
table: None, table: None,
reason: format!("Execute failed: {}", e), reason: format!("Execute failed: {e}"),
})?; })?;
// Trigger-Logik für CREATE TABLE // Trigger-Logik für CREATE TABLE
@ -70,7 +70,7 @@ impl SqlExecutor {
.trim_matches('"') .trim_matches('"')
.trim_matches('`') .trim_matches('`')
.to_string(); .to_string();
eprintln!("DEBUG: Setting up triggers for table: {}", table_name_str); eprintln!("DEBUG: Setting up triggers for table: {table_name_str}");
trigger::setup_triggers_for_table(tx, &table_name_str, false)?; trigger::setup_triggers_for_table(tx, &table_name_str, false)?;
} }
@ -115,7 +115,7 @@ impl SqlExecutor {
} }
let sql_str = statement.to_string(); let sql_str = statement.to_string();
eprintln!("DEBUG: Transformed SQL (with RETURNING): {}", sql_str); eprintln!("DEBUG: Transformed SQL (with RETURNING): {sql_str}");
// Prepare und query ausführen // Prepare und query ausführen
let mut stmt = tx let mut stmt = tx
@ -170,7 +170,7 @@ impl SqlExecutor {
.trim_matches('"') .trim_matches('"')
.trim_matches('`') .trim_matches('`')
.to_string(); .to_string();
eprintln!("DEBUG: Setting up triggers for table (RETURNING): {}", table_name_str); eprintln!("DEBUG: Setting up triggers for table (RETURNING): {table_name_str}");
trigger::setup_triggers_for_table(tx, &table_name_str, false)?; trigger::setup_triggers_for_table(tx, &table_name_str, false)?;
} }
@ -186,7 +186,7 @@ impl SqlExecutor {
) -> Result<HashSet<String>, DatabaseError> { ) -> Result<HashSet<String>, DatabaseError> {
let sql_params: Vec<SqliteValue> = params let sql_params: Vec<SqliteValue> = params
.iter() .iter()
.map(|v| crate::database::core::ValueConverter::json_to_rusqlite_value(v)) .map(crate::database::core::ValueConverter::json_to_rusqlite_value)
.collect::<Result<Vec<_>, _>>()?; .collect::<Result<Vec<_>, _>>()?;
let param_refs: Vec<&dyn ToSql> = sql_params.iter().map(|p| p as &dyn ToSql).collect(); let param_refs: Vec<&dyn ToSql> = sql_params.iter().map(|p| p as &dyn ToSql).collect();
Self::execute_internal_typed(tx, hlc_service, sql, &param_refs) Self::execute_internal_typed(tx, hlc_service, sql, &param_refs)
@ -201,7 +201,7 @@ impl SqlExecutor {
) -> Result<(HashSet<String>, Vec<Vec<JsonValue>>), DatabaseError> { ) -> Result<(HashSet<String>, Vec<Vec<JsonValue>>), DatabaseError> {
let sql_params: Vec<SqliteValue> = params let sql_params: Vec<SqliteValue> = params
.iter() .iter()
.map(|v| crate::database::core::ValueConverter::json_to_rusqlite_value(v)) .map(crate::database::core::ValueConverter::json_to_rusqlite_value)
.collect::<Result<Vec<_>, _>>()?; .collect::<Result<Vec<_>, _>>()?;
let param_refs: Vec<&dyn ToSql> = sql_params.iter().map(|p| p as &dyn ToSql).collect(); let param_refs: Vec<&dyn ToSql> = sql_params.iter().map(|p| p as &dyn ToSql).collect();
Self::query_internal_typed(tx, hlc_service, sql, &param_refs) Self::query_internal_typed(tx, hlc_service, sql, &param_refs)
@ -252,12 +252,12 @@ impl SqlExecutor {
let stmt_to_execute = ast_vec.pop().unwrap(); let stmt_to_execute = ast_vec.pop().unwrap();
let transformed_sql = stmt_to_execute.to_string(); let transformed_sql = stmt_to_execute.to_string();
eprintln!("DEBUG: SELECT (no transformation): {}", transformed_sql); eprintln!("DEBUG: SELECT (no transformation): {transformed_sql}");
// Convert JSON params to SQLite values // Convert JSON params to SQLite values
let sql_params: Vec<SqliteValue> = params let sql_params: Vec<SqliteValue> = params
.iter() .iter()
.map(|v| crate::database::core::ValueConverter::json_to_rusqlite_value(v)) .map(crate::database::core::ValueConverter::json_to_rusqlite_value)
.collect::<Result<Vec<_>, _>>()?; .collect::<Result<Vec<_>, _>>()?;
let mut prepared_stmt = conn.prepare(&transformed_sql)?; let mut prepared_stmt = conn.prepare(&transformed_sql)?;

View File

@ -13,10 +13,8 @@ use crate::AppState;
use rusqlite::params_from_iter; use rusqlite::params_from_iter;
use rusqlite::types::Value as SqlValue; use rusqlite::types::Value as SqlValue;
use rusqlite::Transaction; use rusqlite::Transaction;
use serde_json::json;
use serde_json::Value as JsonValue; use serde_json::Value as JsonValue;
use sqlparser::ast::{Statement, TableFactor, TableObject}; use sqlparser::ast::{Statement, TableFactor, TableObject};
use std::collections::HashSet;
use tauri::State; use tauri::State;
/// Führt Statements mit korrekter Parameter-Bindung aus /// Führt Statements mit korrekter Parameter-Bindung aus
@ -185,7 +183,7 @@ pub async fn extension_sql_execute(
if let Statement::CreateTable(ref create_table_details) = statement { if let Statement::CreateTable(ref create_table_details) = statement {
// Extract table name and remove quotes (both " and `) // Extract table name and remove quotes (both " and `)
let raw_name = create_table_details.name.to_string(); let raw_name = create_table_details.name.to_string();
println!("DEBUG: Raw table name from AST: {:?}", raw_name); println!("DEBUG: Raw table name from AST: {raw_name:?}");
println!("DEBUG: Raw table name chars: {:?}", raw_name.chars().collect::<Vec<_>>()); println!("DEBUG: Raw table name chars: {:?}", raw_name.chars().collect::<Vec<_>>());
let table_name_str = raw_name let table_name_str = raw_name
@ -193,17 +191,15 @@ pub async fn extension_sql_execute(
.trim_matches('`') .trim_matches('`')
.to_string(); .to_string();
println!("DEBUG: Cleaned table name: {:?}", table_name_str); println!("DEBUG: Cleaned table name: {table_name_str:?}");
println!("DEBUG: Cleaned table name chars: {:?}", table_name_str.chars().collect::<Vec<_>>()); println!("DEBUG: Cleaned table name chars: {:?}", table_name_str.chars().collect::<Vec<_>>());
println!( println!(
"Table '{}' created by extension, setting up CRDT triggers...", "Table '{table_name_str}' created by extension, setting up CRDT triggers..."
table_name_str
); );
trigger::setup_triggers_for_table(&tx, &table_name_str, false)?; trigger::setup_triggers_for_table(&tx, &table_name_str, false)?;
println!( println!(
"Triggers for table '{}' successfully created.", "Triggers for table '{table_name_str}' successfully created."
table_name_str
); );
} }

View File

@ -174,7 +174,7 @@ impl serde::Serialize for ExtensionError {
let mut state = serializer.serialize_struct("ExtensionError", 4)?; let mut state = serializer.serialize_struct("ExtensionError", 4)?;
state.serialize_field("code", &self.code())?; state.serialize_field("code", &self.code())?;
state.serialize_field("type", &format!("{:?}", self))?; state.serialize_field("type", &format!("{self:?}"))?;
state.serialize_field("message", &self.to_string())?; state.serialize_field("message", &self.to_string())?;
if let Some(ext_id) = self.extension_id() { if let Some(ext_id) = self.extension_id() {

View File

@ -133,7 +133,7 @@ fn validate_path_pattern(pattern: &str) -> Result<(), ExtensionError> {
// Check for path traversal attempts // Check for path traversal attempts
if pattern.contains("../") || pattern.contains("..\\") { if pattern.contains("../") || pattern.contains("..\\") {
return Err(ExtensionError::SecurityViolation { return Err(ExtensionError::SecurityViolation {
reason: format!("Path traversal detected in pattern: {}", pattern), reason: format!("Path traversal detected in pattern: {pattern}"),
}); });
} }
@ -177,7 +177,7 @@ pub fn resolve_path_pattern(
"$TEMP" => "Temp", "$TEMP" => "Temp",
_ => { _ => {
return Err(ExtensionError::ValidationError { return Err(ExtensionError::ValidationError {
reason: format!("Unknown base directory variable: {}", base_var), reason: format!("Unknown base directory variable: {base_var}"),
}); });
} }
}; };

View File

@ -52,7 +52,7 @@ pub async fn get_all_extensions(
.extension_manager .extension_manager
.load_installed_extensions(&app_handle, &state) .load_installed_extensions(&app_handle, &state)
.await .await
.map_err(|e| format!("Failed to load extensions: {:?}", e))?; .map_err(|e| format!("Failed to load extensions: {e:?}"))?;
/* } */ /* } */
let mut extensions = Vec::new(); let mut extensions = Vec::new();
@ -292,12 +292,12 @@ pub async fn load_dev_extension(
let (host, port, haextension_dir) = if config_path.exists() { let (host, port, haextension_dir) = if config_path.exists() {
let config_content = let config_content =
std::fs::read_to_string(&config_path).map_err(|e| ExtensionError::ValidationError { std::fs::read_to_string(&config_path).map_err(|e| ExtensionError::ValidationError {
reason: format!("Failed to read haextension.config.json: {}", e), reason: format!("Failed to read haextension.config.json: {e}"),
})?; })?;
let config: HaextensionConfig = let config: HaextensionConfig =
serde_json::from_str(&config_content).map_err(|e| ExtensionError::ValidationError { serde_json::from_str(&config_content).map_err(|e| ExtensionError::ValidationError {
reason: format!("Failed to parse haextension.config.json: {}", e), reason: format!("Failed to parse haextension.config.json: {e}"),
})?; })?;
(config.dev.host, config.dev.port, config.dev.haextension_dir) (config.dev.host, config.dev.port, config.dev.haextension_dir)
@ -306,23 +306,22 @@ pub async fn load_dev_extension(
(default_host(), default_port(), default_haextension_dir()) (default_host(), default_port(), default_haextension_dir())
}; };
let dev_server_url = format!("http://{}:{}", host, port); let dev_server_url = format!("http://{host}:{port}");
eprintln!("📡 Dev server URL: {}", dev_server_url); eprintln!("📡 Dev server URL: {dev_server_url}");
eprintln!("📁 Haextension directory: {}", haextension_dir); eprintln!("📁 Haextension directory: {haextension_dir}");
// 1.5. Check if dev server is running // 1.5. Check if dev server is running
if !check_dev_server_health(&dev_server_url).await { if !check_dev_server_health(&dev_server_url).await {
return Err(ExtensionError::ValidationError { return Err(ExtensionError::ValidationError {
reason: format!( reason: format!(
"Dev server at {} is not reachable. Please start your dev server first (e.g., 'npm run dev')", "Dev server at {dev_server_url} is not reachable. Please start your dev server first (e.g., 'npm run dev')"
dev_server_url
), ),
}); });
} }
eprintln!("✅ Dev server is reachable"); eprintln!("✅ Dev server is reachable");
// 2. Validate and build path to manifest: <extension_path>/<haextension_dir>/manifest.json // 2. Validate and build path to manifest: <extension_path>/<haextension_dir>/manifest.json
let manifest_relative_path = format!("{}/manifest.json", haextension_dir); let manifest_relative_path = format!("{haextension_dir}/manifest.json");
let manifest_path = ExtensionManager::validate_path_in_directory( let manifest_path = ExtensionManager::validate_path_in_directory(
&extension_path_buf, &extension_path_buf,
&manifest_relative_path, &manifest_relative_path,
@ -330,15 +329,14 @@ pub async fn load_dev_extension(
)? )?
.ok_or_else(|| ExtensionError::ManifestError { .ok_or_else(|| ExtensionError::ManifestError {
reason: format!( reason: format!(
"Manifest not found at: {}/manifest.json. Make sure you run 'npx @haexhub/sdk init' first.", "Manifest not found at: {haextension_dir}/manifest.json. Make sure you run 'npx @haexhub/sdk init' first."
haextension_dir
), ),
})?; })?;
// 3. Read and parse manifest // 3. Read and parse manifest
let manifest_content = let manifest_content =
std::fs::read_to_string(&manifest_path).map_err(|e| ExtensionError::ManifestError { std::fs::read_to_string(&manifest_path).map_err(|e| ExtensionError::ManifestError {
reason: format!("Failed to read manifest: {}", e), reason: format!("Failed to read manifest: {e}"),
})?; })?;
let manifest: ExtensionManifest = serde_json::from_str(&manifest_content)?; let manifest: ExtensionManifest = serde_json::from_str(&manifest_content)?;
@ -406,7 +404,7 @@ pub fn remove_dev_extension(
if let Some(id) = to_remove { if let Some(id) = to_remove {
dev_exts.remove(&id); dev_exts.remove(&id);
eprintln!("✅ Dev extension removed: {}", name); eprintln!("✅ Dev extension removed: {name}");
Ok(()) Ok(())
} else { } else {
Err(ExtensionError::NotFound { public_key, name }) Err(ExtensionError::NotFound { public_key, name })

View File

@ -28,8 +28,7 @@ impl PermissionManager {
})?; })?;
let sql = format!( let sql = format!(
"INSERT INTO {} (id, extension_id, resource_type, action, target, constraints, status) VALUES (?, ?, ?, ?, ?, ?, ?)", "INSERT INTO {TABLE_EXTENSION_PERMISSIONS} (id, extension_id, resource_type, action, target, constraints, status) VALUES (?, ?, ?, ?, ?, ?, ?)"
TABLE_EXTENSION_PERMISSIONS
); );
for perm in permissions { for perm in permissions {
@ -76,8 +75,7 @@ impl PermissionManager {
let db_perm: HaexExtensionPermissions = permission.into(); let db_perm: HaexExtensionPermissions = permission.into();
let sql = format!( let sql = format!(
"UPDATE {} SET resource_type = ?, action = ?, target = ?, constraints = ?, status = ? WHERE id = ?", "UPDATE {TABLE_EXTENSION_PERMISSIONS} SET resource_type = ?, action = ?, target = ?, constraints = ?, status = ? WHERE id = ?"
TABLE_EXTENSION_PERMISSIONS
); );
let params = params![ let params = params![
@ -111,7 +109,7 @@ impl PermissionManager {
reason: "Failed to lock HLC service".to_string(), reason: "Failed to lock HLC service".to_string(),
})?; })?;
let sql = format!("UPDATE {} SET status = ? WHERE id = ?", TABLE_EXTENSION_PERMISSIONS); let sql = format!("UPDATE {TABLE_EXTENSION_PERMISSIONS} SET status = ? WHERE id = ?");
let params = params![new_status.as_str(), permission_id]; let params = params![new_status.as_str(), permission_id];
SqlExecutor::execute_internal_typed(&tx, &hlc_service, &sql, params)?; SqlExecutor::execute_internal_typed(&tx, &hlc_service, &sql, params)?;
tx.commit().map_err(DatabaseError::from) tx.commit().map_err(DatabaseError::from)
@ -133,7 +131,7 @@ impl PermissionManager {
})?; })?;
// Echtes DELETE - wird vom CrdtTransformer zu UPDATE umgewandelt // Echtes DELETE - wird vom CrdtTransformer zu UPDATE umgewandelt
let sql = format!("DELETE FROM {} WHERE id = ?", TABLE_EXTENSION_PERMISSIONS); let sql = format!("DELETE FROM {TABLE_EXTENSION_PERMISSIONS} WHERE id = ?");
SqlExecutor::execute_internal_typed(&tx, &hlc_service, &sql, params![permission_id])?; SqlExecutor::execute_internal_typed(&tx, &hlc_service, &sql, params![permission_id])?;
tx.commit().map_err(DatabaseError::from) tx.commit().map_err(DatabaseError::from)
}).map_err(ExtensionError::from) }).map_err(ExtensionError::from)
@ -152,7 +150,7 @@ impl PermissionManager {
reason: "Failed to lock HLC service".to_string(), reason: "Failed to lock HLC service".to_string(),
})?; })?;
let sql = format!("DELETE FROM {} WHERE extension_id = ?", TABLE_EXTENSION_PERMISSIONS); let sql = format!("DELETE FROM {TABLE_EXTENSION_PERMISSIONS} WHERE extension_id = ?");
SqlExecutor::execute_internal_typed(&tx, &hlc_service, &sql, params![extension_id])?; SqlExecutor::execute_internal_typed(&tx, &hlc_service, &sql, params![extension_id])?;
tx.commit().map_err(DatabaseError::from) tx.commit().map_err(DatabaseError::from)
}).map_err(ExtensionError::from) }).map_err(ExtensionError::from)
@ -164,7 +162,7 @@ impl PermissionManager {
hlc_service: &crate::crdt::hlc::HlcService, hlc_service: &crate::crdt::hlc::HlcService,
extension_id: &str, extension_id: &str,
) -> Result<(), DatabaseError> { ) -> Result<(), DatabaseError> {
let sql = format!("DELETE FROM {} WHERE extension_id = ?", TABLE_EXTENSION_PERMISSIONS); let sql = format!("DELETE FROM {TABLE_EXTENSION_PERMISSIONS} WHERE extension_id = ?");
SqlExecutor::execute_internal_typed(tx, hlc_service, &sql, params![extension_id])?; SqlExecutor::execute_internal_typed(tx, hlc_service, &sql, params![extension_id])?;
Ok(()) Ok(())
} }
@ -174,7 +172,7 @@ impl PermissionManager {
extension_id: &str, extension_id: &str,
) -> Result<Vec<ExtensionPermission>, ExtensionError> { ) -> Result<Vec<ExtensionPermission>, ExtensionError> {
with_connection(&app_state.db, |conn| { with_connection(&app_state.db, |conn| {
let sql = format!("SELECT * FROM {} WHERE extension_id = ?", TABLE_EXTENSION_PERMISSIONS); let sql = format!("SELECT * FROM {TABLE_EXTENSION_PERMISSIONS} WHERE extension_id = ?");
let mut stmt = conn.prepare(&sql).map_err(DatabaseError::from)?; let mut stmt = conn.prepare(&sql).map_err(DatabaseError::from)?;
let perms_iter = stmt.query_map(params![extension_id], |row| { let perms_iter = stmt.query_map(params![extension_id], |row| {
@ -209,7 +207,7 @@ impl PermissionManager {
.extension_manager .extension_manager
.get_extension(extension_id) .get_extension(extension_id)
.ok_or_else(|| ExtensionError::ValidationError { .ok_or_else(|| ExtensionError::ValidationError {
reason: format!("Extension with ID {} not found", extension_id), reason: format!("Extension with ID {extension_id} not found"),
})?; })?;
// Build expected table prefix: {publicKey}__{extensionName}__ // Build expected table prefix: {publicKey}__{extensionName}__
@ -238,8 +236,8 @@ impl PermissionManager {
if !has_permission { if !has_permission {
return Err(ExtensionError::permission_denied( return Err(ExtensionError::permission_denied(
extension_id, extension_id,
&format!("{:?}", action), &format!("{action:?}"),
&format!("database table '{}'", table_name), &format!("database table '{table_name}'"),
)); ));
} }
@ -415,7 +413,7 @@ impl PermissionManager {
"db" => Ok(ResourceType::Db), "db" => Ok(ResourceType::Db),
"shell" => Ok(ResourceType::Shell), "shell" => Ok(ResourceType::Shell),
_ => Err(DatabaseError::SerializationError { _ => Err(DatabaseError::SerializationError {
reason: format!("Unknown resource type: {}", s), reason: format!("Unknown resource type: {s}"),
}), }),
} }
} }
@ -423,8 +421,7 @@ impl PermissionManager {
fn matches_path_pattern(pattern: &str, path: &str) -> bool { fn matches_path_pattern(pattern: &str, path: &str) -> bool {
if pattern.ends_with("/*") { if let Some(prefix) = pattern.strip_suffix("/*") {
let prefix = &pattern[..pattern.len() - 2];
return path.starts_with(prefix); return path.starts_with(prefix);
} }

View File

@ -267,7 +267,7 @@ impl ResourceType {
"db" => Ok(ResourceType::Db), "db" => Ok(ResourceType::Db),
"shell" => Ok(ResourceType::Shell), "shell" => Ok(ResourceType::Shell),
_ => Err(ExtensionError::ValidationError { _ => Err(ExtensionError::ValidationError {
reason: format!("Unknown resource type: {}", s), reason: format!("Unknown resource type: {s}"),
}), }),
} }
} }
@ -301,7 +301,7 @@ impl Action {
ResourceType::Fs => Ok(Action::Filesystem(FsAction::from_str(s)?)), ResourceType::Fs => Ok(Action::Filesystem(FsAction::from_str(s)?)),
ResourceType::Http => { ResourceType::Http => {
let action: HttpAction = let action: HttpAction =
serde_json::from_str(&format!("\"{}\"", s)).map_err(|_| { serde_json::from_str(&format!("\"{s}\"")).map_err(|_| {
ExtensionError::InvalidActionString { ExtensionError::InvalidActionString {
input: s.to_string(), input: s.to_string(),
resource_type: "http".to_string(), resource_type: "http".to_string(),
@ -329,7 +329,7 @@ impl PermissionStatus {
"granted" => Ok(PermissionStatus::Granted), "granted" => Ok(PermissionStatus::Granted),
"denied" => Ok(PermissionStatus::Denied), "denied" => Ok(PermissionStatus::Denied),
_ => Err(ExtensionError::ValidationError { _ => Err(ExtensionError::ValidationError {
reason: format!("Unknown permission status: {}", s), reason: format!("Unknown permission status: {s}"),
}), }),
} }
} }

View File

@ -17,7 +17,7 @@ impl SqlPermissionValidator {
fn is_own_table(extension_id: &str, table_name: &str) -> bool { fn is_own_table(extension_id: &str, table_name: &str) -> bool {
// Tabellennamen sind im Format: {keyHash}_{extensionName}_{tableName} // Tabellennamen sind im Format: {keyHash}_{extensionName}_{tableName}
// extension_id ist der keyHash der Extension // extension_id ist der keyHash der Extension
table_name.starts_with(&format!("{}_", extension_id)) table_name.starts_with(&format!("{extension_id}_"))
} }
/// Validiert ein SQL-Statement gegen die Permissions einer Extension /// Validiert ein SQL-Statement gegen die Permissions einer Extension
@ -45,7 +45,7 @@ impl SqlPermissionValidator {
Self::validate_schema_statement(app_state, extension_id, &statement).await Self::validate_schema_statement(app_state, extension_id, &statement).await
} }
_ => Err(ExtensionError::ValidationError { _ => Err(ExtensionError::ValidationError {
reason: format!("Statement type not allowed: {}", sql), reason: format!("Statement type not allowed: {sql}"),
}), }),
} }
} }

View File

@ -26,7 +26,7 @@ pub fn run() {
let state = app_handle.state::<AppState>(); let state = app_handle.state::<AppState>();
// Rufe den Handler mit allen benötigten Parametern auf // Rufe den Handler mit allen benötigten Parametern auf
match extension::core::extension_protocol_handler(state, &app_handle, &request) { match extension::core::extension_protocol_handler(state, app_handle, &request) {
Ok(response) => response, Ok(response) => response,
Err(e) => { Err(e) => {
eprintln!( eprintln!(
@ -38,11 +38,10 @@ pub fn run() {
.status(500) .status(500)
.header("Content-Type", "text/plain") .header("Content-Type", "text/plain")
.body(Vec::from(format!( .body(Vec::from(format!(
"Interner Serverfehler im Protokollhandler: {}", "Interner Serverfehler im Protokollhandler: {e}"
e
))) )))
.unwrap_or_else(|build_err| { .unwrap_or_else(|build_err| {
eprintln!("Konnte Fehler-Response nicht erstellen: {}", build_err); eprintln!("Konnte Fehler-Response nicht erstellen: {build_err}");
tauri::http::Response::builder() tauri::http::Response::builder()
.status(500) .status(500)
.body(Vec::new()) .body(Vec::new())