Remove unused code and modernize Rust format strings

Applied cargo clippy fixes to clean up codebase:
- Removed unused imports (serde_json::json, std::collections::HashSet)
- Removed unused function encode_hex_for_log
- Modernized format strings to use inline variables
- Fixed clippy warnings for better code quality

All changes applied automatically by cargo clippy --fix
This commit is contained in:
2025-11-02 14:48:01 +01:00
parent 6ceb22f014
commit a52e1b43fa
19 changed files with 209 additions and 280 deletions

View File

@ -74,15 +74,14 @@ impl HlcService {
// Parse den String in ein Uuid-Objekt.
let uuid = Uuid::parse_str(&node_id_str).map_err(|e| {
HlcError::ParseNodeId(format!(
"Stored device ID is not a valid UUID: {}. Error: {}",
node_id_str, e
"Stored device ID is not a valid UUID: {node_id_str}. Error: {e}"
))
})?;
// Hol dir die rohen 16 Bytes und erstelle daraus die uhlc::ID.
// Das `*` dereferenziert den `&[u8; 16]` zu `[u8; 16]`, was `try_from` erwartet.
let node_id = ID::try_from(*uuid.as_bytes()).map_err(|e| {
HlcError::ParseNodeId(format!("Invalid node ID format from device store: {:?}", e))
HlcError::ParseNodeId(format!("Invalid node ID format from device store: {e:?}"))
})?;
// 2. Erstelle eine HLC-Instanz mit stabiler Identität
@ -95,8 +94,7 @@ impl HlcService {
if let Some(last_timestamp) = Self::load_last_timestamp(conn)? {
hlc.update_with_timestamp(&last_timestamp).map_err(|e| {
HlcError::Parse(format!(
"Failed to update HLC with persisted timestamp: {:?}",
e
"Failed to update HLC with persisted timestamp: {e:?}"
))
})?;
}
@ -119,7 +117,7 @@ impl HlcService {
if let Some(s) = value.as_str() {
// Das ist unser Erfolgsfall. Wir haben einen &str und können
// eine Kopie davon zurückgeben.
println!("Gefundene und validierte Geräte-ID: {}", s);
println!("Gefundene und validierte Geräte-ID: {s}");
if Uuid::parse_str(s).is_ok() {
// Erfolgsfall: Der Wert ist ein String UND eine gültige UUID.
// Wir können die Funktion direkt mit dem Wert verlassen.
@ -183,19 +181,19 @@ impl HlcService {
let hlc = hlc_guard.as_mut().ok_or(HlcError::NotInitialized)?;
hlc.update_with_timestamp(timestamp)
.map_err(|e| HlcError::Parse(format!("Failed to update HLC: {:?}", e)))
.map_err(|e| HlcError::Parse(format!("Failed to update HLC: {e:?}")))
}
/// Lädt den letzten persistierten Zeitstempel aus der Datenbank.
fn load_last_timestamp(conn: &Connection) -> Result<Option<Timestamp>, HlcError> {
let query = format!("SELECT value FROM {} WHERE key = ?1", TABLE_CRDT_CONFIGS);
let query = format!("SELECT value FROM {TABLE_CRDT_CONFIGS} WHERE key = ?1");
match conn.query_row(&query, params![HLC_TIMESTAMP_TYPE], |row| {
row.get::<_, String>(0)
}) {
Ok(state_str) => {
let timestamp = Timestamp::from_str(&state_str).map_err(|e| {
HlcError::ParseTimestamp(format!("Invalid timestamp format: {:?}", e))
HlcError::ParseTimestamp(format!("Invalid timestamp format: {e:?}"))
})?;
Ok(Some(timestamp))
}
@ -209,9 +207,8 @@ impl HlcService {
let timestamp_str = timestamp.to_string();
tx.execute(
&format!(
"INSERT INTO {} (key, value) VALUES (?1, ?2)
ON CONFLICT(key) DO UPDATE SET value = excluded.value",
TABLE_CRDT_CONFIGS
"INSERT INTO {TABLE_CRDT_CONFIGS} (key, value) VALUES (?1, ?2)
ON CONFLICT(key) DO UPDATE SET value = excluded.value"
),
params![HLC_TIMESTAMP_TYPE, timestamp_str],
)?;

View File

@ -32,17 +32,16 @@ pub enum CrdtSetupError {
impl Display for CrdtSetupError {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
match self {
CrdtSetupError::DatabaseError(e) => write!(f, "Database error: {}", e),
CrdtSetupError::DatabaseError(e) => write!(f, "Database error: {e}"),
CrdtSetupError::HlcColumnMissing {
table_name,
column_name,
} => write!(
f,
"Table '{}' is missing the required hlc column '{}'",
table_name, column_name
"Table '{table_name}' is missing the required hlc column '{column_name}'"
),
CrdtSetupError::PrimaryKeyMissing { table_name } => {
write!(f, "Table '{}' has no primary key", table_name)
write!(f, "Table '{table_name}' has no primary key")
}
}
}
@ -129,7 +128,7 @@ pub fn setup_triggers_for_table(
let delete_trigger_sql = generate_delete_trigger_sql(table_name, &pks, &cols_to_track);
if recreate {
drop_triggers_for_table(&tx, table_name)?;
drop_triggers_for_table(tx, table_name)?;
}
tx.execute_batch(&insert_trigger_sql)?;
@ -143,13 +142,11 @@ pub fn setup_triggers_for_table(
pub fn get_table_schema(conn: &Connection, table_name: &str) -> RusqliteResult<Vec<ColumnInfo>> {
if !is_safe_identifier(table_name) {
return Err(rusqlite::Error::InvalidParameterName(format!(
"Invalid or unsafe table name provided: {}",
table_name
))
.into());
"Invalid or unsafe table name provided: {table_name}"
)));
}
let sql = format!("PRAGMA table_info(\"{}\");", table_name);
let sql = format!("PRAGMA table_info(\"{table_name}\");");
let mut stmt = conn.prepare(&sql)?;
let rows = stmt.query_map([], ColumnInfo::from_row)?;
rows.collect()
@ -163,8 +160,7 @@ pub fn drop_triggers_for_table(
) -> Result<(), CrdtSetupError> {
if !is_safe_identifier(table_name) {
return Err(rusqlite::Error::InvalidParameterName(format!(
"Invalid or unsafe table name provided: {}",
table_name
"Invalid or unsafe table name provided: {table_name}"
))
.into());
}
@ -177,8 +173,7 @@ pub fn drop_triggers_for_table(
drop_trigger_sql(DELETE_TRIGGER_TPL.replace("{TABLE_NAME}", table_name));
let sql_batch = format!(
"{}\n{}\n{}",
drop_insert_trigger_sql, drop_update_trigger_sql, drop_delete_trigger_sql
"{drop_insert_trigger_sql}\n{drop_update_trigger_sql}\n{drop_delete_trigger_sql}"
);
tx.execute_batch(&sql_batch)?;
@ -244,33 +239,22 @@ pub fn drop_triggers_for_table(
fn generate_insert_trigger_sql(table_name: &str, pks: &[String], cols: &[String]) -> String {
let pk_json_payload = pks
.iter()
.map(|pk| format!("'{}', NEW.\"{}\"", pk, pk))
.map(|pk| format!("'{pk}', NEW.\"{pk}\""))
.collect::<Vec<_>>()
.join(", ");
let column_inserts = if cols.is_empty() {
// Nur PKs -> einfacher Insert ins Log
format!(
"INSERT INTO {log_table} (id, haex_timestamp, op_type, table_name, row_pks)
VALUES ({uuid_fn}(), NEW.\"{hlc_col}\", 'INSERT', '{table}', json_object({pk_payload}));",
log_table = TABLE_CRDT_LOGS,
uuid_fn = UUID_FUNCTION_NAME,
hlc_col = HLC_TIMESTAMP_COLUMN,
table = table_name,
pk_payload = pk_json_payload
"INSERT INTO {TABLE_CRDT_LOGS} (id, haex_timestamp, op_type, table_name, row_pks)
VALUES ({UUID_FUNCTION_NAME}(), NEW.\"{HLC_TIMESTAMP_COLUMN}\", 'INSERT', '{table_name}', json_object({pk_json_payload}));"
)
} else {
cols.iter().fold(String::new(), |mut acc, col| {
writeln!(
&mut acc,
"INSERT INTO {log_table} (id, haex_timestamp, op_type, table_name, row_pks, column_name, new_value)
VALUES ({uuid_fn}(), NEW.\"{hlc_col}\", 'INSERT', '{table}', json_object({pk_payload}), '{column}', json_object('value', NEW.\"{column}\"));",
log_table = TABLE_CRDT_LOGS,
uuid_fn = UUID_FUNCTION_NAME,
hlc_col = HLC_TIMESTAMP_COLUMN,
table = table_name,
pk_payload = pk_json_payload,
column = col
"INSERT INTO {TABLE_CRDT_LOGS} (id, haex_timestamp, op_type, table_name, row_pks, column_name, new_value)
VALUES ({UUID_FUNCTION_NAME}(), NEW.\"{HLC_TIMESTAMP_COLUMN}\", 'INSERT', '{table_name}', json_object({pk_json_payload}), '{col}', json_object('value', NEW.\"{col}\"));"
).unwrap();
acc
})
@ -290,14 +274,14 @@ fn generate_insert_trigger_sql(table_name: &str, pks: &[String], cols: &[String]
/// Generiert das SQL zum Löschen eines Triggers.
fn drop_trigger_sql(trigger_name: String) -> String {
format!("DROP TRIGGER IF EXISTS \"{}\";", trigger_name)
format!("DROP TRIGGER IF EXISTS \"{trigger_name}\";")
}
/// Generiert das SQL für den UPDATE-Trigger.
fn generate_update_trigger_sql(table_name: &str, pks: &[String], cols: &[String]) -> String {
let pk_json_payload = pks
.iter()
.map(|pk| format!("'{}', NEW.\"{}\"", pk, pk))
.map(|pk| format!("'{pk}', NEW.\"{pk}\""))
.collect::<Vec<_>>()
.join(", ");
@ -308,16 +292,10 @@ fn generate_update_trigger_sql(table_name: &str, pks: &[String], cols: &[String]
for col in cols {
writeln!(
&mut body,
"INSERT INTO {log_table} (id, haex_timestamp, op_type, table_name, row_pks, column_name, new_value, old_value)
SELECT {uuid_fn}(), NEW.\"{hlc_col}\", 'UPDATE', '{table}', json_object({pk_payload}), '{column}',
json_object('value', NEW.\"{column}\"), json_object('value', OLD.\"{column}\")
WHERE NEW.\"{column}\" IS NOT OLD.\"{column}\";",
log_table = TABLE_CRDT_LOGS,
uuid_fn = UUID_FUNCTION_NAME,
hlc_col = HLC_TIMESTAMP_COLUMN,
table = table_name,
pk_payload = pk_json_payload,
column = col
"INSERT INTO {TABLE_CRDT_LOGS} (id, haex_timestamp, op_type, table_name, row_pks, column_name, new_value, old_value)
SELECT {UUID_FUNCTION_NAME}(), NEW.\"{HLC_TIMESTAMP_COLUMN}\", 'UPDATE', '{table_name}', json_object({pk_json_payload}), '{col}',
json_object('value', NEW.\"{col}\"), json_object('value', OLD.\"{col}\")
WHERE NEW.\"{col}\" IS NOT OLD.\"{col}\";"
).unwrap();
}
}
@ -341,7 +319,7 @@ fn generate_update_trigger_sql(table_name: &str, pks: &[String], cols: &[String]
fn generate_delete_trigger_sql(table_name: &str, pks: &[String], cols: &[String]) -> String {
let pk_json_payload = pks
.iter()
.map(|pk| format!("'{}', OLD.\"{}\"", pk, pk))
.map(|pk| format!("'{pk}', OLD.\"{pk}\""))
.collect::<Vec<_>>()
.join(", ");
@ -352,28 +330,17 @@ fn generate_delete_trigger_sql(table_name: &str, pks: &[String], cols: &[String]
for col in cols {
writeln!(
&mut body,
"INSERT INTO {log_table} (id, haex_timestamp, op_type, table_name, row_pks, column_name, old_value)
VALUES ({uuid_fn}(), OLD.\"{hlc_col}\", 'DELETE', '{table}', json_object({pk_payload}), '{column}',
json_object('value', OLD.\"{column}\"));",
log_table = TABLE_CRDT_LOGS,
uuid_fn = UUID_FUNCTION_NAME,
hlc_col = HLC_TIMESTAMP_COLUMN,
table = table_name,
pk_payload = pk_json_payload,
column = col
"INSERT INTO {TABLE_CRDT_LOGS} (id, haex_timestamp, op_type, table_name, row_pks, column_name, old_value)
VALUES ({UUID_FUNCTION_NAME}(), OLD.\"{HLC_TIMESTAMP_COLUMN}\", 'DELETE', '{table_name}', json_object({pk_json_payload}), '{col}',
json_object('value', OLD.\"{col}\"));"
).unwrap();
}
} else {
// Nur PKs -> minimales Delete Log
writeln!(
&mut body,
"INSERT INTO {log_table} (id, haex_timestamp, op_type, table_name, row_pks)
VALUES ({uuid_fn}(), OLD.\"{hlc_col}\", 'DELETE', '{table}', json_object({pk_payload}));",
log_table = TABLE_CRDT_LOGS,
uuid_fn = UUID_FUNCTION_NAME,
hlc_col = HLC_TIMESTAMP_COLUMN,
table = table_name,
pk_payload = pk_json_payload
"INSERT INTO {TABLE_CRDT_LOGS} (id, haex_timestamp, op_type, table_name, row_pks)
VALUES ({UUID_FUNCTION_NAME}(), OLD.\"{HLC_TIMESTAMP_COLUMN}\", 'DELETE', '{table_name}', json_object({pk_json_payload}));"
)
.unwrap();
}