mirror of
https://github.com/haexhub/haex-hub.git
synced 2025-12-17 06:30:50 +01:00
fixed trigger
This commit is contained in:
@ -1,21 +1,19 @@
|
||||
// src-tauri/src/crdt/insert_transformer.rs
|
||||
// INSERT-spezifische CRDT-Transformationen (ON CONFLICT, RETURNING)
|
||||
|
||||
use crate::crdt::trigger::{HLC_TIMESTAMP_COLUMN, TOMBSTONE_COLUMN};
|
||||
use crate::crdt::trigger::HLC_TIMESTAMP_COLUMN;
|
||||
use crate::database::error::DatabaseError;
|
||||
use sqlparser::ast::{Expr, Ident, Insert, SelectItem, SetExpr, Value};
|
||||
use uhlc::Timestamp;
|
||||
|
||||
/// Helper-Struct für INSERT-Transformationen
|
||||
pub struct InsertTransformer {
|
||||
tombstone_column: &'static str,
|
||||
hlc_timestamp_column: &'static str,
|
||||
}
|
||||
|
||||
impl InsertTransformer {
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
tombstone_column: TOMBSTONE_COLUMN,
|
||||
hlc_timestamp_column: HLC_TIMESTAMP_COLUMN,
|
||||
}
|
||||
}
|
||||
@ -58,11 +56,9 @@ impl InsertTransformer {
|
||||
insert_stmt: &mut Insert,
|
||||
timestamp: &Timestamp,
|
||||
) -> Result<(), DatabaseError> {
|
||||
// Add both haex_timestamp and haex_tombstone columns if not exists
|
||||
// Add haex_timestamp column if not exists
|
||||
let hlc_col_index =
|
||||
Self::find_or_add_column(&mut insert_stmt.columns, self.hlc_timestamp_column);
|
||||
let tombstone_col_index =
|
||||
Self::find_or_add_column(&mut insert_stmt.columns, self.tombstone_column);
|
||||
|
||||
// ON CONFLICT Logik komplett entfernt!
|
||||
// Bei Hard Deletes gibt es keine Tombstone-Einträge mehr zu reaktivieren
|
||||
@ -74,24 +70,15 @@ impl InsertTransformer {
|
||||
for row in &mut values.rows {
|
||||
let hlc_value =
|
||||
Expr::Value(Value::SingleQuotedString(timestamp.to_string()).into());
|
||||
let tombstone_value =
|
||||
Expr::Value(Value::Number("0".to_string(), false).into());
|
||||
|
||||
Self::set_or_push_value(row, hlc_col_index, hlc_value);
|
||||
Self::set_or_push_value(row, tombstone_col_index, tombstone_value);
|
||||
}
|
||||
}
|
||||
SetExpr::Select(select) => {
|
||||
let hlc_value =
|
||||
Expr::Value(Value::SingleQuotedString(timestamp.to_string()).into());
|
||||
let tombstone_value = Expr::Value(Value::Number("0".to_string(), false).into());
|
||||
|
||||
Self::set_or_push_projection(&mut select.projection, hlc_col_index, hlc_value);
|
||||
Self::set_or_push_projection(
|
||||
&mut select.projection,
|
||||
tombstone_col_index,
|
||||
tombstone_value,
|
||||
);
|
||||
}
|
||||
_ => {
|
||||
return Err(DatabaseError::UnsupportedStatement {
|
||||
|
||||
@ -1,12 +1,12 @@
|
||||
// src-tauri/src/crdt/transformer.rs
|
||||
|
||||
use crate::crdt::insert_transformer::InsertTransformer;
|
||||
use crate::crdt::trigger::{HLC_TIMESTAMP_COLUMN, TOMBSTONE_COLUMN};
|
||||
use crate::crdt::trigger::HLC_TIMESTAMP_COLUMN;
|
||||
use crate::database::error::DatabaseError;
|
||||
use crate::table_names::{TABLE_CRDT_CONFIGS, TABLE_CRDT_LOGS};
|
||||
use sqlparser::ast::{
|
||||
Assignment, AssignmentTarget, ColumnDef, DataType, Expr, Ident, ObjectName,
|
||||
ObjectNamePart, Statement, TableFactor, TableObject, Value,
|
||||
Assignment, AssignmentTarget, ColumnDef, DataType, Expr, Ident, ObjectName, ObjectNamePart,
|
||||
Statement, TableFactor, TableObject, Value,
|
||||
};
|
||||
use std::borrow::Cow;
|
||||
use std::collections::HashSet;
|
||||
@ -15,13 +15,11 @@ use uhlc::Timestamp;
|
||||
/// Konfiguration für CRDT-Spalten
|
||||
#[derive(Clone)]
|
||||
struct CrdtColumns {
|
||||
tombstone: &'static str,
|
||||
hlc_timestamp: &'static str,
|
||||
}
|
||||
|
||||
impl CrdtColumns {
|
||||
const DEFAULT: Self = Self {
|
||||
tombstone: TOMBSTONE_COLUMN,
|
||||
hlc_timestamp: HLC_TIMESTAMP_COLUMN,
|
||||
};
|
||||
|
||||
@ -37,13 +35,6 @@ impl CrdtColumns {
|
||||
|
||||
/// Fügt CRDT-Spalten zu einer Tabellendefinition hinzu
|
||||
fn add_to_table_definition(&self, columns: &mut Vec<ColumnDef>) {
|
||||
if !columns.iter().any(|c| c.name.value == self.tombstone) {
|
||||
columns.push(ColumnDef {
|
||||
name: Ident::new(self.tombstone),
|
||||
data_type: DataType::Integer(None),
|
||||
options: vec![],
|
||||
});
|
||||
}
|
||||
if !columns.iter().any(|c| c.name.value == self.hlc_timestamp) {
|
||||
columns.push(ColumnDef {
|
||||
name: Ident::new(self.hlc_timestamp),
|
||||
@ -86,7 +77,7 @@ impl CrdtTransformer {
|
||||
// =================================================================
|
||||
// ÖFFENTLICHE API-METHODEN
|
||||
// =================================================================
|
||||
|
||||
|
||||
pub fn transform_execute_statement_with_table_info(
|
||||
&self,
|
||||
stmt: &mut Statement,
|
||||
@ -171,7 +162,7 @@ impl CrdtTransformer {
|
||||
Statement::Update {
|
||||
table, assignments, ..
|
||||
} => {
|
||||
if let TableFactor::Table { name, ..} = &table.relation {
|
||||
if let TableFactor::Table { name, .. } = &table.relation {
|
||||
if self.is_crdt_sync_table(name) {
|
||||
assignments.push(self.columns.create_hlc_assignment(hlc_timestamp));
|
||||
}
|
||||
|
||||
@ -12,18 +12,16 @@ const UPDATE_TRIGGER_TPL: &str = "z_crdt_{TABLE_NAME}_update";
|
||||
const DELETE_TRIGGER_TPL: &str = "z_crdt_{TABLE_NAME}_delete";
|
||||
|
||||
//const SYNC_ACTIVE_KEY: &str = "sync_active";
|
||||
pub const TOMBSTONE_COLUMN: &str = "haex_tombstone";
|
||||
|
||||
pub const HLC_TIMESTAMP_COLUMN: &str = "haex_timestamp";
|
||||
|
||||
/// Name der custom UUID-Generierungs-Funktion (registriert in database::core::open_and_init_db)
|
||||
pub const UUID_FUNCTION_NAME: &str = "gen_uuid";
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum CrdtSetupError {
|
||||
/// Kapselt einen Fehler, der von der rusqlite-Bibliothek kommt.
|
||||
DatabaseError(rusqlite::Error),
|
||||
/// Die Tabelle hat keine Tombstone-Spalte, was eine CRDT-Voraussetzung ist.
|
||||
TombstoneColumnMissing {
|
||||
table_name: String,
|
||||
column_name: String,
|
||||
},
|
||||
HlcColumnMissing {
|
||||
table_name: String,
|
||||
column_name: String,
|
||||
@ -37,14 +35,6 @@ impl Display for CrdtSetupError {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
|
||||
match self {
|
||||
CrdtSetupError::DatabaseError(e) => write!(f, "Database error: {}", e),
|
||||
CrdtSetupError::TombstoneColumnMissing {
|
||||
table_name,
|
||||
column_name,
|
||||
} => write!(
|
||||
f,
|
||||
"Table '{}' is missing the required tombstone column '{}'",
|
||||
table_name, column_name
|
||||
),
|
||||
CrdtSetupError::HlcColumnMissing {
|
||||
table_name,
|
||||
column_name,
|
||||
@ -110,13 +100,6 @@ pub fn setup_triggers_for_table(
|
||||
return Ok(TriggerSetupResult::TableNotFound);
|
||||
}
|
||||
|
||||
if !columns.iter().any(|c| c.name == TOMBSTONE_COLUMN) {
|
||||
return Err(CrdtSetupError::TombstoneColumnMissing {
|
||||
table_name: table_name.to_string(),
|
||||
column_name: TOMBSTONE_COLUMN.to_string(),
|
||||
});
|
||||
}
|
||||
|
||||
if !columns.iter().any(|c| c.name == HLC_TIMESTAMP_COLUMN) {
|
||||
return Err(CrdtSetupError::HlcColumnMissing {
|
||||
table_name: table_name.to_string(),
|
||||
@ -138,7 +121,7 @@ pub fn setup_triggers_for_table(
|
||||
|
||||
let cols_to_track: Vec<String> = columns
|
||||
.iter()
|
||||
.filter(|c| !c.is_pk) //&& c.name != TOMBSTONE_COLUMN && c.name != HLC_TIMESTAMP_COLUMN
|
||||
.filter(|c| !c.is_pk)
|
||||
.map(|c| c.name.clone())
|
||||
.collect();
|
||||
|
||||
@ -269,9 +252,10 @@ fn generate_insert_trigger_sql(table_name: &str, pks: &[String], cols: &[String]
|
||||
let column_inserts = if cols.is_empty() {
|
||||
// Nur PKs -> einfacher Insert ins Log
|
||||
format!(
|
||||
"INSERT INTO {log_table} (haex_timestamp, op_type, table_name, row_pks)
|
||||
VALUES (NEW.\"{hlc_col}\", 'INSERT', '{table}', json_object({pk_payload}));",
|
||||
"INSERT INTO {log_table} (id, haex_timestamp, op_type, table_name, row_pks)
|
||||
VALUES ({uuid_fn}(), NEW.\"{hlc_col}\", 'INSERT', '{table}', json_object({pk_payload}));",
|
||||
log_table = TABLE_CRDT_LOGS,
|
||||
uuid_fn = UUID_FUNCTION_NAME,
|
||||
hlc_col = HLC_TIMESTAMP_COLUMN,
|
||||
table = table_name,
|
||||
pk_payload = pk_json_payload
|
||||
@ -280,9 +264,10 @@ fn generate_insert_trigger_sql(table_name: &str, pks: &[String], cols: &[String]
|
||||
cols.iter().fold(String::new(), |mut acc, col| {
|
||||
writeln!(
|
||||
&mut acc,
|
||||
"INSERT INTO {log_table} (haex_timestamp, op_type, table_name, row_pks, column_name, new_value)
|
||||
VALUES (NEW.\"{hlc_col}\", 'INSERT', '{table}', json_object({pk_payload}), '{column}', json_object('value', NEW.\"{column}\"));",
|
||||
"INSERT INTO {log_table} (id, haex_timestamp, op_type, table_name, row_pks, column_name, new_value)
|
||||
VALUES ({uuid_fn}(), NEW.\"{hlc_col}\", 'INSERT', '{table}', json_object({pk_payload}), '{column}', json_object('value', NEW.\"{column}\"));",
|
||||
log_table = TABLE_CRDT_LOGS,
|
||||
uuid_fn = UUID_FUNCTION_NAME,
|
||||
hlc_col = HLC_TIMESTAMP_COLUMN,
|
||||
table = table_name,
|
||||
pk_payload = pk_json_payload,
|
||||
@ -324,11 +309,12 @@ fn generate_update_trigger_sql(table_name: &str, pks: &[String], cols: &[String]
|
||||
for col in cols {
|
||||
writeln!(
|
||||
&mut body,
|
||||
"INSERT INTO {log_table} (haex_timestamp, op_type, table_name, row_pks, column_name, new_value, old_value)
|
||||
SELECT NEW.\"{hlc_col}\", 'UPDATE', '{table}', json_object({pk_payload}), '{column}',
|
||||
"INSERT INTO {log_table} (id, haex_timestamp, op_type, table_name, row_pks, column_name, new_value, old_value)
|
||||
SELECT {uuid_fn}(), NEW.\"{hlc_col}\", 'UPDATE', '{table}', json_object({pk_payload}), '{column}',
|
||||
json_object('value', NEW.\"{column}\"), json_object('value', OLD.\"{column}\")
|
||||
WHERE NEW.\"{column}\" IS NOT OLD.\"{column}\";",
|
||||
log_table = TABLE_CRDT_LOGS,
|
||||
uuid_fn = UUID_FUNCTION_NAME,
|
||||
hlc_col = HLC_TIMESTAMP_COLUMN,
|
||||
table = table_name,
|
||||
pk_payload = pk_json_payload,
|
||||
@ -367,10 +353,11 @@ fn generate_delete_trigger_sql(table_name: &str, pks: &[String], cols: &[String]
|
||||
for col in cols {
|
||||
writeln!(
|
||||
&mut body,
|
||||
"INSERT INTO {log_table} (haex_timestamp, op_type, table_name, row_pks, column_name, old_value)
|
||||
VALUES (OLD.\"{hlc_col}\", 'DELETE', '{table}', json_object({pk_payload}), '{column}',
|
||||
"INSERT INTO {log_table} (id, haex_timestamp, op_type, table_name, row_pks, column_name, old_value)
|
||||
VALUES ({uuid_fn}(), OLD.\"{hlc_col}\", 'DELETE', '{table}', json_object({pk_payload}), '{column}',
|
||||
json_object('value', OLD.\"{column}\"));",
|
||||
log_table = TABLE_CRDT_LOGS,
|
||||
uuid_fn = UUID_FUNCTION_NAME,
|
||||
hlc_col = HLC_TIMESTAMP_COLUMN,
|
||||
table = table_name,
|
||||
pk_payload = pk_json_payload,
|
||||
@ -381,13 +368,15 @@ fn generate_delete_trigger_sql(table_name: &str, pks: &[String], cols: &[String]
|
||||
// Nur PKs -> minimales Delete Log
|
||||
writeln!(
|
||||
&mut body,
|
||||
"INSERT INTO {log_table} (haex_timestamp, op_type, table_name, row_pks)
|
||||
VALUES (OLD.\"{hlc_col}\", 'DELETE', '{table}', json_object({pk_payload}));",
|
||||
"INSERT INTO {log_table} (id, haex_timestamp, op_type, table_name, row_pks)
|
||||
VALUES ({uuid_fn}(), OLD.\"{hlc_col}\", 'DELETE', '{table}', json_object({pk_payload}));",
|
||||
log_table = TABLE_CRDT_LOGS,
|
||||
uuid_fn = UUID_FUNCTION_NAME,
|
||||
hlc_col = HLC_TIMESTAMP_COLUMN,
|
||||
table = table_name,
|
||||
pk_payload = pk_json_payload
|
||||
).unwrap();
|
||||
)
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
let trigger_name = DELETE_TRIGGER_TPL.replace("{TABLE_NAME}", table_name);
|
||||
|
||||
@ -1,9 +1,11 @@
|
||||
// src-tauri/src/database/core.rs
|
||||
|
||||
use crate::crdt::trigger::UUID_FUNCTION_NAME;
|
||||
use crate::database::error::DatabaseError;
|
||||
use crate::database::DbConnection;
|
||||
use crate::extension::database::executor::SqlExecutor;
|
||||
use base64::{engine::general_purpose::STANDARD, Engine as _};
|
||||
use rusqlite::functions::FunctionFlags;
|
||||
use rusqlite::types::Value as SqlValue;
|
||||
use rusqlite::{
|
||||
types::{Value as RusqliteValue, ValueRef},
|
||||
@ -13,6 +15,7 @@ use serde_json::Value as JsonValue;
|
||||
use sqlparser::ast::{Expr, Query, Select, SetExpr, Statement, TableFactor, TableObject};
|
||||
use sqlparser::dialect::SQLiteDialect;
|
||||
use sqlparser::parser::Parser;
|
||||
use uuid::Uuid;
|
||||
|
||||
/// Öffnet und initialisiert eine Datenbank mit Verschlüsselung
|
||||
pub fn open_and_init_db(path: &str, key: &str, create: bool) -> Result<Connection, DatabaseError> {
|
||||
@ -34,6 +37,19 @@ pub fn open_and_init_db(path: &str, key: &str, create: bool) -> Result<Connectio
|
||||
reason: e.to_string(),
|
||||
})?;
|
||||
|
||||
// Register custom UUID function for SQLite triggers
|
||||
conn.create_scalar_function(
|
||||
UUID_FUNCTION_NAME,
|
||||
0,
|
||||
FunctionFlags::SQLITE_UTF8 | FunctionFlags::SQLITE_DETERMINISTIC,
|
||||
|_ctx| {
|
||||
Ok(Uuid::new_v4().to_string())
|
||||
},
|
||||
)
|
||||
.map_err(|e| DatabaseError::DatabaseError {
|
||||
reason: format!("Failed to register {} function: {}", UUID_FUNCTION_NAME, e),
|
||||
})?;
|
||||
|
||||
let journal_mode: String = conn
|
||||
.query_row("PRAGMA journal_mode=WAL;", [], |row| row.get(0))
|
||||
.map_err(|e| DatabaseError::PragmaError {
|
||||
|
||||
@ -16,8 +16,6 @@ pub struct HaexSettings {
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub value: Option<String>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub haex_tombstone: Option<bool>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub haex_timestamp: Option<String>,
|
||||
}
|
||||
|
||||
@ -28,8 +26,7 @@ impl HaexSettings {
|
||||
key: row.get(1)?,
|
||||
r#type: row.get(2)?,
|
||||
value: row.get(3)?,
|
||||
haex_tombstone: row.get(4)?,
|
||||
haex_timestamp: row.get(5)?,
|
||||
haex_timestamp: row.get(4)?,
|
||||
})
|
||||
}
|
||||
}
|
||||
@ -54,8 +51,6 @@ pub struct HaexExtensions {
|
||||
pub icon: Option<String>,
|
||||
pub signature: String,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub haex_tombstone: Option<bool>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub haex_timestamp: Option<String>,
|
||||
}
|
||||
|
||||
@ -73,8 +68,7 @@ impl HaexExtensions {
|
||||
enabled: row.get(8)?,
|
||||
icon: row.get(9)?,
|
||||
signature: row.get(10)?,
|
||||
haex_tombstone: row.get(11)?,
|
||||
haex_timestamp: row.get(12)?,
|
||||
haex_timestamp: row.get(11)?,
|
||||
})
|
||||
}
|
||||
}
|
||||
@ -83,8 +77,7 @@ impl HaexExtensions {
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct HaexExtensionPermissions {
|
||||
pub id: String,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub extension_id: Option<String>,
|
||||
pub extension_id: String,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub resource_type: Option<String>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
@ -99,8 +92,6 @@ pub struct HaexExtensionPermissions {
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub updated_at: Option<String>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub haex_tombstone: Option<bool>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub haex_timestamp: Option<String>,
|
||||
}
|
||||
|
||||
@ -116,8 +107,7 @@ impl HaexExtensionPermissions {
|
||||
status: row.get(6)?,
|
||||
created_at: row.get(7)?,
|
||||
updated_at: row.get(8)?,
|
||||
haex_tombstone: row.get(9)?,
|
||||
haex_timestamp: row.get(10)?,
|
||||
haex_timestamp: row.get(9)?,
|
||||
})
|
||||
}
|
||||
}
|
||||
@ -200,3 +190,51 @@ impl HaexCrdtConfigs {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct HaexDesktopItems {
|
||||
pub id: String,
|
||||
pub workspace_id: String,
|
||||
pub item_type: String,
|
||||
pub reference_id: String,
|
||||
pub position_x: i64,
|
||||
pub position_y: i64,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub haex_timestamp: Option<String>,
|
||||
}
|
||||
|
||||
impl HaexDesktopItems {
|
||||
pub fn from_row(row: &rusqlite::Row) -> rusqlite::Result<Self> {
|
||||
Ok(Self {
|
||||
id: row.get(0)?,
|
||||
workspace_id: row.get(1)?,
|
||||
item_type: row.get(2)?,
|
||||
reference_id: row.get(3)?,
|
||||
position_x: row.get(4)?,
|
||||
position_y: row.get(5)?,
|
||||
haex_timestamp: row.get(6)?,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct HaexWorkspaces {
|
||||
pub id: String,
|
||||
pub name: String,
|
||||
pub position: i64,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub haex_timestamp: Option<String>,
|
||||
}
|
||||
|
||||
impl HaexWorkspaces {
|
||||
pub fn from_row(row: &rusqlite::Row) -> rusqlite::Result<Self> {
|
||||
Ok(Self {
|
||||
id: row.get(0)?,
|
||||
name: row.get(1)?,
|
||||
position: row.get(2)?,
|
||||
haex_timestamp: row.get(3)?,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
67
src-tauri/src/database/init.rs
Normal file
67
src-tauri/src/database/init.rs
Normal file
@ -0,0 +1,67 @@
|
||||
// src-tauri/src/database/init.rs
|
||||
// Database initialization utilities (trigger setup, etc.)
|
||||
|
||||
use crate::crdt::trigger;
|
||||
use crate::database::error::DatabaseError;
|
||||
use crate::table_names::{
|
||||
TABLE_DESKTOP_ITEMS,
|
||||
TABLE_EXTENSIONS,
|
||||
TABLE_EXTENSION_PERMISSIONS,
|
||||
TABLE_NOTIFICATIONS,
|
||||
TABLE_SETTINGS,
|
||||
TABLE_WORKSPACES,
|
||||
};
|
||||
use rusqlite::{params, Connection};
|
||||
|
||||
/// Liste aller CRDT-Tabellen die Trigger benötigen (ohne Password-Tabellen - die kommen in Extension)
|
||||
const CRDT_TABLES: &[&str] = &[
|
||||
TABLE_SETTINGS,
|
||||
TABLE_EXTENSIONS,
|
||||
TABLE_EXTENSION_PERMISSIONS,
|
||||
TABLE_NOTIFICATIONS,
|
||||
TABLE_WORKSPACES,
|
||||
TABLE_DESKTOP_ITEMS,
|
||||
];
|
||||
|
||||
/// Prüft ob Trigger bereits initialisiert wurden und erstellt sie falls nötig
|
||||
///
|
||||
/// Diese Funktion wird beim ersten Öffnen einer Template-DB aufgerufen.
|
||||
/// Sie erstellt alle CRDT-Trigger für die definierten Tabellen und markiert
|
||||
/// die Initialisierung in haex_settings.
|
||||
///
|
||||
/// Bei Migrations (ALTER TABLE) werden Trigger automatisch neu erstellt,
|
||||
/// daher ist kein Versioning nötig.
|
||||
pub fn ensure_triggers_initialized(conn: &mut Connection) -> Result<bool, DatabaseError> {
|
||||
let tx = conn.transaction()?;
|
||||
|
||||
// Check if triggers already initialized
|
||||
let check_sql = format!(
|
||||
"SELECT value FROM {} WHERE key = ? AND type = ?",
|
||||
TABLE_SETTINGS
|
||||
);
|
||||
let initialized: Option<String> = tx
|
||||
.query_row(
|
||||
&check_sql,
|
||||
params!["triggers_initialized", "system"],
|
||||
|row| row.get(0),
|
||||
)
|
||||
.ok();
|
||||
|
||||
if initialized.is_some() {
|
||||
eprintln!("DEBUG: Triggers already initialized, skipping");
|
||||
tx.commit()?; // Wichtig: Transaktion trotzdem abschließen
|
||||
return Ok(true); // true = war schon initialisiert
|
||||
}
|
||||
|
||||
eprintln!("INFO: Initializing CRDT triggers for database...");
|
||||
|
||||
// Create triggers for all CRDT tables
|
||||
for table_name in CRDT_TABLES {
|
||||
eprintln!(" - Setting up triggers for: {}", table_name);
|
||||
trigger::setup_triggers_for_table(&tx, table_name, false)?;
|
||||
}
|
||||
|
||||
tx.commit()?;
|
||||
eprintln!("INFO: ✓ CRDT triggers created successfully (flag pending)");
|
||||
Ok(false) // false = wurde gerade initialisiert
|
||||
}
|
||||
@ -3,11 +3,13 @@
|
||||
pub mod core;
|
||||
pub mod error;
|
||||
pub mod generated;
|
||||
pub mod init;
|
||||
|
||||
use crate::crdt::hlc::HlcService;
|
||||
use crate::database::core::execute_with_crdt;
|
||||
use crate::database::error::DatabaseError;
|
||||
use crate::extension::database::executor::SqlExecutor;
|
||||
use crate::table_names::TABLE_CRDT_CONFIGS;
|
||||
use crate::table_names::{TABLE_CRDT_CONFIGS, TABLE_SETTINGS};
|
||||
use crate::AppState;
|
||||
use rusqlite::Connection;
|
||||
use serde::{Deserialize, Serialize};
|
||||
@ -76,7 +78,8 @@ pub fn sql_query_with_crdt(
|
||||
|
||||
core::with_connection(&state.db, |conn| {
|
||||
let tx = conn.transaction().map_err(DatabaseError::from)?;
|
||||
let (_modified_tables, result) = SqlExecutor::query_internal(&tx, &hlc_service, &sql, ¶ms)?;
|
||||
let (_modified_tables, result) =
|
||||
SqlExecutor::query_internal(&tx, &hlc_service, &sql, ¶ms)?;
|
||||
tx.commit().map_err(DatabaseError::from)?;
|
||||
Ok(result)
|
||||
})
|
||||
@ -417,9 +420,12 @@ fn initialize_session(
|
||||
state: &State<'_, AppState>,
|
||||
) -> Result<(), DatabaseError> {
|
||||
// 1. Establish the raw database connection
|
||||
let conn = core::open_and_init_db(path, key, false)?;
|
||||
let mut conn = core::open_and_init_db(path, key, false)?;
|
||||
|
||||
// 2. Initialize the HLC service
|
||||
// 2. Ensure CRDT triggers are initialized (for template DB)
|
||||
let triggers_were_already_initialized = init::ensure_triggers_initialized(&mut conn)?;
|
||||
|
||||
// 3. Initialize the HLC service
|
||||
let hlc_service = HlcService::try_initialize(&conn, app_handle).map_err(|e| {
|
||||
// We convert the HlcError into a DatabaseError
|
||||
DatabaseError::ExecutionError {
|
||||
@ -429,16 +435,53 @@ fn initialize_session(
|
||||
}
|
||||
})?;
|
||||
|
||||
// 3. Store everything in the global AppState
|
||||
// 4. Store everything in the global AppState
|
||||
let mut db_guard = state.db.0.lock().map_err(|e| DatabaseError::LockError {
|
||||
reason: e.to_string(),
|
||||
})?;
|
||||
// Wichtig: Wir brauchen den db_guard gleich nicht mehr,
|
||||
// da 'execute_with_crdt' 'with_connection' aufruft, was
|
||||
// 'state.db' selbst locken muss.
|
||||
// Wir müssen den Guard freigeben, *bevor* wir 'execute_with_crdt' rufen,
|
||||
// um einen Deadlock zu verhindern.
|
||||
// Aber wir müssen die 'conn' erst hineinbewegen.
|
||||
*db_guard = Some(conn);
|
||||
drop(db_guard);
|
||||
|
||||
let mut hlc_guard = state.hlc.lock().map_err(|e| DatabaseError::LockError {
|
||||
reason: e.to_string(),
|
||||
})?;
|
||||
*hlc_guard = hlc_service;
|
||||
|
||||
// WICHTIG: hlc_guard *nicht* freigeben, da 'execute_with_crdt'
|
||||
// eine Referenz auf die Guard erwartet.
|
||||
|
||||
// 5. NEUER SCHRITT: Setze das Flag via CRDT, falls nötig
|
||||
if !triggers_were_already_initialized {
|
||||
eprintln!("INFO: Setting 'triggers_initialized' flag via CRDT...");
|
||||
|
||||
let insert_sql = format!(
|
||||
"INSERT INTO {} (id, key, type, value) VALUES (?, ?, ?, ?)",
|
||||
TABLE_SETTINGS
|
||||
);
|
||||
|
||||
// execute_with_crdt erwartet Vec<JsonValue>, kein params!-Makro
|
||||
let params_vec: Vec<JsonValue> = vec![
|
||||
JsonValue::String(uuid::Uuid::new_v4().to_string()),
|
||||
JsonValue::String("triggers_initialized".to_string()),
|
||||
JsonValue::String("system".to_string()),
|
||||
JsonValue::String("1".to_string()),
|
||||
];
|
||||
|
||||
// Jetzt können wir 'execute_with_crdt' sicher aufrufen,
|
||||
// da der AppState initialisiert ist.
|
||||
execute_with_crdt(
|
||||
insert_sql, params_vec, &state.db, // Das &DbConnection (der Mutex)
|
||||
&hlc_guard, // Die gehaltene MutexGuard
|
||||
)?;
|
||||
|
||||
eprintln!("INFO: ✓ 'triggers_initialized' flag set.");
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@ -155,7 +155,6 @@ impl ExtensionPermissions {
|
||||
.and_then(|c| serde_json::from_value::<PermissionConstraints>(c.clone()).ok()),
|
||||
status: p.status.clone().unwrap_or(PermissionStatus::Ask),
|
||||
haex_timestamp: None,
|
||||
haex_tombstone: None,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@ -52,7 +52,7 @@ impl SqlExecutor {
|
||||
}
|
||||
|
||||
let sql_str = statement.to_string();
|
||||
eprintln!("DEBUG: Transformed SQL: {}", sql_str);
|
||||
eprintln!("DEBUG: Transformed execute SQL: {}", sql_str);
|
||||
|
||||
// Führe Statement aus
|
||||
tx.execute(&sql_str, params)
|
||||
|
||||
@ -165,8 +165,6 @@ pub struct ExtensionPermission {
|
||||
pub constraints: Option<PermissionConstraints>,
|
||||
pub status: PermissionStatus,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub haex_tombstone: Option<bool>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub haex_timestamp: Option<String>,
|
||||
}
|
||||
|
||||
@ -341,9 +339,9 @@ impl From<&ExtensionPermission> for crate::database::generated::HaexExtensionPer
|
||||
fn from(perm: &ExtensionPermission) -> Self {
|
||||
Self {
|
||||
id: perm.id.clone(),
|
||||
extension_id: Some(perm.extension_id.clone()),
|
||||
extension_id: perm.extension_id.clone(),
|
||||
resource_type: Some(perm.resource_type.as_str().to_string()),
|
||||
action: Some(perm.action.as_str()),
|
||||
action: Some(perm.action.as_str().to_string()),
|
||||
target: Some(perm.target.clone()),
|
||||
constraints: perm
|
||||
.constraints
|
||||
@ -352,7 +350,6 @@ impl From<&ExtensionPermission> for crate::database::generated::HaexExtensionPer
|
||||
status: perm.status.as_str().to_string(),
|
||||
created_at: None,
|
||||
updated_at: None,
|
||||
haex_tombstone: perm.haex_tombstone,
|
||||
haex_timestamp: perm.haex_timestamp.clone(),
|
||||
}
|
||||
}
|
||||
@ -382,13 +379,12 @@ impl From<crate::database::generated::HaexExtensionPermissions> for ExtensionPer
|
||||
|
||||
Self {
|
||||
id: db_perm.id,
|
||||
extension_id: db_perm.extension_id.unwrap_or_default(),
|
||||
extension_id: db_perm.extension_id,
|
||||
resource_type,
|
||||
action,
|
||||
target: db_perm.target.unwrap_or_default(),
|
||||
constraints,
|
||||
status,
|
||||
haex_tombstone: db_perm.haex_tombstone,
|
||||
haex_timestamp: db_perm.haex_timestamp,
|
||||
}
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user