use window system

This commit is contained in:
2025-10-20 19:14:05 +02:00
parent a291619f63
commit 2b8f1781f3
51 changed files with 6687 additions and 2070 deletions

View File

@ -0,0 +1,10 @@
CREATE TABLE `haex_workspaces` (
`id` text PRIMARY KEY NOT NULL,
`name` text NOT NULL,
`position` integer DEFAULT 0 NOT NULL,
`created_at` integer NOT NULL,
`haex_tombstone` integer,
`haex_timestamp` text
);
--> statement-breakpoint
ALTER TABLE `haex_desktop_items` ADD `workspace_id` text NOT NULL REFERENCES haex_workspaces(id);

View File

@ -0,0 +1 @@
CREATE UNIQUE INDEX `haex_workspaces_name_unique` ON `haex_workspaces` (`name`);

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -29,6 +29,20 @@
"when": 1760611690801,
"tag": "0003_daily_polaris",
"breakpoints": true
},
{
"idx": 4,
"version": "6",
"when": 1760817142340,
"tag": "0004_mature_viper",
"breakpoints": true
},
{
"idx": 5,
"version": "6",
"when": 1760964548034,
"tag": "0005_tidy_yellowjacket",
"breakpoints": true
}
]
}

View File

@ -10,7 +10,9 @@ import {
import tableNames from '../tableNames.json'
// Helper function to add common CRDT columns (haexTombstone and haexTimestamp)
export const withCrdtColumns = <T extends Record<string, SQLiteColumnBuilderBase>>(
export const withCrdtColumns = <
T extends Record<string, SQLiteColumnBuilderBase>,
>(
columns: T,
columnNames: { haexTombstone: string; haexTimestamp: string },
) => ({
@ -132,6 +134,30 @@ export const haexNotifications = sqliteTable(
export type InsertHaexNotifications = typeof haexNotifications.$inferInsert
export type SelectHaexNotifications = typeof haexNotifications.$inferSelect
export const haexWorkspaces = sqliteTable(
tableNames.haex.workspaces.name,
withCrdtColumns(
{
id: text(tableNames.haex.workspaces.columns.id)
.primaryKey()
.$defaultFn(() => crypto.randomUUID()),
name: text(tableNames.haex.workspaces.columns.name).notNull(),
position: integer(tableNames.haex.workspaces.columns.position)
.notNull()
.default(0),
createdAt: integer(tableNames.haex.workspaces.columns.createdAt, {
mode: 'timestamp',
})
.notNull()
.$defaultFn(() => new Date()),
},
tableNames.haex.workspaces.columns,
),
(table) => [unique().on(table.name)],
)
export type InsertHaexWorkspaces = typeof haexWorkspaces.$inferInsert
export type SelectHaexWorkspaces = typeof haexWorkspaces.$inferSelect
export const haexDesktopItems = sqliteTable(
tableNames.haex.desktop_items.name,
withCrdtColumns(
@ -139,10 +165,15 @@ export const haexDesktopItems = sqliteTable(
id: text(tableNames.haex.desktop_items.columns.id)
.primaryKey()
.$defaultFn(() => crypto.randomUUID()),
workspaceId: text(tableNames.haex.desktop_items.columns.workspaceId)
.notNull()
.references(() => haexWorkspaces.id),
itemType: text(tableNames.haex.desktop_items.columns.itemType, {
enum: ['extension', 'file', 'folder'],
}).notNull(),
referenceId: text(tableNames.haex.desktop_items.columns.referenceId).notNull(), // extensionId für extensions, filePath für files/folders
referenceId: text(
tableNames.haex.desktop_items.columns.referenceId,
).notNull(), // extensionId für extensions, filePath für files/folders
positionX: integer(tableNames.haex.desktop_items.columns.positionX)
.notNull()
.default(0),

View File

@ -63,10 +63,22 @@
"haexTimestamp": "haex_timestamp"
}
},
"workspaces": {
"name": "haex_workspaces",
"columns": {
"id": "id",
"name": "name",
"position": "position",
"createdAt": "created_at",
"haexTombstone": "haex_tombstone",
"haexTimestamp": "haex_timestamp"
}
},
"desktop_items": {
"name": "haex_desktop_items",
"columns": {
"id": "id",
"workspaceId": "workspace_id",
"itemType": "item_type",
"referenceId": "reference_id",
"positionX": "position_x",

Binary file not shown.

View File

@ -18,6 +18,7 @@ struct Haex {
extensions: TableDefinition,
extension_permissions: TableDefinition,
notifications: TableDefinition,
desktop_items: TableDefinition,
crdt: Crdt,
}
@ -107,6 +108,16 @@ pub const COL_NOTIFICATIONS_TITLE: &str = "{c_notif_title}";
pub const COL_NOTIFICATIONS_TYPE: &str = "{c_notif_type}";
pub const COL_NOTIFICATIONS_HAEX_TOMBSTONE: &str = "{c_notif_tombstone}";
// --- Table: haex_desktop_items ---
pub const TABLE_DESKTOP_ITEMS: &str = "{t_desktop_items}";
pub const COL_DESKTOP_ITEMS_ID: &str = "{c_desktop_id}";
pub const COL_DESKTOP_ITEMS_ITEM_TYPE: &str = "{c_desktop_itemType}";
pub const COL_DESKTOP_ITEMS_REFERENCE_ID: &str = "{c_desktop_referenceId}";
pub const COL_DESKTOP_ITEMS_POSITION_X: &str = "{c_desktop_positionX}";
pub const COL_DESKTOP_ITEMS_POSITION_Y: &str = "{c_desktop_positionY}";
pub const COL_DESKTOP_ITEMS_HAEX_TOMBSTONE: &str = "{c_desktop_tombstone}";
pub const COL_DESKTOP_ITEMS_HAEX_TIMESTAMP: &str = "{c_desktop_timestamp}";
// --- Table: haex_crdt_logs ---
pub const TABLE_CRDT_LOGS: &str = "{t_crdt_logs}";
pub const COL_CRDT_LOGS_ID: &str = "{c_crdt_logs_id}";
@ -181,6 +192,15 @@ pub const COL_CRDT_CONFIGS_VALUE: &str = "{c_crdt_configs_value}";
c_notif_title = haex.notifications.columns["title"],
c_notif_type = haex.notifications.columns["type"],
c_notif_tombstone = haex.notifications.columns["haexTombstone"],
// Desktop Items
t_desktop_items = haex.desktop_items.name,
c_desktop_id = haex.desktop_items.columns["id"],
c_desktop_itemType = haex.desktop_items.columns["itemType"],
c_desktop_referenceId = haex.desktop_items.columns["referenceId"],
c_desktop_positionX = haex.desktop_items.columns["positionX"],
c_desktop_positionY = haex.desktop_items.columns["positionY"],
c_desktop_tombstone = haex.desktop_items.columns["haexTombstone"],
c_desktop_timestamp = haex.desktop_items.columns["haexTimestamp"],
// CRDT Logs
t_crdt_logs = haex.crdt.logs.name,
c_crdt_logs_id = haex.crdt.logs.columns["id"],

View File

@ -0,0 +1,159 @@
// src-tauri/src/crdt/insert_transformer.rs
// INSERT-spezifische CRDT-Transformationen (ON CONFLICT, RETURNING)
use crate::crdt::trigger::{HLC_TIMESTAMP_COLUMN, TOMBSTONE_COLUMN};
use crate::database::error::DatabaseError;
use sqlparser::ast::{
Assignment, AssignmentTarget, BinaryOperator, Expr, Ident, Insert, ObjectNamePart,
OnConflict, OnConflictAction, OnInsert, SelectItem, SetExpr, Value,
};
use uhlc::Timestamp;
/// Helper-Struct für INSERT-Transformationen
pub struct InsertTransformer {
tombstone_column: &'static str,
hlc_timestamp_column: &'static str,
}
impl InsertTransformer {
pub fn new() -> Self {
Self {
tombstone_column: TOMBSTONE_COLUMN,
hlc_timestamp_column: HLC_TIMESTAMP_COLUMN,
}
}
/// Transformiert INSERT-Statements (fügt HLC-Timestamp hinzu und behandelt Tombstone-Konflikte)
/// Fügt automatisch RETURNING für Primary Keys hinzu, damit der Executor die tatsächlichen PKs kennt
pub fn transform_insert(
&self,
insert_stmt: &mut Insert,
timestamp: &Timestamp,
primary_keys: &[String],
foreign_keys: &[String],
) -> Result<(), DatabaseError> {
// Add both haex_timestamp and haex_tombstone columns
insert_stmt
.columns
.push(Ident::new(self.hlc_timestamp_column));
insert_stmt.columns.push(Ident::new(self.tombstone_column));
// Füge RETURNING für alle Primary Keys hinzu (falls noch nicht vorhanden)
// Dies erlaubt uns, die tatsächlichen PK-Werte nach ON CONFLICT zu kennen
if insert_stmt.returning.is_none() && !primary_keys.is_empty() {
insert_stmt.returning = Some(
primary_keys
.iter()
.map(|pk| SelectItem::UnnamedExpr(Expr::Identifier(Ident::new(pk))))
.collect(),
);
}
// Setze ON CONFLICT für UPSERT-Verhalten bei Tombstone-Einträgen
// Dies ermöglicht das Wiederverwenden von gelöschten Einträgen
if insert_stmt.on.is_none() {
// ON CONFLICT DO UPDATE SET ...
// Aktualisiere alle Spalten außer CRDT-Spalten, wenn ein Konflikt auftritt
// Erstelle UPDATE-Assignments für alle Spalten außer CRDT-Spalten, Primary Keys und Foreign Keys
let mut assignments = Vec::new();
for column in insert_stmt.columns.iter() {
let col_name = &column.value;
// Überspringe CRDT-Spalten
if col_name == self.hlc_timestamp_column || col_name == self.tombstone_column {
continue;
}
// Überspringe Primary Key Spalten um FOREIGN KEY Konflikte zu vermeiden
if primary_keys.contains(col_name) {
continue;
}
// Überspringe Foreign Key Spalten um FOREIGN KEY Konflikte zu vermeiden
// Wenn eine FK auf eine neue ID verweist, die noch nicht existiert, schlägt der Constraint fehl
if foreign_keys.contains(col_name) {
continue;
}
// excluded.column_name referenziert die neuen Werte aus dem INSERT
assignments.push(Assignment {
target: AssignmentTarget::ColumnName(sqlparser::ast::ObjectName(vec![
ObjectNamePart::Identifier(column.clone()),
])),
value: Expr::CompoundIdentifier(vec![Ident::new("excluded"), column.clone()]),
});
}
// Füge HLC-Timestamp Update hinzu (mit dem übergebenen timestamp)
assignments.push(Assignment {
target: AssignmentTarget::ColumnName(sqlparser::ast::ObjectName(vec![ObjectNamePart::Identifier(
Ident::new(self.hlc_timestamp_column),
)])),
value: Expr::Value(Value::SingleQuotedString(timestamp.to_string()).into()),
});
// Setze Tombstone auf 0 (reaktiviere den Eintrag)
assignments.push(Assignment {
target: AssignmentTarget::ColumnName(sqlparser::ast::ObjectName(vec![ObjectNamePart::Identifier(
Ident::new(self.tombstone_column),
)])),
value: Expr::Value(Value::Number("0".to_string(), false).into()),
});
// ON CONFLICT nur wenn Tombstone = 1 (Eintrag wurde gelöscht)
// Ansonsten soll der INSERT fehlschlagen (UNIQUE constraint error)
let tombstone_condition = Expr::BinaryOp {
left: Box::new(Expr::Identifier(Ident::new(self.tombstone_column))),
op: BinaryOperator::Eq,
right: Box::new(Expr::Value(Value::Number("1".to_string(), false).into())),
};
insert_stmt.on = Some(OnInsert::OnConflict(OnConflict {
conflict_target: None, // Wird auf alle UNIQUE Constraints angewendet
action: OnConflictAction::DoUpdate(sqlparser::ast::DoUpdate {
assignments,
selection: Some(tombstone_condition),
}),
}));
}
match insert_stmt.source.as_mut() {
Some(query) => match &mut *query.body {
SetExpr::Values(values) => {
for row in &mut values.rows {
// Add haex_timestamp value
row.push(Expr::Value(
Value::SingleQuotedString(timestamp.to_string()).into(),
));
// Add haex_tombstone value (0 = not deleted)
row.push(Expr::Value(Value::Number("0".to_string(), false).into()));
}
}
SetExpr::Select(select) => {
let hlc_expr =
Expr::Value(Value::SingleQuotedString(timestamp.to_string()).into());
select.projection.push(SelectItem::UnnamedExpr(hlc_expr));
// Add haex_tombstone value (0 = not deleted)
let tombstone_expr = Expr::Value(Value::Number("0".to_string(), false).into());
select
.projection
.push(SelectItem::UnnamedExpr(tombstone_expr));
}
_ => {
return Err(DatabaseError::UnsupportedStatement {
sql: insert_stmt.to_string(),
reason: "INSERT with unsupported source type".to_string(),
});
}
},
None => {
return Err(DatabaseError::UnsupportedStatement {
reason: "INSERT statement has no source".to_string(),
sql: insert_stmt.to_string(),
});
}
}
Ok(())
}
}

View File

@ -1,3 +1,5 @@
pub mod hlc;
pub mod insert_transformer;
pub mod query_transformer;
pub mod transformer;
pub mod trigger;

View File

@ -0,0 +1,515 @@
// src-tauri/src/crdt/query_transformer.rs
// SELECT-spezifische CRDT-Transformationen (Tombstone-Filterung)
use crate::crdt::trigger::{TOMBSTONE_COLUMN};
use crate::database::error::DatabaseError;
use sqlparser::ast::{
BinaryOperator, Expr, Ident, ObjectName, SelectItem, SetExpr, TableFactor, Value,
};
use std::collections::HashSet;
/// Helper-Struct für SELECT-Transformationen
pub struct QueryTransformer {
tombstone_column: &'static str,
}
impl QueryTransformer {
pub fn new() -> Self {
Self {
tombstone_column: TOMBSTONE_COLUMN,
}
}
/// Transformiert Query-Statements (fügt Tombstone-Filter hinzu)
pub fn transform_query_recursive(
&self,
query: &mut sqlparser::ast::Query,
excluded_tables: &std::collections::HashSet<&str>,
) -> Result<(), DatabaseError> {
self.add_tombstone_filters_recursive(&mut query.body, excluded_tables)
}
/// Rekursive Behandlung aller SetExpr-Typen mit vollständiger Subquery-Unterstützung
fn add_tombstone_filters_recursive(
&self,
set_expr: &mut SetExpr,
excluded_tables: &std::collections::HashSet<&str>,
) -> Result<(), DatabaseError> {
match set_expr {
SetExpr::Select(select) => {
self.add_tombstone_filters_to_select(select, excluded_tables)?;
// Transformiere auch Subqueries in Projektionen
for projection in &mut select.projection {
match projection {
SelectItem::UnnamedExpr(expr) | SelectItem::ExprWithAlias { expr, .. } => {
self.transform_expression_subqueries(expr, excluded_tables)?;
}
_ => {} // Wildcard projections ignorieren
}
}
// Transformiere Subqueries in WHERE
if let Some(where_clause) = &mut select.selection {
self.transform_expression_subqueries(where_clause, excluded_tables)?;
}
// Transformiere Subqueries in GROUP BY
match &mut select.group_by {
sqlparser::ast::GroupByExpr::All(_) => {
// GROUP BY ALL - keine Expressions zu transformieren
}
sqlparser::ast::GroupByExpr::Expressions(exprs, _) => {
for group_expr in exprs {
self.transform_expression_subqueries(group_expr, excluded_tables)?;
}
}
}
// Transformiere Subqueries in HAVING
if let Some(having) = &mut select.having {
self.transform_expression_subqueries(having, excluded_tables)?;
}
}
SetExpr::SetOperation { left, right, .. } => {
self.add_tombstone_filters_recursive(left, excluded_tables)?;
self.add_tombstone_filters_recursive(right, excluded_tables)?;
}
SetExpr::Query(query) => {
self.add_tombstone_filters_recursive(&mut query.body, excluded_tables)?;
}
SetExpr::Values(values) => {
// Transformiere auch Subqueries in Values-Listen
for row in &mut values.rows {
for expr in row {
self.transform_expression_subqueries(expr, excluded_tables)?;
}
}
}
_ => {} // Andere Fälle
}
Ok(())
}
/// Transformiert Subqueries innerhalb von Expressions
fn transform_expression_subqueries(
&self,
expr: &mut Expr,
excluded_tables: &std::collections::HashSet<&str>,
) -> Result<(), DatabaseError> {
match expr {
// Einfache Subqueries
Expr::Subquery(query) => {
self.add_tombstone_filters_recursive(&mut query.body, excluded_tables)?;
}
// EXISTS Subqueries
Expr::Exists { subquery, .. } => {
self.add_tombstone_filters_recursive(&mut subquery.body, excluded_tables)?;
}
// IN Subqueries
Expr::InSubquery {
expr: left_expr,
subquery,
..
} => {
self.transform_expression_subqueries(left_expr, excluded_tables)?;
self.add_tombstone_filters_recursive(&mut subquery.body, excluded_tables)?;
}
// ANY/ALL Subqueries
Expr::AnyOp { left, right, .. } | Expr::AllOp { left, right, .. } => {
self.transform_expression_subqueries(left, excluded_tables)?;
self.transform_expression_subqueries(right, excluded_tables)?;
}
// Binäre Operationen
Expr::BinaryOp { left, right, .. } => {
self.transform_expression_subqueries(left, excluded_tables)?;
self.transform_expression_subqueries(right, excluded_tables)?;
}
// Unäre Operationen
Expr::UnaryOp {
expr: inner_expr, ..
} => {
self.transform_expression_subqueries(inner_expr, excluded_tables)?;
}
// Verschachtelte Ausdrücke
Expr::Nested(nested) => {
self.transform_expression_subqueries(nested, excluded_tables)?;
}
// CASE-Ausdrücke
Expr::Case {
operand,
conditions,
else_result,
..
} => {
if let Some(op) = operand {
self.transform_expression_subqueries(op, excluded_tables)?;
}
for case_when in conditions {
self.transform_expression_subqueries(&mut case_when.condition, excluded_tables)?;
self.transform_expression_subqueries(&mut case_when.result, excluded_tables)?;
}
if let Some(else_res) = else_result {
self.transform_expression_subqueries(else_res, excluded_tables)?;
}
}
// Funktionsaufrufe
Expr::Function(func) => match &mut func.args {
sqlparser::ast::FunctionArguments::List(sqlparser::ast::FunctionArgumentList {
args,
..
}) => {
for arg in args {
if let sqlparser::ast::FunctionArg::Unnamed(
sqlparser::ast::FunctionArgExpr::Expr(expr),
) = arg
{
self.transform_expression_subqueries(expr, excluded_tables)?;
}
}
}
_ => {}
},
// BETWEEN
Expr::Between {
expr: main_expr,
low,
high,
..
} => {
self.transform_expression_subqueries(main_expr, excluded_tables)?;
self.transform_expression_subqueries(low, excluded_tables)?;
self.transform_expression_subqueries(high, excluded_tables)?;
}
// IN Liste
Expr::InList {
expr: main_expr,
list,
..
} => {
self.transform_expression_subqueries(main_expr, excluded_tables)?;
for list_expr in list {
self.transform_expression_subqueries(list_expr, excluded_tables)?;
}
}
// IS NULL/IS NOT NULL
Expr::IsNull(inner) | Expr::IsNotNull(inner) => {
self.transform_expression_subqueries(inner, excluded_tables)?;
}
// Andere Expression-Typen benötigen keine Transformation
_ => {}
}
Ok(())
}
/// Erstellt einen Tombstone-Filter für eine Tabelle
pub fn create_tombstone_filter(&self, table_alias: Option<&str>) -> Expr {
let column_expr = match table_alias {
Some(alias) => {
Expr::CompoundIdentifier(vec![Ident::new(alias), Ident::new(self.tombstone_column)])
}
None => {
Expr::Identifier(Ident::new(self.tombstone_column))
}
};
Expr::BinaryOp {
left: Box::new(column_expr),
op: BinaryOperator::NotEq,
right: Box::new(Expr::Value(Value::Number("1".to_string(), false).into())),
}
}
/// Normalisiert Tabellennamen (entfernt Anführungszeichen)
pub fn normalize_table_name(&self, name: &ObjectName) -> String {
let name_str = name.to_string().to_lowercase();
name_str.trim_matches('`').trim_matches('"').to_string()
}
/// Fügt Tombstone-Filter zu SELECT-Statements hinzu
pub fn add_tombstone_filters_to_select(
&self,
select: &mut sqlparser::ast::Select,
excluded_tables: &HashSet<&str>,
) -> Result<(), DatabaseError> {
// Sammle alle CRDT-Tabellen mit ihren Aliasen
let mut crdt_tables = Vec::new();
for twj in &select.from {
if let TableFactor::Table { name, alias, .. } = &twj.relation {
let table_name_str = self.normalize_table_name(name);
if !excluded_tables.contains(table_name_str.as_str()) {
let table_alias = alias.as_ref().map(|a| a.name.value.as_str());
crdt_tables.push((name.clone(), table_alias));
}
}
}
if crdt_tables.is_empty() {
return Ok(());
}
// Prüfe, welche Tombstone-Spalten bereits in der WHERE-Klausel referenziert werden
let explicitly_filtered_tables = if let Some(where_clause) = &select.selection {
self.find_explicitly_filtered_tombstone_tables(where_clause, &crdt_tables)
} else {
HashSet::new()
};
// Erstelle Filter nur für Tabellen, die noch nicht explizit gefiltert werden
let mut tombstone_filters = Vec::new();
for (table_name, table_alias) in crdt_tables {
let table_name_string = table_name.to_string();
let table_key = table_alias.unwrap_or(&table_name_string);
if !explicitly_filtered_tables.contains(table_key) {
tombstone_filters.push(self.create_tombstone_filter(table_alias));
}
}
// Füge die automatischen Filter hinzu
if !tombstone_filters.is_empty() {
let combined_filter = tombstone_filters
.into_iter()
.reduce(|acc, expr| Expr::BinaryOp {
left: Box::new(acc),
op: BinaryOperator::And,
right: Box::new(expr),
})
.unwrap();
match &mut select.selection {
Some(existing) => {
*existing = Expr::BinaryOp {
left: Box::new(existing.clone()),
op: BinaryOperator::And,
right: Box::new(combined_filter),
};
}
None => {
select.selection = Some(combined_filter);
}
}
}
Ok(())
}
/// Findet alle Tabellen, die bereits explizit Tombstone-Filter in der WHERE-Klausel haben
fn find_explicitly_filtered_tombstone_tables(
&self,
where_expr: &Expr,
crdt_tables: &[(ObjectName, Option<&str>)],
) -> HashSet<String> {
let mut filtered_tables = HashSet::new();
self.scan_expression_for_tombstone_references(
where_expr,
crdt_tables,
&mut filtered_tables,
);
filtered_tables
}
/// Rekursiv durchsucht einen Expression-Baum nach Tombstone-Spalten-Referenzen
fn scan_expression_for_tombstone_references(
&self,
expr: &Expr,
crdt_tables: &[(ObjectName, Option<&str>)],
filtered_tables: &mut HashSet<String>,
) {
match expr {
Expr::Identifier(ident) => {
if ident.value == self.tombstone_column && crdt_tables.len() == 1 {
let table_name_str = crdt_tables[0].0.to_string();
let table_key = crdt_tables[0].1.unwrap_or(&table_name_str);
filtered_tables.insert(table_key.to_string());
}
}
Expr::CompoundIdentifier(idents) => {
if idents.len() == 2 && idents[1].value == self.tombstone_column {
let table_ref = &idents[0].value;
for (table_name, alias) in crdt_tables {
let table_name_str = table_name.to_string();
if table_ref == &table_name_str || alias.map_or(false, |a| a == table_ref) {
filtered_tables.insert(table_ref.clone());
break;
}
}
}
}
Expr::BinaryOp { left, right, .. } => {
self.scan_expression_for_tombstone_references(left, crdt_tables, filtered_tables);
self.scan_expression_for_tombstone_references(right, crdt_tables, filtered_tables);
}
Expr::UnaryOp { expr, .. } => {
self.scan_expression_for_tombstone_references(expr, crdt_tables, filtered_tables);
}
Expr::Nested(nested) => {
self.scan_expression_for_tombstone_references(nested, crdt_tables, filtered_tables);
}
Expr::InList { expr, .. } => {
self.scan_expression_for_tombstone_references(expr, crdt_tables, filtered_tables);
}
Expr::Between { expr, .. } => {
self.scan_expression_for_tombstone_references(expr, crdt_tables, filtered_tables);
}
Expr::IsNull(expr) | Expr::IsNotNull(expr) => {
self.scan_expression_for_tombstone_references(expr, crdt_tables, filtered_tables);
}
Expr::Function(func) => {
if let sqlparser::ast::FunctionArguments::List(
sqlparser::ast::FunctionArgumentList { args, .. },
) = &func.args
{
for arg in args {
if let sqlparser::ast::FunctionArg::Unnamed(
sqlparser::ast::FunctionArgExpr::Expr(expr),
) = arg
{
self.scan_expression_for_tombstone_references(
expr,
crdt_tables,
filtered_tables,
);
}
}
}
}
Expr::Case {
operand,
conditions,
else_result,
..
} => {
if let Some(op) = operand {
self.scan_expression_for_tombstone_references(op, crdt_tables, filtered_tables);
}
for case_when in conditions {
self.scan_expression_for_tombstone_references(
&case_when.condition,
crdt_tables,
filtered_tables,
);
self.scan_expression_for_tombstone_references(
&case_when.result,
crdt_tables,
filtered_tables,
);
}
if let Some(else_res) = else_result {
self.scan_expression_for_tombstone_references(
else_res,
crdt_tables,
filtered_tables,
);
}
}
Expr::Subquery(query) => {
self.analyze_query_for_tombstone_references(query, crdt_tables, filtered_tables)
.ok();
}
Expr::Exists { subquery, .. } => {
self.analyze_query_for_tombstone_references(subquery, crdt_tables, filtered_tables)
.ok();
}
Expr::InSubquery { expr, subquery, .. } => {
self.scan_expression_for_tombstone_references(expr, crdt_tables, filtered_tables);
self.analyze_query_for_tombstone_references(subquery, crdt_tables, filtered_tables)
.ok();
}
Expr::AnyOp { left, right, .. } | Expr::AllOp { left, right, .. } => {
self.scan_expression_for_tombstone_references(left, crdt_tables, filtered_tables);
self.scan_expression_for_tombstone_references(right, crdt_tables, filtered_tables);
}
_ => {}
}
}
fn analyze_query_for_tombstone_references(
&self,
query: &sqlparser::ast::Query,
crdt_tables: &[(ObjectName, Option<&str>)],
filtered_tables: &mut HashSet<String>,
) -> Result<(), DatabaseError> {
self.analyze_set_expr_for_tombstone_references(&query.body, crdt_tables, filtered_tables)
}
fn analyze_set_expr_for_tombstone_references(
&self,
set_expr: &SetExpr,
crdt_tables: &[(ObjectName, Option<&str>)],
filtered_tables: &mut HashSet<String>,
) -> Result<(), DatabaseError> {
match set_expr {
SetExpr::Select(select) => {
if let Some(where_clause) = &select.selection {
self.scan_expression_for_tombstone_references(
where_clause,
crdt_tables,
filtered_tables,
);
}
for projection in &select.projection {
match projection {
SelectItem::UnnamedExpr(expr) | SelectItem::ExprWithAlias { expr, .. } => {
self.scan_expression_for_tombstone_references(
expr,
crdt_tables,
filtered_tables,
);
}
_ => {}
}
}
match &select.group_by {
sqlparser::ast::GroupByExpr::All(_) => {}
sqlparser::ast::GroupByExpr::Expressions(exprs, _) => {
for group_expr in exprs {
self.scan_expression_for_tombstone_references(
group_expr,
crdt_tables,
filtered_tables,
);
}
}
}
if let Some(having) = &select.having {
self.scan_expression_for_tombstone_references(
having,
crdt_tables,
filtered_tables,
);
}
}
SetExpr::SetOperation { left, right, .. } => {
self.analyze_set_expr_for_tombstone_references(left, crdt_tables, filtered_tables)?;
self.analyze_set_expr_for_tombstone_references(
right,
crdt_tables,
filtered_tables,
)?;
}
SetExpr::Query(query) => {
self.analyze_set_expr_for_tombstone_references(
&query.body,
crdt_tables,
filtered_tables,
)?;
}
SetExpr::Values(values) => {
for row in &values.rows {
for expr in row {
self.scan_expression_for_tombstone_references(
expr,
crdt_tables,
filtered_tables,
);
}
}
}
_ => {}
}
Ok(())
}
}

View File

@ -1,9 +1,12 @@
use crate::crdt::insert_transformer::InsertTransformer;
use crate::crdt::query_transformer::QueryTransformer;
use crate::crdt::trigger::{HLC_TIMESTAMP_COLUMN, TOMBSTONE_COLUMN};
use crate::database::error::DatabaseError;
use crate::table_names::{TABLE_CRDT_CONFIGS, TABLE_CRDT_LOGS};
use sqlparser::ast::{
Assignment, AssignmentTarget, BinaryOperator, ColumnDef, DataType, Expr, Ident, Insert,
ObjectName, ObjectNamePart, SelectItem, SetExpr, Statement, TableFactor, TableObject, Value,
Assignment, AssignmentTarget, BinaryOperator, ColumnDef, DataType, Expr, Ident,
ObjectName, ObjectNamePart, Statement, TableFactor, TableObject,
Value,
};
use std::borrow::Cow;
use std::collections::HashSet;
@ -112,7 +115,10 @@ impl CrdtTransformer {
pub fn transform_select_statement(&self, stmt: &mut Statement) -> Result<(), DatabaseError> {
match stmt {
Statement::Query(query) => self.transform_query_recursive(query),
Statement::Query(query) => {
let query_transformer = QueryTransformer::new();
query_transformer.transform_query_recursive(query, &self.excluded_tables)
}
// Fange alle anderen Fälle ab und gib einen Fehler zurück
_ => Err(DatabaseError::UnsupportedStatement {
sql: stmt.to_string(),
@ -121,10 +127,12 @@ impl CrdtTransformer {
}
}
pub fn transform_execute_statement(
/// Transformiert Statements MIT Zugriff auf Tabelleninformationen (empfohlen)
pub fn transform_execute_statement_with_table_info(
&self,
stmt: &mut Statement,
hlc_timestamp: &Timestamp,
tx: &rusqlite::Transaction,
) -> Result<Option<String>, DatabaseError> {
match stmt {
Statement::CreateTable(create_table) => {
@ -141,7 +149,100 @@ impl CrdtTransformer {
Statement::Insert(insert_stmt) => {
if let TableObject::TableName(name) = &insert_stmt.table {
if self.is_crdt_sync_table(name) {
self.transform_insert(insert_stmt, hlc_timestamp)?;
// Hole die Tabelleninformationen um PKs und FKs zu identifizieren
let table_name_str = self.normalize_table_name(name);
let columns = crate::crdt::trigger::get_table_schema(tx, &table_name_str)
.map_err(|e| DatabaseError::ExecutionError {
sql: format!("PRAGMA table_info('{}')", table_name_str),
reason: e.to_string(),
table: Some(table_name_str.to_string()),
})?;
let primary_keys: Vec<String> = columns
.iter()
.filter(|c| c.is_pk)
.map(|c| c.name.clone())
.collect();
let foreign_keys = crate::crdt::trigger::get_foreign_key_columns(tx, &table_name_str)
.map_err(|e| DatabaseError::ExecutionError {
sql: format!("PRAGMA foreign_key_list('{}')", table_name_str),
reason: e.to_string(),
table: Some(table_name_str.to_string()),
})?;
let insert_transformer = InsertTransformer::new();
insert_transformer.transform_insert(insert_stmt, hlc_timestamp, &primary_keys, &foreign_keys)?;
}
}
Ok(None)
}
Statement::Update {
table, assignments, ..
} => {
if let TableFactor::Table { name, .. } = &table.relation {
if self.is_crdt_sync_table(name) {
assignments.push(self.columns.create_hlc_assignment(hlc_timestamp));
}
}
Ok(None)
}
Statement::Delete(del_stmt) => {
if let Some(table_name) = self.extract_table_name_from_delete(del_stmt) {
let table_name_str = self.normalize_table_name(&table_name);
let is_crdt = self.is_crdt_sync_table(&table_name);
eprintln!("DEBUG DELETE (with_table_info): table='{}', is_crdt_sync={}, normalized='{}'",
table_name, is_crdt, table_name_str);
if is_crdt {
eprintln!("DEBUG: Transforming DELETE to UPDATE for table '{}'", table_name_str);
self.transform_delete_to_update(stmt, hlc_timestamp)?;
}
Ok(None)
} else {
Err(DatabaseError::UnsupportedStatement {
sql: del_stmt.to_string(),
reason: "DELETE from non-table source or multiple tables".to_string(),
})
}
}
Statement::AlterTable { name, .. } => {
if self.is_crdt_sync_table(name) {
Ok(Some(self.normalize_table_name(name).into_owned()))
} else {
Ok(None)
}
}
_ => Ok(None),
}
}
pub fn transform_execute_statement(
&self,
stmt: &mut Statement,
hlc_timestamp: &Timestamp,
) -> Result<Option<String>, DatabaseError> {
// Für INSERT-Statements ohne Connection nutzen wir eine leere PK-Liste
// Das bedeutet ALLE Spalten werden im ON CONFLICT UPDATE gesetzt
// Dies ist ein Fallback für den Fall, dass keine Connection verfügbar ist
match stmt {
Statement::CreateTable(create_table) => {
if self.is_crdt_sync_table(&create_table.name) {
self.columns
.add_to_table_definition(&mut create_table.columns);
Ok(Some(
self.normalize_table_name(&create_table.name).into_owned(),
))
} else {
Ok(None)
}
}
Statement::Insert(insert_stmt) => {
if let TableObject::TableName(name) = &insert_stmt.table {
if self.is_crdt_sync_table(name) {
// Ohne Connection: leere PK- und FK-Listen (alle Spalten werden upgedatet)
let insert_transformer = InsertTransformer::new();
insert_transformer.transform_insert(insert_stmt, hlc_timestamp, &[], &[])?;
}
}
Ok(None)
@ -180,560 +281,6 @@ impl CrdtTransformer {
}
}
/// Transformiert Query-Statements (fügt Tombstone-Filter hinzu)
fn transform_query_recursive(
&self,
query: &mut sqlparser::ast::Query,
) -> Result<(), DatabaseError> {
self.add_tombstone_filters_recursive(&mut query.body)
}
/// Rekursive Behandlung aller SetExpr-Typen mit vollständiger Subquery-Unterstützung
fn add_tombstone_filters_recursive(&self, set_expr: &mut SetExpr) -> Result<(), DatabaseError> {
match set_expr {
SetExpr::Select(select) => {
self.add_tombstone_filters_to_select(select)?;
// Transformiere auch Subqueries in Projektionen
for projection in &mut select.projection {
match projection {
SelectItem::UnnamedExpr(expr) | SelectItem::ExprWithAlias { expr, .. } => {
self.transform_expression_subqueries(expr)?;
}
_ => {} // Wildcard projections ignorieren
}
}
// Transformiere Subqueries in WHERE
if let Some(where_clause) = &mut select.selection {
self.transform_expression_subqueries(where_clause)?;
}
// Transformiere Subqueries in GROUP BY
match &mut select.group_by {
sqlparser::ast::GroupByExpr::All(_) => {
// GROUP BY ALL - keine Expressions zu transformieren
}
sqlparser::ast::GroupByExpr::Expressions(exprs, _) => {
for group_expr in exprs {
self.transform_expression_subqueries(group_expr)?;
}
}
}
// Transformiere Subqueries in HAVING
if let Some(having) = &mut select.having {
self.transform_expression_subqueries(having)?;
}
}
SetExpr::SetOperation { left, right, .. } => {
self.add_tombstone_filters_recursive(left)?;
self.add_tombstone_filters_recursive(right)?;
}
SetExpr::Query(query) => {
self.add_tombstone_filters_recursive(&mut query.body)?;
}
SetExpr::Values(values) => {
// Transformiere auch Subqueries in Values-Listen
for row in &mut values.rows {
for expr in row {
self.transform_expression_subqueries(expr)?;
}
}
}
_ => {} // Andere Fälle
}
Ok(())
}
/// Transformiert Subqueries innerhalb von Expressions
fn transform_expression_subqueries(&self, expr: &mut Expr) -> Result<(), DatabaseError> {
match expr {
// Einfache Subqueries
Expr::Subquery(query) => {
self.add_tombstone_filters_recursive(&mut query.body)?;
}
// EXISTS Subqueries
Expr::Exists { subquery, .. } => {
self.add_tombstone_filters_recursive(&mut subquery.body)?;
}
// IN Subqueries
Expr::InSubquery {
expr: left_expr,
subquery,
..
} => {
self.transform_expression_subqueries(left_expr)?;
self.add_tombstone_filters_recursive(&mut subquery.body)?;
}
// ANY/ALL Subqueries
Expr::AnyOp { left, right, .. } | Expr::AllOp { left, right, .. } => {
self.transform_expression_subqueries(left)?;
self.transform_expression_subqueries(right)?;
}
// Binäre Operationen
Expr::BinaryOp { left, right, .. } => {
self.transform_expression_subqueries(left)?;
self.transform_expression_subqueries(right)?;
}
// Unäre Operationen
Expr::UnaryOp {
expr: inner_expr, ..
} => {
self.transform_expression_subqueries(inner_expr)?;
}
// Verschachtelte Ausdrücke
Expr::Nested(nested) => {
self.transform_expression_subqueries(nested)?;
}
// CASE-Ausdrücke
Expr::Case {
operand,
conditions,
else_result,
..
} => {
if let Some(op) = operand {
self.transform_expression_subqueries(op)?;
}
for case_when in conditions {
self.transform_expression_subqueries(&mut case_when.condition)?;
self.transform_expression_subqueries(&mut case_when.result)?;
}
if let Some(else_res) = else_result {
self.transform_expression_subqueries(else_res)?;
}
}
// Funktionsaufrufe
Expr::Function(func) => match &mut func.args {
sqlparser::ast::FunctionArguments::List(sqlparser::ast::FunctionArgumentList {
args,
..
}) => {
for arg in args {
if let sqlparser::ast::FunctionArg::Unnamed(
sqlparser::ast::FunctionArgExpr::Expr(expr),
) = arg
{
self.transform_expression_subqueries(expr)?;
}
}
}
_ => {}
},
// BETWEEN
Expr::Between {
expr: main_expr,
low,
high,
..
} => {
self.transform_expression_subqueries(main_expr)?;
self.transform_expression_subqueries(low)?;
self.transform_expression_subqueries(high)?;
}
// IN Liste
Expr::InList {
expr: main_expr,
list,
..
} => {
self.transform_expression_subqueries(main_expr)?;
for list_expr in list {
self.transform_expression_subqueries(list_expr)?;
}
}
// IS NULL/IS NOT NULL
Expr::IsNull(inner) | Expr::IsNotNull(inner) => {
self.transform_expression_subqueries(inner)?;
}
// Andere Expression-Typen benötigen keine Transformation
_ => {}
}
Ok(())
}
/// Fügt Tombstone-Filter zu SELECT-Statements hinzu (nur wenn nicht explizit in WHERE gesetzt)
fn add_tombstone_filters_to_select(
&self,
select: &mut sqlparser::ast::Select,
) -> Result<(), DatabaseError> {
// Sammle alle CRDT-Tabellen mit ihren Aliasen
let mut crdt_tables = Vec::new();
for twj in &select.from {
if let TableFactor::Table { name, alias, .. } = &twj.relation {
if self.is_crdt_sync_table(name) {
let table_alias = alias.as_ref().map(|a| a.name.value.as_str());
crdt_tables.push((name.clone(), table_alias));
}
}
}
if crdt_tables.is_empty() {
return Ok(());
}
// Prüfe, welche Tombstone-Spalten bereits in der WHERE-Klausel referenziert werden
let explicitly_filtered_tables = if let Some(where_clause) = &select.selection {
self.find_explicitly_filtered_tombstone_tables(where_clause, &crdt_tables)
} else {
HashSet::new()
};
// Erstelle Filter nur für Tabellen, die noch nicht explizit gefiltert werden
let mut tombstone_filters = Vec::new();
for (table_name, table_alias) in crdt_tables {
let table_name_string = table_name.to_string();
let table_key = table_alias.unwrap_or(&table_name_string);
if !explicitly_filtered_tables.contains(table_key) {
tombstone_filters.push(self.columns.create_tombstone_filter(table_alias));
}
}
// Füge die automatischen Filter hinzu
if !tombstone_filters.is_empty() {
let combined_filter = tombstone_filters
.into_iter()
.reduce(|acc, expr| Expr::BinaryOp {
left: Box::new(acc),
op: BinaryOperator::And,
right: Box::new(expr),
})
.unwrap();
match &mut select.selection {
Some(existing) => {
*existing = Expr::BinaryOp {
left: Box::new(existing.clone()),
op: BinaryOperator::And,
right: Box::new(combined_filter),
};
}
None => {
select.selection = Some(combined_filter);
}
}
}
Ok(())
}
/// Findet alle Tabellen, die bereits explizit Tombstone-Filter in der WHERE-Klausel haben
fn find_explicitly_filtered_tombstone_tables(
&self,
where_expr: &Expr,
crdt_tables: &[(ObjectName, Option<&str>)],
) -> HashSet<String> {
let mut filtered_tables = HashSet::new();
self.scan_expression_for_tombstone_references(
where_expr,
crdt_tables,
&mut filtered_tables,
);
filtered_tables
}
/// Rekursiv durchsucht einen Expression-Baum nach Tombstone-Spalten-Referenzen
fn scan_expression_for_tombstone_references(
&self,
expr: &Expr,
crdt_tables: &[(ObjectName, Option<&str>)],
filtered_tables: &mut HashSet<String>,
) {
match expr {
// Einfache Spaltenreferenz: tombstone = ?
Expr::Identifier(ident) => {
if ident.value == self.columns.tombstone {
// Wenn keine Tabelle spezifiziert ist und es nur eine CRDT-Tabelle gibt
if crdt_tables.len() == 1 {
let table_name_str = crdt_tables[0].0.to_string();
let table_key = crdt_tables[0].1.unwrap_or(&table_name_str);
filtered_tables.insert(table_key.to_string());
}
}
}
// Qualifizierte Spaltenreferenz: table.tombstone = ? oder alias.tombstone = ?
Expr::CompoundIdentifier(idents) => {
if idents.len() == 2 && idents[1].value == self.columns.tombstone {
let table_ref = &idents[0].value;
// Prüfe, ob es eine unserer CRDT-Tabellen ist (nach Name oder Alias)
for (table_name, alias) in crdt_tables {
let table_name_str = table_name.to_string();
if table_ref == &table_name_str || alias.map_or(false, |a| a == table_ref) {
filtered_tables.insert(table_ref.clone());
break;
}
}
}
}
// Binäre Operationen: AND, OR, etc.
Expr::BinaryOp { left, right, .. } => {
self.scan_expression_for_tombstone_references(left, crdt_tables, filtered_tables);
self.scan_expression_for_tombstone_references(right, crdt_tables, filtered_tables);
}
// Unäre Operationen: NOT, etc.
Expr::UnaryOp { expr, .. } => {
self.scan_expression_for_tombstone_references(expr, crdt_tables, filtered_tables);
}
// Verschachtelte Ausdrücke
Expr::Nested(nested) => {
self.scan_expression_for_tombstone_references(nested, crdt_tables, filtered_tables);
}
// IN-Klauseln
Expr::InList { expr, .. } => {
self.scan_expression_for_tombstone_references(expr, crdt_tables, filtered_tables);
}
// BETWEEN-Klauseln
Expr::Between { expr, .. } => {
self.scan_expression_for_tombstone_references(expr, crdt_tables, filtered_tables);
}
// IS NULL/IS NOT NULL
Expr::IsNull(expr) | Expr::IsNotNull(expr) => {
self.scan_expression_for_tombstone_references(expr, crdt_tables, filtered_tables);
}
// Funktionsaufrufe - KORRIGIERT
Expr::Function(func) => {
match &func.args {
sqlparser::ast::FunctionArguments::List(
sqlparser::ast::FunctionArgumentList { args, .. },
) => {
for arg in args {
if let sqlparser::ast::FunctionArg::Unnamed(
sqlparser::ast::FunctionArgExpr::Expr(expr),
) = arg
{
self.scan_expression_for_tombstone_references(
expr,
crdt_tables,
filtered_tables,
);
}
}
}
_ => {} // Andere FunctionArguments-Varianten ignorieren
}
}
// CASE-Ausdrücke - KORRIGIERT
Expr::Case {
operand,
conditions,
else_result,
..
} => {
if let Some(op) = operand {
self.scan_expression_for_tombstone_references(op, crdt_tables, filtered_tables);
}
for case_when in conditions {
self.scan_expression_for_tombstone_references(
&case_when.condition,
crdt_tables,
filtered_tables,
);
self.scan_expression_for_tombstone_references(
&case_when.result,
crdt_tables,
filtered_tables,
);
}
if let Some(else_res) = else_result {
self.scan_expression_for_tombstone_references(
else_res,
crdt_tables,
filtered_tables,
);
}
}
// Subqueries mit vollständiger Unterstützung
Expr::Subquery(query) => {
self.transform_query_recursive_for_tombstone_analysis(
query,
crdt_tables,
filtered_tables,
)
.ok();
}
// EXISTS/NOT EXISTS Subqueries
Expr::Exists { subquery, .. } => {
self.transform_query_recursive_for_tombstone_analysis(
subquery,
crdt_tables,
filtered_tables,
)
.ok();
}
// IN/NOT IN Subqueries
Expr::InSubquery { expr, subquery, .. } => {
self.scan_expression_for_tombstone_references(expr, crdt_tables, filtered_tables);
self.transform_query_recursive_for_tombstone_analysis(
subquery,
crdt_tables,
filtered_tables,
)
.ok();
}
// ANY/ALL Subqueries
Expr::AnyOp { left, right, .. } | Expr::AllOp { left, right, .. } => {
self.scan_expression_for_tombstone_references(left, crdt_tables, filtered_tables);
self.scan_expression_for_tombstone_references(right, crdt_tables, filtered_tables);
}
// Andere Expression-Typen ignorieren wir für jetzt
_ => {}
}
}
/// Analysiert eine Subquery und sammelt Tombstone-Referenzen
fn transform_query_recursive_for_tombstone_analysis(
&self,
query: &sqlparser::ast::Query,
crdt_tables: &[(ObjectName, Option<&str>)],
filtered_tables: &mut HashSet<String>,
) -> Result<(), DatabaseError> {
self.analyze_set_expr_for_tombstone_references(&query.body, crdt_tables, filtered_tables)
}
/// Rekursiv analysiert SetExpr für Tombstone-Referenzen
fn analyze_set_expr_for_tombstone_references(
&self,
set_expr: &SetExpr,
crdt_tables: &[(ObjectName, Option<&str>)],
filtered_tables: &mut HashSet<String>,
) -> Result<(), DatabaseError> {
match set_expr {
SetExpr::Select(select) => {
// Analysiere WHERE-Klausel
if let Some(where_clause) = &select.selection {
self.scan_expression_for_tombstone_references(
where_clause,
crdt_tables,
filtered_tables,
);
}
// Analysiere alle Projektionen (können auch Subqueries enthalten)
for projection in &select.projection {
match projection {
SelectItem::UnnamedExpr(expr) | SelectItem::ExprWithAlias { expr, .. } => {
self.scan_expression_for_tombstone_references(
expr,
crdt_tables,
filtered_tables,
);
}
_ => {} // Wildcard projections ignorieren
}
}
// Analysiere GROUP BY
match &select.group_by {
sqlparser::ast::GroupByExpr::All(_) => {
// GROUP BY ALL - keine Expressions zu analysieren
}
sqlparser::ast::GroupByExpr::Expressions(exprs, _) => {
for group_expr in exprs {
self.scan_expression_for_tombstone_references(
group_expr,
crdt_tables,
filtered_tables,
);
}
}
}
// Analysiere HAVING
if let Some(having) = &select.having {
self.scan_expression_for_tombstone_references(
having,
crdt_tables,
filtered_tables,
);
}
}
SetExpr::SetOperation { left, right, .. } => {
self.analyze_set_expr_for_tombstone_references(left, crdt_tables, filtered_tables)?;
self.analyze_set_expr_for_tombstone_references(
right,
crdt_tables,
filtered_tables,
)?;
}
SetExpr::Query(query) => {
self.analyze_set_expr_for_tombstone_references(
&query.body,
crdt_tables,
filtered_tables,
)?;
}
SetExpr::Values(values) => {
// Analysiere Values-Listen
for row in &values.rows {
for expr in row {
self.scan_expression_for_tombstone_references(
expr,
crdt_tables,
filtered_tables,
);
}
}
}
_ => {} // Andere Varianten
}
Ok(())
}
/// Transformiert INSERT-Statements (fügt HLC-Timestamp hinzu)
fn transform_insert(
&self,
insert_stmt: &mut Insert,
timestamp: &Timestamp,
) -> Result<(), DatabaseError> {
// Add both haex_timestamp and haex_tombstone columns
insert_stmt
.columns
.push(Ident::new(self.columns.hlc_timestamp));
insert_stmt
.columns
.push(Ident::new(self.columns.tombstone));
match insert_stmt.source.as_mut() {
Some(query) => match &mut *query.body {
SetExpr::Values(values) => {
for row in &mut values.rows {
// Add haex_timestamp value
row.push(Expr::Value(
Value::SingleQuotedString(timestamp.to_string()).into(),
));
// Add haex_tombstone value (0 = not deleted)
row.push(Expr::Value(
Value::Number("0".to_string(), false).into(),
));
}
}
SetExpr::Select(select) => {
let hlc_expr =
Expr::Value(Value::SingleQuotedString(timestamp.to_string()).into());
select.projection.push(SelectItem::UnnamedExpr(hlc_expr));
// Add haex_tombstone value (0 = not deleted)
let tombstone_expr =
Expr::Value(Value::Number("0".to_string(), false).into());
select.projection.push(SelectItem::UnnamedExpr(tombstone_expr));
}
_ => {
return Err(DatabaseError::UnsupportedStatement {
sql: insert_stmt.to_string(),
reason: "INSERT with unsupported source type".to_string(),
});
}
},
None => {
return Err(DatabaseError::UnsupportedStatement {
reason: "INSERT statement has no source".to_string(),
sql: insert_stmt.to_string(),
});
}
}
Ok(())
}
/// Transformiert DELETE zu UPDATE (soft delete)
fn transform_delete_to_update(

View File

@ -78,14 +78,14 @@ pub enum TriggerSetupResult {
TableNotFound,
}
#[derive(Debug)]
struct ColumnInfo {
name: String,
is_pk: bool,
#[derive(Debug, Clone)]
pub struct ColumnInfo {
pub name: String,
pub is_pk: bool,
}
impl ColumnInfo {
fn from_row(row: &Row) -> RusqliteResult<Self> {
pub fn from_row(row: &Row) -> RusqliteResult<Self> {
Ok(ColumnInfo {
name: row.get("name")?,
is_pk: row.get::<_, i64>("pk")? > 0,
@ -155,7 +155,7 @@ pub fn setup_triggers_for_table(
}
/// Holt das Schema für eine gegebene Tabelle.
fn get_table_schema(conn: &Connection, table_name: &str) -> RusqliteResult<Vec<ColumnInfo>> {
pub fn get_table_schema(conn: &Connection, table_name: &str) -> RusqliteResult<Vec<ColumnInfo>> {
if !is_safe_identifier(table_name) {
return Err(rusqlite::Error::InvalidParameterName(format!(
"Invalid or unsafe table name provided: {}",
@ -170,6 +170,29 @@ fn get_table_schema(conn: &Connection, table_name: &str) -> RusqliteResult<Vec<C
rows.collect()
}
/// Holt alle Foreign Key Spalten einer Tabelle.
/// Gibt eine Liste der Spaltennamen zurück, die Foreign Keys sind.
pub fn get_foreign_key_columns(conn: &Connection, table_name: &str) -> RusqliteResult<Vec<String>> {
if !is_safe_identifier(table_name) {
return Err(rusqlite::Error::InvalidParameterName(format!(
"Invalid or unsafe table name provided: {}",
table_name
))
.into());
}
let sql = format!("PRAGMA foreign_key_list(\"{}\");", table_name);
let mut stmt = conn.prepare(&sql)?;
// foreign_key_list gibt Spalten zurück: id, seq, table, from, to, on_update, on_delete, match
// Wir brauchen die "from" Spalte, die den Namen der FK-Spalte in der aktuellen Tabelle enthält
let rows = stmt.query_map([], |row| {
row.get::<_, String>("from")
})?;
rows.collect()
}
pub fn drop_triggers_for_table(
tx: &Transaction, // Arbeitet direkt auf einer Transaktion
table_name: &str,

View File

@ -2,6 +2,7 @@
use crate::database::error::DatabaseError;
use crate::database::DbConnection;
use crate::extension::database::executor::SqlExecutor;
use base64::{engine::general_purpose::STANDARD, Engine as _};
use rusqlite::types::Value as SqlValue;
use rusqlite::{
@ -79,6 +80,16 @@ pub fn parse_sql_statements(sql: &str) -> Result<Vec<Statement>, DatabaseError>
})
}
/// Prüft ob ein Statement ein RETURNING Clause hat (AST-basiert, sicher)
pub fn statement_has_returning(statement: &Statement) -> bool {
match statement {
Statement::Insert(insert) => insert.returning.is_some(),
Statement::Update { returning, .. } => returning.is_some(),
Statement::Delete(delete) => delete.returning.is_some(),
_ => false,
}
}
pub struct ValueConverter;
impl ValueConverter {
@ -116,6 +127,25 @@ impl ValueConverter {
}
}
/// Execute SQL mit CRDT-Transformation (für Drizzle-Integration)
/// Diese Funktion sollte von Drizzle verwendet werden, um CRDT-Support zu erhalten
pub fn execute_with_crdt(
sql: String,
params: Vec<JsonValue>,
connection: &DbConnection,
hlc_service: &std::sync::MutexGuard<crate::crdt::hlc::HlcService>,
) -> Result<Vec<Vec<JsonValue>>, DatabaseError> {
with_connection(connection, |conn| {
let tx = conn.transaction().map_err(DatabaseError::from)?;
let _modified_tables = SqlExecutor::execute_internal(&tx, hlc_service, &sql, &params)?;
tx.commit().map_err(DatabaseError::from)?;
// Für Drizzle: gebe leeres Array zurück (wie bei execute ohne RETURNING)
Ok(vec![])
})
}
/// Execute SQL OHNE CRDT-Transformation (für spezielle Fälle)
pub fn execute(
sql: String,
params: Vec<JsonValue>,
@ -245,7 +275,7 @@ pub fn select(
}
/// Konvertiert rusqlite ValueRef zu JSON
fn convert_value_ref_to_json(value_ref: ValueRef) -> Result<JsonValue, DatabaseError> {
pub fn convert_value_ref_to_json(value_ref: ValueRef) -> Result<JsonValue, DatabaseError> {
let json_val = match value_ref {
ValueRef::Null => JsonValue::Null,
ValueRef::Integer(i) => JsonValue::Number(i.into()),

View File

@ -6,6 +6,7 @@ pub mod generated;
use crate::crdt::hlc::HlcService;
use crate::database::error::DatabaseError;
use crate::extension::database::executor::SqlExecutor;
use crate::table_names::TABLE_CRDT_CONFIGS;
use crate::AppState;
use rusqlite::Connection;
@ -42,6 +43,36 @@ pub fn sql_execute(
core::execute(sql, params, &state.db)
}
#[tauri::command]
pub fn sql_execute_with_crdt(
sql: String,
params: Vec<JsonValue>,
state: State<'_, AppState>,
) -> Result<Vec<Vec<JsonValue>>, DatabaseError> {
let hlc_service = state.hlc.lock().map_err(|_| DatabaseError::MutexPoisoned {
reason: "Failed to lock HLC service".to_string(),
})?;
core::execute_with_crdt(sql, params, &state.db, &hlc_service)
}
#[tauri::command]
pub fn sql_query_with_crdt(
sql: String,
params: Vec<JsonValue>,
state: State<'_, AppState>,
) -> Result<Vec<Vec<JsonValue>>, DatabaseError> {
let hlc_service = state.hlc.lock().map_err(|_| DatabaseError::MutexPoisoned {
reason: "Failed to lock HLC service".to_string(),
})?;
core::with_connection(&state.db, |conn| {
let tx = conn.transaction().map_err(DatabaseError::from)?;
let result = SqlExecutor::query_internal(&tx, &hlc_service, &sql, &params)?;
tx.commit().map_err(DatabaseError::from)?;
Ok(result)
})
}
/// Resolves a database name to the full vault path
fn get_vault_path(app_handle: &AppHandle, vault_name: &str) -> Result<String, DatabaseError> {
// Sicherstellen, dass der Name eine .db Endung hat

View File

@ -4,7 +4,7 @@ use crate::extension::core::manifest::{EditablePermissions, ExtensionManifest, E
use crate::extension::core::types::{copy_directory, Extension, ExtensionSource};
use crate::extension::core::ExtensionPermissions;
use crate::extension::crypto::ExtensionCrypto;
use crate::extension::database::executor::SqlExecutor;
use crate::extension::database::executor::{PkRemappingContext, SqlExecutor};
use crate::extension::error::ExtensionError;
use crate::extension::permissions::manager::PermissionManager;
use crate::extension::permissions::types::ExtensionPermission;
@ -315,7 +315,8 @@ impl ExtensionManager {
name: extension_name.to_string(),
})?;
eprintln!("DEBUG: Removing extension with ID: {}", extension.id);
eprintln!("DEBUG: Extension name: {}, version: {}", extension_name, extension_version);
// Lösche Permissions und Extension-Eintrag in einer Transaktion
with_connection(&state.db, |conn| {
@ -326,6 +327,7 @@ impl ExtensionManager {
})?;
// Lösche alle Permissions mit extension_id
eprintln!("DEBUG: Deleting permissions for extension_id: {}", extension.id);
PermissionManager::delete_permissions_in_transaction(
&tx,
&hlc_service,
@ -334,6 +336,7 @@ impl ExtensionManager {
// Lösche Extension-Eintrag mit extension_id
let sql = format!("DELETE FROM {} WHERE id = ?", TABLE_EXTENSIONS);
eprintln!("DEBUG: Executing SQL: {} with id = {}", sql, extension.id);
SqlExecutor::execute_internal_typed(
&tx,
&hlc_service,
@ -341,9 +344,12 @@ impl ExtensionManager {
rusqlite::params![&extension.id],
)?;
eprintln!("DEBUG: Committing transaction");
tx.commit().map_err(DatabaseError::from)
})?;
eprintln!("DEBUG: Transaction committed successfully");
// Entferne aus dem In-Memory-Manager
self.remove_extension(public_key, extension_name)?;
@ -460,20 +466,25 @@ impl ExtensionManager {
let permissions = custom_permissions.to_internal_permissions(&extension_id);
// Extension-Eintrag und Permissions in einer Transaktion speichern
with_connection(&state.db, |conn| {
let actual_extension_id = with_connection(&state.db, |conn| {
let tx = conn.transaction().map_err(DatabaseError::from)?;
let hlc_service = state.hlc.lock().map_err(|_| DatabaseError::MutexPoisoned {
reason: "Failed to lock HLC service".to_string(),
})?;
// Erstelle PK-Remapping Context für die gesamte Transaktion
// Dies ermöglicht automatisches FK-Remapping wenn ON CONFLICT bei Extension auftritt
let mut pk_context = PkRemappingContext::new();
// 1. Extension-Eintrag erstellen mit generierter UUID
// WICHTIG: RETURNING wird vom CRDT-Transformer automatisch hinzugefügt
let insert_ext_sql = format!(
"INSERT INTO {} (id, name, version, author, entry, icon, public_key, signature, homepage, description, enabled) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)",
"INSERT INTO {} (id, name, version, author, entry, icon, public_key, signature, homepage, description, enabled) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) RETURNING id",
TABLE_EXTENSIONS
);
SqlExecutor::execute_internal_typed(
let (_tables, returning_results) = SqlExecutor::query_internal_typed_with_context(
&tx,
&hlc_service,
&insert_ext_sql,
@ -490,11 +501,28 @@ impl ExtensionManager {
extracted.manifest.description,
true, // enabled
],
&mut pk_context,
)?;
// Nutze die tatsächliche ID aus der Datenbank (wichtig bei ON CONFLICT)
// Die haex_extensions Tabelle hat einen single-column PK namens "id"
let actual_extension_id = returning_results
.first()
.and_then(|row| row.first())
.and_then(|val| val.as_str())
.map(|s| s.to_string())
.unwrap_or_else(|| extension_id.clone());
eprintln!(
"DEBUG: Extension UUID - Generated: {}, Actual from DB: {}",
extension_id, actual_extension_id
);
// 2. Permissions speichern (oder aktualisieren falls schon vorhanden)
// Nutze einfaches INSERT - die CRDT-Transformation fügt automatisch ON CONFLICT hinzu
// FK-Werte (extension_id) werden automatisch remapped wenn Extension ON CONFLICT hatte
let insert_perm_sql = format!(
"INSERT OR REPLACE INTO {} (id, extension_id, resource_type, action, target, constraints, status) VALUES (?, ?, ?, ?, ?, ?, ?)",
"INSERT INTO {} (id, extension_id, resource_type, action, target, constraints, status) VALUES (?, ?, ?, ?, ?, ?, ?)",
TABLE_EXTENSION_PERMISSIONS
);
@ -502,7 +530,7 @@ impl ExtensionManager {
use crate::database::generated::HaexExtensionPermissions;
let db_perm: HaexExtensionPermissions = perm.into();
SqlExecutor::execute_internal_typed(
SqlExecutor::execute_internal_typed_with_context(
&tx,
&hlc_service,
&insert_perm_sql,
@ -515,15 +543,16 @@ impl ExtensionManager {
db_perm.constraints,
db_perm.status,
],
&mut pk_context,
)?;
}
tx.commit().map_err(DatabaseError::from)?;
Ok(extension_id.clone())
Ok(actual_extension_id.clone())
})?;
let extension = Extension {
id: extension_id.clone(),
id: actual_extension_id.clone(), // Nutze die actual_extension_id aus der Transaktion
source: ExtensionSource::Production {
path: extensions_dir.clone(),
version: extracted.manifest.version.clone(),
@ -535,7 +564,7 @@ impl ExtensionManager {
self.add_production_extension(extension)?;
Ok(extension_id)
Ok(actual_extension_id) // Gebe die actual_extension_id an den Caller zurück
}
/// Scannt das Dateisystem beim Start und lädt alle installierten Erweiterungen.

View File

@ -3,38 +3,123 @@
use crate::crdt::hlc::HlcService;
use crate::crdt::transformer::CrdtTransformer;
use crate::crdt::trigger;
use crate::database::core::{parse_sql_statements, ValueConverter};
use crate::database::core::{convert_value_ref_to_json, parse_sql_statements, ValueConverter};
use crate::database::error::DatabaseError;
use rusqlite::{params_from_iter, Params, Transaction};
use rusqlite::{params_from_iter, types::Value as SqliteValue, ToSql, Transaction};
use serde_json::Value as JsonValue;
use sqlparser::ast::Statement;
use std::collections::HashSet;
use sqlparser::ast::{Insert, Statement, TableObject};
use std::collections::{HashMap, HashSet};
/// Repräsentiert PK-Werte für eine Zeile (kann single oder composite key sein)
#[derive(Debug, Clone, PartialEq, Eq)]
struct PkValues {
/// column_name -> value
values: HashMap<String, String>,
}
impl PkValues {
fn new() -> Self {
Self {
values: HashMap::new(),
}
}
fn insert(&mut self, column: String, value: String) {
self.values.insert(column, value);
}
fn get(&self, column: &str) -> Option<&String> {
self.values.get(column)
}
}
/// Context für PK-Remapping während einer Transaktion
/// Trackt für jede Tabelle: welche PKs sollten eingefügt werden vs. welche sind tatsächlich in der DB
#[derive(Debug, Default)]
pub struct PkRemappingContext {
/// Für jede Tabelle: Liste von (original_pk_values, actual_pk_values) Mappings
/// Wird nur gespeichert wenn original != actual (d.h. ON CONFLICT hat PK geändert)
mappings: HashMap<String, Vec<(PkValues, PkValues)>>,
}
impl PkRemappingContext {
pub fn new() -> Self {
Self::default()
}
/// Fügt ein Mapping für eine Tabelle hinzu, aber nur wenn original != actual
/// original und actual sind die PK-Werte vor und nach dem INSERT
fn add_mapping(&mut self, table: String, original: PkValues, actual: PkValues) {
// Nur speichern wenn tatsächlich unterschiedlich (ON CONFLICT hat stattgefunden)
if original != actual {
eprintln!(
"DEBUG: PK Remapping for table '{}': {:?} -> {:?}",
table, original.values, actual.values
);
self.mappings
.entry(table)
.or_insert_with(Vec::new)
.push((original, actual));
}
}
/// Versucht einen FK-Wert zu remappen
/// referenced_table: Die Tabelle auf die der FK zeigt
/// referenced_column: Die PK-Spalte in der referenced_table
/// value: Der FK-Wert der ersetzt werden soll
fn remap_fk_value(
&self,
referenced_table: &str,
referenced_column: &str,
value: &str,
) -> String {
self.mappings
.get(referenced_table)
.and_then(|mappings| {
mappings.iter().find_map(|(original, actual)| {
if original.get(referenced_column)? == value {
let actual_val = actual.get(referenced_column)?.clone();
eprintln!(
"DEBUG: FK Remapping for {}.{}: {} -> {}",
referenced_table, referenced_column, value, actual_val
);
Some(actual_val)
} else {
None
}
})
})
.unwrap_or_else(|| value.to_string())
}
}
/// SQL-Executor OHNE Berechtigungsprüfung - für interne Nutzung
pub struct SqlExecutor;
impl SqlExecutor {
pub fn execute_internal_typed<P>(
/// Führt ein SQL Statement OHNE RETURNING aus (mit CRDT und PK-Remapping)
/// Unterstützt automatisches FK-Remapping wenn vorherige INSERTs ON CONFLICT getriggert haben
///
/// Diese Variante akzeptiert &[&dyn ToSql] direkt (wie von rusqlite::params![] erzeugt)
/// Returns: modified_schema_tables
pub fn execute_internal_typed_with_context(
tx: &Transaction,
hlc_service: &HlcService,
sql: &str,
params: P, // Akzeptiert jetzt alles, was rusqlite als Parameter versteht
) -> Result<HashSet<String>, DatabaseError>
where
P: Params,
{
params: &[&dyn ToSql],
pk_context: &mut PkRemappingContext,
) -> Result<HashSet<String>, DatabaseError> {
let mut ast_vec = parse_sql_statements(sql)?;
// Wir stellen sicher, dass wir nur EIN Statement verarbeiten. Das ist sicherer.
if ast_vec.len() != 1 {
return Err(DatabaseError::ExecutionError {
sql: sql.to_string(),
reason: "execute_internal_typed sollte nur ein einzelnes SQL-Statement erhalten"
reason: "execute_internal_typed_with_context sollte nur ein einzelnes SQL-Statement erhalten"
.to_string(),
table: None,
});
}
// Wir nehmen das einzige Statement aus dem Vektor.
let mut statement = ast_vec.pop().unwrap();
let transformer = CrdtTransformer::new();
@ -46,23 +131,72 @@ impl SqlExecutor {
})?;
let mut modified_schema_tables = HashSet::new();
if let Some(table_name) =
transformer.transform_execute_statement(&mut statement, &hlc_timestamp)?
{
if let Some(table_name) = transformer.transform_execute_statement_with_table_info(
&mut statement,
&hlc_timestamp,
tx,
)? {
modified_schema_tables.insert(table_name);
}
// Führe das transformierte Statement aus.
// `params` wird jetzt nur noch einmal hierher bewegt, was korrekt ist.
let sql_str = statement.to_string();
tx.execute(&sql_str, params)
.map_err(|e| DatabaseError::ExecutionError {
sql: sql_str.clone(),
table: None,
reason: e.to_string(),
})?;
eprintln!("DEBUG: Transformed SQL: {}", sql_str);
// Die Trigger-Logik für CREATE TABLE bleibt erhalten.
// Spezielle Behandlung für INSERT Statements (mit FK-Remapping, OHNE RETURNING)
if let Statement::Insert(ref insert_stmt) = statement {
if let TableObject::TableName(ref table_name) = insert_stmt.table {
let table_name_str = table_name
.to_string()
.trim_matches('`')
.trim_matches('"')
.to_string();
// Konvertiere Params zu Vec für Manipulation
let mut param_vec = params_to_vec(params, tx)?;
// Hole Foreign Key Informationen
let fk_info = get_fk_info(tx, &table_name_str)?;
// Remap FK-Werte in params (falls Mappings existieren)
remap_fk_params(insert_stmt, &mut param_vec, &fk_info, pk_context)?;
// Führe INSERT mit execute() aus
let param_refs: Vec<&dyn ToSql> =
param_vec.iter().map(|v| v as &dyn ToSql).collect();
let mut stmt = tx
.prepare(&sql_str)
.map_err(|e| DatabaseError::ExecutionError {
sql: sql_str.clone(),
table: Some(table_name_str.clone()),
reason: format!("Prepare failed: {}", e),
})?;
let _ = stmt
.query(params_from_iter(param_refs.iter()))
.map_err(|e| DatabaseError::ExecutionError {
sql: sql_str.clone(),
table: Some(table_name_str.clone()),
reason: format!("Query execution failed: {}", e),
})?;
/* tx.execute(&sql_str, params_from_iter(param_refs.iter()))
.map_err(|e| DatabaseError::ExecutionError {
sql: sql_str.clone(),
table: Some(table_name_str.clone()),
reason: e.to_string(),
})?; */
}
} else {
// Nicht-INSERT Statements normal ausführen
tx.execute(&sql_str, params)
.map_err(|e| DatabaseError::ExecutionError {
sql: sql_str.clone(),
table: None,
reason: e.to_string(),
})?;
}
// Trigger-Logik für CREATE TABLE
if let Statement::CreateTable(create_table_details) = statement {
let table_name_str = create_table_details.name.to_string();
trigger::setup_triggers_for_table(tx, &table_name_str, false)?;
@ -70,7 +204,193 @@ impl SqlExecutor {
Ok(modified_schema_tables)
}
/// Führt ein SQL Statement MIT RETURNING aus (mit CRDT und PK-Remapping)
/// Unterstützt automatisches FK-Remapping wenn vorherige INSERTs ON CONFLICT getriggert haben
///
/// Diese Variante akzeptiert &[&dyn ToSql] direkt (wie von rusqlite::params![] erzeugt)
/// Returns: (modified_schema_tables, returning_results)
/// returning_results enthält ALLE RETURNING-Spalten für INSERT/UPDATE/DELETE mit RETURNING
pub fn query_internal_typed_with_context(
tx: &Transaction,
hlc_service: &HlcService,
sql: &str,
params: &[&dyn ToSql],
pk_context: &mut PkRemappingContext,
) -> Result<(HashSet<String>, Vec<Vec<JsonValue>>), DatabaseError> {
let mut ast_vec = parse_sql_statements(sql)?;
if ast_vec.len() != 1 {
return Err(DatabaseError::ExecutionError {
sql: sql.to_string(),
reason: "query_internal_typed_with_context sollte nur ein einzelnes SQL-Statement erhalten"
.to_string(),
table: None,
});
}
let mut statement = ast_vec.pop().unwrap();
let transformer = CrdtTransformer::new();
let hlc_timestamp =
hlc_service
.new_timestamp_and_persist(tx)
.map_err(|e| DatabaseError::HlcError {
reason: e.to_string(),
})?;
let mut modified_schema_tables = HashSet::new();
if let Some(table_name) = transformer.transform_execute_statement_with_table_info(
&mut statement,
&hlc_timestamp,
tx,
)? {
modified_schema_tables.insert(table_name);
}
let sql_str = statement.to_string();
eprintln!("DEBUG: Transformed SQL (with RETURNING): {}", sql_str);
// Spezielle Behandlung für INSERT Statements (mit PK-Remapping + RETURNING)
if let Statement::Insert(ref insert_stmt) = statement {
if let TableObject::TableName(ref table_name) = insert_stmt.table {
let table_name_str = table_name
.to_string()
.trim_matches('`')
.trim_matches('"')
.to_string();
// Konvertiere Params zu Vec für Manipulation
let mut param_vec = params_to_vec(params, tx)?;
// Hole Table Schema um PKs und FKs zu identifizieren
let table_columns =
trigger::get_table_schema(tx, &table_name_str).map_err(|e| {
DatabaseError::ExecutionError {
sql: format!("PRAGMA table_info('{}')", table_name_str),
reason: e.to_string(),
table: Some(table_name_str.clone()),
}
})?;
let pk_columns: Vec<String> = table_columns
.iter()
.filter(|c| c.is_pk)
.map(|c| c.name.clone())
.collect();
// Hole Foreign Key Informationen
let fk_info = get_fk_info(tx, &table_name_str)?;
// 1. Extrahiere Original PK-Werte aus params (vor FK-Remapping)
let original_pk =
extract_pk_values_from_params(insert_stmt, &param_vec, &pk_columns)?;
// 2. Remap FK-Werte in params (falls Mappings existieren)
remap_fk_params(insert_stmt, &mut param_vec, &fk_info, pk_context)?;
// 3. Führe INSERT mit query() aus um RETURNING zu lesen
let mut stmt = tx
.prepare(&sql_str)
.map_err(|e| DatabaseError::ExecutionError {
sql: sql_str.clone(),
table: Some(table_name_str.clone()),
reason: e.to_string(),
})?;
let num_columns = stmt.column_count();
let param_refs: Vec<&dyn ToSql> =
param_vec.iter().map(|v| v as &dyn ToSql).collect();
let mut rows = stmt
.query(params_from_iter(param_refs.iter()))
.map_err(|e| DatabaseError::ExecutionError {
sql: sql_str.clone(),
table: Some(table_name_str.clone()),
reason: e.to_string(),
})?;
let mut result_vec: Vec<Vec<JsonValue>> = Vec::new();
// 4. Lese ALLE RETURNING Werte und speichere PK-Mapping
while let Some(row) = rows.next().map_err(|e| DatabaseError::ExecutionError {
sql: sql_str.clone(),
table: Some(table_name_str.clone()),
reason: e.to_string(),
})? {
// Extrahiere PK-Werte für PK-Remapping
let actual_pk = extract_pk_values_from_row(&row, &pk_columns)?;
pk_context.add_mapping(
table_name_str.clone(),
original_pk.clone(),
actual_pk.clone(),
);
// Extrahiere ALLE Spalten für RETURNING-Ergebnis
let mut row_values: Vec<JsonValue> = Vec::with_capacity(num_columns);
for i in 0..num_columns {
let value_ref =
row.get_ref(i)
.map_err(|e| DatabaseError::RowProcessingError {
reason: format!("Failed to get column {}: {}", i, e),
})?;
let json_val = convert_value_ref_to_json(value_ref)?;
row_values.push(json_val);
}
result_vec.push(row_values);
}
return Ok((modified_schema_tables, result_vec));
}
}
// Für UPDATE/DELETE mit RETURNING: query() verwenden (kein PK-Remapping nötig)
let mut stmt = tx
.prepare(&sql_str)
.map_err(|e| DatabaseError::PrepareError {
reason: e.to_string(),
})?;
let num_columns = stmt.column_count();
let mut rows = stmt.query(params).map_err(|e| DatabaseError::QueryError {
reason: e.to_string(),
})?;
let mut result_vec: Vec<Vec<JsonValue>> = Vec::new();
while let Some(row) = rows.next().map_err(|e| DatabaseError::RowProcessingError {
reason: format!("Row iteration error: {}", e),
})? {
let mut row_values: Vec<JsonValue> = Vec::with_capacity(num_columns);
for i in 0..num_columns {
let value_ref = row
.get_ref(i)
.map_err(|e| DatabaseError::RowProcessingError {
reason: format!("Failed to get column {}: {}", i, e),
})?;
let json_val = convert_value_ref_to_json(value_ref)?;
row_values.push(json_val);
}
result_vec.push(row_values);
}
Ok((modified_schema_tables, result_vec))
}
/// Legacy-Methode ohne PK-Remapping Context
pub fn execute_internal_typed(
tx: &Transaction,
hlc_service: &HlcService,
sql: &str,
params: &[&dyn ToSql],
) -> Result<HashSet<String>, DatabaseError> {
let mut context = PkRemappingContext::new();
Self::execute_internal_typed_with_context(tx, hlc_service, sql, params, &mut context)
}
/// Führt SQL aus (mit CRDT-Transformation) - OHNE Permission-Check
/// Wrapper um execute_internal_typed für JsonValue-Parameter
/// Nutzt PK-Remapping Logik für INSERT mit ON CONFLICT
pub fn execute_internal(
tx: &Transaction,
hlc_service: &HlcService,
@ -87,50 +407,18 @@ impl SqlExecutor {
});
}
// SQL parsing
let mut ast_vec = parse_sql_statements(sql)?;
// Convert JsonValue params to SqliteValue
let params_converted: Vec<SqliteValue> = params
.iter()
.map(ValueConverter::json_to_rusqlite_value)
.collect::<Result<Vec<_>, _>>()?;
let transformer = CrdtTransformer::new();
// Convert to &dyn ToSql references
let param_refs: Vec<&dyn ToSql> =
params_converted.iter().map(|v| v as &dyn ToSql).collect();
// Generate HLC timestamp
let hlc_timestamp =
hlc_service
.new_timestamp_and_persist(tx)
.map_err(|e| DatabaseError::HlcError {
reason: e.to_string(),
})?;
// Transform statements
let mut modified_schema_tables = HashSet::new();
for statement in &mut ast_vec {
if let Some(table_name) =
transformer.transform_execute_statement(statement, &hlc_timestamp)?
{
modified_schema_tables.insert(table_name);
}
}
// Convert parameters
let sql_values = ValueConverter::convert_params(params)?;
// Execute statements
for statement in ast_vec {
let sql_str = statement.to_string();
tx.execute(&sql_str, params_from_iter(sql_values.iter()))
.map_err(|e| DatabaseError::ExecutionError {
sql: sql_str.clone(),
table: None,
reason: e.to_string(),
})?;
if let Statement::CreateTable(create_table_details) = statement {
let table_name_str = create_table_details.name.to_string();
trigger::setup_triggers_for_table(tx, &table_name_str, false)?;
}
}
Ok(modified_schema_tables)
// Call execute_internal_typed (mit PK-Remapping!)
Self::execute_internal_typed(tx, hlc_service, sql, &param_refs)
}
/// Führt SELECT aus (mit CRDT-Transformation) - OHNE Permission-Check
@ -206,4 +494,240 @@ impl SqlExecutor {
Ok(results)
}
/// Führt SQL mit CRDT-Transformation aus und gibt RETURNING-Ergebnisse zurück
/// Speziell für INSERT/UPDATE/DELETE mit RETURNING (Drizzle-Integration)
/// Nutzt PK-Remapping für INSERT-Operationen
pub fn query_internal(
tx: &Transaction,
hlc_service: &HlcService,
sql: &str,
params: &[JsonValue],
) -> Result<Vec<Vec<JsonValue>>, DatabaseError> {
// Parameter validation
let total_placeholders = sql.matches('?').count();
if total_placeholders != params.len() {
return Err(DatabaseError::ParameterMismatchError {
expected: total_placeholders,
provided: params.len(),
sql: sql.to_string(),
});
}
// Parameter konvertieren
let params_converted: Vec<SqliteValue> = params
.iter()
.map(ValueConverter::json_to_rusqlite_value)
.collect::<Result<Vec<_>, _>>()?;
// Convert to &dyn ToSql references
let param_refs: Vec<&dyn ToSql> =
params_converted.iter().map(|v| v as &dyn ToSql).collect();
// Call query_internal_typed_with_context (mit PK-Remapping!)
let mut context = PkRemappingContext::new();
let (_tables, results) = Self::query_internal_typed_with_context(
tx,
hlc_service,
sql,
&param_refs,
&mut context,
)?;
Ok(results)
}
}
// =========================
// Helper-Funktionen für FK-Remapping
// =========================
/// Strukturiert FK-Informationen für einfache Lookups
#[derive(Debug)]
struct FkInfo {
/// column_name -> (referenced_table, referenced_column)
mappings: HashMap<String, (String, String)>,
}
/// Hole Foreign Key Informationen für eine Tabelle
fn get_fk_info(tx: &Transaction, table_name: &str) -> Result<FkInfo, DatabaseError> {
// Nutze PRAGMA foreign_key_list um FK-Beziehungen zu holen
let sql = format!("PRAGMA foreign_key_list('{}');", table_name);
let mut stmt = tx
.prepare(&sql)
.map_err(|e| DatabaseError::ExecutionError {
sql: sql.clone(),
reason: e.to_string(),
table: Some(table_name.to_string()),
})?;
let mut mappings = HashMap::new();
let rows = stmt
.query_map([], |row| {
Ok((
row.get::<_, String>("from")?, // FK column in this table
row.get::<_, String>("table")?, // referenced table
row.get::<_, String>("to")?, // referenced column
))
})
.map_err(|e| DatabaseError::ExecutionError {
sql,
reason: e.to_string(),
table: Some(table_name.to_string()),
})?;
for row in rows {
let (from_col, ref_table, ref_col) = row.map_err(|e| DatabaseError::ExecutionError {
sql: format!("PRAGMA foreign_key_list('{}')", table_name),
reason: e.to_string(),
table: Some(table_name.to_string()),
})?;
mappings.insert(from_col, (ref_table, ref_col));
}
Ok(FkInfo { mappings })
}
/// Konvertiert &[&dyn ToSql] zu Vec<SqliteValue> für Manipulation
/// Nutzt einen Dummy-Query um die Parameter-Werte zu extrahieren
fn params_to_vec(
params: &[&dyn ToSql],
tx: &Transaction,
) -> Result<Vec<SqliteValue>, DatabaseError> {
let mut values = Vec::new();
// Erstelle eine Dummy-Query mit genau so vielen Platzhaltern wie wir Parameter haben
// z.B. "SELECT ?, ?, ?"
if params.is_empty() {
return Ok(values);
}
let placeholders = vec!["?"; params.len()].join(", ");
let dummy_sql = format!("SELECT {}", placeholders);
let mut stmt = tx
.prepare(&dummy_sql)
.map_err(|e| DatabaseError::ExecutionError {
sql: dummy_sql.clone(),
reason: format!("Failed to prepare dummy query: {}", e),
table: None,
})?;
// Führe die Query aus und extrahiere die Werte aus der Row
let mut rows = stmt
.query(params)
.map_err(|e| DatabaseError::ExecutionError {
sql: dummy_sql.clone(),
reason: format!("Failed to execute dummy query: {}", e),
table: None,
})?;
if let Some(row) = rows.next().map_err(|e| DatabaseError::ExecutionError {
sql: dummy_sql,
reason: format!("Failed to read dummy query result: {}", e),
table: None,
})? {
// Extrahiere alle Spalten-Werte
for i in 0..params.len() {
let value: SqliteValue = row.get(i).map_err(|e| DatabaseError::ExecutionError {
sql: format!("SELECT ..."),
reason: format!("Failed to extract value at index {}: {}", i, e),
table: None,
})?;
values.push(value);
}
}
Ok(values)
}
/// Extrahiert PK-Werte aus den INSERT-Parametern
fn extract_pk_values_from_params(
insert_stmt: &Insert,
params: &[SqliteValue],
pk_columns: &[String],
) -> Result<PkValues, DatabaseError> {
let mut pk_values = PkValues::new();
// Finde die Positionen der PK-Spalten in der INSERT column list
for pk_col in pk_columns {
if let Some(pos) = insert_stmt.columns.iter().position(|c| &c.value == pk_col) {
// Hole den Parameter-Wert an dieser Position
if pos < params.len() {
// Konvertiere SqliteValue zu String
let value_str = value_to_string(&params[pos]);
pk_values.insert(pk_col.clone(), value_str);
}
}
}
Ok(pk_values)
}
/// Remapped FK-Werte in den Parametern basierend auf dem PK-Remapping Context
fn remap_fk_params(
insert_stmt: &Insert,
params: &mut Vec<SqliteValue>,
fk_info: &FkInfo,
pk_context: &PkRemappingContext,
) -> Result<(), DatabaseError> {
// Für jede FK-Spalte: prüfe ob Remapping nötig ist
for (col_name, (ref_table, ref_col)) in &fk_info.mappings {
// Finde Position der FK-Spalte in der INSERT column list
if let Some(pos) = insert_stmt
.columns
.iter()
.position(|c| &c.value == col_name)
{
if pos < params.len() {
// Hole aktuellen FK-Wert (als String)
let current_value = value_to_string(&params[pos]);
// Versuche zu remappen
let new_value = pk_context.remap_fk_value(ref_table, ref_col, &current_value);
if new_value != current_value {
// Ersetze den Parameter-Wert
params[pos] = SqliteValue::Text(new_value);
eprintln!(
"DEBUG: Remapped FK {}={} to {:?}",
col_name, current_value, params[pos]
);
}
}
}
}
Ok(())
}
/// Hilfsfunktion: Konvertiert SqliteValue zu String für Vergleiche
fn value_to_string(value: &SqliteValue) -> String {
match value {
SqliteValue::Null => "NULL".to_string(),
SqliteValue::Integer(i) => i.to_string(),
SqliteValue::Real(r) => r.to_string(),
SqliteValue::Text(s) => s.clone(),
SqliteValue::Blob(b) => format!("BLOB({} bytes)", b.len()),
}
}
/// Extrahiert PK-Werte aus einer RETURNING Row
fn extract_pk_values_from_row(
row: &rusqlite::Row,
pk_columns: &[String],
) -> Result<PkValues, DatabaseError> {
let mut pk_values = PkValues::new();
for (idx, pk_col) in pk_columns.iter().enumerate() {
// RETURNING gibt PKs in der Reihenfolge zurück, wie sie im RETURNING Clause stehen
let value: String = row.get(idx).map_err(|e| DatabaseError::ExecutionError {
sql: "RETURNING clause".to_string(),
reason: format!("Failed to extract PK column '{}': {}", pk_col, e),
table: None,
})?;
pk_values.insert(pk_col.clone(), value);
}
Ok(pk_values)
}

View File

@ -71,6 +71,8 @@ pub fn run() {
database::list_vaults,
database::open_encrypted_database,
database::sql_execute,
database::sql_execute_with_crdt,
database::sql_query_with_crdt,
database::sql_select,
database::vault_exists,
extension::database::extension_sql_execute,