refatored rust sql and drizzle

This commit is contained in:
2025-10-22 15:05:36 +02:00
parent 9ea057e943
commit f70e924cc3
27 changed files with 3968 additions and 2747 deletions

View File

@ -23,46 +23,47 @@
"@nuxt/icon": "2.0.0",
"@nuxt/ui": "4.0.0",
"@nuxtjs/i18n": "10.0.6",
"@pinia/nuxt": "^0.11.1",
"@tailwindcss/vite": "^4.1.10",
"@tauri-apps/api": "^2.5.0",
"@tauri-apps/plugin-dialog": "^2.2.2",
"@tauri-apps/plugin-fs": "^2.3.0",
"@pinia/nuxt": "^0.11.2",
"@tailwindcss/vite": "^4.1.15",
"@tauri-apps/api": "^2.9.0",
"@tauri-apps/plugin-dialog": "^2.4.0",
"@tauri-apps/plugin-fs": "^2.4.2",
"@tauri-apps/plugin-http": "2.5.2",
"@tauri-apps/plugin-notification": "2.3.1",
"@tauri-apps/plugin-opener": "^2.3.0",
"@tauri-apps/plugin-os": "^2.2.2",
"@tauri-apps/plugin-opener": "^2.5.0",
"@tauri-apps/plugin-os": "^2.3.1",
"@tauri-apps/plugin-sql": "2.3.0",
"@tauri-apps/plugin-store": "^2.2.1",
"@tauri-apps/plugin-store": "^2.4.0",
"@vueuse/components": "^13.9.0",
"@vueuse/core": "^13.4.0",
"@vueuse/core": "^13.9.0",
"@vueuse/gesture": "^2.0.0",
"@vueuse/nuxt": "^13.4.0",
"drizzle-orm": "^0.44.2",
"eslint": "^9.34.0",
"@vueuse/nuxt": "^13.9.0",
"drizzle-orm": "^0.44.6",
"eslint": "^9.38.0",
"fuse.js": "^7.1.0",
"nuxt": "^4.0.3",
"nuxt-zod-i18n": "^1.12.0",
"swiper": "^12.0.2",
"tailwindcss": "^4.1.10",
"vue": "^3.5.20",
"vue-router": "^4.5.1",
"zod": "4.1.5"
"nuxt": "^4.1.3",
"nuxt-zod-i18n": "^1.12.1",
"swiper": "^12.0.3",
"tailwindcss": "^4.1.15",
"vue": "^3.5.22",
"vue-router": "^4.6.3",
"zod": "^3.25.76"
},
"devDependencies": {
"@iconify/json": "^2.2.351",
"@iconify-json/hugeicons": "^1.2.17",
"@iconify/json": "^2.2.398",
"@iconify/tailwind4": "^1.0.6",
"@libsql/client": "^0.15.15",
"@tauri-apps/cli": "^2.5.0",
"@types/node": "^24.6.2",
"@tauri-apps/cli": "^2.9.0",
"@types/node": "^24.9.1",
"@vitejs/plugin-vue": "6.0.1",
"@vue/compiler-sfc": "^3.5.17",
"drizzle-kit": "^0.31.2",
"globals": "^16.2.0",
"@vue/compiler-sfc": "^3.5.22",
"drizzle-kit": "^0.31.5",
"globals": "^16.4.0",
"prettier": "3.6.2",
"tsx": "^4.20.6",
"tw-animate-css": "^1.3.8",
"typescript": "^5.8.3",
"tw-animate-css": "^1.4.0",
"typescript": "^5.9.3",
"vite": "7.1.3",
"vue-tsc": "3.0.6"
},

3905
pnpm-lock.yaml generated

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,3 @@
DROP INDEX `haex_workspaces_name_unique`;--> statement-breakpoint
CREATE UNIQUE INDEX `haex_workspaces_position_unique` ON `haex_workspaces` (`position`);--> statement-breakpoint
CREATE UNIQUE INDEX `haex_settings_key_type_value_unique` ON `haex_settings` (`key`,`type`,`value`);

File diff suppressed because it is too large Load Diff

View File

@ -43,6 +43,13 @@
"when": 1760964548034,
"tag": "0005_tidy_yellowjacket",
"breakpoints": true
},
{
"idx": 6,
"version": "6",
"when": 1761137108127,
"tag": "0006_gigantic_bloodaxe",
"breakpoints": true
}
]
}

View File

@ -21,18 +21,22 @@ export const withCrdtColumns = <
haexTimestamp: text(columnNames.haexTimestamp),
})
export const haexSettings = sqliteTable(tableNames.haex.settings.name, {
id: text()
.primaryKey()
.$defaultFn(() => crypto.randomUUID()),
key: text(),
type: text(),
value: text(),
haexTombstone: integer(tableNames.haex.settings.columns.haexTombstone, {
mode: 'boolean',
}),
haexTimestamp: text(tableNames.haex.settings.columns.haexTimestamp),
})
export const haexSettings = sqliteTable(
tableNames.haex.settings.name,
{
id: text()
.primaryKey()
.$defaultFn(() => crypto.randomUUID()),
key: text(),
type: text(),
value: text(),
haexTombstone: integer(tableNames.haex.settings.columns.haexTombstone, {
mode: 'boolean',
}),
haexTimestamp: text(tableNames.haex.settings.columns.haexTimestamp),
},
(table) => [unique().on(table.key, table.type, table.value)],
)
export type InsertHaexSettings = typeof haexSettings.$inferInsert
export type SelectHaexSettings = typeof haexSettings.$inferSelect
@ -153,7 +157,7 @@ export const haexWorkspaces = sqliteTable(
},
tableNames.haex.workspaces.columns,
),
(table) => [unique().on(table.name)],
(table) => [unique().on(table.position)],
)
export type InsertHaexWorkspaces = typeof haexWorkspaces.$inferInsert
export type SelectHaexWorkspaces = typeof haexWorkspaces.$inferSelect

Binary file not shown.

View File

@ -4,8 +4,8 @@
use crate::crdt::trigger::{HLC_TIMESTAMP_COLUMN, TOMBSTONE_COLUMN};
use crate::database::error::DatabaseError;
use sqlparser::ast::{
Assignment, AssignmentTarget, BinaryOperator, Expr, Ident, Insert, ObjectNamePart,
OnConflict, OnConflictAction, OnInsert, SelectItem, SetExpr, Value,
Assignment, AssignmentTarget, BinaryOperator, Expr, Ident, Insert, ObjectNamePart, OnConflict,
OnConflictAction, OnInsert, SelectItem, SetExpr, Value,
};
use uhlc::Timestamp;
@ -23,6 +23,37 @@ impl InsertTransformer {
}
}
fn find_or_add_column(columns: &mut Vec<Ident>, col_name: &'static str) -> usize {
match columns.iter().position(|c| c.value == col_name) {
Some(index) => index, // Gefunden! Gib Index zurück.
None => {
// Nicht gefunden! Hinzufügen.
columns.push(Ident::new(col_name));
columns.len() - 1 // Der Index des gerade hinzugefügten Elements
}
}
}
/// Wenn der Index == der Länge ist, wird der Wert stattdessen gepusht.
fn set_or_push_value(row: &mut Vec<Expr>, index: usize, value: Expr) {
if index < row.len() {
// Spalte war vorhanden, Wert (wahrscheinlich `?` oder NULL) ersetzen
row[index] = value;
} else {
// Spalte war nicht vorhanden, Wert hinzufügen
row.push(value);
}
}
fn set_or_push_projection(projection: &mut Vec<SelectItem>, index: usize, value: Expr) {
let item = SelectItem::UnnamedExpr(value);
if index < projection.len() {
projection[index] = item;
} else {
projection.push(item);
}
}
/// Transformiert INSERT-Statements (fügt HLC-Timestamp hinzu und behandelt Tombstone-Konflikte)
/// Fügt automatisch RETURNING für Primary Keys hinzu, damit der Executor die tatsächlichen PKs kennt
pub fn transform_insert(
@ -32,11 +63,11 @@ impl InsertTransformer {
primary_keys: &[String],
foreign_keys: &[String],
) -> Result<(), DatabaseError> {
// Add both haex_timestamp and haex_tombstone columns
insert_stmt
.columns
.push(Ident::new(self.hlc_timestamp_column));
insert_stmt.columns.push(Ident::new(self.tombstone_column));
// Add both haex_timestamp and haex_tombstone columns if not exists
let hlc_col_index =
Self::find_or_add_column(&mut insert_stmt.columns, self.hlc_timestamp_column);
let tombstone_col_index =
Self::find_or_add_column(&mut insert_stmt.columns, self.tombstone_column);
// Füge RETURNING für alle Primary Keys hinzu (falls noch nicht vorhanden)
// Dies erlaubt uns, die tatsächlichen PK-Werte nach ON CONFLICT zu kennen
@ -57,7 +88,7 @@ impl InsertTransformer {
// Erstelle UPDATE-Assignments für alle Spalten außer CRDT-Spalten, Primary Keys und Foreign Keys
let mut assignments = Vec::new();
for column in insert_stmt.columns.iter() {
for column in insert_stmt.columns.clone().iter() {
let col_name = &column.value;
// Überspringe CRDT-Spalten
@ -87,17 +118,17 @@ impl InsertTransformer {
// Füge HLC-Timestamp Update hinzu (mit dem übergebenen timestamp)
assignments.push(Assignment {
target: AssignmentTarget::ColumnName(sqlparser::ast::ObjectName(vec![ObjectNamePart::Identifier(
Ident::new(self.hlc_timestamp_column),
)])),
target: AssignmentTarget::ColumnName(sqlparser::ast::ObjectName(vec![
ObjectNamePart::Identifier(Ident::new(self.hlc_timestamp_column)),
])),
value: Expr::Value(Value::SingleQuotedString(timestamp.to_string()).into()),
});
// Setze Tombstone auf 0 (reaktiviere den Eintrag)
assignments.push(Assignment {
target: AssignmentTarget::ColumnName(sqlparser::ast::ObjectName(vec![ObjectNamePart::Identifier(
Ident::new(self.tombstone_column),
)])),
target: AssignmentTarget::ColumnName(sqlparser::ast::ObjectName(vec![
ObjectNamePart::Identifier(Ident::new(self.tombstone_column)),
])),
value: Expr::Value(Value::Number("0".to_string(), false).into()),
});
@ -122,23 +153,26 @@ impl InsertTransformer {
Some(query) => match &mut *query.body {
SetExpr::Values(values) => {
for row in &mut values.rows {
// Add haex_timestamp value
row.push(Expr::Value(
Value::SingleQuotedString(timestamp.to_string()).into(),
));
// Add haex_tombstone value (0 = not deleted)
row.push(Expr::Value(Value::Number("0".to_string(), false).into()));
let hlc_value =
Expr::Value(Value::SingleQuotedString(timestamp.to_string()).into());
let tombstone_value =
Expr::Value(Value::Number("0".to_string(), false).into());
Self::set_or_push_value(row, hlc_col_index, hlc_value);
Self::set_or_push_value(row, tombstone_col_index, tombstone_value);
}
}
SetExpr::Select(select) => {
let hlc_expr =
let hlc_value =
Expr::Value(Value::SingleQuotedString(timestamp.to_string()).into());
select.projection.push(SelectItem::UnnamedExpr(hlc_expr));
// Add haex_tombstone value (0 = not deleted)
let tombstone_expr = Expr::Value(Value::Number("0".to_string(), false).into());
select
.projection
.push(SelectItem::UnnamedExpr(tombstone_expr));
let tombstone_value = Expr::Value(Value::Number("0".to_string(), false).into());
Self::set_or_push_projection(&mut select.projection, hlc_col_index, hlc_value);
Self::set_or_push_projection(
&mut select.projection,
tombstone_col_index,
tombstone_value,
);
}
_ => {
return Err(DatabaseError::UnsupportedStatement {

View File

@ -1,5 +1,5 @@
pub mod hlc;
pub mod insert_transformer;
pub mod query_transformer;
//pub mod query_transformer;
pub mod transformer;
pub mod trigger;

View File

@ -1,515 +0,0 @@
// src-tauri/src/crdt/query_transformer.rs
// SELECT-spezifische CRDT-Transformationen (Tombstone-Filterung)
use crate::crdt::trigger::{TOMBSTONE_COLUMN};
use crate::database::error::DatabaseError;
use sqlparser::ast::{
BinaryOperator, Expr, Ident, ObjectName, SelectItem, SetExpr, TableFactor, Value,
};
use std::collections::HashSet;
/// Helper-Struct für SELECT-Transformationen
pub struct QueryTransformer {
tombstone_column: &'static str,
}
impl QueryTransformer {
pub fn new() -> Self {
Self {
tombstone_column: TOMBSTONE_COLUMN,
}
}
/// Transformiert Query-Statements (fügt Tombstone-Filter hinzu)
pub fn transform_query_recursive(
&self,
query: &mut sqlparser::ast::Query,
excluded_tables: &std::collections::HashSet<&str>,
) -> Result<(), DatabaseError> {
self.add_tombstone_filters_recursive(&mut query.body, excluded_tables)
}
/// Rekursive Behandlung aller SetExpr-Typen mit vollständiger Subquery-Unterstützung
fn add_tombstone_filters_recursive(
&self,
set_expr: &mut SetExpr,
excluded_tables: &std::collections::HashSet<&str>,
) -> Result<(), DatabaseError> {
match set_expr {
SetExpr::Select(select) => {
self.add_tombstone_filters_to_select(select, excluded_tables)?;
// Transformiere auch Subqueries in Projektionen
for projection in &mut select.projection {
match projection {
SelectItem::UnnamedExpr(expr) | SelectItem::ExprWithAlias { expr, .. } => {
self.transform_expression_subqueries(expr, excluded_tables)?;
}
_ => {} // Wildcard projections ignorieren
}
}
// Transformiere Subqueries in WHERE
if let Some(where_clause) = &mut select.selection {
self.transform_expression_subqueries(where_clause, excluded_tables)?;
}
// Transformiere Subqueries in GROUP BY
match &mut select.group_by {
sqlparser::ast::GroupByExpr::All(_) => {
// GROUP BY ALL - keine Expressions zu transformieren
}
sqlparser::ast::GroupByExpr::Expressions(exprs, _) => {
for group_expr in exprs {
self.transform_expression_subqueries(group_expr, excluded_tables)?;
}
}
}
// Transformiere Subqueries in HAVING
if let Some(having) = &mut select.having {
self.transform_expression_subqueries(having, excluded_tables)?;
}
}
SetExpr::SetOperation { left, right, .. } => {
self.add_tombstone_filters_recursive(left, excluded_tables)?;
self.add_tombstone_filters_recursive(right, excluded_tables)?;
}
SetExpr::Query(query) => {
self.add_tombstone_filters_recursive(&mut query.body, excluded_tables)?;
}
SetExpr::Values(values) => {
// Transformiere auch Subqueries in Values-Listen
for row in &mut values.rows {
for expr in row {
self.transform_expression_subqueries(expr, excluded_tables)?;
}
}
}
_ => {} // Andere Fälle
}
Ok(())
}
/// Transformiert Subqueries innerhalb von Expressions
fn transform_expression_subqueries(
&self,
expr: &mut Expr,
excluded_tables: &std::collections::HashSet<&str>,
) -> Result<(), DatabaseError> {
match expr {
// Einfache Subqueries
Expr::Subquery(query) => {
self.add_tombstone_filters_recursive(&mut query.body, excluded_tables)?;
}
// EXISTS Subqueries
Expr::Exists { subquery, .. } => {
self.add_tombstone_filters_recursive(&mut subquery.body, excluded_tables)?;
}
// IN Subqueries
Expr::InSubquery {
expr: left_expr,
subquery,
..
} => {
self.transform_expression_subqueries(left_expr, excluded_tables)?;
self.add_tombstone_filters_recursive(&mut subquery.body, excluded_tables)?;
}
// ANY/ALL Subqueries
Expr::AnyOp { left, right, .. } | Expr::AllOp { left, right, .. } => {
self.transform_expression_subqueries(left, excluded_tables)?;
self.transform_expression_subqueries(right, excluded_tables)?;
}
// Binäre Operationen
Expr::BinaryOp { left, right, .. } => {
self.transform_expression_subqueries(left, excluded_tables)?;
self.transform_expression_subqueries(right, excluded_tables)?;
}
// Unäre Operationen
Expr::UnaryOp {
expr: inner_expr, ..
} => {
self.transform_expression_subqueries(inner_expr, excluded_tables)?;
}
// Verschachtelte Ausdrücke
Expr::Nested(nested) => {
self.transform_expression_subqueries(nested, excluded_tables)?;
}
// CASE-Ausdrücke
Expr::Case {
operand,
conditions,
else_result,
..
} => {
if let Some(op) = operand {
self.transform_expression_subqueries(op, excluded_tables)?;
}
for case_when in conditions {
self.transform_expression_subqueries(&mut case_when.condition, excluded_tables)?;
self.transform_expression_subqueries(&mut case_when.result, excluded_tables)?;
}
if let Some(else_res) = else_result {
self.transform_expression_subqueries(else_res, excluded_tables)?;
}
}
// Funktionsaufrufe
Expr::Function(func) => match &mut func.args {
sqlparser::ast::FunctionArguments::List(sqlparser::ast::FunctionArgumentList {
args,
..
}) => {
for arg in args {
if let sqlparser::ast::FunctionArg::Unnamed(
sqlparser::ast::FunctionArgExpr::Expr(expr),
) = arg
{
self.transform_expression_subqueries(expr, excluded_tables)?;
}
}
}
_ => {}
},
// BETWEEN
Expr::Between {
expr: main_expr,
low,
high,
..
} => {
self.transform_expression_subqueries(main_expr, excluded_tables)?;
self.transform_expression_subqueries(low, excluded_tables)?;
self.transform_expression_subqueries(high, excluded_tables)?;
}
// IN Liste
Expr::InList {
expr: main_expr,
list,
..
} => {
self.transform_expression_subqueries(main_expr, excluded_tables)?;
for list_expr in list {
self.transform_expression_subqueries(list_expr, excluded_tables)?;
}
}
// IS NULL/IS NOT NULL
Expr::IsNull(inner) | Expr::IsNotNull(inner) => {
self.transform_expression_subqueries(inner, excluded_tables)?;
}
// Andere Expression-Typen benötigen keine Transformation
_ => {}
}
Ok(())
}
/// Erstellt einen Tombstone-Filter für eine Tabelle
pub fn create_tombstone_filter(&self, table_alias: Option<&str>) -> Expr {
let column_expr = match table_alias {
Some(alias) => {
Expr::CompoundIdentifier(vec![Ident::new(alias), Ident::new(self.tombstone_column)])
}
None => {
Expr::Identifier(Ident::new(self.tombstone_column))
}
};
Expr::BinaryOp {
left: Box::new(column_expr),
op: BinaryOperator::NotEq,
right: Box::new(Expr::Value(Value::Number("1".to_string(), false).into())),
}
}
/// Normalisiert Tabellennamen (entfernt Anführungszeichen)
pub fn normalize_table_name(&self, name: &ObjectName) -> String {
let name_str = name.to_string().to_lowercase();
name_str.trim_matches('`').trim_matches('"').to_string()
}
/// Fügt Tombstone-Filter zu SELECT-Statements hinzu
pub fn add_tombstone_filters_to_select(
&self,
select: &mut sqlparser::ast::Select,
excluded_tables: &HashSet<&str>,
) -> Result<(), DatabaseError> {
// Sammle alle CRDT-Tabellen mit ihren Aliasen
let mut crdt_tables = Vec::new();
for twj in &select.from {
if let TableFactor::Table { name, alias, .. } = &twj.relation {
let table_name_str = self.normalize_table_name(name);
if !excluded_tables.contains(table_name_str.as_str()) {
let table_alias = alias.as_ref().map(|a| a.name.value.as_str());
crdt_tables.push((name.clone(), table_alias));
}
}
}
if crdt_tables.is_empty() {
return Ok(());
}
// Prüfe, welche Tombstone-Spalten bereits in der WHERE-Klausel referenziert werden
let explicitly_filtered_tables = if let Some(where_clause) = &select.selection {
self.find_explicitly_filtered_tombstone_tables(where_clause, &crdt_tables)
} else {
HashSet::new()
};
// Erstelle Filter nur für Tabellen, die noch nicht explizit gefiltert werden
let mut tombstone_filters = Vec::new();
for (table_name, table_alias) in crdt_tables {
let table_name_string = table_name.to_string();
let table_key = table_alias.unwrap_or(&table_name_string);
if !explicitly_filtered_tables.contains(table_key) {
tombstone_filters.push(self.create_tombstone_filter(table_alias));
}
}
// Füge die automatischen Filter hinzu
if !tombstone_filters.is_empty() {
let combined_filter = tombstone_filters
.into_iter()
.reduce(|acc, expr| Expr::BinaryOp {
left: Box::new(acc),
op: BinaryOperator::And,
right: Box::new(expr),
})
.unwrap();
match &mut select.selection {
Some(existing) => {
*existing = Expr::BinaryOp {
left: Box::new(existing.clone()),
op: BinaryOperator::And,
right: Box::new(combined_filter),
};
}
None => {
select.selection = Some(combined_filter);
}
}
}
Ok(())
}
/// Findet alle Tabellen, die bereits explizit Tombstone-Filter in der WHERE-Klausel haben
fn find_explicitly_filtered_tombstone_tables(
&self,
where_expr: &Expr,
crdt_tables: &[(ObjectName, Option<&str>)],
) -> HashSet<String> {
let mut filtered_tables = HashSet::new();
self.scan_expression_for_tombstone_references(
where_expr,
crdt_tables,
&mut filtered_tables,
);
filtered_tables
}
/// Rekursiv durchsucht einen Expression-Baum nach Tombstone-Spalten-Referenzen
fn scan_expression_for_tombstone_references(
&self,
expr: &Expr,
crdt_tables: &[(ObjectName, Option<&str>)],
filtered_tables: &mut HashSet<String>,
) {
match expr {
Expr::Identifier(ident) => {
if ident.value == self.tombstone_column && crdt_tables.len() == 1 {
let table_name_str = crdt_tables[0].0.to_string();
let table_key = crdt_tables[0].1.unwrap_or(&table_name_str);
filtered_tables.insert(table_key.to_string());
}
}
Expr::CompoundIdentifier(idents) => {
if idents.len() == 2 && idents[1].value == self.tombstone_column {
let table_ref = &idents[0].value;
for (table_name, alias) in crdt_tables {
let table_name_str = table_name.to_string();
if table_ref == &table_name_str || alias.map_or(false, |a| a == table_ref) {
filtered_tables.insert(table_ref.clone());
break;
}
}
}
}
Expr::BinaryOp { left, right, .. } => {
self.scan_expression_for_tombstone_references(left, crdt_tables, filtered_tables);
self.scan_expression_for_tombstone_references(right, crdt_tables, filtered_tables);
}
Expr::UnaryOp { expr, .. } => {
self.scan_expression_for_tombstone_references(expr, crdt_tables, filtered_tables);
}
Expr::Nested(nested) => {
self.scan_expression_for_tombstone_references(nested, crdt_tables, filtered_tables);
}
Expr::InList { expr, .. } => {
self.scan_expression_for_tombstone_references(expr, crdt_tables, filtered_tables);
}
Expr::Between { expr, .. } => {
self.scan_expression_for_tombstone_references(expr, crdt_tables, filtered_tables);
}
Expr::IsNull(expr) | Expr::IsNotNull(expr) => {
self.scan_expression_for_tombstone_references(expr, crdt_tables, filtered_tables);
}
Expr::Function(func) => {
if let sqlparser::ast::FunctionArguments::List(
sqlparser::ast::FunctionArgumentList { args, .. },
) = &func.args
{
for arg in args {
if let sqlparser::ast::FunctionArg::Unnamed(
sqlparser::ast::FunctionArgExpr::Expr(expr),
) = arg
{
self.scan_expression_for_tombstone_references(
expr,
crdt_tables,
filtered_tables,
);
}
}
}
}
Expr::Case {
operand,
conditions,
else_result,
..
} => {
if let Some(op) = operand {
self.scan_expression_for_tombstone_references(op, crdt_tables, filtered_tables);
}
for case_when in conditions {
self.scan_expression_for_tombstone_references(
&case_when.condition,
crdt_tables,
filtered_tables,
);
self.scan_expression_for_tombstone_references(
&case_when.result,
crdt_tables,
filtered_tables,
);
}
if let Some(else_res) = else_result {
self.scan_expression_for_tombstone_references(
else_res,
crdt_tables,
filtered_tables,
);
}
}
Expr::Subquery(query) => {
self.analyze_query_for_tombstone_references(query, crdt_tables, filtered_tables)
.ok();
}
Expr::Exists { subquery, .. } => {
self.analyze_query_for_tombstone_references(subquery, crdt_tables, filtered_tables)
.ok();
}
Expr::InSubquery { expr, subquery, .. } => {
self.scan_expression_for_tombstone_references(expr, crdt_tables, filtered_tables);
self.analyze_query_for_tombstone_references(subquery, crdt_tables, filtered_tables)
.ok();
}
Expr::AnyOp { left, right, .. } | Expr::AllOp { left, right, .. } => {
self.scan_expression_for_tombstone_references(left, crdt_tables, filtered_tables);
self.scan_expression_for_tombstone_references(right, crdt_tables, filtered_tables);
}
_ => {}
}
}
fn analyze_query_for_tombstone_references(
&self,
query: &sqlparser::ast::Query,
crdt_tables: &[(ObjectName, Option<&str>)],
filtered_tables: &mut HashSet<String>,
) -> Result<(), DatabaseError> {
self.analyze_set_expr_for_tombstone_references(&query.body, crdt_tables, filtered_tables)
}
fn analyze_set_expr_for_tombstone_references(
&self,
set_expr: &SetExpr,
crdt_tables: &[(ObjectName, Option<&str>)],
filtered_tables: &mut HashSet<String>,
) -> Result<(), DatabaseError> {
match set_expr {
SetExpr::Select(select) => {
if let Some(where_clause) = &select.selection {
self.scan_expression_for_tombstone_references(
where_clause,
crdt_tables,
filtered_tables,
);
}
for projection in &select.projection {
match projection {
SelectItem::UnnamedExpr(expr) | SelectItem::ExprWithAlias { expr, .. } => {
self.scan_expression_for_tombstone_references(
expr,
crdt_tables,
filtered_tables,
);
}
_ => {}
}
}
match &select.group_by {
sqlparser::ast::GroupByExpr::All(_) => {}
sqlparser::ast::GroupByExpr::Expressions(exprs, _) => {
for group_expr in exprs {
self.scan_expression_for_tombstone_references(
group_expr,
crdt_tables,
filtered_tables,
);
}
}
}
if let Some(having) = &select.having {
self.scan_expression_for_tombstone_references(
having,
crdt_tables,
filtered_tables,
);
}
}
SetExpr::SetOperation { left, right, .. } => {
self.analyze_set_expr_for_tombstone_references(left, crdt_tables, filtered_tables)?;
self.analyze_set_expr_for_tombstone_references(
right,
crdt_tables,
filtered_tables,
)?;
}
SetExpr::Query(query) => {
self.analyze_set_expr_for_tombstone_references(
&query.body,
crdt_tables,
filtered_tables,
)?;
}
SetExpr::Values(values) => {
for row in &values.rows {
for expr in row {
self.scan_expression_for_tombstone_references(
expr,
crdt_tables,
filtered_tables,
);
}
}
}
_ => {}
}
Ok(())
}
}

View File

@ -1,12 +1,12 @@
// src-tauri/src/crdt/transformer.rs
use crate::crdt::insert_transformer::InsertTransformer;
use crate::crdt::query_transformer::QueryTransformer;
use crate::crdt::trigger::{HLC_TIMESTAMP_COLUMN, TOMBSTONE_COLUMN};
use crate::database::error::DatabaseError;
use crate::table_names::{TABLE_CRDT_CONFIGS, TABLE_CRDT_LOGS};
use sqlparser::ast::{
Assignment, AssignmentTarget, BinaryOperator, ColumnDef, DataType, Expr, Ident,
ObjectName, ObjectNamePart, Statement, TableFactor, TableObject,
Value,
Assignment, AssignmentTarget, BinaryOperator, ColumnDef, DataType, Expr, Ident, ObjectName,
ObjectNamePart, SelectItem, SetExpr, Statement, TableFactor, TableObject, Value,
};
use std::borrow::Cow;
use std::collections::HashSet;
@ -38,11 +38,7 @@ impl CrdtColumns {
}
};
Expr::BinaryOp {
left: Box::new(column_expr),
op: BinaryOperator::NotEq,
right: Box::new(Expr::Value(Value::Number("1".to_string(), false).into())),
}
Expr::IsNotTrue(Box::new(column_expr))
}
/// Erstellt eine Tombstone-Zuweisung für UPDATE/DELETE
@ -113,11 +109,15 @@ impl CrdtTransformer {
Cow::Owned(name_str.trim_matches('`').trim_matches('"').to_string())
}
// =================================================================
// ÖFFENTLICHE API-METHODEN
// =================================================================
pub fn transform_select_statement(&self, stmt: &mut Statement) -> Result<(), DatabaseError> {
match stmt {
Statement::Query(query) => {
let query_transformer = QueryTransformer::new();
query_transformer.transform_query_recursive(query, &self.excluded_tables)
// Ruft jetzt die private Methode in diesem Struct auf
self.transform_select_query_recursive(&mut query.body, &self.excluded_tables)
}
// Fange alle anderen Fälle ab und gib einen Fehler zurück
_ => Err(DatabaseError::UnsupportedStatement {
@ -165,15 +165,21 @@ impl CrdtTransformer {
.map(|c| c.name.clone())
.collect();
let foreign_keys = crate::crdt::trigger::get_foreign_key_columns(tx, &table_name_str)
.map_err(|e| DatabaseError::ExecutionError {
sql: format!("PRAGMA foreign_key_list('{}')", table_name_str),
reason: e.to_string(),
table: Some(table_name_str.to_string()),
})?;
let foreign_keys =
crate::crdt::trigger::get_foreign_key_columns(tx, &table_name_str)
.map_err(|e| DatabaseError::ExecutionError {
sql: format!("PRAGMA foreign_key_list('{}')", table_name_str),
reason: e.to_string(),
table: Some(table_name_str.to_string()),
})?;
let insert_transformer = InsertTransformer::new();
insert_transformer.transform_insert(insert_stmt, hlc_timestamp, &primary_keys, &foreign_keys)?;
insert_transformer.transform_insert(
insert_stmt,
hlc_timestamp,
&primary_keys,
&foreign_keys,
)?;
}
}
Ok(None)
@ -195,7 +201,10 @@ impl CrdtTransformer {
eprintln!("DEBUG DELETE (with_table_info): table='{}', is_crdt_sync={}, normalized='{}'",
table_name, is_crdt, table_name_str);
if is_crdt {
eprintln!("DEBUG: Transforming DELETE to UPDATE for table '{}'", table_name_str);
eprintln!(
"DEBUG: Transforming DELETE to UPDATE for table '{}'",
table_name_str
);
self.transform_delete_to_update(stmt, hlc_timestamp)?;
}
Ok(None)
@ -242,7 +251,12 @@ impl CrdtTransformer {
if self.is_crdt_sync_table(name) {
// Ohne Connection: leere PK- und FK-Listen (alle Spalten werden upgedatet)
let insert_transformer = InsertTransformer::new();
insert_transformer.transform_insert(insert_stmt, hlc_timestamp, &[], &[])?;
insert_transformer.transform_insert(
insert_stmt,
hlc_timestamp,
&[],
&[],
)?;
}
}
Ok(None)
@ -281,6 +295,9 @@ impl CrdtTransformer {
}
}
// =================================================================
// PRIVATE HELFER (DELETE/UPDATE)
// =================================================================
/// Transformiert DELETE zu UPDATE (soft delete)
fn transform_delete_to_update(
@ -341,4 +358,475 @@ impl CrdtTransformer {
None
}
}
// =================================================================
// PRIVATE HELFER (SELECT-TRANSFORMATION)
// (Diese Methoden kommen aus dem alten `query_transformer.rs`)
// =================================================================
/// Rekursive Behandlung aller SetExpr-Typen mit vollständiger Subquery-Unterstützung
fn transform_select_query_recursive(
&self,
set_expr: &mut SetExpr,
excluded_tables: &std::collections::HashSet<&str>,
) -> Result<(), DatabaseError> {
match set_expr {
SetExpr::Select(select) => {
self.add_tombstone_filters_to_select(select, excluded_tables)?;
// Transformiere auch Subqueries in Projektionen
for projection in &mut select.projection {
match projection {
SelectItem::UnnamedExpr(expr) | SelectItem::ExprWithAlias { expr, .. } => {
self.transform_expression_subqueries(expr, excluded_tables)?;
}
_ => {} // Wildcard projections ignorieren
}
}
// Transformiere Subqueries in WHERE
if let Some(where_clause) = &mut select.selection {
self.transform_expression_subqueries(where_clause, excluded_tables)?;
}
// Transformiere Subqueries in GROUP BY
match &mut select.group_by {
sqlparser::ast::GroupByExpr::All(_) => {
// GROUP BY ALL - keine Expressions zu transformieren
}
sqlparser::ast::GroupByExpr::Expressions(exprs, _) => {
for group_expr in exprs {
self.transform_expression_subqueries(group_expr, excluded_tables)?;
}
}
}
// Transformiere Subqueries in HAVING
if let Some(having) = &mut select.having {
self.transform_expression_subqueries(having, excluded_tables)?;
}
}
SetExpr::SetOperation { left, right, .. } => {
self.transform_select_query_recursive(left, excluded_tables)?;
self.transform_select_query_recursive(right, excluded_tables)?;
}
SetExpr::Query(query) => {
self.transform_select_query_recursive(&mut query.body, excluded_tables)?;
}
SetExpr::Values(values) => {
// Transformiere auch Subqueries in Values-Listen
for row in &mut values.rows {
for expr in row {
self.transform_expression_subqueries(expr, excluded_tables)?;
}
}
}
_ => {} // Andere Fälle
}
Ok(())
}
/// Transformiert Subqueries innerhalb von Expressions
fn transform_expression_subqueries(
&self,
expr: &mut Expr,
excluded_tables: &std::collections::HashSet<&str>,
) -> Result<(), DatabaseError> {
match expr {
// Einfache Subqueries
Expr::Subquery(query) => {
self.transform_select_query_recursive(&mut query.body, excluded_tables)?;
}
// EXISTS Subqueries
Expr::Exists { subquery, .. } => {
self.transform_select_query_recursive(&mut subquery.body, excluded_tables)?;
}
// IN Subqueries
Expr::InSubquery {
expr: left_expr,
subquery,
..
} => {
self.transform_expression_subqueries(left_expr, excluded_tables)?;
self.transform_select_query_recursive(&mut subquery.body, excluded_tables)?;
}
// ANY/ALL Subqueries
Expr::AnyOp { left, right, .. } | Expr::AllOp { left, right, .. } => {
self.transform_expression_subqueries(left, excluded_tables)?;
self.transform_expression_subqueries(right, excluded_tables)?;
}
// Binäre Operationen
Expr::BinaryOp { left, right, .. } => {
self.transform_expression_subqueries(left, excluded_tables)?;
self.transform_expression_subqueries(right, excluded_tables)?;
}
// Unäre Operationen
Expr::UnaryOp {
expr: inner_expr, ..
} => {
self.transform_expression_subqueries(inner_expr, excluded_tables)?;
}
// Verschachtelte Ausdrücke
Expr::Nested(nested) => {
self.transform_expression_subqueries(nested, excluded_tables)?;
}
// CASE-Ausdrücke
Expr::Case {
operand,
conditions,
else_result,
..
} => {
if let Some(op) = operand {
self.transform_expression_subqueries(op, excluded_tables)?;
}
for case_when in conditions {
self.transform_expression_subqueries(
&mut case_when.condition,
excluded_tables,
)?;
self.transform_expression_subqueries(&mut case_when.result, excluded_tables)?;
}
if let Some(else_res) = else_result {
self.transform_expression_subqueries(else_res, excluded_tables)?;
}
}
// Funktionsaufrufe
Expr::Function(func) => match &mut func.args {
sqlparser::ast::FunctionArguments::List(sqlparser::ast::FunctionArgumentList {
args,
..
}) => {
for arg in args {
if let sqlparser::ast::FunctionArg::Unnamed(
sqlparser::ast::FunctionArgExpr::Expr(expr),
) = arg
{
self.transform_expression_subqueries(expr, excluded_tables)?;
}
}
}
_ => {}
},
// BETWEEN
Expr::Between {
expr: main_expr,
low,
high,
..
} => {
self.transform_expression_subqueries(main_expr, excluded_tables)?;
self.transform_expression_subqueries(low, excluded_tables)?;
self.transform_expression_subqueries(high, excluded_tables)?;
}
// IN Liste
Expr::InList {
expr: main_expr,
list,
..
} => {
self.transform_expression_subqueries(main_expr, excluded_tables)?;
for list_expr in list {
self.transform_expression_subqueries(list_expr, excluded_tables)?;
}
}
// IS NULL/IS NOT NULL
Expr::IsNull(inner) | Expr::IsNotNull(inner) => {
self.transform_expression_subqueries(inner, excluded_tables)?;
}
// Andere Expression-Typen benötigen keine Transformation
_ => {}
}
Ok(())
}
/// Fügt Tombstone-Filter zu SELECT-Statements hinzu
fn add_tombstone_filters_to_select(
&self,
select: &mut sqlparser::ast::Select,
excluded_tables: &HashSet<&str>,
) -> Result<(), DatabaseError> {
// Sammle alle CRDT-Tabellen mit ihren Aliasen
let mut crdt_tables = Vec::new();
for twj in &select.from {
if let TableFactor::Table { name, alias, .. } = &twj.relation {
// Nutzt die zentrale Logik von CrdtTransformer
if self.is_crdt_sync_table(name) {
let table_alias = alias.as_ref().map(|a| a.name.value.as_str());
crdt_tables.push((name.clone(), table_alias));
}
}
}
if crdt_tables.is_empty() {
return Ok(());
}
// Prüfe, welche Tombstone-Spalten bereits in der WHERE-Klausel referenziert werden
let explicitly_filtered_tables = if let Some(where_clause) = &select.selection {
self.find_explicitly_filtered_tombstone_tables(where_clause, &crdt_tables)
} else {
HashSet::new()
};
// Erstelle Filter nur für Tabellen, die noch nicht explizit gefiltert werden
let mut tombstone_filters = Vec::new();
for (table_name, table_alias) in crdt_tables {
let table_name_string = table_name.to_string();
let table_key = table_alias.unwrap_or(&table_name_string);
if !explicitly_filtered_tables.contains(table_key) {
// Nutzt die zentrale Logik von CrdtColumns
tombstone_filters.push(self.columns.create_tombstone_filter(table_alias));
}
}
// Füge die automatischen Filter hinzu
if !tombstone_filters.is_empty() {
let combined_filter = tombstone_filters
.into_iter()
.reduce(|acc, expr| Expr::BinaryOp {
left: Box::new(acc),
op: BinaryOperator::And,
right: Box::new(expr),
})
.unwrap();
match &mut select.selection {
Some(existing) => {
*existing = Expr::BinaryOp {
left: Box::new(existing.clone()),
op: BinaryOperator::And,
right: Box::new(combined_filter),
};
}
None => {
select.selection = Some(combined_filter);
}
}
}
Ok(())
}
/// Findet alle Tabellen, die bereits explizit Tombstone-Filter in der WHERE-Klausel haben
fn find_explicitly_filtered_tombstone_tables(
&self,
where_expr: &Expr,
crdt_tables: &[(ObjectName, Option<&str>)],
) -> HashSet<String> {
let mut filtered_tables = HashSet::new();
self.scan_expression_for_tombstone_references(
where_expr,
crdt_tables,
&mut filtered_tables,
);
filtered_tables
}
/// Rekursiv durchsucht einen Expression-Baum nach Tombstone-Spalten-Referenzen
fn scan_expression_for_tombstone_references(
&self,
expr: &Expr,
crdt_tables: &[(ObjectName, Option<&str>)],
filtered_tables: &mut HashSet<String>,
) {
match expr {
Expr::Identifier(ident) => {
// Nutzt die zentrale Konfiguration von CrdtColumns
if ident.value == self.columns.tombstone && crdt_tables.len() == 1 {
let table_name_str = crdt_tables[0].0.to_string();
let table_key = crdt_tables[0].1.unwrap_or(&table_name_str);
filtered_tables.insert(table_key.to_string());
}
}
Expr::CompoundIdentifier(idents) => {
// Nutzt die zentrale Konfiguration von CrdtColumns
if idents.len() == 2 && idents[1].value == self.columns.tombstone {
let table_ref = &idents[0].value;
for (table_name, alias) in crdt_tables {
let table_name_str = table_name.to_string();
if table_ref == &table_name_str || alias.map_or(false, |a| a == table_ref) {
filtered_tables.insert(table_ref.clone());
break;
}
}
}
}
Expr::BinaryOp { left, right, .. } => {
self.scan_expression_for_tombstone_references(left, crdt_tables, filtered_tables);
self.scan_expression_for_tombstone_references(right, crdt_tables, filtered_tables);
}
Expr::UnaryOp { expr, .. } => {
self.scan_expression_for_tombstone_references(expr, crdt_tables, filtered_tables);
}
Expr::Nested(nested) => {
self.scan_expression_for_tombstone_references(nested, crdt_tables, filtered_tables);
}
Expr::InList { expr, .. } => {
self.scan_expression_for_tombstone_references(expr, crdt_tables, filtered_tables);
}
Expr::Between { expr, .. } => {
self.scan_expression_for_tombstone_references(expr, crdt_tables, filtered_tables);
}
Expr::IsNull(expr) | Expr::IsNotNull(expr) => {
self.scan_expression_for_tombstone_references(expr, crdt_tables, filtered_tables);
}
Expr::Function(func) => {
if let sqlparser::ast::FunctionArguments::List(
sqlparser::ast::FunctionArgumentList { args, .. },
) = &func.args
{
for arg in args {
if let sqlparser::ast::FunctionArg::Unnamed(
sqlparser::ast::FunctionArgExpr::Expr(expr),
) = arg
{
self.scan_expression_for_tombstone_references(
expr,
crdt_tables,
filtered_tables,
);
}
}
}
}
Expr::Case {
operand,
conditions,
else_result,
..
} => {
if let Some(op) = operand {
self.scan_expression_for_tombstone_references(op, crdt_tables, filtered_tables);
}
for case_when in conditions {
self.scan_expression_for_tombstone_references(
&case_when.condition,
crdt_tables,
filtered_tables,
);
self.scan_expression_for_tombstone_references(
&case_when.result,
crdt_tables,
filtered_tables,
);
}
if let Some(else_res) = else_result {
self.scan_expression_for_tombstone_references(
else_res,
crdt_tables,
filtered_tables,
);
}
}
Expr::Subquery(query) => {
self.analyze_query_for_tombstone_references(query, crdt_tables, filtered_tables)
.ok();
}
Expr::Exists { subquery, .. } => {
self.analyze_query_for_tombstone_references(subquery, crdt_tables, filtered_tables)
.ok();
}
Expr::InSubquery { expr, subquery, .. } => {
self.scan_expression_for_tombstone_references(expr, crdt_tables, filtered_tables);
self.analyze_query_for_tombstone_references(subquery, crdt_tables, filtered_tables)
.ok();
}
Expr::AnyOp { left, right, .. } | Expr::AllOp { left, right, .. } => {
self.scan_expression_for_tombstone_references(left, crdt_tables, filtered_tables);
self.scan_expression_for_tombstone_references(right, crdt_tables, filtered_tables);
}
_ => {}
}
}
fn analyze_query_for_tombstone_references(
&self,
query: &sqlparser::ast::Query,
crdt_tables: &[(ObjectName, Option<&str>)],
filtered_tables: &mut HashSet<String>,
) -> Result<(), DatabaseError> {
self.analyze_set_expr_for_tombstone_references(&query.body, crdt_tables, filtered_tables)
}
fn analyze_set_expr_for_tombstone_references(
&self,
set_expr: &SetExpr,
crdt_tables: &[(ObjectName, Option<&str>)],
filtered_tables: &mut HashSet<String>,
) -> Result<(), DatabaseError> {
match set_expr {
SetExpr::Select(select) => {
if let Some(where_clause) = &select.selection {
self.scan_expression_for_tombstone_references(
where_clause,
crdt_tables,
filtered_tables,
);
}
for projection in &select.projection {
match projection {
SelectItem::UnnamedExpr(expr) | SelectItem::ExprWithAlias { expr, .. } => {
self.scan_expression_for_tombstone_references(
expr,
crdt_tables,
filtered_tables,
);
}
_ => {}
}
}
match &select.group_by {
sqlparser::ast::GroupByExpr::All(_) => {}
sqlparser::ast::GroupByExpr::Expressions(exprs, _) => {
for group_expr in exprs {
self.scan_expression_for_tombstone_references(
group_expr,
crdt_tables,
filtered_tables,
);
}
}
}
if let Some(having) = &select.having {
self.scan_expression_for_tombstone_references(
having,
crdt_tables,
filtered_tables,
);
}
}
SetExpr::SetOperation { left, right, .. } => {
self.analyze_set_expr_for_tombstone_references(left, crdt_tables, filtered_tables)?;
self.analyze_set_expr_for_tombstone_references(
right,
crdt_tables,
filtered_tables,
)?;
}
SetExpr::Query(query) => {
self.analyze_set_expr_for_tombstone_references(
&query.body,
crdt_tables,
filtered_tables,
)?;
}
SetExpr::Values(values) => {
for row in &values.rows {
for expr in row {
self.scan_expression_for_tombstone_references(
expr,
crdt_tables,
filtered_tables,
);
}
}
}
_ => {}
}
Ok(())
}
}

View File

@ -15,7 +15,6 @@ use sqlparser::dialect::SQLiteDialect;
use sqlparser::parser::Parser;
/// Öffnet und initialisiert eine Datenbank mit Verschlüsselung
///
pub fn open_and_init_db(path: &str, key: &str, create: bool) -> Result<Connection, DatabaseError> {
let flags = if create {
OpenFlags::SQLITE_OPEN_READ_WRITE | OpenFlags::SQLITE_OPEN_CREATE
@ -159,47 +158,23 @@ pub fn execute(
let params_sql: Vec<&dyn ToSql> = params_converted.iter().map(|v| v as &dyn ToSql).collect();
with_connection(connection, |conn| {
// Check if the SQL contains RETURNING clause
let has_returning = sql.to_uppercase().contains("RETURNING");
if has_returning {
// Use prepare + query for RETURNING statements
let mut stmt = conn
.prepare(&sql)
.map_err(|e| DatabaseError::PrepareError {
reason: e.to_string(),
})?;
if sql.to_uppercase().contains("RETURNING") {
let mut stmt = conn.prepare(&sql)?;
let num_columns = stmt.column_count();
let mut rows = stmt
.query(&params_sql[..])
.map_err(|e| DatabaseError::QueryError {
reason: e.to_string(),
})?;
let mut rows = stmt.query(&params_sql[..])?;
let mut result_vec: Vec<Vec<JsonValue>> = Vec::new();
while let Some(row) = rows.next().map_err(|e| DatabaseError::RowProcessingError {
reason: format!("Row iteration error: {}", e),
})? {
while let Some(row) = rows.next()? {
let mut row_values: Vec<JsonValue> = Vec::with_capacity(num_columns);
for i in 0..num_columns {
let value_ref =
row.get_ref(i)
.map_err(|e| DatabaseError::RowProcessingError {
reason: format!("Failed to get column {}: {}", i, e),
})?;
let value_ref = row.get_ref(i)?;
let json_val = convert_value_ref_to_json(value_ref)?;
row_values.push(json_val);
}
result_vec.push(row_values);
}
Ok(result_vec)
} else {
// For non-RETURNING statements, just execute and return empty array
conn.execute(&sql, &params_sql[..]).map_err(|e| {
let table_name = extract_primary_table_name_from_sql(&sql).unwrap_or(None);
DatabaseError::ExecutionError {
@ -208,7 +183,6 @@ pub fn execute(
table: table_name,
}
})?;
Ok(vec![])
}
})
@ -238,40 +212,20 @@ pub fn select(
let params_sql: Vec<&dyn ToSql> = params_converted.iter().map(|v| v as &dyn ToSql).collect();
with_connection(connection, |conn| {
let mut stmt = conn
.prepare(&sql)
.map_err(|e| DatabaseError::PrepareError {
reason: e.to_string(),
})?;
let mut stmt = conn.prepare(&sql)?;
let num_columns = stmt.column_count();
let mut rows = stmt
.query(&params_sql[..])
.map_err(|e| DatabaseError::QueryError {
reason: e.to_string(),
})?;
let mut rows = stmt.query(&params_sql[..])?;
let mut result_vec: Vec<Vec<JsonValue>> = Vec::new();
while let Some(row) = rows.next().map_err(|e| DatabaseError::RowProcessingError {
reason: format!("Row iteration error: {}", e),
})? {
while let Some(row) = rows.next()? {
let mut row_values: Vec<JsonValue> = Vec::with_capacity(num_columns);
for i in 0..num_columns {
let value_ref = row
.get_ref(i)
.map_err(|e| DatabaseError::RowProcessingError {
reason: format!("Failed to get column {}: {}", i, e),
})?;
let value_ref = row.get_ref(i)?;
let json_val = convert_value_ref_to_json(value_ref)?;
row_values.push(json_val);
}
result_vec.push(row_values);
}
Ok(result_vec)
})
}

View File

@ -204,15 +204,17 @@ pub fn list_vaults(app_handle: AppHandle) -> Result<Vec<VaultInfo>, DatabaseErro
/// Checks if a vault with the given name exists
#[tauri::command]
pub fn vault_exists(app_handle: AppHandle, db_name: String) -> Result<bool, DatabaseError> {
let vault_path = get_vault_path(&app_handle, &db_name)?;
pub fn vault_exists(app_handle: AppHandle, vault_name: String) -> Result<bool, DatabaseError> {
let vault_path = get_vault_path(&app_handle, &vault_name)?;
Ok(Path::new(&vault_path).exists())
}
/// Deletes a vault database file
#[tauri::command]
pub fn delete_vault(app_handle: AppHandle, db_name: String) -> Result<String, DatabaseError> {
let vault_path = get_vault_path(&app_handle, &db_name)?;
pub fn delete_vault(app_handle: AppHandle, vault_name: String) -> Result<String, DatabaseError> {
let vault_path = get_vault_path(&app_handle, &vault_name)?;
let vault_shm_path = format!("{}-shm", vault_path);
let vault_wal_path = format!("{}-wal", vault_path);
if !Path::new(&vault_path).exists() {
return Err(DatabaseError::IoError {
@ -221,12 +223,26 @@ pub fn delete_vault(app_handle: AppHandle, db_name: String) -> Result<String, Da
});
}
if Path::new(&vault_shm_path).exists() {
fs::remove_file(&vault_shm_path).map_err(|e| DatabaseError::IoError {
path: vault_shm_path.clone(),
reason: format!("Failed to delete vault: {}", e),
})?;
}
if Path::new(&vault_wal_path).exists() {
fs::remove_file(&vault_wal_path).map_err(|e| DatabaseError::IoError {
path: vault_wal_path.clone(),
reason: format!("Failed to delete vault: {}", e),
})?;
}
fs::remove_file(&vault_path).map_err(|e| DatabaseError::IoError {
path: vault_path.clone(),
reason: format!("Failed to delete vault: {}", e),
})?;
Ok(format!("Vault '{}' successfully deleted", db_name))
Ok(format!("Vault '{}' successfully deleted", vault_name))
}
#[tauri::command]

View File

@ -10,7 +10,8 @@ use crate::extension::permissions::manager::PermissionManager;
use crate::extension::permissions::types::ExtensionPermission;
use crate::table_names::{TABLE_EXTENSIONS, TABLE_EXTENSION_PERMISSIONS};
use crate::AppState;
use std::collections::HashMap;
use serde_json::Value as JsonValue;
use std::collections::{HashMap, HashSet};
use std::fs;
use std::io::Cursor;
use std::path::PathBuf;
@ -470,9 +471,12 @@ impl ExtensionManager {
let actual_extension_id = with_connection(&state.db, |conn| {
let tx = conn.transaction().map_err(DatabaseError::from)?;
let hlc_service = state.hlc.lock().map_err(|_| DatabaseError::MutexPoisoned {
let hlc_service_guard = state.hlc.lock().map_err(|_| DatabaseError::MutexPoisoned {
reason: "Failed to lock HLC service".to_string(),
})?;
// Klonen, um den MutexGuard freizugeben, bevor potenziell lange DB-Operationen stattfinden
let hlc_service = hlc_service_guard.clone();
drop(hlc_service_guard);
// Erstelle PK-Remapping Context für die gesamte Transaktion
// Dies ermöglicht automatisches FK-Remapping wenn ON CONFLICT bei Extension auftritt
@ -485,34 +489,35 @@ impl ExtensionManager {
TABLE_EXTENSIONS
);
let (_tables, returning_results) = SqlExecutor::query_internal_typed_with_context(
&tx,
&hlc_service,
&insert_ext_sql,
rusqlite::params![
extension_id,
extracted.manifest.name,
extracted.manifest.version,
extracted.manifest.author,
extracted.manifest.entry,
extracted.manifest.icon,
extracted.manifest.public_key,
extracted.manifest.signature,
extracted.manifest.homepage,
extracted.manifest.description,
true, // enabled
],
&mut pk_context,
)?;
let (_tables, returning_results): (HashSet<String>, Vec<Vec<JsonValue>>) =
SqlExecutor::query_internal_typed_with_context(
&tx,
&hlc_service,
&insert_ext_sql,
rusqlite::params![
extension_id,
extracted.manifest.name,
extracted.manifest.version,
extracted.manifest.author,
extracted.manifest.entry,
extracted.manifest.icon,
extracted.manifest.public_key,
extracted.manifest.signature,
extracted.manifest.homepage,
extracted.manifest.description,
true, // enabled
],
&mut pk_context,
)?;
// Nutze die tatsächliche ID aus der Datenbank (wichtig bei ON CONFLICT)
// Die haex_extensions Tabelle hat einen single-column PK namens "id"
let actual_extension_id = returning_results
.first()
.and_then(|row| row.first())
.and_then(|val| val.as_str())
.map(|s| s.to_string())
.unwrap_or_else(|| extension_id.clone());
.first() // Holt die erste Zeile (das innere Vec<JsonValue>, z.B. Some(&["uuid-string"]))
.and_then(|row_array| row_array.first()) // Holt das erste Element daraus (z.B. Some(&JsonValue::String("uuid-string")))
.and_then(|val| val.as_str()) // Konvertiert zu &str (z.B. Some("uuid-string"))
.map(|s| s.to_string()) // Konvertiert zu String
.unwrap_or_else(|| extension_id.clone()); // Fallback
eprintln!(
"DEBUG: Extension UUID - Generated: {}, Actual from DB: {}",

View File

@ -5,10 +5,9 @@ use crate::crdt::transformer::CrdtTransformer;
use crate::crdt::trigger;
use crate::database::core::{convert_value_ref_to_json, parse_sql_statements, ValueConverter};
use crate::database::error::DatabaseError;
use crate::database::DbConnection;
use rusqlite::Connection;
use rusqlite::{params_from_iter, types::Value as SqliteValue, ToSql, Transaction};
use serde_json::Value as JsonValue;
use serde_json::{Map, Value as JsonValue};
use sqlparser::ast::{Insert, Statement, TableObject};
use std::collections::{HashMap, HashSet};
@ -142,7 +141,7 @@ impl SqlExecutor {
}
let sql_str = statement.to_string();
eprintln!("DEBUG: Transformed SQL: {}", sql_str);
eprintln!("DEBUG: Transformed SQL (execute path): {}", sql_str);
// Spezielle Behandlung für INSERT Statements (mit FK-Remapping, OHNE RETURNING)
if let Statement::Insert(ref insert_stmt) = statement {
@ -162,7 +161,6 @@ impl SqlExecutor {
// Remap FK-Werte in params (falls Mappings existieren)
remap_fk_params(insert_stmt, &mut param_vec, &fk_info, pk_context)?;
// Führe INSERT mit execute() aus
let param_refs: Vec<&dyn ToSql> =
param_vec.iter().map(|v| v as &dyn ToSql).collect();
@ -174,19 +172,15 @@ impl SqlExecutor {
reason: format!("Prepare failed: {}", e),
})?;
let _ = stmt
let mut rows = stmt
.query(params_from_iter(param_refs.iter()))
.map_err(|e| DatabaseError::ExecutionError {
sql: sql_str.clone(),
table: Some(table_name_str.clone()),
reason: format!("Query execution failed: {}", e),
reason: format!("INSERT query execution failed: {}", e),
})?;
/* tx.execute(&sql_str, params_from_iter(param_refs.iter()))
.map_err(|e| DatabaseError::ExecutionError {
sql: sql_str.clone(),
table: Some(table_name_str.clone()),
reason: e.to_string(),
})?; */
let _ = rows.next()?;
}
} else {
// Nicht-INSERT Statements normal ausführen
@ -194,7 +188,7 @@ impl SqlExecutor {
.map_err(|e| DatabaseError::ExecutionError {
sql: sql_str.clone(),
table: None,
reason: e.to_string(),
reason: format!("Execute failed: {}", e),
})?;
}
@ -300,9 +294,16 @@ impl SqlExecutor {
reason: e.to_string(),
})?;
let num_columns = stmt.column_count();
let column_names: Vec<String> = stmt
.column_names()
.into_iter()
.map(|s| s.to_string())
.collect();
let num_columns = column_names.len();
let param_refs: Vec<&dyn ToSql> =
param_vec.iter().map(|v| v as &dyn ToSql).collect();
let mut rows = stmt
.query(params_from_iter(param_refs.iter()))
.map_err(|e| DatabaseError::ExecutionError {
@ -329,6 +330,7 @@ impl SqlExecutor {
// Extrahiere ALLE Spalten für RETURNING-Ergebnis
let mut row_values: Vec<JsonValue> = Vec::with_capacity(num_columns);
for i in 0..num_columns {
let value_ref =
row.get_ref(i)
@ -353,6 +355,7 @@ impl SqlExecutor {
})?;
let num_columns = stmt.column_count();
let mut rows = stmt.query(params).map_err(|e| DatabaseError::QueryError {
reason: e.to_string(),
})?;
@ -463,11 +466,14 @@ impl SqlExecutor {
transformer.transform_select_statement(&mut stmt_to_execute)?;
let transformed_sql = stmt_to_execute.to_string();
eprintln!("DEBUG: Transformed SELECT: {}", transformed_sql);
let mut prepared_stmt = conn.prepare(&transformed_sql)?;
let num_columns = prepared_stmt.column_count();
let mut rows = prepared_stmt
.query(params_from_iter(sql_params.iter()))
.query(params_from_iter(&sql_params[..]))
.map_err(|e| DatabaseError::QueryError {
reason: e.to_string(),
})?;
@ -719,13 +725,14 @@ fn extract_pk_values_from_row(
) -> Result<PkValues, DatabaseError> {
let mut pk_values = PkValues::new();
for (idx, pk_col) in pk_columns.iter().enumerate() {
// RETURNING gibt PKs in der Reihenfolge zurück, wie sie im RETURNING Clause stehen
let value: String = row.get(idx).map_err(|e| DatabaseError::ExecutionError {
sql: "RETURNING clause".to_string(),
reason: format!("Failed to extract PK column '{}': {}", pk_col, e),
table: None,
})?;
for pk_col in pk_columns.iter() {
let value: String =
row.get(pk_col.as_str())
.map_err(|e| DatabaseError::ExecutionError {
sql: "RETURNING clause".to_string(),
reason: format!("Failed to extract PK column '{}': {}", pk_col, e),
table: None,
})?;
pk_values.insert(pk_col.clone(), value);
}

View File

@ -317,15 +317,6 @@ fn count_sql_placeholders(sql: &str) -> usize {
sql.matches('?').count()
}
/// Kürzt SQL für Fehlermeldungen
/* fn truncate_sql(sql: &str, max_length: usize) -> String {
if sql.len() <= max_length {
sql.to_string()
} else {
format!("{}...", &sql[..max_length])
}
} */
#[cfg(test)]
mod tests {
use super::*;

View File

@ -15,11 +15,6 @@
}
}
:root {
--ui-header-height: 74px;
}
.swiper-slide {
isolation: isolate; /* Für jeden Slide */
contain: layout style; /* Enthält den Context, ohne Performance-Hit */
@theme {
--spacing-header: 3.5rem; /* 72px - oder dein Wunschwert */
}

View File

@ -248,6 +248,7 @@
<script setup lang="ts">
import { Swiper, SwiperSlide } from 'swiper/vue'
import { Navigation } from 'swiper/modules'
import type { Swiper as SwiperType } from 'swiper'
import 'swiper/css'
import 'swiper/css/navigation'
@ -273,14 +274,8 @@ const {
isOverviewMode,
} = storeToRefs(workspaceStore)
// Swiper instance
// Control Swiper touch behavior (disable during icon/window drag)
// Mouse position tracking
const { x: mouseX } = useMouse()
// Desktop element ref
const desktopEl = useTemplateRef('desktopEl')
// Track desktop viewport size reactively

View File

@ -1,44 +1,46 @@
<template>
<div class="flex flex-col w-full h-full overflow-hidden">
<UPageHeader
as="header"
:ui="{
root: [
'bg-default border-b border-accented sticky top-0 z-50 py-0 px-8',
],
wrapper: [
'pt-6 flex flex-col sm:flex-row sm:items-center sm:justify-between gap-4',
],
}"
>
<template #title>
<div class="flex items-center">
<UiLogoHaexhub class="size-12 shrink-0" />
<div ref="headerRef">
<UPageHeader
as="header"
:ui="{
root: [
'bg-default border-b border-accented sticky top-0 z-50 pt-2 px-8 h-header',
],
wrapper: [
'flex flex-col sm:flex-row sm:items-center sm:justify-between gap-4',
],
}"
>
<template #title>
<div class="flex items-center">
<UiLogoHaexhub class="size-12 shrink-0" />
<NuxtLinkLocale
class="link text-base-content link-neutral text-xl font-semibold no-underline flex items-center"
:to="{ name: 'desktop' }"
<NuxtLinkLocale
class="link text-base-content link-neutral text-xl font-semibold no-underline flex items-center"
:to="{ name: 'desktop' }"
>
<UiTextGradient class="text-nowrap">
{{ currentVaultName }}
</UiTextGradient>
</NuxtLinkLocale>
</div>
</template>
<template #links>
<UButton
color="neutral"
variant="outline"
:block="isSmallScreen"
@click="isOverviewMode = !isOverviewMode"
icon="i-bi-person-workspace"
size="lg"
>
<UiTextGradient class="text-nowrap">
{{ currentVaultName }}
</UiTextGradient>
</NuxtLinkLocale>
</div>
</template>
<template #links>
<UButton
color="neutral"
variant="outline"
:block="isSmallScreen"
@click="isOverviewMode = !isOverviewMode"
icon="i-bi-person-workspace"
size="lg"
>
</UButton>
<HaexExtensionLauncher :block="isSmallScreen" />
</template>
</UPageHeader>
</UButton>
<HaexExtensionLauncher :block="isSmallScreen" />
</template>
</UPageHeader>
</div>
<main class="flex-1 overflow-hidden bg-elevated">
<NuxtPage />

View File

@ -62,7 +62,7 @@
>
<Icon
name="mdi:trash-can-outline"
@click="removeVaultAsync(vault.name)"
@click="prepareRemoveVault(vault.name)"
/>
</UButton>
</div>
@ -81,24 +81,52 @@
</div>
</div>
</div>
<UiDialogConfirm
v-model:open="showRemoveDialog"
:title="t('remove.title')"
:description="t('remove.description', { vaultName: vaultToBeRemoved })"
@confirm="onConfirmRemoveAsync"
/>
</div>
</template>
<script setup lang="ts">
import { openUrl } from '@tauri-apps/plugin-opener'
import type { Locale } from 'vue-i18n'
import type { VaultInfo } from '@bindings/VaultInfo'
definePageMeta({
name: 'vaultOpen',
})
const { t, setLocale } = useI18n()
const passwordPromptOpen = ref(false)
const selectedVault = ref<IVaultInfo>()
const selectedVault = ref<VaultInfo>()
const showRemoveDialog = ref(false)
const { syncLastVaultsAsync, removeVaultAsync } = useLastVaultStore()
const { lastVaults } = storeToRefs(useLastVaultStore())
const vaultToBeRemoved = ref('')
const prepareRemoveVault = (vaultName: string) => {
vaultToBeRemoved.value = vaultName
showRemoveDialog.value = true
}
const toast = useToast()
const onConfirmRemoveAsync = async () => {
try {
await removeVaultAsync(vaultToBeRemoved.value)
showRemoveDialog.value = false
await syncLastVaultsAsync()
} catch (error) {
toast.add({
color: 'error',
description: JSON.stringify(error),
})
}
}
onMounted(async () => {
try {
await syncLastVaultsAsync()
@ -116,7 +144,10 @@ const onSelectLocale = async (locale: Locale) => {
de:
welcome: 'Viel Spass mit'
lastUsed: 'Zuletzt verwendete Vaults'
sponsors: 'Supported by'
sponsors: Supported by
remove:
title: Vault löschen
description: Möchtest du die Vault {vaultName} wirklich löschen?
en:
welcome: 'Have fun with'

View File

@ -57,7 +57,10 @@ onMounted(async () => {
await loadExtensionsAsync()
await readNotificationsAsync()
if (!(await isKnownDeviceAsync())) {
const knownDevice = await isKnownDeviceAsync()
console.log('knownDevice', knownDevice)
if (!knownDevice) {
console.log('not known device')
newDeviceName.value = hostname.value ?? 'unknown'
showNewDeviceDialog.value = true

View File

@ -179,8 +179,9 @@ export const useWindowManagerStore = defineStore('windowManager', () => {
// Calculate viewport-aware size
const viewportWidth = window.innerWidth
const viewportHeight = window.innerHeight
const viewportHeight = window.innerHeight - 60
console.log('viewportHeight', window.innerHeight, viewportHeight)
const windowHeight = Math.min(height, viewportHeight)
// Adjust width proportionally if needed (optional)

View File

@ -57,9 +57,11 @@ export const useUiStore = defineStore('uiStore', () => {
colorMode.preference = currentThemeName.value
})
const viewportHeightWithoutHeader = ref(0)
return {
availableThemes,
//currentScreenSize,
viewportHeightWithoutHeader,
currentTheme,
currentThemeName,
defaultTheme,

View File

@ -136,43 +136,50 @@ const drizzleCallback = (async (
params: unknown[],
method: 'get' | 'run' | 'all' | 'values',
) => {
let rows: unknown[] = []
let rows: any[] = []
try {
if (isSelectQuery(sql)) {
// SELECT statements
rows = await invoke<unknown[]>('sql_select_with_crdt', {
sql,
params,
}).catch((e) => {
console.error('SQL select Error:', e, sql, params)
return []
})
} else if (hasReturning(sql)) {
// INSERT/UPDATE/DELETE with RETURNING → use query
rows = await invoke<unknown[]>('sql_query_with_crdt', {
sql,
params,
}).catch((e) => {
console.error('SQL query with CRDT Error:', e, sql, params)
return []
})
} else {
// INSERT/UPDATE/DELETE without RETURNING → use execute
await invoke<unknown[]>('sql_execute_with_crdt', {
sql,
params,
}).catch((e) => {
console.error('SQL execute with CRDT Error:', e, sql, params, rows)
return []
})
}
} catch (error) {
console.error('Fehler im drizzleCallback invoke:', error, {
sql,
params,
method,
})
}
console.log('drizzleCallback', method, sql, params)
console.log('drizzleCallback rows', rows)
if (isSelectQuery(sql)) {
// SELECT statements
rows = await invoke<unknown[]>('sql_select_with_crdt', {
sql,
params,
}).catch((e) => {
console.error('SQL select Error:', e, sql, params)
return []
})
} else if (hasReturning(sql)) {
// INSERT/UPDATE/DELETE with RETURNING → use query
rows = await invoke<unknown[]>('sql_query_with_crdt', {
sql,
params,
}).catch((e) => {
console.error('SQL query with CRDT Error:', e, sql, params)
return []
})
} else {
// INSERT/UPDATE/DELETE without RETURNING → use execute
await invoke<unknown[]>('sql_execute_with_crdt', {
sql,
params,
}).catch((e) => {
console.error('SQL execute with CRDT Error:', e, sql, params, rows)
return []
})
}
console.log('drizzle found', rows)
if (method === 'get') {
return { rows: rows.length > 0 ? [rows[0]] : [] }
} else {
return { rows }
return rows.length > 0 ? { rows: rows[0] } : { rows }
}
return { rows }
}) satisfies AsyncRemoteCallback

View File

@ -1,89 +1,30 @@
import { invoke } from '@tauri-apps/api/core'
import { load } from '@tauri-apps/plugin-store'
/* interface ILastVault {
lastUsed: Date
name: string
path: string
} */
export interface IVaultInfo {
name: string
path: string
lastAccess: Date
}
import type { VaultInfo } from '@bindings/VaultInfo'
export const useLastVaultStore = defineStore('lastVaultStore', () => {
const {
public: { haexVault },
} = useRuntimeConfig()
const lastVaults = ref<IVaultInfo[]>([])
const keyName = 'lastVaults'
const getStoreAsync = async () => {
return await load(haexVault.lastVaultFileName || 'lastVaults.json')
}
const lastVaults = ref<VaultInfo[]>([])
const syncLastVaultsAsync = async () => {
lastVaults.value =
(await listVaultsAsync()).sort(
(a, b) => +new Date(b.lastAccess) - +new Date(a.lastAccess),
(a, b) => +new Date(`${b.lastAccess}`) - +new Date(`${a.lastAccess}`),
) ?? []
return lastVaults.value
}
const listVaultsAsync = async () => {
lastVaults.value = await invoke<IVaultInfo[]>('list_vaults')
lastVaults.value = await invoke<VaultInfo[]>('list_vaults')
return lastVaults.value
}
const addVaultAsync = async ({
name,
path,
}: {
name?: string
path: string
}) => {
if (!lastVaults.value) await syncLastVaultsAsync()
const saveName = name || getFileNameFromPath(path)
lastVaults.value = lastVaults.value.filter((vault) => vault.path !== path)
lastVaults.value.push({ lastAccess: new Date(), name: saveName, path })
await saveLastVaultsAsync()
}
const removeVaultAsync = async (vaultPath: string) => {
lastVaults.value = lastVaults.value.filter(
(vault) => vault.path !== vaultPath,
)
await saveLastVaultsAsync()
}
const saveLastVaultsAsync = async () => {
const store = await getStoreAsync()
await store.set(keyName, lastVaults.value)
await syncLastVaultsAsync()
const removeVaultAsync = async (vaultName: string) => {
return await invoke('delete_vault', { vaultName })
}
return {
addVaultAsync,
syncLastVaultsAsync,
lastVaults,
removeVaultAsync,
saveLastVaultsAsync,
}
})
const getFileNameFromPath = (path: string) => {
const lastBackslashIndex = path.lastIndexOf('\\')
const lastSlashIndex = path.lastIndexOf('/')
const lastIndex = Math.max(lastBackslashIndex, lastSlashIndex)
const fileName = path.substring(lastIndex + 1)
return fileName
}

View File

@ -32,6 +32,7 @@ export const useVaultSettingsStore = defineStore('vaultSettingsStore', () => {
where: eq(schema.haexSettings.key, VaultSettingsKeyEnum.locale),
})
console.log('found currentLocaleRow', currentLocaleRow)
if (currentLocaleRow?.value) {
const currentLocale = app.$i18n.availableLocales.find(
(locale) => locale === currentLocaleRow.value,
@ -70,6 +71,7 @@ export const useVaultSettingsStore = defineStore('vaultSettingsStore', () => {
where: eq(schema.haexSettings.key, VaultSettingsKeyEnum.theme),
})
console.log('found currentThemeRow', currentThemeRow)
if (currentThemeRow?.value) {
const theme = availableThemes.value.find(
(theme) => theme.value === currentThemeRow.value,
@ -98,6 +100,7 @@ export const useVaultSettingsStore = defineStore('vaultSettingsStore', () => {
where: eq(schema.haexSettings.key, VaultSettingsKeyEnum.vaultName),
})
console.log('found currentVaultNameRow', currentVaultNameRow)
if (currentVaultNameRow?.value) {
currentVaultName.value =
currentVaultNameRow.value || haexVault.defaultVaultName || 'HaexHub'
@ -129,7 +132,7 @@ export const useVaultSettingsStore = defineStore('vaultSettingsStore', () => {
),
})
console.log('store: readDeviceNameAsync', deviceName)
return deviceName
return deviceName?.id ? deviceName : undefined
}
const addDeviceNameAsync = async ({