2 Commits

Author SHA1 Message Date
f70e924cc3 refatored rust sql and drizzle 2025-10-22 15:05:36 +02:00
9ea057e943 fixed drizzle rust logic 2025-10-21 16:29:13 +02:00
34 changed files with 4101 additions and 2841 deletions

View File

@ -23,46 +23,47 @@
"@nuxt/icon": "2.0.0", "@nuxt/icon": "2.0.0",
"@nuxt/ui": "4.0.0", "@nuxt/ui": "4.0.0",
"@nuxtjs/i18n": "10.0.6", "@nuxtjs/i18n": "10.0.6",
"@pinia/nuxt": "^0.11.1", "@pinia/nuxt": "^0.11.2",
"@tailwindcss/vite": "^4.1.10", "@tailwindcss/vite": "^4.1.15",
"@tauri-apps/api": "^2.5.0", "@tauri-apps/api": "^2.9.0",
"@tauri-apps/plugin-dialog": "^2.2.2", "@tauri-apps/plugin-dialog": "^2.4.0",
"@tauri-apps/plugin-fs": "^2.3.0", "@tauri-apps/plugin-fs": "^2.4.2",
"@tauri-apps/plugin-http": "2.5.2", "@tauri-apps/plugin-http": "2.5.2",
"@tauri-apps/plugin-notification": "2.3.1", "@tauri-apps/plugin-notification": "2.3.1",
"@tauri-apps/plugin-opener": "^2.3.0", "@tauri-apps/plugin-opener": "^2.5.0",
"@tauri-apps/plugin-os": "^2.2.2", "@tauri-apps/plugin-os": "^2.3.1",
"@tauri-apps/plugin-sql": "2.3.0", "@tauri-apps/plugin-sql": "2.3.0",
"@tauri-apps/plugin-store": "^2.2.1", "@tauri-apps/plugin-store": "^2.4.0",
"@vueuse/components": "^13.9.0", "@vueuse/components": "^13.9.0",
"@vueuse/core": "^13.4.0", "@vueuse/core": "^13.9.0",
"@vueuse/gesture": "^2.0.0", "@vueuse/gesture": "^2.0.0",
"@vueuse/nuxt": "^13.4.0", "@vueuse/nuxt": "^13.9.0",
"drizzle-orm": "^0.44.2", "drizzle-orm": "^0.44.6",
"eslint": "^9.34.0", "eslint": "^9.38.0",
"fuse.js": "^7.1.0", "fuse.js": "^7.1.0",
"nuxt": "^4.0.3", "nuxt": "^4.1.3",
"nuxt-zod-i18n": "^1.12.0", "nuxt-zod-i18n": "^1.12.1",
"swiper": "^12.0.2", "swiper": "^12.0.3",
"tailwindcss": "^4.1.10", "tailwindcss": "^4.1.15",
"vue": "^3.5.20", "vue": "^3.5.22",
"vue-router": "^4.5.1", "vue-router": "^4.6.3",
"zod": "4.1.5" "zod": "^3.25.76"
}, },
"devDependencies": { "devDependencies": {
"@iconify/json": "^2.2.351", "@iconify-json/hugeicons": "^1.2.17",
"@iconify/json": "^2.2.398",
"@iconify/tailwind4": "^1.0.6", "@iconify/tailwind4": "^1.0.6",
"@libsql/client": "^0.15.15", "@libsql/client": "^0.15.15",
"@tauri-apps/cli": "^2.5.0", "@tauri-apps/cli": "^2.9.0",
"@types/node": "^24.6.2", "@types/node": "^24.9.1",
"@vitejs/plugin-vue": "6.0.1", "@vitejs/plugin-vue": "6.0.1",
"@vue/compiler-sfc": "^3.5.17", "@vue/compiler-sfc": "^3.5.22",
"drizzle-kit": "^0.31.2", "drizzle-kit": "^0.31.5",
"globals": "^16.2.0", "globals": "^16.4.0",
"prettier": "3.6.2", "prettier": "3.6.2",
"tsx": "^4.20.6", "tsx": "^4.20.6",
"tw-animate-css": "^1.3.8", "tw-animate-css": "^1.4.0",
"typescript": "^5.8.3", "typescript": "^5.9.3",
"vite": "7.1.3", "vite": "7.1.3",
"vue-tsc": "3.0.6" "vue-tsc": "3.0.6"
}, },

3905
pnpm-lock.yaml generated

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,3 @@
DROP INDEX `haex_workspaces_name_unique`;--> statement-breakpoint
CREATE UNIQUE INDEX `haex_workspaces_position_unique` ON `haex_workspaces` (`position`);--> statement-breakpoint
CREATE UNIQUE INDEX `haex_settings_key_type_value_unique` ON `haex_settings` (`key`,`type`,`value`);

File diff suppressed because it is too large Load Diff

View File

@ -43,6 +43,13 @@
"when": 1760964548034, "when": 1760964548034,
"tag": "0005_tidy_yellowjacket", "tag": "0005_tidy_yellowjacket",
"breakpoints": true "breakpoints": true
},
{
"idx": 6,
"version": "6",
"when": 1761137108127,
"tag": "0006_gigantic_bloodaxe",
"breakpoints": true
} }
] ]
} }

View File

@ -21,18 +21,22 @@ export const withCrdtColumns = <
haexTimestamp: text(columnNames.haexTimestamp), haexTimestamp: text(columnNames.haexTimestamp),
}) })
export const haexSettings = sqliteTable(tableNames.haex.settings.name, { export const haexSettings = sqliteTable(
id: text() tableNames.haex.settings.name,
.primaryKey() {
.$defaultFn(() => crypto.randomUUID()), id: text()
key: text(), .primaryKey()
type: text(), .$defaultFn(() => crypto.randomUUID()),
value: text(), key: text(),
haexTombstone: integer(tableNames.haex.settings.columns.haexTombstone, { type: text(),
mode: 'boolean', value: text(),
}), haexTombstone: integer(tableNames.haex.settings.columns.haexTombstone, {
haexTimestamp: text(tableNames.haex.settings.columns.haexTimestamp), mode: 'boolean',
}) }),
haexTimestamp: text(tableNames.haex.settings.columns.haexTimestamp),
},
(table) => [unique().on(table.key, table.type, table.value)],
)
export type InsertHaexSettings = typeof haexSettings.$inferInsert export type InsertHaexSettings = typeof haexSettings.$inferInsert
export type SelectHaexSettings = typeof haexSettings.$inferSelect export type SelectHaexSettings = typeof haexSettings.$inferSelect
@ -153,7 +157,7 @@ export const haexWorkspaces = sqliteTable(
}, },
tableNames.haex.workspaces.columns, tableNames.haex.workspaces.columns,
), ),
(table) => [unique().on(table.name)], (table) => [unique().on(table.position)],
) )
export type InsertHaexWorkspaces = typeof haexWorkspaces.$inferInsert export type InsertHaexWorkspaces = typeof haexWorkspaces.$inferInsert
export type SelectHaexWorkspaces = typeof haexWorkspaces.$inferSelect export type SelectHaexWorkspaces = typeof haexWorkspaces.$inferSelect

Binary file not shown.

View File

@ -4,8 +4,8 @@
use crate::crdt::trigger::{HLC_TIMESTAMP_COLUMN, TOMBSTONE_COLUMN}; use crate::crdt::trigger::{HLC_TIMESTAMP_COLUMN, TOMBSTONE_COLUMN};
use crate::database::error::DatabaseError; use crate::database::error::DatabaseError;
use sqlparser::ast::{ use sqlparser::ast::{
Assignment, AssignmentTarget, BinaryOperator, Expr, Ident, Insert, ObjectNamePart, Assignment, AssignmentTarget, BinaryOperator, Expr, Ident, Insert, ObjectNamePart, OnConflict,
OnConflict, OnConflictAction, OnInsert, SelectItem, SetExpr, Value, OnConflictAction, OnInsert, SelectItem, SetExpr, Value,
}; };
use uhlc::Timestamp; use uhlc::Timestamp;
@ -23,6 +23,37 @@ impl InsertTransformer {
} }
} }
fn find_or_add_column(columns: &mut Vec<Ident>, col_name: &'static str) -> usize {
match columns.iter().position(|c| c.value == col_name) {
Some(index) => index, // Gefunden! Gib Index zurück.
None => {
// Nicht gefunden! Hinzufügen.
columns.push(Ident::new(col_name));
columns.len() - 1 // Der Index des gerade hinzugefügten Elements
}
}
}
/// Wenn der Index == der Länge ist, wird der Wert stattdessen gepusht.
fn set_or_push_value(row: &mut Vec<Expr>, index: usize, value: Expr) {
if index < row.len() {
// Spalte war vorhanden, Wert (wahrscheinlich `?` oder NULL) ersetzen
row[index] = value;
} else {
// Spalte war nicht vorhanden, Wert hinzufügen
row.push(value);
}
}
fn set_or_push_projection(projection: &mut Vec<SelectItem>, index: usize, value: Expr) {
let item = SelectItem::UnnamedExpr(value);
if index < projection.len() {
projection[index] = item;
} else {
projection.push(item);
}
}
/// Transformiert INSERT-Statements (fügt HLC-Timestamp hinzu und behandelt Tombstone-Konflikte) /// Transformiert INSERT-Statements (fügt HLC-Timestamp hinzu und behandelt Tombstone-Konflikte)
/// Fügt automatisch RETURNING für Primary Keys hinzu, damit der Executor die tatsächlichen PKs kennt /// Fügt automatisch RETURNING für Primary Keys hinzu, damit der Executor die tatsächlichen PKs kennt
pub fn transform_insert( pub fn transform_insert(
@ -32,11 +63,11 @@ impl InsertTransformer {
primary_keys: &[String], primary_keys: &[String],
foreign_keys: &[String], foreign_keys: &[String],
) -> Result<(), DatabaseError> { ) -> Result<(), DatabaseError> {
// Add both haex_timestamp and haex_tombstone columns // Add both haex_timestamp and haex_tombstone columns if not exists
insert_stmt let hlc_col_index =
.columns Self::find_or_add_column(&mut insert_stmt.columns, self.hlc_timestamp_column);
.push(Ident::new(self.hlc_timestamp_column)); let tombstone_col_index =
insert_stmt.columns.push(Ident::new(self.tombstone_column)); Self::find_or_add_column(&mut insert_stmt.columns, self.tombstone_column);
// Füge RETURNING für alle Primary Keys hinzu (falls noch nicht vorhanden) // Füge RETURNING für alle Primary Keys hinzu (falls noch nicht vorhanden)
// Dies erlaubt uns, die tatsächlichen PK-Werte nach ON CONFLICT zu kennen // Dies erlaubt uns, die tatsächlichen PK-Werte nach ON CONFLICT zu kennen
@ -57,7 +88,7 @@ impl InsertTransformer {
// Erstelle UPDATE-Assignments für alle Spalten außer CRDT-Spalten, Primary Keys und Foreign Keys // Erstelle UPDATE-Assignments für alle Spalten außer CRDT-Spalten, Primary Keys und Foreign Keys
let mut assignments = Vec::new(); let mut assignments = Vec::new();
for column in insert_stmt.columns.iter() { for column in insert_stmt.columns.clone().iter() {
let col_name = &column.value; let col_name = &column.value;
// Überspringe CRDT-Spalten // Überspringe CRDT-Spalten
@ -87,17 +118,17 @@ impl InsertTransformer {
// Füge HLC-Timestamp Update hinzu (mit dem übergebenen timestamp) // Füge HLC-Timestamp Update hinzu (mit dem übergebenen timestamp)
assignments.push(Assignment { assignments.push(Assignment {
target: AssignmentTarget::ColumnName(sqlparser::ast::ObjectName(vec![ObjectNamePart::Identifier( target: AssignmentTarget::ColumnName(sqlparser::ast::ObjectName(vec![
Ident::new(self.hlc_timestamp_column), ObjectNamePart::Identifier(Ident::new(self.hlc_timestamp_column)),
)])), ])),
value: Expr::Value(Value::SingleQuotedString(timestamp.to_string()).into()), value: Expr::Value(Value::SingleQuotedString(timestamp.to_string()).into()),
}); });
// Setze Tombstone auf 0 (reaktiviere den Eintrag) // Setze Tombstone auf 0 (reaktiviere den Eintrag)
assignments.push(Assignment { assignments.push(Assignment {
target: AssignmentTarget::ColumnName(sqlparser::ast::ObjectName(vec![ObjectNamePart::Identifier( target: AssignmentTarget::ColumnName(sqlparser::ast::ObjectName(vec![
Ident::new(self.tombstone_column), ObjectNamePart::Identifier(Ident::new(self.tombstone_column)),
)])), ])),
value: Expr::Value(Value::Number("0".to_string(), false).into()), value: Expr::Value(Value::Number("0".to_string(), false).into()),
}); });
@ -122,23 +153,26 @@ impl InsertTransformer {
Some(query) => match &mut *query.body { Some(query) => match &mut *query.body {
SetExpr::Values(values) => { SetExpr::Values(values) => {
for row in &mut values.rows { for row in &mut values.rows {
// Add haex_timestamp value let hlc_value =
row.push(Expr::Value( Expr::Value(Value::SingleQuotedString(timestamp.to_string()).into());
Value::SingleQuotedString(timestamp.to_string()).into(), let tombstone_value =
)); Expr::Value(Value::Number("0".to_string(), false).into());
// Add haex_tombstone value (0 = not deleted)
row.push(Expr::Value(Value::Number("0".to_string(), false).into())); Self::set_or_push_value(row, hlc_col_index, hlc_value);
Self::set_or_push_value(row, tombstone_col_index, tombstone_value);
} }
} }
SetExpr::Select(select) => { SetExpr::Select(select) => {
let hlc_expr = let hlc_value =
Expr::Value(Value::SingleQuotedString(timestamp.to_string()).into()); Expr::Value(Value::SingleQuotedString(timestamp.to_string()).into());
select.projection.push(SelectItem::UnnamedExpr(hlc_expr)); let tombstone_value = Expr::Value(Value::Number("0".to_string(), false).into());
// Add haex_tombstone value (0 = not deleted)
let tombstone_expr = Expr::Value(Value::Number("0".to_string(), false).into()); Self::set_or_push_projection(&mut select.projection, hlc_col_index, hlc_value);
select Self::set_or_push_projection(
.projection &mut select.projection,
.push(SelectItem::UnnamedExpr(tombstone_expr)); tombstone_col_index,
tombstone_value,
);
} }
_ => { _ => {
return Err(DatabaseError::UnsupportedStatement { return Err(DatabaseError::UnsupportedStatement {

View File

@ -1,5 +1,5 @@
pub mod hlc; pub mod hlc;
pub mod insert_transformer; pub mod insert_transformer;
pub mod query_transformer; //pub mod query_transformer;
pub mod transformer; pub mod transformer;
pub mod trigger; pub mod trigger;

View File

@ -1,515 +0,0 @@
// src-tauri/src/crdt/query_transformer.rs
// SELECT-spezifische CRDT-Transformationen (Tombstone-Filterung)
use crate::crdt::trigger::{TOMBSTONE_COLUMN};
use crate::database::error::DatabaseError;
use sqlparser::ast::{
BinaryOperator, Expr, Ident, ObjectName, SelectItem, SetExpr, TableFactor, Value,
};
use std::collections::HashSet;
/// Helper-Struct für SELECT-Transformationen
pub struct QueryTransformer {
tombstone_column: &'static str,
}
impl QueryTransformer {
pub fn new() -> Self {
Self {
tombstone_column: TOMBSTONE_COLUMN,
}
}
/// Transformiert Query-Statements (fügt Tombstone-Filter hinzu)
pub fn transform_query_recursive(
&self,
query: &mut sqlparser::ast::Query,
excluded_tables: &std::collections::HashSet<&str>,
) -> Result<(), DatabaseError> {
self.add_tombstone_filters_recursive(&mut query.body, excluded_tables)
}
/// Rekursive Behandlung aller SetExpr-Typen mit vollständiger Subquery-Unterstützung
fn add_tombstone_filters_recursive(
&self,
set_expr: &mut SetExpr,
excluded_tables: &std::collections::HashSet<&str>,
) -> Result<(), DatabaseError> {
match set_expr {
SetExpr::Select(select) => {
self.add_tombstone_filters_to_select(select, excluded_tables)?;
// Transformiere auch Subqueries in Projektionen
for projection in &mut select.projection {
match projection {
SelectItem::UnnamedExpr(expr) | SelectItem::ExprWithAlias { expr, .. } => {
self.transform_expression_subqueries(expr, excluded_tables)?;
}
_ => {} // Wildcard projections ignorieren
}
}
// Transformiere Subqueries in WHERE
if let Some(where_clause) = &mut select.selection {
self.transform_expression_subqueries(where_clause, excluded_tables)?;
}
// Transformiere Subqueries in GROUP BY
match &mut select.group_by {
sqlparser::ast::GroupByExpr::All(_) => {
// GROUP BY ALL - keine Expressions zu transformieren
}
sqlparser::ast::GroupByExpr::Expressions(exprs, _) => {
for group_expr in exprs {
self.transform_expression_subqueries(group_expr, excluded_tables)?;
}
}
}
// Transformiere Subqueries in HAVING
if let Some(having) = &mut select.having {
self.transform_expression_subqueries(having, excluded_tables)?;
}
}
SetExpr::SetOperation { left, right, .. } => {
self.add_tombstone_filters_recursive(left, excluded_tables)?;
self.add_tombstone_filters_recursive(right, excluded_tables)?;
}
SetExpr::Query(query) => {
self.add_tombstone_filters_recursive(&mut query.body, excluded_tables)?;
}
SetExpr::Values(values) => {
// Transformiere auch Subqueries in Values-Listen
for row in &mut values.rows {
for expr in row {
self.transform_expression_subqueries(expr, excluded_tables)?;
}
}
}
_ => {} // Andere Fälle
}
Ok(())
}
/// Transformiert Subqueries innerhalb von Expressions
fn transform_expression_subqueries(
&self,
expr: &mut Expr,
excluded_tables: &std::collections::HashSet<&str>,
) -> Result<(), DatabaseError> {
match expr {
// Einfache Subqueries
Expr::Subquery(query) => {
self.add_tombstone_filters_recursive(&mut query.body, excluded_tables)?;
}
// EXISTS Subqueries
Expr::Exists { subquery, .. } => {
self.add_tombstone_filters_recursive(&mut subquery.body, excluded_tables)?;
}
// IN Subqueries
Expr::InSubquery {
expr: left_expr,
subquery,
..
} => {
self.transform_expression_subqueries(left_expr, excluded_tables)?;
self.add_tombstone_filters_recursive(&mut subquery.body, excluded_tables)?;
}
// ANY/ALL Subqueries
Expr::AnyOp { left, right, .. } | Expr::AllOp { left, right, .. } => {
self.transform_expression_subqueries(left, excluded_tables)?;
self.transform_expression_subqueries(right, excluded_tables)?;
}
// Binäre Operationen
Expr::BinaryOp { left, right, .. } => {
self.transform_expression_subqueries(left, excluded_tables)?;
self.transform_expression_subqueries(right, excluded_tables)?;
}
// Unäre Operationen
Expr::UnaryOp {
expr: inner_expr, ..
} => {
self.transform_expression_subqueries(inner_expr, excluded_tables)?;
}
// Verschachtelte Ausdrücke
Expr::Nested(nested) => {
self.transform_expression_subqueries(nested, excluded_tables)?;
}
// CASE-Ausdrücke
Expr::Case {
operand,
conditions,
else_result,
..
} => {
if let Some(op) = operand {
self.transform_expression_subqueries(op, excluded_tables)?;
}
for case_when in conditions {
self.transform_expression_subqueries(&mut case_when.condition, excluded_tables)?;
self.transform_expression_subqueries(&mut case_when.result, excluded_tables)?;
}
if let Some(else_res) = else_result {
self.transform_expression_subqueries(else_res, excluded_tables)?;
}
}
// Funktionsaufrufe
Expr::Function(func) => match &mut func.args {
sqlparser::ast::FunctionArguments::List(sqlparser::ast::FunctionArgumentList {
args,
..
}) => {
for arg in args {
if let sqlparser::ast::FunctionArg::Unnamed(
sqlparser::ast::FunctionArgExpr::Expr(expr),
) = arg
{
self.transform_expression_subqueries(expr, excluded_tables)?;
}
}
}
_ => {}
},
// BETWEEN
Expr::Between {
expr: main_expr,
low,
high,
..
} => {
self.transform_expression_subqueries(main_expr, excluded_tables)?;
self.transform_expression_subqueries(low, excluded_tables)?;
self.transform_expression_subqueries(high, excluded_tables)?;
}
// IN Liste
Expr::InList {
expr: main_expr,
list,
..
} => {
self.transform_expression_subqueries(main_expr, excluded_tables)?;
for list_expr in list {
self.transform_expression_subqueries(list_expr, excluded_tables)?;
}
}
// IS NULL/IS NOT NULL
Expr::IsNull(inner) | Expr::IsNotNull(inner) => {
self.transform_expression_subqueries(inner, excluded_tables)?;
}
// Andere Expression-Typen benötigen keine Transformation
_ => {}
}
Ok(())
}
/// Erstellt einen Tombstone-Filter für eine Tabelle
pub fn create_tombstone_filter(&self, table_alias: Option<&str>) -> Expr {
let column_expr = match table_alias {
Some(alias) => {
Expr::CompoundIdentifier(vec![Ident::new(alias), Ident::new(self.tombstone_column)])
}
None => {
Expr::Identifier(Ident::new(self.tombstone_column))
}
};
Expr::BinaryOp {
left: Box::new(column_expr),
op: BinaryOperator::NotEq,
right: Box::new(Expr::Value(Value::Number("1".to_string(), false).into())),
}
}
/// Normalisiert Tabellennamen (entfernt Anführungszeichen)
pub fn normalize_table_name(&self, name: &ObjectName) -> String {
let name_str = name.to_string().to_lowercase();
name_str.trim_matches('`').trim_matches('"').to_string()
}
/// Fügt Tombstone-Filter zu SELECT-Statements hinzu
pub fn add_tombstone_filters_to_select(
&self,
select: &mut sqlparser::ast::Select,
excluded_tables: &HashSet<&str>,
) -> Result<(), DatabaseError> {
// Sammle alle CRDT-Tabellen mit ihren Aliasen
let mut crdt_tables = Vec::new();
for twj in &select.from {
if let TableFactor::Table { name, alias, .. } = &twj.relation {
let table_name_str = self.normalize_table_name(name);
if !excluded_tables.contains(table_name_str.as_str()) {
let table_alias = alias.as_ref().map(|a| a.name.value.as_str());
crdt_tables.push((name.clone(), table_alias));
}
}
}
if crdt_tables.is_empty() {
return Ok(());
}
// Prüfe, welche Tombstone-Spalten bereits in der WHERE-Klausel referenziert werden
let explicitly_filtered_tables = if let Some(where_clause) = &select.selection {
self.find_explicitly_filtered_tombstone_tables(where_clause, &crdt_tables)
} else {
HashSet::new()
};
// Erstelle Filter nur für Tabellen, die noch nicht explizit gefiltert werden
let mut tombstone_filters = Vec::new();
for (table_name, table_alias) in crdt_tables {
let table_name_string = table_name.to_string();
let table_key = table_alias.unwrap_or(&table_name_string);
if !explicitly_filtered_tables.contains(table_key) {
tombstone_filters.push(self.create_tombstone_filter(table_alias));
}
}
// Füge die automatischen Filter hinzu
if !tombstone_filters.is_empty() {
let combined_filter = tombstone_filters
.into_iter()
.reduce(|acc, expr| Expr::BinaryOp {
left: Box::new(acc),
op: BinaryOperator::And,
right: Box::new(expr),
})
.unwrap();
match &mut select.selection {
Some(existing) => {
*existing = Expr::BinaryOp {
left: Box::new(existing.clone()),
op: BinaryOperator::And,
right: Box::new(combined_filter),
};
}
None => {
select.selection = Some(combined_filter);
}
}
}
Ok(())
}
/// Findet alle Tabellen, die bereits explizit Tombstone-Filter in der WHERE-Klausel haben
fn find_explicitly_filtered_tombstone_tables(
&self,
where_expr: &Expr,
crdt_tables: &[(ObjectName, Option<&str>)],
) -> HashSet<String> {
let mut filtered_tables = HashSet::new();
self.scan_expression_for_tombstone_references(
where_expr,
crdt_tables,
&mut filtered_tables,
);
filtered_tables
}
/// Rekursiv durchsucht einen Expression-Baum nach Tombstone-Spalten-Referenzen
fn scan_expression_for_tombstone_references(
&self,
expr: &Expr,
crdt_tables: &[(ObjectName, Option<&str>)],
filtered_tables: &mut HashSet<String>,
) {
match expr {
Expr::Identifier(ident) => {
if ident.value == self.tombstone_column && crdt_tables.len() == 1 {
let table_name_str = crdt_tables[0].0.to_string();
let table_key = crdt_tables[0].1.unwrap_or(&table_name_str);
filtered_tables.insert(table_key.to_string());
}
}
Expr::CompoundIdentifier(idents) => {
if idents.len() == 2 && idents[1].value == self.tombstone_column {
let table_ref = &idents[0].value;
for (table_name, alias) in crdt_tables {
let table_name_str = table_name.to_string();
if table_ref == &table_name_str || alias.map_or(false, |a| a == table_ref) {
filtered_tables.insert(table_ref.clone());
break;
}
}
}
}
Expr::BinaryOp { left, right, .. } => {
self.scan_expression_for_tombstone_references(left, crdt_tables, filtered_tables);
self.scan_expression_for_tombstone_references(right, crdt_tables, filtered_tables);
}
Expr::UnaryOp { expr, .. } => {
self.scan_expression_for_tombstone_references(expr, crdt_tables, filtered_tables);
}
Expr::Nested(nested) => {
self.scan_expression_for_tombstone_references(nested, crdt_tables, filtered_tables);
}
Expr::InList { expr, .. } => {
self.scan_expression_for_tombstone_references(expr, crdt_tables, filtered_tables);
}
Expr::Between { expr, .. } => {
self.scan_expression_for_tombstone_references(expr, crdt_tables, filtered_tables);
}
Expr::IsNull(expr) | Expr::IsNotNull(expr) => {
self.scan_expression_for_tombstone_references(expr, crdt_tables, filtered_tables);
}
Expr::Function(func) => {
if let sqlparser::ast::FunctionArguments::List(
sqlparser::ast::FunctionArgumentList { args, .. },
) = &func.args
{
for arg in args {
if let sqlparser::ast::FunctionArg::Unnamed(
sqlparser::ast::FunctionArgExpr::Expr(expr),
) = arg
{
self.scan_expression_for_tombstone_references(
expr,
crdt_tables,
filtered_tables,
);
}
}
}
}
Expr::Case {
operand,
conditions,
else_result,
..
} => {
if let Some(op) = operand {
self.scan_expression_for_tombstone_references(op, crdt_tables, filtered_tables);
}
for case_when in conditions {
self.scan_expression_for_tombstone_references(
&case_when.condition,
crdt_tables,
filtered_tables,
);
self.scan_expression_for_tombstone_references(
&case_when.result,
crdt_tables,
filtered_tables,
);
}
if let Some(else_res) = else_result {
self.scan_expression_for_tombstone_references(
else_res,
crdt_tables,
filtered_tables,
);
}
}
Expr::Subquery(query) => {
self.analyze_query_for_tombstone_references(query, crdt_tables, filtered_tables)
.ok();
}
Expr::Exists { subquery, .. } => {
self.analyze_query_for_tombstone_references(subquery, crdt_tables, filtered_tables)
.ok();
}
Expr::InSubquery { expr, subquery, .. } => {
self.scan_expression_for_tombstone_references(expr, crdt_tables, filtered_tables);
self.analyze_query_for_tombstone_references(subquery, crdt_tables, filtered_tables)
.ok();
}
Expr::AnyOp { left, right, .. } | Expr::AllOp { left, right, .. } => {
self.scan_expression_for_tombstone_references(left, crdt_tables, filtered_tables);
self.scan_expression_for_tombstone_references(right, crdt_tables, filtered_tables);
}
_ => {}
}
}
fn analyze_query_for_tombstone_references(
&self,
query: &sqlparser::ast::Query,
crdt_tables: &[(ObjectName, Option<&str>)],
filtered_tables: &mut HashSet<String>,
) -> Result<(), DatabaseError> {
self.analyze_set_expr_for_tombstone_references(&query.body, crdt_tables, filtered_tables)
}
fn analyze_set_expr_for_tombstone_references(
&self,
set_expr: &SetExpr,
crdt_tables: &[(ObjectName, Option<&str>)],
filtered_tables: &mut HashSet<String>,
) -> Result<(), DatabaseError> {
match set_expr {
SetExpr::Select(select) => {
if let Some(where_clause) = &select.selection {
self.scan_expression_for_tombstone_references(
where_clause,
crdt_tables,
filtered_tables,
);
}
for projection in &select.projection {
match projection {
SelectItem::UnnamedExpr(expr) | SelectItem::ExprWithAlias { expr, .. } => {
self.scan_expression_for_tombstone_references(
expr,
crdt_tables,
filtered_tables,
);
}
_ => {}
}
}
match &select.group_by {
sqlparser::ast::GroupByExpr::All(_) => {}
sqlparser::ast::GroupByExpr::Expressions(exprs, _) => {
for group_expr in exprs {
self.scan_expression_for_tombstone_references(
group_expr,
crdt_tables,
filtered_tables,
);
}
}
}
if let Some(having) = &select.having {
self.scan_expression_for_tombstone_references(
having,
crdt_tables,
filtered_tables,
);
}
}
SetExpr::SetOperation { left, right, .. } => {
self.analyze_set_expr_for_tombstone_references(left, crdt_tables, filtered_tables)?;
self.analyze_set_expr_for_tombstone_references(
right,
crdt_tables,
filtered_tables,
)?;
}
SetExpr::Query(query) => {
self.analyze_set_expr_for_tombstone_references(
&query.body,
crdt_tables,
filtered_tables,
)?;
}
SetExpr::Values(values) => {
for row in &values.rows {
for expr in row {
self.scan_expression_for_tombstone_references(
expr,
crdt_tables,
filtered_tables,
);
}
}
}
_ => {}
}
Ok(())
}
}

View File

@ -1,12 +1,12 @@
// src-tauri/src/crdt/transformer.rs
use crate::crdt::insert_transformer::InsertTransformer; use crate::crdt::insert_transformer::InsertTransformer;
use crate::crdt::query_transformer::QueryTransformer;
use crate::crdt::trigger::{HLC_TIMESTAMP_COLUMN, TOMBSTONE_COLUMN}; use crate::crdt::trigger::{HLC_TIMESTAMP_COLUMN, TOMBSTONE_COLUMN};
use crate::database::error::DatabaseError; use crate::database::error::DatabaseError;
use crate::table_names::{TABLE_CRDT_CONFIGS, TABLE_CRDT_LOGS}; use crate::table_names::{TABLE_CRDT_CONFIGS, TABLE_CRDT_LOGS};
use sqlparser::ast::{ use sqlparser::ast::{
Assignment, AssignmentTarget, BinaryOperator, ColumnDef, DataType, Expr, Ident, Assignment, AssignmentTarget, BinaryOperator, ColumnDef, DataType, Expr, Ident, ObjectName,
ObjectName, ObjectNamePart, Statement, TableFactor, TableObject, ObjectNamePart, SelectItem, SetExpr, Statement, TableFactor, TableObject, Value,
Value,
}; };
use std::borrow::Cow; use std::borrow::Cow;
use std::collections::HashSet; use std::collections::HashSet;
@ -38,11 +38,7 @@ impl CrdtColumns {
} }
}; };
Expr::BinaryOp { Expr::IsNotTrue(Box::new(column_expr))
left: Box::new(column_expr),
op: BinaryOperator::NotEq,
right: Box::new(Expr::Value(Value::Number("1".to_string(), false).into())),
}
} }
/// Erstellt eine Tombstone-Zuweisung für UPDATE/DELETE /// Erstellt eine Tombstone-Zuweisung für UPDATE/DELETE
@ -113,11 +109,15 @@ impl CrdtTransformer {
Cow::Owned(name_str.trim_matches('`').trim_matches('"').to_string()) Cow::Owned(name_str.trim_matches('`').trim_matches('"').to_string())
} }
// =================================================================
// ÖFFENTLICHE API-METHODEN
// =================================================================
pub fn transform_select_statement(&self, stmt: &mut Statement) -> Result<(), DatabaseError> { pub fn transform_select_statement(&self, stmt: &mut Statement) -> Result<(), DatabaseError> {
match stmt { match stmt {
Statement::Query(query) => { Statement::Query(query) => {
let query_transformer = QueryTransformer::new(); // Ruft jetzt die private Methode in diesem Struct auf
query_transformer.transform_query_recursive(query, &self.excluded_tables) self.transform_select_query_recursive(&mut query.body, &self.excluded_tables)
} }
// Fange alle anderen Fälle ab und gib einen Fehler zurück // Fange alle anderen Fälle ab und gib einen Fehler zurück
_ => Err(DatabaseError::UnsupportedStatement { _ => Err(DatabaseError::UnsupportedStatement {
@ -165,15 +165,21 @@ impl CrdtTransformer {
.map(|c| c.name.clone()) .map(|c| c.name.clone())
.collect(); .collect();
let foreign_keys = crate::crdt::trigger::get_foreign_key_columns(tx, &table_name_str) let foreign_keys =
.map_err(|e| DatabaseError::ExecutionError { crate::crdt::trigger::get_foreign_key_columns(tx, &table_name_str)
sql: format!("PRAGMA foreign_key_list('{}')", table_name_str), .map_err(|e| DatabaseError::ExecutionError {
reason: e.to_string(), sql: format!("PRAGMA foreign_key_list('{}')", table_name_str),
table: Some(table_name_str.to_string()), reason: e.to_string(),
})?; table: Some(table_name_str.to_string()),
})?;
let insert_transformer = InsertTransformer::new(); let insert_transformer = InsertTransformer::new();
insert_transformer.transform_insert(insert_stmt, hlc_timestamp, &primary_keys, &foreign_keys)?; insert_transformer.transform_insert(
insert_stmt,
hlc_timestamp,
&primary_keys,
&foreign_keys,
)?;
} }
} }
Ok(None) Ok(None)
@ -195,7 +201,10 @@ impl CrdtTransformer {
eprintln!("DEBUG DELETE (with_table_info): table='{}', is_crdt_sync={}, normalized='{}'", eprintln!("DEBUG DELETE (with_table_info): table='{}', is_crdt_sync={}, normalized='{}'",
table_name, is_crdt, table_name_str); table_name, is_crdt, table_name_str);
if is_crdt { if is_crdt {
eprintln!("DEBUG: Transforming DELETE to UPDATE for table '{}'", table_name_str); eprintln!(
"DEBUG: Transforming DELETE to UPDATE for table '{}'",
table_name_str
);
self.transform_delete_to_update(stmt, hlc_timestamp)?; self.transform_delete_to_update(stmt, hlc_timestamp)?;
} }
Ok(None) Ok(None)
@ -242,7 +251,12 @@ impl CrdtTransformer {
if self.is_crdt_sync_table(name) { if self.is_crdt_sync_table(name) {
// Ohne Connection: leere PK- und FK-Listen (alle Spalten werden upgedatet) // Ohne Connection: leere PK- und FK-Listen (alle Spalten werden upgedatet)
let insert_transformer = InsertTransformer::new(); let insert_transformer = InsertTransformer::new();
insert_transformer.transform_insert(insert_stmt, hlc_timestamp, &[], &[])?; insert_transformer.transform_insert(
insert_stmt,
hlc_timestamp,
&[],
&[],
)?;
} }
} }
Ok(None) Ok(None)
@ -281,6 +295,9 @@ impl CrdtTransformer {
} }
} }
// =================================================================
// PRIVATE HELFER (DELETE/UPDATE)
// =================================================================
/// Transformiert DELETE zu UPDATE (soft delete) /// Transformiert DELETE zu UPDATE (soft delete)
fn transform_delete_to_update( fn transform_delete_to_update(
@ -341,4 +358,475 @@ impl CrdtTransformer {
None None
} }
} }
// =================================================================
// PRIVATE HELFER (SELECT-TRANSFORMATION)
// (Diese Methoden kommen aus dem alten `query_transformer.rs`)
// =================================================================
/// Rekursive Behandlung aller SetExpr-Typen mit vollständiger Subquery-Unterstützung
fn transform_select_query_recursive(
&self,
set_expr: &mut SetExpr,
excluded_tables: &std::collections::HashSet<&str>,
) -> Result<(), DatabaseError> {
match set_expr {
SetExpr::Select(select) => {
self.add_tombstone_filters_to_select(select, excluded_tables)?;
// Transformiere auch Subqueries in Projektionen
for projection in &mut select.projection {
match projection {
SelectItem::UnnamedExpr(expr) | SelectItem::ExprWithAlias { expr, .. } => {
self.transform_expression_subqueries(expr, excluded_tables)?;
}
_ => {} // Wildcard projections ignorieren
}
}
// Transformiere Subqueries in WHERE
if let Some(where_clause) = &mut select.selection {
self.transform_expression_subqueries(where_clause, excluded_tables)?;
}
// Transformiere Subqueries in GROUP BY
match &mut select.group_by {
sqlparser::ast::GroupByExpr::All(_) => {
// GROUP BY ALL - keine Expressions zu transformieren
}
sqlparser::ast::GroupByExpr::Expressions(exprs, _) => {
for group_expr in exprs {
self.transform_expression_subqueries(group_expr, excluded_tables)?;
}
}
}
// Transformiere Subqueries in HAVING
if let Some(having) = &mut select.having {
self.transform_expression_subqueries(having, excluded_tables)?;
}
}
SetExpr::SetOperation { left, right, .. } => {
self.transform_select_query_recursive(left, excluded_tables)?;
self.transform_select_query_recursive(right, excluded_tables)?;
}
SetExpr::Query(query) => {
self.transform_select_query_recursive(&mut query.body, excluded_tables)?;
}
SetExpr::Values(values) => {
// Transformiere auch Subqueries in Values-Listen
for row in &mut values.rows {
for expr in row {
self.transform_expression_subqueries(expr, excluded_tables)?;
}
}
}
_ => {} // Andere Fälle
}
Ok(())
}
/// Transformiert Subqueries innerhalb von Expressions
fn transform_expression_subqueries(
&self,
expr: &mut Expr,
excluded_tables: &std::collections::HashSet<&str>,
) -> Result<(), DatabaseError> {
match expr {
// Einfache Subqueries
Expr::Subquery(query) => {
self.transform_select_query_recursive(&mut query.body, excluded_tables)?;
}
// EXISTS Subqueries
Expr::Exists { subquery, .. } => {
self.transform_select_query_recursive(&mut subquery.body, excluded_tables)?;
}
// IN Subqueries
Expr::InSubquery {
expr: left_expr,
subquery,
..
} => {
self.transform_expression_subqueries(left_expr, excluded_tables)?;
self.transform_select_query_recursive(&mut subquery.body, excluded_tables)?;
}
// ANY/ALL Subqueries
Expr::AnyOp { left, right, .. } | Expr::AllOp { left, right, .. } => {
self.transform_expression_subqueries(left, excluded_tables)?;
self.transform_expression_subqueries(right, excluded_tables)?;
}
// Binäre Operationen
Expr::BinaryOp { left, right, .. } => {
self.transform_expression_subqueries(left, excluded_tables)?;
self.transform_expression_subqueries(right, excluded_tables)?;
}
// Unäre Operationen
Expr::UnaryOp {
expr: inner_expr, ..
} => {
self.transform_expression_subqueries(inner_expr, excluded_tables)?;
}
// Verschachtelte Ausdrücke
Expr::Nested(nested) => {
self.transform_expression_subqueries(nested, excluded_tables)?;
}
// CASE-Ausdrücke
Expr::Case {
operand,
conditions,
else_result,
..
} => {
if let Some(op) = operand {
self.transform_expression_subqueries(op, excluded_tables)?;
}
for case_when in conditions {
self.transform_expression_subqueries(
&mut case_when.condition,
excluded_tables,
)?;
self.transform_expression_subqueries(&mut case_when.result, excluded_tables)?;
}
if let Some(else_res) = else_result {
self.transform_expression_subqueries(else_res, excluded_tables)?;
}
}
// Funktionsaufrufe
Expr::Function(func) => match &mut func.args {
sqlparser::ast::FunctionArguments::List(sqlparser::ast::FunctionArgumentList {
args,
..
}) => {
for arg in args {
if let sqlparser::ast::FunctionArg::Unnamed(
sqlparser::ast::FunctionArgExpr::Expr(expr),
) = arg
{
self.transform_expression_subqueries(expr, excluded_tables)?;
}
}
}
_ => {}
},
// BETWEEN
Expr::Between {
expr: main_expr,
low,
high,
..
} => {
self.transform_expression_subqueries(main_expr, excluded_tables)?;
self.transform_expression_subqueries(low, excluded_tables)?;
self.transform_expression_subqueries(high, excluded_tables)?;
}
// IN Liste
Expr::InList {
expr: main_expr,
list,
..
} => {
self.transform_expression_subqueries(main_expr, excluded_tables)?;
for list_expr in list {
self.transform_expression_subqueries(list_expr, excluded_tables)?;
}
}
// IS NULL/IS NOT NULL
Expr::IsNull(inner) | Expr::IsNotNull(inner) => {
self.transform_expression_subqueries(inner, excluded_tables)?;
}
// Andere Expression-Typen benötigen keine Transformation
_ => {}
}
Ok(())
}
/// Fügt Tombstone-Filter zu SELECT-Statements hinzu
fn add_tombstone_filters_to_select(
&self,
select: &mut sqlparser::ast::Select,
excluded_tables: &HashSet<&str>,
) -> Result<(), DatabaseError> {
// Sammle alle CRDT-Tabellen mit ihren Aliasen
let mut crdt_tables = Vec::new();
for twj in &select.from {
if let TableFactor::Table { name, alias, .. } = &twj.relation {
// Nutzt die zentrale Logik von CrdtTransformer
if self.is_crdt_sync_table(name) {
let table_alias = alias.as_ref().map(|a| a.name.value.as_str());
crdt_tables.push((name.clone(), table_alias));
}
}
}
if crdt_tables.is_empty() {
return Ok(());
}
// Prüfe, welche Tombstone-Spalten bereits in der WHERE-Klausel referenziert werden
let explicitly_filtered_tables = if let Some(where_clause) = &select.selection {
self.find_explicitly_filtered_tombstone_tables(where_clause, &crdt_tables)
} else {
HashSet::new()
};
// Erstelle Filter nur für Tabellen, die noch nicht explizit gefiltert werden
let mut tombstone_filters = Vec::new();
for (table_name, table_alias) in crdt_tables {
let table_name_string = table_name.to_string();
let table_key = table_alias.unwrap_or(&table_name_string);
if !explicitly_filtered_tables.contains(table_key) {
// Nutzt die zentrale Logik von CrdtColumns
tombstone_filters.push(self.columns.create_tombstone_filter(table_alias));
}
}
// Füge die automatischen Filter hinzu
if !tombstone_filters.is_empty() {
let combined_filter = tombstone_filters
.into_iter()
.reduce(|acc, expr| Expr::BinaryOp {
left: Box::new(acc),
op: BinaryOperator::And,
right: Box::new(expr),
})
.unwrap();
match &mut select.selection {
Some(existing) => {
*existing = Expr::BinaryOp {
left: Box::new(existing.clone()),
op: BinaryOperator::And,
right: Box::new(combined_filter),
};
}
None => {
select.selection = Some(combined_filter);
}
}
}
Ok(())
}
/// Findet alle Tabellen, die bereits explizit Tombstone-Filter in der WHERE-Klausel haben
fn find_explicitly_filtered_tombstone_tables(
&self,
where_expr: &Expr,
crdt_tables: &[(ObjectName, Option<&str>)],
) -> HashSet<String> {
let mut filtered_tables = HashSet::new();
self.scan_expression_for_tombstone_references(
where_expr,
crdt_tables,
&mut filtered_tables,
);
filtered_tables
}
/// Rekursiv durchsucht einen Expression-Baum nach Tombstone-Spalten-Referenzen
fn scan_expression_for_tombstone_references(
&self,
expr: &Expr,
crdt_tables: &[(ObjectName, Option<&str>)],
filtered_tables: &mut HashSet<String>,
) {
match expr {
Expr::Identifier(ident) => {
// Nutzt die zentrale Konfiguration von CrdtColumns
if ident.value == self.columns.tombstone && crdt_tables.len() == 1 {
let table_name_str = crdt_tables[0].0.to_string();
let table_key = crdt_tables[0].1.unwrap_or(&table_name_str);
filtered_tables.insert(table_key.to_string());
}
}
Expr::CompoundIdentifier(idents) => {
// Nutzt die zentrale Konfiguration von CrdtColumns
if idents.len() == 2 && idents[1].value == self.columns.tombstone {
let table_ref = &idents[0].value;
for (table_name, alias) in crdt_tables {
let table_name_str = table_name.to_string();
if table_ref == &table_name_str || alias.map_or(false, |a| a == table_ref) {
filtered_tables.insert(table_ref.clone());
break;
}
}
}
}
Expr::BinaryOp { left, right, .. } => {
self.scan_expression_for_tombstone_references(left, crdt_tables, filtered_tables);
self.scan_expression_for_tombstone_references(right, crdt_tables, filtered_tables);
}
Expr::UnaryOp { expr, .. } => {
self.scan_expression_for_tombstone_references(expr, crdt_tables, filtered_tables);
}
Expr::Nested(nested) => {
self.scan_expression_for_tombstone_references(nested, crdt_tables, filtered_tables);
}
Expr::InList { expr, .. } => {
self.scan_expression_for_tombstone_references(expr, crdt_tables, filtered_tables);
}
Expr::Between { expr, .. } => {
self.scan_expression_for_tombstone_references(expr, crdt_tables, filtered_tables);
}
Expr::IsNull(expr) | Expr::IsNotNull(expr) => {
self.scan_expression_for_tombstone_references(expr, crdt_tables, filtered_tables);
}
Expr::Function(func) => {
if let sqlparser::ast::FunctionArguments::List(
sqlparser::ast::FunctionArgumentList { args, .. },
) = &func.args
{
for arg in args {
if let sqlparser::ast::FunctionArg::Unnamed(
sqlparser::ast::FunctionArgExpr::Expr(expr),
) = arg
{
self.scan_expression_for_tombstone_references(
expr,
crdt_tables,
filtered_tables,
);
}
}
}
}
Expr::Case {
operand,
conditions,
else_result,
..
} => {
if let Some(op) = operand {
self.scan_expression_for_tombstone_references(op, crdt_tables, filtered_tables);
}
for case_when in conditions {
self.scan_expression_for_tombstone_references(
&case_when.condition,
crdt_tables,
filtered_tables,
);
self.scan_expression_for_tombstone_references(
&case_when.result,
crdt_tables,
filtered_tables,
);
}
if let Some(else_res) = else_result {
self.scan_expression_for_tombstone_references(
else_res,
crdt_tables,
filtered_tables,
);
}
}
Expr::Subquery(query) => {
self.analyze_query_for_tombstone_references(query, crdt_tables, filtered_tables)
.ok();
}
Expr::Exists { subquery, .. } => {
self.analyze_query_for_tombstone_references(subquery, crdt_tables, filtered_tables)
.ok();
}
Expr::InSubquery { expr, subquery, .. } => {
self.scan_expression_for_tombstone_references(expr, crdt_tables, filtered_tables);
self.analyze_query_for_tombstone_references(subquery, crdt_tables, filtered_tables)
.ok();
}
Expr::AnyOp { left, right, .. } | Expr::AllOp { left, right, .. } => {
self.scan_expression_for_tombstone_references(left, crdt_tables, filtered_tables);
self.scan_expression_for_tombstone_references(right, crdt_tables, filtered_tables);
}
_ => {}
}
}
fn analyze_query_for_tombstone_references(
&self,
query: &sqlparser::ast::Query,
crdt_tables: &[(ObjectName, Option<&str>)],
filtered_tables: &mut HashSet<String>,
) -> Result<(), DatabaseError> {
self.analyze_set_expr_for_tombstone_references(&query.body, crdt_tables, filtered_tables)
}
fn analyze_set_expr_for_tombstone_references(
&self,
set_expr: &SetExpr,
crdt_tables: &[(ObjectName, Option<&str>)],
filtered_tables: &mut HashSet<String>,
) -> Result<(), DatabaseError> {
match set_expr {
SetExpr::Select(select) => {
if let Some(where_clause) = &select.selection {
self.scan_expression_for_tombstone_references(
where_clause,
crdt_tables,
filtered_tables,
);
}
for projection in &select.projection {
match projection {
SelectItem::UnnamedExpr(expr) | SelectItem::ExprWithAlias { expr, .. } => {
self.scan_expression_for_tombstone_references(
expr,
crdt_tables,
filtered_tables,
);
}
_ => {}
}
}
match &select.group_by {
sqlparser::ast::GroupByExpr::All(_) => {}
sqlparser::ast::GroupByExpr::Expressions(exprs, _) => {
for group_expr in exprs {
self.scan_expression_for_tombstone_references(
group_expr,
crdt_tables,
filtered_tables,
);
}
}
}
if let Some(having) = &select.having {
self.scan_expression_for_tombstone_references(
having,
crdt_tables,
filtered_tables,
);
}
}
SetExpr::SetOperation { left, right, .. } => {
self.analyze_set_expr_for_tombstone_references(left, crdt_tables, filtered_tables)?;
self.analyze_set_expr_for_tombstone_references(
right,
crdt_tables,
filtered_tables,
)?;
}
SetExpr::Query(query) => {
self.analyze_set_expr_for_tombstone_references(
&query.body,
crdt_tables,
filtered_tables,
)?;
}
SetExpr::Values(values) => {
for row in &values.rows {
for expr in row {
self.scan_expression_for_tombstone_references(
expr,
crdt_tables,
filtered_tables,
);
}
}
}
_ => {}
}
Ok(())
}
} }

View File

@ -15,7 +15,6 @@ use sqlparser::dialect::SQLiteDialect;
use sqlparser::parser::Parser; use sqlparser::parser::Parser;
/// Öffnet und initialisiert eine Datenbank mit Verschlüsselung /// Öffnet und initialisiert eine Datenbank mit Verschlüsselung
///
pub fn open_and_init_db(path: &str, key: &str, create: bool) -> Result<Connection, DatabaseError> { pub fn open_and_init_db(path: &str, key: &str, create: bool) -> Result<Connection, DatabaseError> {
let flags = if create { let flags = if create {
OpenFlags::SQLITE_OPEN_READ_WRITE | OpenFlags::SQLITE_OPEN_CREATE OpenFlags::SQLITE_OPEN_READ_WRITE | OpenFlags::SQLITE_OPEN_CREATE
@ -159,45 +158,23 @@ pub fn execute(
let params_sql: Vec<&dyn ToSql> = params_converted.iter().map(|v| v as &dyn ToSql).collect(); let params_sql: Vec<&dyn ToSql> = params_converted.iter().map(|v| v as &dyn ToSql).collect();
with_connection(connection, |conn| { with_connection(connection, |conn| {
// Check if the SQL contains RETURNING clause if sql.to_uppercase().contains("RETURNING") {
let has_returning = sql.to_uppercase().contains("RETURNING"); let mut stmt = conn.prepare(&sql)?;
if has_returning {
// Use prepare + query for RETURNING statements
let mut stmt = conn.prepare(&sql).map_err(|e| DatabaseError::PrepareError {
reason: e.to_string(),
})?;
let num_columns = stmt.column_count(); let num_columns = stmt.column_count();
let mut rows = stmt let mut rows = stmt.query(&params_sql[..])?;
.query(&params_sql[..])
.map_err(|e| DatabaseError::QueryError {
reason: e.to_string(),
})?;
let mut result_vec: Vec<Vec<JsonValue>> = Vec::new(); let mut result_vec: Vec<Vec<JsonValue>> = Vec::new();
while let Some(row) = rows.next().map_err(|e| DatabaseError::RowProcessingError { while let Some(row) = rows.next()? {
reason: format!("Row iteration error: {}", e),
})? {
let mut row_values: Vec<JsonValue> = Vec::with_capacity(num_columns); let mut row_values: Vec<JsonValue> = Vec::with_capacity(num_columns);
for i in 0..num_columns { for i in 0..num_columns {
let value_ref = row.get_ref(i).map_err(|e| { let value_ref = row.get_ref(i)?;
DatabaseError::RowProcessingError {
reason: format!("Failed to get column {}: {}", i, e),
}
})?;
let json_val = convert_value_ref_to_json(value_ref)?; let json_val = convert_value_ref_to_json(value_ref)?;
row_values.push(json_val); row_values.push(json_val);
} }
result_vec.push(row_values); result_vec.push(row_values);
} }
Ok(result_vec) Ok(result_vec)
} else { } else {
// For non-RETURNING statements, just execute and return empty array
conn.execute(&sql, &params_sql[..]).map_err(|e| { conn.execute(&sql, &params_sql[..]).map_err(|e| {
let table_name = extract_primary_table_name_from_sql(&sql).unwrap_or(None); let table_name = extract_primary_table_name_from_sql(&sql).unwrap_or(None);
DatabaseError::ExecutionError { DatabaseError::ExecutionError {
@ -206,7 +183,6 @@ pub fn execute(
table: table_name, table: table_name,
} }
})?; })?;
Ok(vec![]) Ok(vec![])
} }
}) })
@ -236,44 +212,34 @@ pub fn select(
let params_sql: Vec<&dyn ToSql> = params_converted.iter().map(|v| v as &dyn ToSql).collect(); let params_sql: Vec<&dyn ToSql> = params_converted.iter().map(|v| v as &dyn ToSql).collect();
with_connection(connection, |conn| { with_connection(connection, |conn| {
let mut stmt = conn let mut stmt = conn.prepare(&sql)?;
.prepare(&sql)
.map_err(|e| DatabaseError::PrepareError {
reason: e.to_string(),
})?;
let num_columns = stmt.column_count(); let num_columns = stmt.column_count();
let mut rows = stmt.query(&params_sql[..])?;
let mut rows = stmt
.query(&params_sql[..])
.map_err(|e| DatabaseError::QueryError {
reason: e.to_string(),
})?;
let mut result_vec: Vec<Vec<JsonValue>> = Vec::new(); let mut result_vec: Vec<Vec<JsonValue>> = Vec::new();
while let Some(row) = rows.next().map_err(|e| DatabaseError::RowProcessingError { while let Some(row) = rows.next()? {
reason: format!("Row iteration error: {}", e),
})? {
let mut row_values: Vec<JsonValue> = Vec::with_capacity(num_columns); let mut row_values: Vec<JsonValue> = Vec::with_capacity(num_columns);
for i in 0..num_columns { for i in 0..num_columns {
let value_ref = row let value_ref = row.get_ref(i)?;
.get_ref(i)
.map_err(|e| DatabaseError::RowProcessingError {
reason: format!("Failed to get column {}: {}", i, e),
})?;
let json_val = convert_value_ref_to_json(value_ref)?; let json_val = convert_value_ref_to_json(value_ref)?;
row_values.push(json_val); row_values.push(json_val);
} }
result_vec.push(row_values); result_vec.push(row_values);
} }
Ok(result_vec) Ok(result_vec)
}) })
} }
pub fn select_with_crdt(
sql: String,
params: Vec<JsonValue>,
connection: &DbConnection,
) -> Result<Vec<Vec<JsonValue>>, DatabaseError> {
with_connection(&connection, |conn| {
SqlExecutor::select_internal(conn, &sql, &params)
})
}
/// Konvertiert rusqlite ValueRef zu JSON /// Konvertiert rusqlite ValueRef zu JSON
pub fn convert_value_ref_to_json(value_ref: ValueRef) -> Result<JsonValue, DatabaseError> { pub fn convert_value_ref_to_json(value_ref: ValueRef) -> Result<JsonValue, DatabaseError> {
let json_val = match value_ref { let json_val = match value_ref {

View File

@ -43,6 +43,15 @@ pub fn sql_execute(
core::execute(sql, params, &state.db) core::execute(sql, params, &state.db)
} }
#[tauri::command]
pub fn sql_select_with_crdt(
sql: String,
params: Vec<JsonValue>,
state: State<'_, AppState>,
) -> Result<Vec<Vec<JsonValue>>, DatabaseError> {
core::select_with_crdt(sql, params, &state.db)
}
#[tauri::command] #[tauri::command]
pub fn sql_execute_with_crdt( pub fn sql_execute_with_crdt(
sql: String, sql: String,
@ -195,15 +204,17 @@ pub fn list_vaults(app_handle: AppHandle) -> Result<Vec<VaultInfo>, DatabaseErro
/// Checks if a vault with the given name exists /// Checks if a vault with the given name exists
#[tauri::command] #[tauri::command]
pub fn vault_exists(app_handle: AppHandle, db_name: String) -> Result<bool, DatabaseError> { pub fn vault_exists(app_handle: AppHandle, vault_name: String) -> Result<bool, DatabaseError> {
let vault_path = get_vault_path(&app_handle, &db_name)?; let vault_path = get_vault_path(&app_handle, &vault_name)?;
Ok(Path::new(&vault_path).exists()) Ok(Path::new(&vault_path).exists())
} }
/// Deletes a vault database file /// Deletes a vault database file
#[tauri::command] #[tauri::command]
pub fn delete_vault(app_handle: AppHandle, db_name: String) -> Result<String, DatabaseError> { pub fn delete_vault(app_handle: AppHandle, vault_name: String) -> Result<String, DatabaseError> {
let vault_path = get_vault_path(&app_handle, &db_name)?; let vault_path = get_vault_path(&app_handle, &vault_name)?;
let vault_shm_path = format!("{}-shm", vault_path);
let vault_wal_path = format!("{}-wal", vault_path);
if !Path::new(&vault_path).exists() { if !Path::new(&vault_path).exists() {
return Err(DatabaseError::IoError { return Err(DatabaseError::IoError {
@ -212,12 +223,26 @@ pub fn delete_vault(app_handle: AppHandle, db_name: String) -> Result<String, Da
}); });
} }
if Path::new(&vault_shm_path).exists() {
fs::remove_file(&vault_shm_path).map_err(|e| DatabaseError::IoError {
path: vault_shm_path.clone(),
reason: format!("Failed to delete vault: {}", e),
})?;
}
if Path::new(&vault_wal_path).exists() {
fs::remove_file(&vault_wal_path).map_err(|e| DatabaseError::IoError {
path: vault_wal_path.clone(),
reason: format!("Failed to delete vault: {}", e),
})?;
}
fs::remove_file(&vault_path).map_err(|e| DatabaseError::IoError { fs::remove_file(&vault_path).map_err(|e| DatabaseError::IoError {
path: vault_path.clone(), path: vault_path.clone(),
reason: format!("Failed to delete vault: {}", e), reason: format!("Failed to delete vault: {}", e),
})?; })?;
Ok(format!("Vault '{}' successfully deleted", db_name)) Ok(format!("Vault '{}' successfully deleted", vault_name))
} }
#[tauri::command] #[tauri::command]

View File

@ -10,7 +10,8 @@ use crate::extension::permissions::manager::PermissionManager;
use crate::extension::permissions::types::ExtensionPermission; use crate::extension::permissions::types::ExtensionPermission;
use crate::table_names::{TABLE_EXTENSIONS, TABLE_EXTENSION_PERMISSIONS}; use crate::table_names::{TABLE_EXTENSIONS, TABLE_EXTENSION_PERMISSIONS};
use crate::AppState; use crate::AppState;
use std::collections::HashMap; use serde_json::Value as JsonValue;
use std::collections::{HashMap, HashSet};
use std::fs; use std::fs;
use std::io::Cursor; use std::io::Cursor;
use std::path::PathBuf; use std::path::PathBuf;
@ -167,7 +168,6 @@ impl ExtensionManager {
Ok(specific_extension_dir) Ok(specific_extension_dir)
} }
pub fn add_production_extension(&self, extension: Extension) -> Result<(), ExtensionError> { pub fn add_production_extension(&self, extension: Extension) -> Result<(), ExtensionError> {
if extension.id.is_empty() { if extension.id.is_empty() {
return Err(ExtensionError::ValidationError { return Err(ExtensionError::ValidationError {
@ -223,11 +223,12 @@ impl ExtensionManager {
name: &str, name: &str,
) -> Result<Option<(String, Extension)>, ExtensionError> { ) -> Result<Option<(String, Extension)>, ExtensionError> {
// 1. Check dev extensions first (higher priority) // 1. Check dev extensions first (higher priority)
let dev_extensions = self.dev_extensions.lock().map_err(|e| { let dev_extensions =
ExtensionError::MutexPoisoned { self.dev_extensions
reason: e.to_string(), .lock()
} .map_err(|e| ExtensionError::MutexPoisoned {
})?; reason: e.to_string(),
})?;
for (id, ext) in dev_extensions.iter() { for (id, ext) in dev_extensions.iter() {
if ext.manifest.public_key == public_key && ext.manifest.name == name { if ext.manifest.public_key == public_key && ext.manifest.name == name {
@ -236,11 +237,12 @@ impl ExtensionManager {
} }
// 2. Check production extensions // 2. Check production extensions
let prod_extensions = self.production_extensions.lock().map_err(|e| { let prod_extensions =
ExtensionError::MutexPoisoned { self.production_extensions
reason: e.to_string(), .lock()
} .map_err(|e| ExtensionError::MutexPoisoned {
})?; reason: e.to_string(),
})?;
for (id, ext) in prod_extensions.iter() { for (id, ext) in prod_extensions.iter() {
if ext.manifest.public_key == public_key && ext.manifest.name == name { if ext.manifest.public_key == public_key && ext.manifest.name == name {
@ -262,11 +264,7 @@ impl ExtensionManager {
.map(|(_, ext)| ext)) .map(|(_, ext)| ext))
} }
pub fn remove_extension( pub fn remove_extension(&self, public_key: &str, name: &str) -> Result<(), ExtensionError> {
&self,
public_key: &str,
name: &str,
) -> Result<(), ExtensionError> {
let (id, _) = self let (id, _) = self
.find_extension_id_by_public_key_and_name(public_key, name)? .find_extension_id_by_public_key_and_name(public_key, name)?
.ok_or_else(|| ExtensionError::NotFound { .ok_or_else(|| ExtensionError::NotFound {
@ -276,11 +274,12 @@ impl ExtensionManager {
// Remove from dev extensions first // Remove from dev extensions first
{ {
let mut dev_extensions = self.dev_extensions.lock().map_err(|e| { let mut dev_extensions =
ExtensionError::MutexPoisoned { self.dev_extensions
reason: e.to_string(), .lock()
} .map_err(|e| ExtensionError::MutexPoisoned {
})?; reason: e.to_string(),
})?;
if dev_extensions.remove(&id).is_some() { if dev_extensions.remove(&id).is_some() {
return Ok(()); return Ok(());
} }
@ -288,11 +287,12 @@ impl ExtensionManager {
// Remove from production extensions // Remove from production extensions
{ {
let mut prod_extensions = self.production_extensions.lock().map_err(|e| { let mut prod_extensions =
ExtensionError::MutexPoisoned { self.production_extensions
reason: e.to_string(), .lock()
} .map_err(|e| ExtensionError::MutexPoisoned {
})?; reason: e.to_string(),
})?;
prod_extensions.remove(&id); prod_extensions.remove(&id);
} }
@ -316,7 +316,10 @@ impl ExtensionManager {
})?; })?;
eprintln!("DEBUG: Removing extension with ID: {}", extension.id); eprintln!("DEBUG: Removing extension with ID: {}", extension.id);
eprintln!("DEBUG: Extension name: {}, version: {}", extension_name, extension_version); eprintln!(
"DEBUG: Extension name: {}, version: {}",
extension_name, extension_version
);
// Lösche Permissions und Extension-Eintrag in einer Transaktion // Lösche Permissions und Extension-Eintrag in einer Transaktion
with_connection(&state.db, |conn| { with_connection(&state.db, |conn| {
@ -327,12 +330,11 @@ impl ExtensionManager {
})?; })?;
// Lösche alle Permissions mit extension_id // Lösche alle Permissions mit extension_id
eprintln!("DEBUG: Deleting permissions for extension_id: {}", extension.id); eprintln!(
PermissionManager::delete_permissions_in_transaction( "DEBUG: Deleting permissions for extension_id: {}",
&tx, extension.id
&hlc_service, );
&extension.id, PermissionManager::delete_permissions_in_transaction(&tx, &hlc_service, &extension.id)?;
)?;
// Lösche Extension-Eintrag mit extension_id // Lösche Extension-Eintrag mit extension_id
let sql = format!("DELETE FROM {} WHERE id = ?", TABLE_EXTENSIONS); let sql = format!("DELETE FROM {} WHERE id = ?", TABLE_EXTENSIONS);
@ -469,9 +471,12 @@ impl ExtensionManager {
let actual_extension_id = with_connection(&state.db, |conn| { let actual_extension_id = with_connection(&state.db, |conn| {
let tx = conn.transaction().map_err(DatabaseError::from)?; let tx = conn.transaction().map_err(DatabaseError::from)?;
let hlc_service = state.hlc.lock().map_err(|_| DatabaseError::MutexPoisoned { let hlc_service_guard = state.hlc.lock().map_err(|_| DatabaseError::MutexPoisoned {
reason: "Failed to lock HLC service".to_string(), reason: "Failed to lock HLC service".to_string(),
})?; })?;
// Klonen, um den MutexGuard freizugeben, bevor potenziell lange DB-Operationen stattfinden
let hlc_service = hlc_service_guard.clone();
drop(hlc_service_guard);
// Erstelle PK-Remapping Context für die gesamte Transaktion // Erstelle PK-Remapping Context für die gesamte Transaktion
// Dies ermöglicht automatisches FK-Remapping wenn ON CONFLICT bei Extension auftritt // Dies ermöglicht automatisches FK-Remapping wenn ON CONFLICT bei Extension auftritt
@ -484,34 +489,35 @@ impl ExtensionManager {
TABLE_EXTENSIONS TABLE_EXTENSIONS
); );
let (_tables, returning_results) = SqlExecutor::query_internal_typed_with_context( let (_tables, returning_results): (HashSet<String>, Vec<Vec<JsonValue>>) =
&tx, SqlExecutor::query_internal_typed_with_context(
&hlc_service, &tx,
&insert_ext_sql, &hlc_service,
rusqlite::params![ &insert_ext_sql,
extension_id, rusqlite::params![
extracted.manifest.name, extension_id,
extracted.manifest.version, extracted.manifest.name,
extracted.manifest.author, extracted.manifest.version,
extracted.manifest.entry, extracted.manifest.author,
extracted.manifest.icon, extracted.manifest.entry,
extracted.manifest.public_key, extracted.manifest.icon,
extracted.manifest.signature, extracted.manifest.public_key,
extracted.manifest.homepage, extracted.manifest.signature,
extracted.manifest.description, extracted.manifest.homepage,
true, // enabled extracted.manifest.description,
], true, // enabled
&mut pk_context, ],
)?; &mut pk_context,
)?;
// Nutze die tatsächliche ID aus der Datenbank (wichtig bei ON CONFLICT) // Nutze die tatsächliche ID aus der Datenbank (wichtig bei ON CONFLICT)
// Die haex_extensions Tabelle hat einen single-column PK namens "id" // Die haex_extensions Tabelle hat einen single-column PK namens "id"
let actual_extension_id = returning_results let actual_extension_id = returning_results
.first() .first() // Holt die erste Zeile (das innere Vec<JsonValue>, z.B. Some(&["uuid-string"]))
.and_then(|row| row.first()) .and_then(|row_array| row_array.first()) // Holt das erste Element daraus (z.B. Some(&JsonValue::String("uuid-string")))
.and_then(|val| val.as_str()) .and_then(|val| val.as_str()) // Konvertiert zu &str (z.B. Some("uuid-string"))
.map(|s| s.to_string()) .map(|s| s.to_string()) // Konvertiert zu String
.unwrap_or_else(|| extension_id.clone()); .unwrap_or_else(|| extension_id.clone()); // Fallback
eprintln!( eprintln!(
"DEBUG: Extension UUID - Generated: {}, Actual from DB: {}", "DEBUG: Extension UUID - Generated: {}, Actual from DB: {}",
@ -573,6 +579,7 @@ impl ExtensionManager {
app_handle: &AppHandle, app_handle: &AppHandle,
state: &State<'_, AppState>, state: &State<'_, AppState>,
) -> Result<Vec<String>, ExtensionError> { ) -> Result<Vec<String>, ExtensionError> {
// Clear existing data
self.production_extensions self.production_extensions
.lock() .lock()
.map_err(|e| ExtensionError::MutexPoisoned { .map_err(|e| ExtensionError::MutexPoisoned {
@ -592,19 +599,22 @@ impl ExtensionManager {
})? })?
.clear(); .clear();
// Schritt 1: Alle Daten aus der Datenbank in einem Rutsch laden. // Lade alle Daten aus der Datenbank
let extensions = with_connection(&state.db, |conn| { let extensions = with_connection(&state.db, |conn| {
let sql = format!( let sql = format!(
"SELECT id, name, version, author, entry, icon, public_key, signature, homepage, description, enabled FROM {}", "SELECT id, name, version, author, entry, icon, public_key, signature, homepage, description, enabled FROM {}",
TABLE_EXTENSIONS TABLE_EXTENSIONS
); );
eprintln!("DEBUG: SQL Query before transformation: {}", sql); eprintln!("DEBUG: SQL Query before transformation: {}", sql);
// select_internal gibt jetzt Vec<Vec<JsonValue>> zurück
let results = SqlExecutor::select_internal(conn, &sql, &[])?; let results = SqlExecutor::select_internal(conn, &sql, &[])?;
eprintln!("DEBUG: Query returned {} results", results.len()); eprintln!("DEBUG: Query returned {} results", results.len());
let mut data = Vec::new(); let mut data = Vec::new();
for result in results { for row in results {
let id = result["id"] // Wir erwarten die Werte in der Reihenfolge der SELECT-Anweisung
let id = row[0]
.as_str() .as_str()
.ok_or_else(|| DatabaseError::SerializationError { .ok_or_else(|| DatabaseError::SerializationError {
reason: "Missing id field".to_string(), reason: "Missing id field".to_string(),
@ -612,31 +622,31 @@ impl ExtensionManager {
.to_string(); .to_string();
let manifest = ExtensionManifest { let manifest = ExtensionManifest {
name: result["name"] name: row[1]
.as_str() .as_str()
.ok_or_else(|| DatabaseError::SerializationError { .ok_or_else(|| DatabaseError::SerializationError {
reason: "Missing name field".to_string(), reason: "Missing name field".to_string(),
})? })?
.to_string(), .to_string(),
version: result["version"] version: row[2]
.as_str() .as_str()
.ok_or_else(|| DatabaseError::SerializationError { .ok_or_else(|| DatabaseError::SerializationError {
reason: "Missing version field".to_string(), reason: "Missing version field".to_string(),
})? })?
.to_string(), .to_string(),
author: result["author"].as_str().map(String::from), author: row[3].as_str().map(String::from),
entry: result["entry"].as_str().unwrap_or("index.html").to_string(), entry: row[4].as_str().unwrap_or("index.html").to_string(),
icon: result["icon"].as_str().map(String::from), icon: row[5].as_str().map(String::from),
public_key: result["public_key"].as_str().unwrap_or("").to_string(), public_key: row[6].as_str().unwrap_or("").to_string(),
signature: result["signature"].as_str().unwrap_or("").to_string(), signature: row[7].as_str().unwrap_or("").to_string(),
permissions: ExtensionPermissions::default(), permissions: ExtensionPermissions::default(),
homepage: result["homepage"].as_str().map(String::from), homepage: row[8].as_str().map(String::from),
description: result["description"].as_str().map(String::from), description: row[9].as_str().map(String::from),
}; };
let enabled = result["enabled"] let enabled = row[10]
.as_bool() .as_bool()
.or_else(|| result["enabled"].as_i64().map(|v| v != 0)) .or_else(|| row[10].as_i64().map(|v| v != 0))
.unwrap_or(false); .unwrap_or(false);
data.push(ExtensionDataFromDb { data.push(ExtensionDataFromDb {
@ -684,10 +694,7 @@ impl ExtensionManager {
continue; continue;
} }
eprintln!( eprintln!("DEBUG: Extension loaded successfully: {}", extension_id);
"DEBUG: Extension loaded successfully: {}",
extension_id
);
let extension = Extension { let extension = Extension {
id: extension_id.clone(), id: extension_id.clone(),

View File

@ -5,8 +5,9 @@ use crate::crdt::transformer::CrdtTransformer;
use crate::crdt::trigger; use crate::crdt::trigger;
use crate::database::core::{convert_value_ref_to_json, parse_sql_statements, ValueConverter}; use crate::database::core::{convert_value_ref_to_json, parse_sql_statements, ValueConverter};
use crate::database::error::DatabaseError; use crate::database::error::DatabaseError;
use rusqlite::Connection;
use rusqlite::{params_from_iter, types::Value as SqliteValue, ToSql, Transaction}; use rusqlite::{params_from_iter, types::Value as SqliteValue, ToSql, Transaction};
use serde_json::Value as JsonValue; use serde_json::{Map, Value as JsonValue};
use sqlparser::ast::{Insert, Statement, TableObject}; use sqlparser::ast::{Insert, Statement, TableObject};
use std::collections::{HashMap, HashSet}; use std::collections::{HashMap, HashSet};
@ -140,7 +141,7 @@ impl SqlExecutor {
} }
let sql_str = statement.to_string(); let sql_str = statement.to_string();
eprintln!("DEBUG: Transformed SQL: {}", sql_str); eprintln!("DEBUG: Transformed SQL (execute path): {}", sql_str);
// Spezielle Behandlung für INSERT Statements (mit FK-Remapping, OHNE RETURNING) // Spezielle Behandlung für INSERT Statements (mit FK-Remapping, OHNE RETURNING)
if let Statement::Insert(ref insert_stmt) = statement { if let Statement::Insert(ref insert_stmt) = statement {
@ -160,7 +161,6 @@ impl SqlExecutor {
// Remap FK-Werte in params (falls Mappings existieren) // Remap FK-Werte in params (falls Mappings existieren)
remap_fk_params(insert_stmt, &mut param_vec, &fk_info, pk_context)?; remap_fk_params(insert_stmt, &mut param_vec, &fk_info, pk_context)?;
// Führe INSERT mit execute() aus
let param_refs: Vec<&dyn ToSql> = let param_refs: Vec<&dyn ToSql> =
param_vec.iter().map(|v| v as &dyn ToSql).collect(); param_vec.iter().map(|v| v as &dyn ToSql).collect();
@ -172,19 +172,15 @@ impl SqlExecutor {
reason: format!("Prepare failed: {}", e), reason: format!("Prepare failed: {}", e),
})?; })?;
let _ = stmt let mut rows = stmt
.query(params_from_iter(param_refs.iter())) .query(params_from_iter(param_refs.iter()))
.map_err(|e| DatabaseError::ExecutionError { .map_err(|e| DatabaseError::ExecutionError {
sql: sql_str.clone(), sql: sql_str.clone(),
table: Some(table_name_str.clone()), table: Some(table_name_str.clone()),
reason: format!("Query execution failed: {}", e), reason: format!("INSERT query execution failed: {}", e),
})?; })?;
/* tx.execute(&sql_str, params_from_iter(param_refs.iter()))
.map_err(|e| DatabaseError::ExecutionError { let _ = rows.next()?;
sql: sql_str.clone(),
table: Some(table_name_str.clone()),
reason: e.to_string(),
})?; */
} }
} else { } else {
// Nicht-INSERT Statements normal ausführen // Nicht-INSERT Statements normal ausführen
@ -192,7 +188,7 @@ impl SqlExecutor {
.map_err(|e| DatabaseError::ExecutionError { .map_err(|e| DatabaseError::ExecutionError {
sql: sql_str.clone(), sql: sql_str.clone(),
table: None, table: None,
reason: e.to_string(), reason: format!("Execute failed: {}", e),
})?; })?;
} }
@ -298,9 +294,16 @@ impl SqlExecutor {
reason: e.to_string(), reason: e.to_string(),
})?; })?;
let num_columns = stmt.column_count(); let column_names: Vec<String> = stmt
.column_names()
.into_iter()
.map(|s| s.to_string())
.collect();
let num_columns = column_names.len();
let param_refs: Vec<&dyn ToSql> = let param_refs: Vec<&dyn ToSql> =
param_vec.iter().map(|v| v as &dyn ToSql).collect(); param_vec.iter().map(|v| v as &dyn ToSql).collect();
let mut rows = stmt let mut rows = stmt
.query(params_from_iter(param_refs.iter())) .query(params_from_iter(param_refs.iter()))
.map_err(|e| DatabaseError::ExecutionError { .map_err(|e| DatabaseError::ExecutionError {
@ -327,6 +330,7 @@ impl SqlExecutor {
// Extrahiere ALLE Spalten für RETURNING-Ergebnis // Extrahiere ALLE Spalten für RETURNING-Ergebnis
let mut row_values: Vec<JsonValue> = Vec::with_capacity(num_columns); let mut row_values: Vec<JsonValue> = Vec::with_capacity(num_columns);
for i in 0..num_columns { for i in 0..num_columns {
let value_ref = let value_ref =
row.get_ref(i) row.get_ref(i)
@ -351,6 +355,7 @@ impl SqlExecutor {
})?; })?;
let num_columns = stmt.column_count(); let num_columns = stmt.column_count();
let mut rows = stmt.query(params).map_err(|e| DatabaseError::QueryError { let mut rows = stmt.query(params).map_err(|e| DatabaseError::QueryError {
reason: e.to_string(), reason: e.to_string(),
})?; })?;
@ -423,10 +428,10 @@ impl SqlExecutor {
/// Führt SELECT aus (mit CRDT-Transformation) - OHNE Permission-Check /// Führt SELECT aus (mit CRDT-Transformation) - OHNE Permission-Check
pub fn select_internal( pub fn select_internal(
conn: &rusqlite::Connection, conn: &Connection,
sql: &str, sql: &str,
params: &[JsonValue], params: &[JsonValue],
) -> Result<Vec<JsonValue>, DatabaseError> { ) -> Result<Vec<Vec<JsonValue>>, DatabaseError> {
// Parameter validation // Parameter validation
let total_placeholders = sql.matches('?').count(); let total_placeholders = sql.matches('?').count();
if total_placeholders != params.len() { if total_placeholders != params.len() {
@ -457,42 +462,43 @@ impl SqlExecutor {
let sql_params = ValueConverter::convert_params(params)?; let sql_params = ValueConverter::convert_params(params)?;
let transformer = CrdtTransformer::new(); let transformer = CrdtTransformer::new();
let last_statement = ast_vec.pop().unwrap(); let mut stmt_to_execute = ast_vec.pop().unwrap();
let mut stmt_to_execute = last_statement;
transformer.transform_select_statement(&mut stmt_to_execute)?; transformer.transform_select_statement(&mut stmt_to_execute)?;
let transformed_sql = stmt_to_execute.to_string(); let transformed_sql = stmt_to_execute.to_string();
let mut prepared_stmt = eprintln!("DEBUG: Transformed SELECT: {}", transformed_sql);
conn.prepare(&transformed_sql)
.map_err(|e| DatabaseError::ExecutionError {
sql: transformed_sql.clone(),
reason: e.to_string(),
table: None,
})?;
let column_names: Vec<String> = prepared_stmt let mut prepared_stmt = conn.prepare(&transformed_sql)?;
.column_names()
.into_iter()
.map(|s| s.to_string())
.collect();
let rows = prepared_stmt let num_columns = prepared_stmt.column_count();
.query_map(params_from_iter(sql_params.iter()), |row| {
crate::extension::database::row_to_json_value(row, &column_names) let mut rows = prepared_stmt
}) .query(params_from_iter(&sql_params[..]))
.map_err(|e| DatabaseError::QueryError { .map_err(|e| DatabaseError::QueryError {
reason: e.to_string(), reason: e.to_string(),
})?; })?;
let mut results = Vec::new(); let mut result_vec: Vec<Vec<JsonValue>> = Vec::new();
for row_result in rows {
results.push(row_result.map_err(|e| DatabaseError::RowProcessingError { while let Some(row) = rows.next().map_err(|e| DatabaseError::RowProcessingError {
reason: e.to_string(), reason: format!("Row iteration error: {}", e),
})?); })? {
let mut row_values: Vec<JsonValue> = Vec::with_capacity(num_columns);
for i in 0..num_columns {
let value_ref = row
.get_ref(i)
.map_err(|e| DatabaseError::RowProcessingError {
reason: format!("Failed to get column {}: {}", i, e),
})?;
let json_val = convert_value_ref_to_json(value_ref)?;
row_values.push(json_val);
}
result_vec.push(row_values);
} }
Ok(results) Ok(result_vec)
} }
/// Führt SQL mit CRDT-Transformation aus und gibt RETURNING-Ergebnisse zurück /// Führt SQL mit CRDT-Transformation aus und gibt RETURNING-Ergebnisse zurück
@ -719,13 +725,14 @@ fn extract_pk_values_from_row(
) -> Result<PkValues, DatabaseError> { ) -> Result<PkValues, DatabaseError> {
let mut pk_values = PkValues::new(); let mut pk_values = PkValues::new();
for (idx, pk_col) in pk_columns.iter().enumerate() { for pk_col in pk_columns.iter() {
// RETURNING gibt PKs in der Reihenfolge zurück, wie sie im RETURNING Clause stehen let value: String =
let value: String = row.get(idx).map_err(|e| DatabaseError::ExecutionError { row.get(pk_col.as_str())
sql: "RETURNING clause".to_string(), .map_err(|e| DatabaseError::ExecutionError {
reason: format!("Failed to extract PK column '{}': {}", pk_col, e), sql: "RETURNING clause".to_string(),
table: None, reason: format!("Failed to extract PK column '{}': {}", pk_col, e),
})?; table: None,
})?;
pk_values.insert(pk_col.clone(), value); pk_values.insert(pk_col.clone(), value);
} }

View File

@ -317,15 +317,6 @@ fn count_sql_placeholders(sql: &str) -> usize {
sql.matches('?').count() sql.matches('?').count()
} }
/// Kürzt SQL für Fehlermeldungen
/* fn truncate_sql(sql: &str, max_length: usize) -> String {
if sql.len() <= max_length {
sql.to_string()
} else {
format!("{}...", &sql[..max_length])
}
} */
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*; use super::*;

View File

@ -70,15 +70,16 @@ pub fn run() {
database::delete_vault, database::delete_vault,
database::list_vaults, database::list_vaults,
database::open_encrypted_database, database::open_encrypted_database,
database::sql_execute,
database::sql_execute_with_crdt, database::sql_execute_with_crdt,
database::sql_execute,
database::sql_query_with_crdt, database::sql_query_with_crdt,
database::sql_select_with_crdt,
database::sql_select, database::sql_select,
database::vault_exists, database::vault_exists,
extension::database::extension_sql_execute, extension::database::extension_sql_execute,
extension::database::extension_sql_select, extension::database::extension_sql_select,
extension::get_all_extensions,
extension::get_all_dev_extensions, extension::get_all_dev_extensions,
extension::get_all_extensions,
extension::get_extension_info, extension::get_extension_info,
extension::install_extension_with_permissions, extension::install_extension_with_permissions,
extension::is_extension_installed, extension::is_extension_installed,

View File

@ -15,11 +15,6 @@
} }
} }
:root { @theme {
--ui-header-height: 74px; --spacing-header: 3.5rem; /* 72px - oder dein Wunschwert */
}
.swiper-slide {
isolation: isolate; /* Für jeden Slide */
contain: layout style; /* Enthält den Context, ohne Performance-Hit */
} }

View File

@ -17,6 +17,7 @@
class="w-full h-full" class="w-full h-full"
@swiper="onSwiperInit" @swiper="onSwiperInit"
@slide-change="onSlideChange" @slide-change="onSlideChange"
direction="vertical"
> >
<SwiperSlide <SwiperSlide
v-for="workspace in workspaces" v-for="workspace in workspaces"
@ -247,6 +248,7 @@
<script setup lang="ts"> <script setup lang="ts">
import { Swiper, SwiperSlide } from 'swiper/vue' import { Swiper, SwiperSlide } from 'swiper/vue'
import { Navigation } from 'swiper/modules' import { Navigation } from 'swiper/modules'
import type { Swiper as SwiperType } from 'swiper'
import 'swiper/css' import 'swiper/css'
import 'swiper/css/navigation' import 'swiper/css/navigation'
@ -272,14 +274,8 @@ const {
isOverviewMode, isOverviewMode,
} = storeToRefs(workspaceStore) } = storeToRefs(workspaceStore)
// Swiper instance
// Control Swiper touch behavior (disable during icon/window drag)
// Mouse position tracking
const { x: mouseX } = useMouse() const { x: mouseX } = useMouse()
// Desktop element ref
const desktopEl = useTemplateRef('desktopEl') const desktopEl = useTemplateRef('desktopEl')
// Track desktop viewport size reactively // Track desktop viewport size reactively

View File

@ -52,7 +52,7 @@
<p <p
v-if="extension.description" v-if="extension.description"
class="text-sm text-gray-600 dark:text-gray-300 mt-2 line-clamp-2" class="hidden @lg:flex text-sm text-gray-600 dark:text-gray-300 mt-2 line-clamp-2"
> >
{{ extension.description }} {{ extension.description }}
</p> </p>
@ -67,7 +67,9 @@
> >
<UIcon name="i-heroicons-check-circle-solid" /> <UIcon name="i-heroicons-check-circle-solid" />
<span v-if="!extension.installedVersion">{{ t('installed') }}</span> <span v-if="!extension.installedVersion">{{ t('installed') }}</span>
<span v-else>{{ t('installedVersion', { version: extension.installedVersion }) }}</span> <span v-else>{{
t('installedVersion', { version: extension.installedVersion })
}}</span>
</div> </div>
<div <div
v-if="extension.downloads" v-if="extension.downloads"
@ -114,10 +116,16 @@
<div class="flex items-center justify-between gap-2"> <div class="flex items-center justify-between gap-2">
<UButton <UButton
:label="getInstallButtonLabel()" :label="getInstallButtonLabel()"
:color="extension.isInstalled && !extension.installedVersion ? 'neutral' : 'primary'" :color="
extension.isInstalled && !extension.installedVersion
? 'neutral'
: 'primary'
"
:disabled="extension.isInstalled && !extension.installedVersion" :disabled="extension.isInstalled && !extension.installedVersion"
:icon=" :icon="
extension.isInstalled && !extension.installedVersion ? 'i-heroicons-check' : 'i-heroicons-arrow-down-tray' extension.isInstalled && !extension.installedVersion
? 'i-heroicons-check'
: 'i-heroicons-arrow-down-tray'
" "
size="sm" size="sm"
@click.stop="$emit('install')" @click.stop="$emit('install')"

View File

@ -1,8 +1,8 @@
<template> <template>
<div class="flex flex-col h-full"> <div class="flex flex-col h-full bg-default">
<!-- Header with Actions --> <!-- Header with Actions -->
<div <div
class="flex flex-col sm:flex-row sm:items-center justify-between gap-4 p-6 border-b border-gray-200 dark:border-gray-800" class="flex flex-col @lg:flex-row @lg:items-center justify-between gap-4 p-6 border-b border-gray-200 dark:border-gray-800"
> >
<div> <div>
<h1 class="text-2xl font-bold"> <h1 class="text-2xl font-bold">
@ -14,14 +14,14 @@
</div> </div>
<div <div
class="flex flex-col sm:flex-row items-stretch sm:items-center gap-3" class="flex flex-col @lg:flex-row items-stretch @lg:items-center gap-3"
> >
<!-- Marketplace Selector --> <!-- Marketplace Selector -->
<USelectMenu <USelectMenu
v-model="selectedMarketplace" v-model="selectedMarketplace"
:items="marketplaces" :items="marketplaces"
value-key="id" value-key="id"
class="w-full sm:w-48" class="w-full @lg:w-48"
> >
<template #leading> <template #leading>
<UIcon name="i-heroicons-building-storefront" /> <UIcon name="i-heroicons-building-storefront" />
@ -34,13 +34,14 @@
icon="i-heroicons-arrow-up-tray" icon="i-heroicons-arrow-up-tray"
color="neutral" color="neutral"
@click="onSelectExtensionAsync" @click="onSelectExtensionAsync"
block
/> />
</div> </div>
</div> </div>
<!-- Search and Filters --> <!-- Search and Filters -->
<div <div
class="flex flex-col sm:flex-row items-stretch sm:items-center gap-4 p-6 border-b border-gray-200 dark:border-gray-800" class="flex flex-col @lg:flex-row items-stretch @lg:items-center gap-4 p-6 border-b border-gray-200 dark:border-gray-800"
> >
<UInput <UInput
v-model="searchQuery" v-model="searchQuery"
@ -53,7 +54,7 @@
:items="categories" :items="categories"
:placeholder="t('filter.category')" :placeholder="t('filter.category')"
value-key="id" value-key="id"
class="w-full sm:w-48" class="w-full @lg:w-48"
> >
<template #leading> <template #leading>
<UIcon name="i-heroicons-tag" /> <UIcon name="i-heroicons-tag" />
@ -65,7 +66,7 @@
<div class="flex-1 overflow-auto p-6"> <div class="flex-1 overflow-auto p-6">
<div <div
v-if="filteredExtensions.length" v-if="filteredExtensions.length"
class="grid grid-cols-1 md:grid-cols-2 lg:grid-cols-3 gap-4" class="grid grid-cols-1 @md:grid-cols-2 @2xl:grid-cols-3 gap-4"
> >
<!-- Marketplace Extension Card --> <!-- Marketplace Extension Card -->
<HaexExtensionMarketplaceCard <HaexExtensionMarketplaceCard

View File

@ -1,8 +1,6 @@
<template> <template>
<div> <div class="w-full h-full bg-default">
<div <div class="grid grid-cols-2 p-2">
class="grid grid-rows-2 sm:grid-cols-2 sm:gap-2 p-2 max-w-2xl w-full h-fit"
>
<div class="p-2">{{ t('language') }}</div> <div class="p-2">{{ t('language') }}</div>
<div><UiDropdownLocale @select="onSelectLocaleAsync" /></div> <div><UiDropdownLocale @select="onSelectLocaleAsync" /></div>
@ -34,6 +32,8 @@
@change="onUpdateDeviceNameAsync" @change="onUpdateDeviceNameAsync"
/> />
</div> </div>
<div class="h-full"></div>
</div> </div>
</div> </div>
</template> </template>

View File

@ -5,7 +5,7 @@
:class="[ :class="[
'absolute bg-default/80 backdrop-blur-xl rounded-xl shadow-2xl overflow-hidden isolate', 'absolute bg-default/80 backdrop-blur-xl rounded-xl shadow-2xl overflow-hidden isolate',
'border border-gray-200 dark:border-gray-700 transition-all ease-out duration-600 ', 'border border-gray-200 dark:border-gray-700 transition-all ease-out duration-600 ',
'flex flex-col', 'flex flex-col @container',
{ 'select-none': isResizingOrDragging }, { 'select-none': isResizingOrDragging },
isActive ? 'z-50' : 'z-10', isActive ? 'z-50' : 'z-10',
]" ]"
@ -208,6 +208,7 @@ useDrag(
} }
} }
globalThis.getSelection()?.removeAllRanges()
emit('positionChanged', x.value, y.value) emit('positionChanged', x.value, y.value)
emit('sizeChanged', width.value, height.value) emit('sizeChanged', width.value, height.value)
emit('dragEnd') emit('dragEnd')

View File

@ -2,7 +2,7 @@
<UCard <UCard
class="cursor-pointer transition-all h-32 w-72 shrink-0 group duration-500" class="cursor-pointer transition-all h-32 w-72 shrink-0 group duration-500"
:class="[ :class="[
workspace.position === currentWorkspaceIndex workspace.id === currentWorkspace?.id
? 'ring-2 ring-secondary bg-secondary/10' ? 'ring-2 ring-secondary bg-secondary/10'
: 'hover:ring-2 hover:ring-gray-300', : 'hover:ring-2 hover:ring-gray-300',
]" ]"
@ -31,5 +31,5 @@ defineProps<{ workspace: IWorkspace }>()
const workspaceStore = useWorkspaceStore() const workspaceStore = useWorkspaceStore()
const { currentWorkspaceIndex } = storeToRefs(workspaceStore) const { currentWorkspace } = storeToRefs(workspaceStore)
</script> </script>

View File

@ -1,44 +1,46 @@
<template> <template>
<div class="flex flex-col w-full h-full overflow-hidden"> <div class="flex flex-col w-full h-full overflow-hidden">
<UPageHeader <div ref="headerRef">
as="header" <UPageHeader
:ui="{ as="header"
root: [ :ui="{
'bg-default border-b border-accented sticky top-0 z-50 py-0 px-8', root: [
], 'bg-default border-b border-accented sticky top-0 z-50 pt-2 px-8 h-header',
wrapper: [ ],
'pt-6 flex flex-col sm:flex-row sm:items-center sm:justify-between gap-4', wrapper: [
], 'flex flex-col sm:flex-row sm:items-center sm:justify-between gap-4',
}" ],
> }"
<template #title> >
<div class="flex items-center"> <template #title>
<UiLogoHaexhub class="size-12 shrink-0" /> <div class="flex items-center">
<UiLogoHaexhub class="size-12 shrink-0" />
<NuxtLinkLocale <NuxtLinkLocale
class="link text-base-content link-neutral text-xl font-semibold no-underline flex items-center" class="link text-base-content link-neutral text-xl font-semibold no-underline flex items-center"
:to="{ name: 'desktop' }" :to="{ name: 'desktop' }"
>
<UiTextGradient class="text-nowrap">
{{ currentVaultName }}
</UiTextGradient>
</NuxtLinkLocale>
</div>
</template>
<template #links>
<UButton
color="neutral"
variant="outline"
:block="isSmallScreen"
@click="isOverviewMode = !isOverviewMode"
icon="i-bi-person-workspace"
size="lg"
> >
<UiTextGradient class="text-nowrap"> </UButton>
{{ currentVaultName }} <HaexExtensionLauncher :block="isSmallScreen" />
</UiTextGradient> </template>
</NuxtLinkLocale> </UPageHeader>
</div> </div>
</template>
<template #links>
<UButton
color="neutral"
variant="outline"
:block="isSmallScreen"
@click="isOverviewMode = !isOverviewMode"
icon="i-bi-person-workspace"
size="lg"
>
</UButton>
<HaexExtensionLauncher :block="isSmallScreen" />
</template>
</UPageHeader>
<main class="flex-1 overflow-hidden bg-elevated"> <main class="flex-1 overflow-hidden bg-elevated">
<NuxtPage /> <NuxtPage />

View File

@ -62,7 +62,7 @@
> >
<Icon <Icon
name="mdi:trash-can-outline" name="mdi:trash-can-outline"
@click="removeVaultAsync(vault.name)" @click="prepareRemoveVault(vault.name)"
/> />
</UButton> </UButton>
</div> </div>
@ -81,24 +81,52 @@
</div> </div>
</div> </div>
</div> </div>
<UiDialogConfirm
v-model:open="showRemoveDialog"
:title="t('remove.title')"
:description="t('remove.description', { vaultName: vaultToBeRemoved })"
@confirm="onConfirmRemoveAsync"
/>
</div> </div>
</template> </template>
<script setup lang="ts"> <script setup lang="ts">
import { openUrl } from '@tauri-apps/plugin-opener' import { openUrl } from '@tauri-apps/plugin-opener'
import type { Locale } from 'vue-i18n' import type { Locale } from 'vue-i18n'
import type { VaultInfo } from '@bindings/VaultInfo'
definePageMeta({ definePageMeta({
name: 'vaultOpen', name: 'vaultOpen',
}) })
const { t, setLocale } = useI18n() const { t, setLocale } = useI18n()
const passwordPromptOpen = ref(false) const passwordPromptOpen = ref(false)
const selectedVault = ref<IVaultInfo>() const selectedVault = ref<VaultInfo>()
const showRemoveDialog = ref(false)
const { syncLastVaultsAsync, removeVaultAsync } = useLastVaultStore() const { syncLastVaultsAsync, removeVaultAsync } = useLastVaultStore()
const { lastVaults } = storeToRefs(useLastVaultStore()) const { lastVaults } = storeToRefs(useLastVaultStore())
const vaultToBeRemoved = ref('')
const prepareRemoveVault = (vaultName: string) => {
vaultToBeRemoved.value = vaultName
showRemoveDialog.value = true
}
const toast = useToast()
const onConfirmRemoveAsync = async () => {
try {
await removeVaultAsync(vaultToBeRemoved.value)
showRemoveDialog.value = false
await syncLastVaultsAsync()
} catch (error) {
toast.add({
color: 'error',
description: JSON.stringify(error),
})
}
}
onMounted(async () => { onMounted(async () => {
try { try {
await syncLastVaultsAsync() await syncLastVaultsAsync()
@ -116,7 +144,10 @@ const onSelectLocale = async (locale: Locale) => {
de: de:
welcome: 'Viel Spass mit' welcome: 'Viel Spass mit'
lastUsed: 'Zuletzt verwendete Vaults' lastUsed: 'Zuletzt verwendete Vaults'
sponsors: 'Supported by' sponsors: Supported by
remove:
title: Vault löschen
description: Möchtest du die Vault {vaultName} wirklich löschen?
en: en:
welcome: 'Have fun with' welcome: 'Have fun with'

View File

@ -57,7 +57,10 @@ onMounted(async () => {
await loadExtensionsAsync() await loadExtensionsAsync()
await readNotificationsAsync() await readNotificationsAsync()
if (!(await isKnownDeviceAsync())) { const knownDevice = await isKnownDeviceAsync()
console.log('knownDevice', knownDevice)
if (!knownDevice) {
console.log('not known device') console.log('not known device')
newDeviceName.value = hostname.value ?? 'unknown' newDeviceName.value = hostname.value ?? 'unknown'
showNewDeviceDialog.value = true showNewDeviceDialog.value = true

View File

@ -179,8 +179,9 @@ export const useWindowManagerStore = defineStore('windowManager', () => {
// Calculate viewport-aware size // Calculate viewport-aware size
const viewportWidth = window.innerWidth const viewportWidth = window.innerWidth
const viewportHeight = window.innerHeight const viewportHeight = window.innerHeight - 60
console.log('viewportHeight', window.innerHeight, viewportHeight)
const windowHeight = Math.min(height, viewportHeight) const windowHeight = Math.min(height, viewportHeight)
// Adjust width proportionally if needed (optional) // Adjust width proportionally if needed (optional)

View File

@ -38,6 +38,7 @@ export const useWorkspaceStore = defineStore('workspaceStore', () => {
.from(haexWorkspaces) .from(haexWorkspaces)
.orderBy(asc(haexWorkspaces.position)) .orderBy(asc(haexWorkspaces.position))
console.log('loadWorkspacesAsync', items)
workspaces.value = items workspaces.value = items
// Create default workspace if none exist // Create default workspace if none exist

View File

@ -57,9 +57,11 @@ export const useUiStore = defineStore('uiStore', () => {
colorMode.preference = currentThemeName.value colorMode.preference = currentThemeName.value
}) })
const viewportHeightWithoutHeader = ref(0)
return { return {
availableThemes, availableThemes,
//currentScreenSize, viewportHeightWithoutHeader,
currentTheme, currentTheme,
currentThemeName, currentThemeName,
defaultTheme, defaultTheme,

View File

@ -136,40 +136,50 @@ const drizzleCallback = (async (
params: unknown[], params: unknown[],
method: 'get' | 'run' | 'all' | 'values', method: 'get' | 'run' | 'all' | 'values',
) => { ) => {
let rows: unknown[] = [] let rows: any[] = []
try {
if (isSelectQuery(sql)) {
// SELECT statements
rows = await invoke<unknown[]>('sql_select_with_crdt', {
sql,
params,
}).catch((e) => {
console.error('SQL select Error:', e, sql, params)
return []
})
} else if (hasReturning(sql)) {
// INSERT/UPDATE/DELETE with RETURNING → use query
rows = await invoke<unknown[]>('sql_query_with_crdt', {
sql,
params,
}).catch((e) => {
console.error('SQL query with CRDT Error:', e, sql, params)
return []
})
} else {
// INSERT/UPDATE/DELETE without RETURNING → use execute
await invoke<unknown[]>('sql_execute_with_crdt', {
sql,
params,
}).catch((e) => {
console.error('SQL execute with CRDT Error:', e, sql, params, rows)
return []
})
}
} catch (error) {
console.error('Fehler im drizzleCallback invoke:', error, {
sql,
params,
method,
})
}
console.log('drizzleCallback', method, sql, params) console.log('drizzleCallback', method, sql, params)
console.log('drizzleCallback rows', rows)
if (isSelectQuery(sql)) {
// SELECT statements
rows = await invoke<unknown[]>('sql_select', { sql, params }).catch((e) => {
console.error('SQL select Error:', e, sql, params)
return []
})
} else if (hasReturning(sql)) {
// INSERT/UPDATE/DELETE with RETURNING → use query
rows = await invoke<unknown[]>('sql_query_with_crdt', {
sql,
params,
}).catch((e) => {
console.error('SQL query with CRDT Error:', e, sql, params)
return []
})
} else {
// INSERT/UPDATE/DELETE without RETURNING → use execute
await invoke<unknown[]>('sql_execute_with_crdt', {
sql,
params,
}).catch((e) => {
console.error('SQL execute with CRDT Error:', e, sql, params, rows)
return []
})
return { rows: undefined }
}
if (method === 'get') { if (method === 'get') {
return { rows: rows.length > 0 ? [rows[0]] : [] } return rows.length > 0 ? { rows: rows[0] } : { rows }
} else {
return { rows }
} }
return { rows }
}) satisfies AsyncRemoteCallback }) satisfies AsyncRemoteCallback

View File

@ -1,89 +1,30 @@
import { invoke } from '@tauri-apps/api/core' import { invoke } from '@tauri-apps/api/core'
import { load } from '@tauri-apps/plugin-store' import type { VaultInfo } from '@bindings/VaultInfo'
/* interface ILastVault {
lastUsed: Date
name: string
path: string
} */
export interface IVaultInfo {
name: string
path: string
lastAccess: Date
}
export const useLastVaultStore = defineStore('lastVaultStore', () => { export const useLastVaultStore = defineStore('lastVaultStore', () => {
const { const lastVaults = ref<VaultInfo[]>([])
public: { haexVault },
} = useRuntimeConfig()
const lastVaults = ref<IVaultInfo[]>([])
const keyName = 'lastVaults'
const getStoreAsync = async () => {
return await load(haexVault.lastVaultFileName || 'lastVaults.json')
}
const syncLastVaultsAsync = async () => { const syncLastVaultsAsync = async () => {
lastVaults.value = lastVaults.value =
(await listVaultsAsync()).sort( (await listVaultsAsync()).sort(
(a, b) => +new Date(b.lastAccess) - +new Date(a.lastAccess), (a, b) => +new Date(`${b.lastAccess}`) - +new Date(`${a.lastAccess}`),
) ?? [] ) ?? []
return lastVaults.value return lastVaults.value
} }
const listVaultsAsync = async () => { const listVaultsAsync = async () => {
lastVaults.value = await invoke<IVaultInfo[]>('list_vaults') lastVaults.value = await invoke<VaultInfo[]>('list_vaults')
return lastVaults.value return lastVaults.value
} }
const addVaultAsync = async ({ const removeVaultAsync = async (vaultName: string) => {
name, return await invoke('delete_vault', { vaultName })
path,
}: {
name?: string
path: string
}) => {
if (!lastVaults.value) await syncLastVaultsAsync()
const saveName = name || getFileNameFromPath(path)
lastVaults.value = lastVaults.value.filter((vault) => vault.path !== path)
lastVaults.value.push({ lastAccess: new Date(), name: saveName, path })
await saveLastVaultsAsync()
}
const removeVaultAsync = async (vaultPath: string) => {
lastVaults.value = lastVaults.value.filter(
(vault) => vault.path !== vaultPath,
)
await saveLastVaultsAsync()
}
const saveLastVaultsAsync = async () => {
const store = await getStoreAsync()
await store.set(keyName, lastVaults.value)
await syncLastVaultsAsync()
} }
return { return {
addVaultAsync,
syncLastVaultsAsync, syncLastVaultsAsync,
lastVaults, lastVaults,
removeVaultAsync, removeVaultAsync,
saveLastVaultsAsync,
} }
}) })
const getFileNameFromPath = (path: string) => {
const lastBackslashIndex = path.lastIndexOf('\\')
const lastSlashIndex = path.lastIndexOf('/')
const lastIndex = Math.max(lastBackslashIndex, lastSlashIndex)
const fileName = path.substring(lastIndex + 1)
return fileName
}

View File

@ -32,6 +32,7 @@ export const useVaultSettingsStore = defineStore('vaultSettingsStore', () => {
where: eq(schema.haexSettings.key, VaultSettingsKeyEnum.locale), where: eq(schema.haexSettings.key, VaultSettingsKeyEnum.locale),
}) })
console.log('found currentLocaleRow', currentLocaleRow)
if (currentLocaleRow?.value) { if (currentLocaleRow?.value) {
const currentLocale = app.$i18n.availableLocales.find( const currentLocale = app.$i18n.availableLocales.find(
(locale) => locale === currentLocaleRow.value, (locale) => locale === currentLocaleRow.value,
@ -70,6 +71,7 @@ export const useVaultSettingsStore = defineStore('vaultSettingsStore', () => {
where: eq(schema.haexSettings.key, VaultSettingsKeyEnum.theme), where: eq(schema.haexSettings.key, VaultSettingsKeyEnum.theme),
}) })
console.log('found currentThemeRow', currentThemeRow)
if (currentThemeRow?.value) { if (currentThemeRow?.value) {
const theme = availableThemes.value.find( const theme = availableThemes.value.find(
(theme) => theme.value === currentThemeRow.value, (theme) => theme.value === currentThemeRow.value,
@ -98,6 +100,7 @@ export const useVaultSettingsStore = defineStore('vaultSettingsStore', () => {
where: eq(schema.haexSettings.key, VaultSettingsKeyEnum.vaultName), where: eq(schema.haexSettings.key, VaultSettingsKeyEnum.vaultName),
}) })
console.log('found currentVaultNameRow', currentVaultNameRow)
if (currentVaultNameRow?.value) { if (currentVaultNameRow?.value) {
currentVaultName.value = currentVaultName.value =
currentVaultNameRow.value || haexVault.defaultVaultName || 'HaexHub' currentVaultNameRow.value || haexVault.defaultVaultName || 'HaexHub'
@ -129,7 +132,7 @@ export const useVaultSettingsStore = defineStore('vaultSettingsStore', () => {
), ),
}) })
console.log('store: readDeviceNameAsync', deviceName) console.log('store: readDeviceNameAsync', deviceName)
return deviceName return deviceName?.id ? deviceName : undefined
} }
const addDeviceNameAsync = async ({ const addDeviceNameAsync = async ({