6 Commits

Author SHA1 Message Date
2b739b9e79 Improve database query handling with automatic fallback for RETURNING clauses 2025-11-07 01:39:44 +01:00
63849d86e1 Add sync backend infrastructure and improve grid snapping
- Implement crypto utilities for vault key management (Hybrid-Ansatz)
  - PBKDF2 key derivation with 600k iterations
  - AES-GCM encryption for vault keys and CRDT data
  - Optimized Base64 conversion with Buffer/btoa fallback
- Add Sync Engine Store for server communication
  - Vault key storage and retrieval
  - CRDT log push/pull operations
  - Supabase client integration
- Add Sync Orchestrator Store with realtime subscriptions
  - Event-driven sync (push after writes)
  - Supabase Realtime for instant sync
  - Sync status tracking per backend
- Add haex_sync_status table for reliable sync tracking
2025-11-05 17:08:49 +01:00
9adee46166 Bump version to 0.1.11 2025-11-05 01:08:33 +01:00
be7dff72dd Add sync backend infrastructure and improve grid snapping
- Add haexSyncBackends table with CRDT support for multi-backend sync
- Implement useSyncBackendsStore for managing sync server configurations
- Fix desktop icon grid snapping for all icon sizes (small to extra-large)
- Add Supabase client dependency for future sync implementation
- Generate database migration for sync_backends table
2025-11-05 01:08:09 +01:00
b465c117b0 Fix browser text selection during icon drag
- Add e.preventDefault() in handlePointerDown to prevent text selection
- Add @dragstart.prevent to prevent native browser drag
- Add select-none and @selectstart.prevent to workspace
- Add mouseleave event listener to reset drag state when leaving window
- Refactor grid positioning to use consistent iconPadding constant
2025-11-04 22:36:17 +01:00
731ae7cc47 Improve desktop grid positioning and spacing
- Increase icon spacing from 20px to 30px padding
- Add vertical grid offset (-30px) to start grid higher
- Remove screen-size dependent grid columns/rows (now fully dynamic)
- Fix dropzone visualization to use consistent snapToGrid function
- Clean up unused UI store dependencies
2025-11-04 16:39:08 +01:00
16 changed files with 2419 additions and 73 deletions

View File

@ -1,7 +1,7 @@
{
"name": "haex-hub",
"private": true,
"version": "0.1.8",
"version": "0.1.12",
"type": "module",
"scripts": {
"build": "nuxt build",
@ -24,6 +24,7 @@
"@nuxt/ui": "4.1.0",
"@nuxtjs/i18n": "10.0.6",
"@pinia/nuxt": "^0.11.2",
"@supabase/supabase-js": "^2.79.0",
"@tailwindcss/vite": "^4.1.16",
"@tauri-apps/api": "^2.9.0",
"@tauri-apps/plugin-dialog": "^2.4.2",

69
pnpm-lock.yaml generated
View File

@ -29,6 +29,9 @@ importers:
'@pinia/nuxt':
specifier: ^0.11.2
version: 0.11.2(magicast@0.5.1)(pinia@3.0.3(typescript@5.9.3)(vue@3.5.22(typescript@5.9.3)))
'@supabase/supabase-js':
specifier: ^2.79.0
version: 2.79.0
'@tailwindcss/vite':
specifier: ^4.1.16
version: 4.1.16(vite@7.1.12(@types/node@24.9.2)(jiti@2.6.1)(lightningcss@1.30.2)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1))
@ -1894,6 +1897,30 @@ packages:
peerDependencies:
eslint: '>=9.0.0'
'@supabase/auth-js@2.79.0':
resolution: {integrity: sha512-p2GKvdbF9d/6C+dtS6iNcSicPr6eUfkvovD60HWlWsD+oOjC483DzFWrzGjNpBwnswhfMRP8Qn3rYA0VWaOfjw==}
engines: {node: '>=20.0.0'}
'@supabase/functions-js@2.79.0':
resolution: {integrity: sha512-WaiU6b+Z+ZfJOjFhpMKdajt42weiFUrA6TVW5oGd6WfPGajFiKZJJIAvuK0g7KDKaYowtQrOo5+Ais+PcuZ1qA==}
engines: {node: '>=20.0.0'}
'@supabase/postgrest-js@2.79.0':
resolution: {integrity: sha512-2i8EFm3/49ecjt6dk/TGVROBbtOmhryiC4NL3u0FBIrm2hqj+FvbELv1jjM6r+a6abnh+uzIV/bFsWHAa/k3/w==}
engines: {node: '>=20.0.0'}
'@supabase/realtime-js@2.79.0':
resolution: {integrity: sha512-foaZujNBycAqLizUcuLyyFyDitfPnEMVO4CiKXNwaMCDVMoVX4QR6n4gpJLUC5BGzc20Mte6vSJLbk4MN90Prw==}
engines: {node: '>=20.0.0'}
'@supabase/storage-js@2.79.0':
resolution: {integrity: sha512-PLSeKX1/BZhGWCT972w4TvVOCcw/xh4TsowtUBiZvPx4OdHT7dB1q0DXKwVUfKbWk5UUC+6XAq4ZU/ZCtdgn6w==}
engines: {node: '>=20.0.0'}
'@supabase/supabase-js@2.79.0':
resolution: {integrity: sha512-x9ndEaBSwoRnFOOZGhh2CeV69Uz4B/EOSGCbKysDhTiYakiCAdDXaNuLPluviKU/Aot+F7BglXZDZ0YJ3GpGrw==}
engines: {node: '>=20.0.0'}
'@swc/helpers@0.5.17':
resolution: {integrity: sha512-5IKx/Y13RsYd+sauPb2x+U/xZikHjolzfuDgTAl/Tdf3Q8rslRvC19NKDLgAJQ6wsqADk10ntlv08nPFw/gO/A==}
@ -2116,6 +2143,9 @@ packages:
resolution: {integrity: sha512-EULJ8LApcVEPbrfND0cRQqutIOdiIgJ1Mgrhpy755r14xMohPTEpkV/k28SJvuOs9bHRFW8x+KeDAEPiGQPB9Q==}
deprecated: This is a stub types definition. parse-path provides its own type definitions, so you do not need this installed.
'@types/phoenix@1.6.6':
resolution: {integrity: sha512-PIzZZlEppgrpoT2QgbnDU+MMzuR6BbCjllj0bM70lWoejMeNJAxCchxnv7J3XFkI8MpygtRpzXrIlmWUBclP5A==}
'@types/resolve@1.20.2':
resolution: {integrity: sha512-60BCwRFOZCQhDncwQdxxeOEEkbc5dIMccYLwbxsS4TUNeVECQ/pBJ0j09mrHOl/JJvpRPGwO9SvE4nR2Nb/a4Q==}
@ -7556,6 +7586,43 @@ snapshots:
estraverse: 5.3.0
picomatch: 4.0.3
'@supabase/auth-js@2.79.0':
dependencies:
tslib: 2.8.1
'@supabase/functions-js@2.79.0':
dependencies:
tslib: 2.8.1
'@supabase/postgrest-js@2.79.0':
dependencies:
tslib: 2.8.1
'@supabase/realtime-js@2.79.0':
dependencies:
'@types/phoenix': 1.6.6
'@types/ws': 8.18.1
tslib: 2.8.1
ws: 8.18.3
transitivePeerDependencies:
- bufferutil
- utf-8-validate
'@supabase/storage-js@2.79.0':
dependencies:
tslib: 2.8.1
'@supabase/supabase-js@2.79.0':
dependencies:
'@supabase/auth-js': 2.79.0
'@supabase/functions-js': 2.79.0
'@supabase/postgrest-js': 2.79.0
'@supabase/realtime-js': 2.79.0
'@supabase/storage-js': 2.79.0
transitivePeerDependencies:
- bufferutil
- utf-8-validate
'@swc/helpers@0.5.17':
dependencies:
tslib: 2.8.1
@ -7740,6 +7807,8 @@ snapshots:
dependencies:
parse-path: 7.1.0
'@types/phoenix@1.6.6': {}
'@types/resolve@1.20.2': {}
'@types/web-bluetooth@0.0.20': {}

View File

@ -0,0 +1,10 @@
CREATE TABLE `haex_sync_backends` (
`id` text PRIMARY KEY NOT NULL,
`name` text NOT NULL,
`server_url` text NOT NULL,
`enabled` integer DEFAULT true NOT NULL,
`priority` integer DEFAULT 0 NOT NULL,
`created_at` text DEFAULT (CURRENT_TIMESTAMP),
`updated_at` integer,
`haex_timestamp` text
);

View File

@ -0,0 +1,843 @@
{
"version": "6",
"dialect": "sqlite",
"id": "bf82259e-9264-44e7-a60f-8cc14a1f22e2",
"prevId": "3aedf10c-2266-40f4-8549-0ff8b0588853",
"tables": {
"haex_crdt_configs": {
"name": "haex_crdt_configs",
"columns": {
"key": {
"name": "key",
"type": "text",
"primaryKey": true,
"notNull": true,
"autoincrement": false
},
"value": {
"name": "value",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
}
},
"indexes": {},
"foreignKeys": {},
"compositePrimaryKeys": {},
"uniqueConstraints": {},
"checkConstraints": {}
},
"haex_crdt_logs": {
"name": "haex_crdt_logs",
"columns": {
"id": {
"name": "id",
"type": "text",
"primaryKey": true,
"notNull": true,
"autoincrement": false
},
"haex_timestamp": {
"name": "haex_timestamp",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"table_name": {
"name": "table_name",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"row_pks": {
"name": "row_pks",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"op_type": {
"name": "op_type",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"column_name": {
"name": "column_name",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"new_value": {
"name": "new_value",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"old_value": {
"name": "old_value",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
}
},
"indexes": {
"idx_haex_timestamp": {
"name": "idx_haex_timestamp",
"columns": [
"haex_timestamp"
],
"isUnique": false
},
"idx_table_row": {
"name": "idx_table_row",
"columns": [
"table_name",
"row_pks"
],
"isUnique": false
}
},
"foreignKeys": {},
"compositePrimaryKeys": {},
"uniqueConstraints": {},
"checkConstraints": {}
},
"haex_crdt_snapshots": {
"name": "haex_crdt_snapshots",
"columns": {
"snapshot_id": {
"name": "snapshot_id",
"type": "text",
"primaryKey": true,
"notNull": true,
"autoincrement": false
},
"created": {
"name": "created",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"epoch_hlc": {
"name": "epoch_hlc",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"location_url": {
"name": "location_url",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"file_size_bytes": {
"name": "file_size_bytes",
"type": "integer",
"primaryKey": false,
"notNull": false,
"autoincrement": false
}
},
"indexes": {},
"foreignKeys": {},
"compositePrimaryKeys": {},
"uniqueConstraints": {},
"checkConstraints": {}
},
"haex_desktop_items": {
"name": "haex_desktop_items",
"columns": {
"id": {
"name": "id",
"type": "text",
"primaryKey": true,
"notNull": true,
"autoincrement": false
},
"workspace_id": {
"name": "workspace_id",
"type": "text",
"primaryKey": false,
"notNull": true,
"autoincrement": false
},
"item_type": {
"name": "item_type",
"type": "text",
"primaryKey": false,
"notNull": true,
"autoincrement": false
},
"extension_id": {
"name": "extension_id",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"system_window_id": {
"name": "system_window_id",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"position_x": {
"name": "position_x",
"type": "integer",
"primaryKey": false,
"notNull": true,
"autoincrement": false,
"default": 0
},
"position_y": {
"name": "position_y",
"type": "integer",
"primaryKey": false,
"notNull": true,
"autoincrement": false,
"default": 0
},
"haex_timestamp": {
"name": "haex_timestamp",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
}
},
"indexes": {},
"foreignKeys": {
"haex_desktop_items_workspace_id_haex_workspaces_id_fk": {
"name": "haex_desktop_items_workspace_id_haex_workspaces_id_fk",
"tableFrom": "haex_desktop_items",
"tableTo": "haex_workspaces",
"columnsFrom": [
"workspace_id"
],
"columnsTo": [
"id"
],
"onDelete": "cascade",
"onUpdate": "no action"
},
"haex_desktop_items_extension_id_haex_extensions_id_fk": {
"name": "haex_desktop_items_extension_id_haex_extensions_id_fk",
"tableFrom": "haex_desktop_items",
"tableTo": "haex_extensions",
"columnsFrom": [
"extension_id"
],
"columnsTo": [
"id"
],
"onDelete": "cascade",
"onUpdate": "no action"
}
},
"compositePrimaryKeys": {},
"uniqueConstraints": {},
"checkConstraints": {
"item_reference": {
"name": "item_reference",
"value": "(\"haex_desktop_items\".\"item_type\" = 'extension' AND \"haex_desktop_items\".\"extension_id\" IS NOT NULL AND \"haex_desktop_items\".\"system_window_id\" IS NULL) OR (\"haex_desktop_items\".\"item_type\" = 'system' AND \"haex_desktop_items\".\"system_window_id\" IS NOT NULL AND \"haex_desktop_items\".\"extension_id\" IS NULL) OR (\"haex_desktop_items\".\"item_type\" = 'file' AND \"haex_desktop_items\".\"system_window_id\" IS NOT NULL AND \"haex_desktop_items\".\"extension_id\" IS NULL) OR (\"haex_desktop_items\".\"item_type\" = 'folder' AND \"haex_desktop_items\".\"system_window_id\" IS NOT NULL AND \"haex_desktop_items\".\"extension_id\" IS NULL)"
}
}
},
"haex_devices": {
"name": "haex_devices",
"columns": {
"id": {
"name": "id",
"type": "text",
"primaryKey": true,
"notNull": true,
"autoincrement": false
},
"device_id": {
"name": "device_id",
"type": "text",
"primaryKey": false,
"notNull": true,
"autoincrement": false
},
"name": {
"name": "name",
"type": "text",
"primaryKey": false,
"notNull": true,
"autoincrement": false
},
"created_at": {
"name": "created_at",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false,
"default": "(CURRENT_TIMESTAMP)"
},
"updated_at": {
"name": "updated_at",
"type": "integer",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"haex_timestamp": {
"name": "haex_timestamp",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
}
},
"indexes": {
"haex_devices_device_id_unique": {
"name": "haex_devices_device_id_unique",
"columns": [
"device_id"
],
"isUnique": true
}
},
"foreignKeys": {},
"compositePrimaryKeys": {},
"uniqueConstraints": {},
"checkConstraints": {}
},
"haex_extension_permissions": {
"name": "haex_extension_permissions",
"columns": {
"id": {
"name": "id",
"type": "text",
"primaryKey": true,
"notNull": true,
"autoincrement": false
},
"extension_id": {
"name": "extension_id",
"type": "text",
"primaryKey": false,
"notNull": true,
"autoincrement": false
},
"resource_type": {
"name": "resource_type",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"action": {
"name": "action",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"target": {
"name": "target",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"constraints": {
"name": "constraints",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"status": {
"name": "status",
"type": "text",
"primaryKey": false,
"notNull": true,
"autoincrement": false,
"default": "'denied'"
},
"created_at": {
"name": "created_at",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false,
"default": "(CURRENT_TIMESTAMP)"
},
"updated_at": {
"name": "updated_at",
"type": "integer",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"haex_timestamp": {
"name": "haex_timestamp",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
}
},
"indexes": {
"haex_extension_permissions_extension_id_resource_type_action_target_unique": {
"name": "haex_extension_permissions_extension_id_resource_type_action_target_unique",
"columns": [
"extension_id",
"resource_type",
"action",
"target"
],
"isUnique": true
}
},
"foreignKeys": {
"haex_extension_permissions_extension_id_haex_extensions_id_fk": {
"name": "haex_extension_permissions_extension_id_haex_extensions_id_fk",
"tableFrom": "haex_extension_permissions",
"tableTo": "haex_extensions",
"columnsFrom": [
"extension_id"
],
"columnsTo": [
"id"
],
"onDelete": "cascade",
"onUpdate": "no action"
}
},
"compositePrimaryKeys": {},
"uniqueConstraints": {},
"checkConstraints": {}
},
"haex_extensions": {
"name": "haex_extensions",
"columns": {
"id": {
"name": "id",
"type": "text",
"primaryKey": true,
"notNull": true,
"autoincrement": false
},
"public_key": {
"name": "public_key",
"type": "text",
"primaryKey": false,
"notNull": true,
"autoincrement": false
},
"name": {
"name": "name",
"type": "text",
"primaryKey": false,
"notNull": true,
"autoincrement": false
},
"version": {
"name": "version",
"type": "text",
"primaryKey": false,
"notNull": true,
"autoincrement": false
},
"author": {
"name": "author",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"description": {
"name": "description",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"entry": {
"name": "entry",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false,
"default": "'index.html'"
},
"homepage": {
"name": "homepage",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"enabled": {
"name": "enabled",
"type": "integer",
"primaryKey": false,
"notNull": false,
"autoincrement": false,
"default": true
},
"icon": {
"name": "icon",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"signature": {
"name": "signature",
"type": "text",
"primaryKey": false,
"notNull": true,
"autoincrement": false
},
"single_instance": {
"name": "single_instance",
"type": "integer",
"primaryKey": false,
"notNull": false,
"autoincrement": false,
"default": false
},
"haex_timestamp": {
"name": "haex_timestamp",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
}
},
"indexes": {
"haex_extensions_public_key_name_unique": {
"name": "haex_extensions_public_key_name_unique",
"columns": [
"public_key",
"name"
],
"isUnique": true
}
},
"foreignKeys": {},
"compositePrimaryKeys": {},
"uniqueConstraints": {},
"checkConstraints": {}
},
"haex_notifications": {
"name": "haex_notifications",
"columns": {
"id": {
"name": "id",
"type": "text",
"primaryKey": true,
"notNull": true,
"autoincrement": false
},
"alt": {
"name": "alt",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"date": {
"name": "date",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"icon": {
"name": "icon",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"image": {
"name": "image",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"read": {
"name": "read",
"type": "integer",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"source": {
"name": "source",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"text": {
"name": "text",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"title": {
"name": "title",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"type": {
"name": "type",
"type": "text",
"primaryKey": false,
"notNull": true,
"autoincrement": false
},
"haex_timestamp": {
"name": "haex_timestamp",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
}
},
"indexes": {},
"foreignKeys": {},
"compositePrimaryKeys": {},
"uniqueConstraints": {},
"checkConstraints": {}
},
"haex_settings": {
"name": "haex_settings",
"columns": {
"id": {
"name": "id",
"type": "text",
"primaryKey": true,
"notNull": true,
"autoincrement": false
},
"device_id": {
"name": "device_id",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"key": {
"name": "key",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"type": {
"name": "type",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"value": {
"name": "value",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"haex_timestamp": {
"name": "haex_timestamp",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
}
},
"indexes": {
"haex_settings_device_id_key_type_unique": {
"name": "haex_settings_device_id_key_type_unique",
"columns": [
"device_id",
"key",
"type"
],
"isUnique": true
}
},
"foreignKeys": {
"haex_settings_device_id_haex_devices_id_fk": {
"name": "haex_settings_device_id_haex_devices_id_fk",
"tableFrom": "haex_settings",
"tableTo": "haex_devices",
"columnsFrom": [
"device_id"
],
"columnsTo": [
"id"
],
"onDelete": "cascade",
"onUpdate": "no action"
}
},
"compositePrimaryKeys": {},
"uniqueConstraints": {},
"checkConstraints": {}
},
"haex_sync_backends": {
"name": "haex_sync_backends",
"columns": {
"id": {
"name": "id",
"type": "text",
"primaryKey": true,
"notNull": true,
"autoincrement": false
},
"name": {
"name": "name",
"type": "text",
"primaryKey": false,
"notNull": true,
"autoincrement": false
},
"server_url": {
"name": "server_url",
"type": "text",
"primaryKey": false,
"notNull": true,
"autoincrement": false
},
"enabled": {
"name": "enabled",
"type": "integer",
"primaryKey": false,
"notNull": true,
"autoincrement": false,
"default": true
},
"priority": {
"name": "priority",
"type": "integer",
"primaryKey": false,
"notNull": true,
"autoincrement": false,
"default": 0
},
"created_at": {
"name": "created_at",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false,
"default": "(CURRENT_TIMESTAMP)"
},
"updated_at": {
"name": "updated_at",
"type": "integer",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"haex_timestamp": {
"name": "haex_timestamp",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
}
},
"indexes": {},
"foreignKeys": {},
"compositePrimaryKeys": {},
"uniqueConstraints": {},
"checkConstraints": {}
},
"haex_workspaces": {
"name": "haex_workspaces",
"columns": {
"id": {
"name": "id",
"type": "text",
"primaryKey": true,
"notNull": true,
"autoincrement": false
},
"device_id": {
"name": "device_id",
"type": "text",
"primaryKey": false,
"notNull": true,
"autoincrement": false
},
"name": {
"name": "name",
"type": "text",
"primaryKey": false,
"notNull": true,
"autoincrement": false
},
"position": {
"name": "position",
"type": "integer",
"primaryKey": false,
"notNull": true,
"autoincrement": false,
"default": 0
},
"background": {
"name": "background",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"haex_timestamp": {
"name": "haex_timestamp",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
}
},
"indexes": {
"haex_workspaces_position_unique": {
"name": "haex_workspaces_position_unique",
"columns": [
"position"
],
"isUnique": true
}
},
"foreignKeys": {},
"compositePrimaryKeys": {},
"uniqueConstraints": {},
"checkConstraints": {}
}
},
"views": {},
"enums": {},
"_meta": {
"schemas": {},
"tables": {},
"columns": {}
},
"internal": {
"indexes": {}
}
}

View File

@ -22,6 +22,13 @@
"when": 1762263814375,
"tag": "0002_loose_quasimodo",
"breakpoints": true
},
{
"idx": 3,
"version": "6",
"when": 1762300795436,
"tag": "0003_luxuriant_deathstrike",
"breakpoints": true
}
]
}

View File

@ -18,6 +18,7 @@
@pointerdown.left="handlePointerDown"
@pointermove="handlePointerMove"
@pointerup="handlePointerUp"
@dragstart.prevent
@click.left="handleClick"
@dblclick="handleDoubleClick"
>
@ -176,6 +177,9 @@ const style = computed(() => ({
const handlePointerDown = (e: PointerEvent) => {
if (!draggableEl.value || !draggableEl.value.parentElement) return
// Prevent any text selection during drag
e.preventDefault()
isDragging.value = true
emit('dragStart', props.id, props.itemType, props.referenceId, iconWidth.value, iconHeight.value, x.value, y.value)

View File

@ -25,12 +25,13 @@
>
<UContextMenu :items="getWorkspaceContextMenuItems(workspace.id)">
<div
class="w-full h-full relative"
class="w-full h-full relative select-none"
:style="getWorkspaceBackgroundStyle(workspace)"
@click.self.stop="handleDesktopClick"
@mousedown.left.self="handleAreaSelectStart"
@dragover.prevent="handleDragOver"
@drop.prevent="handleDrop($event, workspace.id)"
@selectstart.prevent
>
<!-- Drop Target Zone (visible during drag) -->
<div
@ -301,7 +302,7 @@ const { x: mouseX, y: mouseY } = useMouse()
const dropTargetZone = computed(() => {
if (!isDragging.value) return null
// Use the actual icon position during drag, not the mouse position
// Use the actual icon position during drag
const iconX = currentDraggedItem.x
const iconY = currentDraggedItem.y
@ -313,11 +314,14 @@ const dropTargetZone = computed(() => {
currentDraggedItem.height || undefined,
)
// Show dropzone at snapped position with grid cell size
const cellSize = desktopStore.gridCellSize
return {
x: snapped.x,
y: snapped.y,
width: currentDraggedItem.width || desktopStore.gridCellSize,
height: currentDraggedItem.height || desktopStore.gridCellSize,
width: currentDraggedItem.width || cellSize,
height: currentDraggedItem.height || cellSize,
}
})
@ -732,6 +736,21 @@ watch(currentWorkspace, async () => {
}
})
// Reset drag state when mouse leaves the document (fixes stuck dropzone)
useEventListener(document, 'mouseleave', () => {
if (isDragging.value) {
isDragging.value = false
currentDraggedItem.id = ''
currentDraggedItem.itemType = ''
currentDraggedItem.referenceId = ''
currentDraggedItem.width = 0
currentDraggedItem.height = 0
currentDraggedItem.x = 0
currentDraggedItem.y = 0
allowSwipe.value = true
}
})
onMounted(async () => {
// Load workspaces first
await workspaceStore.loadWorkspacesAsync()

View File

@ -365,17 +365,37 @@ async function handleDatabaseMethodAsync(
switch (request.method) {
case 'haextension.db.query': {
const rows = await invoke<unknown[]>('extension_sql_select', {
sql: params.query || '',
params: params.params || [],
publicKey: extension.publicKey,
name: extension.name,
})
try {
const rows = await invoke<unknown[]>('extension_sql_select', {
sql: params.query || '',
params: params.params || [],
publicKey: extension.publicKey,
name: extension.name,
})
return {
rows,
rowsAffected: 0,
lastInsertId: undefined,
return {
rows,
rowsAffected: 0,
lastInsertId: undefined,
}
} catch (error: any) {
// If error is about non-SELECT statements (INSERT/UPDATE/DELETE with RETURNING),
// automatically retry with execute
if (error?.message?.includes('Only SELECT statements are allowed')) {
const rows = await invoke<unknown[]>('extension_sql_execute', {
sql: params.query || '',
params: params.params || [],
publicKey: extension.publicKey,
name: extension.name,
})
return {
rows,
rowsAffected: rows.length,
lastInsertId: undefined,
}
}
throw error
}
}

View File

@ -48,3 +48,27 @@ export const haexCrdtConfigs = sqliteTable(tableNames.haex.crdt.configs.name, {
key: text().primaryKey(),
value: text(),
})
/**
* Sync Status Table (WITHOUT CRDT - local-only metadata)
* Tracks sync progress for each backend
*/
export const haexSyncStatus = sqliteTable(
'haex_sync_status',
{
id: text('id')
.$defaultFn(() => crypto.randomUUID())
.primaryKey(),
backendId: text('backend_id').notNull(),
// Last server sequence number received from pull
lastPullSequence: integer('last_pull_sequence'),
// Last HLC timestamp pushed to server
lastPushHlcTimestamp: text('last_push_hlc_timestamp'),
// Last successful sync timestamp
lastSyncAt: text('last_sync_at'),
// Sync error message if any
error: text('error'),
},
)
export type InsertHaexSyncStatus = typeof haexSyncStatus.$inferInsert
export type SelectHaexSyncStatus = typeof haexSyncStatus.$inferSelect

View File

@ -205,3 +205,30 @@ export const haexDesktopItems = sqliteTable(
)
export type InsertHaexDesktopItems = typeof haexDesktopItems.$inferInsert
export type SelectHaexDesktopItems = typeof haexDesktopItems.$inferSelect
export const haexSyncBackends = sqliteTable(
tableNames.haex.sync_backends.name,
withCrdtColumns({
id: text(tableNames.haex.sync_backends.columns.id)
.$defaultFn(() => crypto.randomUUID())
.primaryKey(),
name: text(tableNames.haex.sync_backends.columns.name).notNull(),
serverUrl: text(tableNames.haex.sync_backends.columns.serverUrl).notNull(),
enabled: integer(tableNames.haex.sync_backends.columns.enabled, {
mode: 'boolean',
})
.default(true)
.notNull(),
priority: integer(tableNames.haex.sync_backends.columns.priority)
.default(0)
.notNull(),
createdAt: text(tableNames.haex.sync_backends.columns.createdAt).default(
sql`(CURRENT_TIMESTAMP)`,
),
updatedAt: integer(tableNames.haex.sync_backends.columns.updatedAt, {
mode: 'timestamp',
}).$onUpdate(() => new Date()),
}),
)
export type InsertHaexSyncBackends = typeof haexSyncBackends.$inferInsert
export type SelectHaexSyncBackends = typeof haexSyncBackends.$inferSelect

View File

@ -102,6 +102,20 @@
"haexTimestamp": "haex_timestamp"
}
},
"sync_backends": {
"name": "haex_sync_backends",
"columns": {
"id": "id",
"name": "name",
"serverUrl": "server_url",
"enabled": "enabled",
"priority": "priority",
"createdAt": "created_at",
"updatedAt": "updated_at",
"haexTimestamp": "haex_timestamp"
}
},
"crdt": {
"logs": {

View File

@ -24,29 +24,29 @@ export const useDesktopStore = defineStore('desktopStore', () => {
const workspaceStore = useWorkspaceStore()
const { currentWorkspace } = storeToRefs(workspaceStore)
const { $i18n } = useNuxtApp()
const uiStore = useUiStore()
const { isSmallScreen } = storeToRefs(uiStore)
const deviceStore = useDeviceStore()
const settingsStore = useVaultSettingsStore()
$i18n.setLocaleMessage('de', {
desktop: de,
})
$i18n.setLocaleMessage('de', { desktop: de })
$i18n.setLocaleMessage('en', { desktop: en })
const desktopItems = ref<IDesktopItem[]>([])
const selectedItemIds = ref<Set<string>>(new Set())
// Desktop Grid Settings (stored in DB per device)
const iconSizePreset = ref<DesktopIconSizePreset>(DesktopIconSizePreset.medium)
const iconSizePreset = ref<DesktopIconSizePreset>(
DesktopIconSizePreset.medium,
)
// Get device internal ID from DB
const getDeviceInternalIdAsync = async () => {
if (!deviceStore.deviceId || !currentVault.value?.drizzle) return undefined
const device = await currentVault.value.drizzle.query.haexDevices.findFirst({
where: eq(haexDevices.deviceId, deviceStore.deviceId),
})
const device = await currentVault.value.drizzle.query.haexDevices.findFirst(
{
where: eq(haexDevices.deviceId, deviceStore.deviceId),
},
)
return device?.id ? device.id : undefined
}
@ -56,7 +56,8 @@ export const useDesktopStore = defineStore('desktopStore', () => {
const deviceInternalId = await getDeviceInternalIdAsync()
if (!deviceInternalId) return
const preset = await settingsStore.syncDesktopIconSizeAsync(deviceInternalId)
const preset =
await settingsStore.syncDesktopIconSizeAsync(deviceInternalId)
iconSizePreset.value = preset
}
@ -69,51 +70,55 @@ export const useDesktopStore = defineStore('desktopStore', () => {
iconSizePreset.value = preset
}
// Reactive grid settings based on screen size
const effectiveGridColumns = computed(() => {
return isSmallScreen.value ? 4 : 8
})
const effectiveGridRows = computed(() => {
return isSmallScreen.value ? 5 : 6
})
const effectiveIconSize = computed(() => {
return iconSizePresetValues[iconSizePreset.value]
})
const iconPadding = 30
// Calculate grid cell size based on icon size
const gridCellSize = computed(() => {
// Add padding around icon (20px extra for spacing)
return effectiveIconSize.value + 20
// Add padding around icon (30px extra for spacing)
return effectiveIconSize.value + iconPadding
})
// Snap position to grid (centers icon in cell)
// iconWidth and iconHeight are optional - if provided, they're used for centering
const snapToGrid = (x: number, y: number, iconWidth?: number, iconHeight?: number) => {
const snapToGrid = (
x: number,
y: number,
iconWidth?: number,
iconHeight?: number,
) => {
const cellSize = gridCellSize.value
const halfCell = cellSize / 2
// Use provided dimensions or fall back to the effective icon size (not cell size!)
const actualIconWidth = iconWidth || effectiveIconSize.value
const actualIconHeight = iconHeight || effectiveIconSize.value
// Calculate which grid cell the position falls into
const col = Math.floor(x / cellSize)
const row = Math.floor(y / cellSize)
// Add half the icon size to x/y to get the center point for snapping
const centerX = x + actualIconWidth / 2
const centerY = y + actualIconHeight / 2
// Use provided dimensions or fall back to cell size
const actualIconWidth = iconWidth || cellSize
const actualIconHeight = iconHeight || cellSize
// Find nearest grid cell center
// Grid cells are centered at: halfCell, halfCell + cellSize, halfCell + 2*cellSize, ...
// Which is: halfCell + (n * cellSize) for n = 0, 1, 2, ...
const col = Math.round((centerX - halfCell) / cellSize)
const row = Math.round((centerY - halfCell) / cellSize)
// Center the icon in the cell(s) it occupies
const cellsWide = Math.max(1, Math.ceil(actualIconWidth / cellSize))
const cellsHigh = Math.max(1, Math.ceil(actualIconHeight / cellSize))
// Calculate the center of the target grid cell
const gridCenterX = halfCell + col * cellSize
const gridCenterY = halfCell + row * cellSize
const totalWidth = cellsWide * cellSize
const totalHeight = cellsHigh * cellSize
const paddingX = (totalWidth - actualIconWidth) / 2
const paddingY = (totalHeight - actualIconHeight) / 2
// Calculate the top-left position that centers the icon in the cell
const snappedX = gridCenterX - actualIconWidth / 2
const snappedY = gridCenterY - actualIconHeight / 2
return {
x: col * cellSize + paddingX,
y: row * cellSize + paddingY,
x: snappedX,
y: snappedY,
}
}
@ -134,9 +139,12 @@ export const useDesktopStore = defineStore('desktopStore', () => {
.from(haexDesktopItems)
.where(eq(haexDesktopItems.workspaceId, currentWorkspace.value.id))
desktopItems.value = items.map(item => ({
desktopItems.value = items.map((item) => ({
...item,
referenceId: item.itemType === 'extension' ? item.extensionId! : item.systemWindowId!,
referenceId:
item.itemType === 'extension'
? item.extensionId!
: item.systemWindowId!,
}))
} catch (error) {
console.error('Fehler beim Laden der Desktop-Items:', error)
@ -165,7 +173,10 @@ export const useDesktopStore = defineStore('desktopStore', () => {
workspaceId: targetWorkspaceId,
itemType: itemType,
extensionId: itemType === 'extension' ? referenceId : null,
systemWindowId: itemType === 'system' || itemType === 'file' || itemType === 'folder' ? referenceId : null,
systemWindowId:
itemType === 'system' || itemType === 'file' || itemType === 'folder'
? referenceId
: null,
positionX: positionX,
positionY: positionY,
}
@ -178,7 +189,10 @@ export const useDesktopStore = defineStore('desktopStore', () => {
if (result.length > 0 && result[0]) {
const itemWithRef = {
...result[0],
referenceId: itemType === 'extension' ? result[0].extensionId! : result[0].systemWindowId!,
referenceId:
itemType === 'extension'
? result[0].extensionId!
: result[0].systemWindowId!,
}
desktopItems.value.push(itemWithRef)
return itemWithRef
@ -189,7 +203,7 @@ export const useDesktopStore = defineStore('desktopStore', () => {
itemType,
referenceId,
workspaceId: targetWorkspaceId,
position: { x: positionX, y: positionY }
position: { x: positionX, y: positionY },
})
// Log full error details
@ -226,7 +240,10 @@ export const useDesktopStore = defineStore('desktopStore', () => {
const item = result[0]
desktopItems.value[index] = {
...item,
referenceId: item.itemType === 'extension' ? item.extensionId! : item.systemWindowId!,
referenceId:
item.itemType === 'extension'
? item.extensionId!
: item.systemWindowId!,
}
}
}
@ -259,16 +276,14 @@ export const useDesktopStore = defineStore('desktopStore', () => {
itemType: DesktopItemType,
referenceId: string,
) => {
return desktopItems.value.find(
(item) => {
if (item.itemType !== itemType) return false
if (itemType === 'extension') {
return item.extensionId === referenceId
} else {
return item.systemWindowId === referenceId
}
},
)
return desktopItems.value.find((item) => {
if (item.itemType !== itemType) return false
if (itemType === 'extension') {
return item.extensionId === referenceId
} else {
return item.systemWindowId === referenceId
}
})
}
const openDesktopItem = (
@ -279,9 +294,9 @@ export const useDesktopStore = defineStore('desktopStore', () => {
const windowManager = useWindowManagerStore()
if (itemType === 'system') {
const systemWindow = windowManager.getAllSystemWindows().find(
(win) => win.id === referenceId,
)
const systemWindow = windowManager
.getAllSystemWindows()
.find((win) => win.id === referenceId)
if (systemWindow) {
windowManager.openWindowAsync({
@ -439,8 +454,6 @@ export const useDesktopStore = defineStore('desktopStore', () => {
iconSizePreset,
syncDesktopIconSizeAsync,
updateDesktopIconSizeAsync,
effectiveGridColumns,
effectiveGridRows,
effectiveIconSize,
gridCellSize,
snapToGrid,

130
src/stores/sync/backends.ts Normal file
View File

@ -0,0 +1,130 @@
import { eq } from 'drizzle-orm'
import {
haexSyncBackends,
type InsertHaexSyncBackends,
type SelectHaexSyncBackends,
} from '~/database/schemas'
export const useSyncBackendsStore = defineStore('syncBackendsStore', () => {
const { currentVault } = storeToRefs(useVaultStore())
const backends = ref<SelectHaexSyncBackends[]>([])
const enabledBackends = computed(() =>
backends.value.filter((b) => b.enabled),
)
const sortedBackends = computed(() =>
[...backends.value].sort((a, b) => (b.priority || 0) - (a.priority || 0)),
)
// Load all sync backends from database
const loadBackendsAsync = async () => {
if (!currentVault.value?.drizzle) {
console.error('No vault opened')
return
}
try {
const result = await currentVault.value.drizzle
.select()
.from(haexSyncBackends)
backends.value = result
} catch (error) {
console.error('Failed to load sync backends:', error)
throw error
}
}
// Add a new sync backend
const addBackendAsync = async (backend: InsertHaexSyncBackends) => {
if (!currentVault.value?.drizzle) {
throw new Error('No vault opened')
}
try {
const result = await currentVault.value.drizzle
.insert(haexSyncBackends)
.values(backend)
.returning()
if (result.length > 0 && result[0]) {
backends.value.push(result[0])
return result[0]
}
} catch (error) {
console.error('Failed to add sync backend:', error)
throw error
}
}
// Update an existing sync backend
const updateBackendAsync = async (
id: string,
updates: Partial<InsertHaexSyncBackends>,
) => {
if (!currentVault.value?.drizzle) {
throw new Error('No vault opened')
}
try {
const result = await currentVault.value.drizzle
.update(haexSyncBackends)
.set(updates)
.where(eq(haexSyncBackends.id, id))
.returning()
if (result.length > 0 && result[0]) {
const index = backends.value.findIndex((b) => b.id === id)
if (index !== -1) {
backends.value[index] = result[0]
}
return result[0]
}
} catch (error) {
console.error('Failed to update sync backend:', error)
throw error
}
}
// Delete a sync backend
const deleteBackendAsync = async (id: string) => {
if (!currentVault.value?.drizzle) {
throw new Error('No vault opened')
}
try {
await currentVault.value.drizzle
.delete(haexSyncBackends)
.where(eq(haexSyncBackends.id, id))
backends.value = backends.value.filter((b) => b.id !== id)
} catch (error) {
console.error('Failed to delete sync backend:', error)
throw error
}
}
// Enable/disable a backend
const toggleBackendAsync = async (id: string, enabled: boolean) => {
return updateBackendAsync(id, { enabled })
}
// Update backend priority (for sync order)
const updatePriorityAsync = async (id: string, priority: number) => {
return updateBackendAsync(id, { priority })
}
return {
backends,
enabledBackends,
sortedBackends,
loadBackendsAsync,
addBackendAsync,
updateBackendAsync,
deleteBackendAsync,
toggleBackendAsync,
updatePriorityAsync,
}
})

390
src/stores/sync/engine.ts Normal file
View File

@ -0,0 +1,390 @@
/**
* Sync Engine Store - Executes sync operations with haex-sync-server backends
* Handles vault key storage and CRDT log synchronization
*/
import { createClient } from '@supabase/supabase-js'
import type { SelectHaexCrdtLogs } from '~/database/schemas'
import {
encryptVaultKeyAsync,
decryptVaultKeyAsync,
encryptCrdtDataAsync,
decryptCrdtDataAsync,
generateVaultKey,
} from '~/utils/crypto/vaultKey'
interface VaultKeyCache {
[vaultId: string]: {
vaultKey: Uint8Array
timestamp: number
}
}
interface SyncLogData {
vaultId: string
encryptedData: string
nonce: string
haexTimestamp: string
sequence: number
}
interface PullLogsResponse {
logs: Array<{
id: string
userId: string
vaultId: string
encryptedData: string
nonce: string
haexTimestamp: string
sequence: number
createdAt: string
}>
hasMore: boolean
}
export const useSyncEngineStore = defineStore('syncEngineStore', () => {
const { currentVault, currentVaultId } = storeToRefs(useVaultStore())
const syncBackendsStore = useSyncBackendsStore()
// In-memory cache for decrypted vault keys (cleared on logout/vault close)
const vaultKeyCache = ref<VaultKeyCache>({})
// Supabase client (initialized with config from backend)
const supabaseClient = ref<ReturnType<typeof createClient> | null>(null)
/**
* Initializes Supabase client for a specific backend
*/
const initSupabaseClientAsync = async (backendId: string) => {
const backend = syncBackendsStore.backends.find((b) => b.id === backendId)
if (!backend) {
throw new Error('Backend not found')
}
// Get Supabase URL and anon key from server health check
const response = await fetch(backend.serverUrl)
if (!response.ok) {
throw new Error('Failed to connect to sync server')
}
const serverInfo = await response.json()
const supabaseUrl = serverInfo.supabaseUrl
// For now, we need to configure the anon key somewhere
// TODO: Store this in backend config or fetch from somewhere secure
const supabaseAnonKey = 'YOUR_SUPABASE_ANON_KEY'
supabaseClient.value = createClient(supabaseUrl, supabaseAnonKey)
}
/**
* Gets the current Supabase auth token
*/
const getAuthTokenAsync = async (): Promise<string | null> => {
if (!supabaseClient.value) {
return null
}
const {
data: { session },
} = await supabaseClient.value.auth.getSession()
return session?.access_token ?? null
}
/**
* Stores encrypted vault key on the server
*/
const storeVaultKeyAsync = async (
backendId: string,
vaultId: string,
password: string,
): Promise<void> => {
const backend = syncBackendsStore.backends.find((b) => b.id === backendId)
if (!backend) {
throw new Error('Backend not found')
}
// Generate new vault key
const vaultKey = generateVaultKey()
// Encrypt vault key with password
const encryptedData = await encryptVaultKeyAsync(vaultKey, password)
// Get auth token
const token = await getAuthTokenAsync()
if (!token) {
throw new Error('Not authenticated')
}
// Send to server
const response = await fetch(`${backend.serverUrl}/sync/vault-key`, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
'Authorization': `Bearer ${token}`,
},
body: JSON.stringify({
vaultId,
...encryptedData,
}),
})
if (!response.ok) {
const error = await response.json().catch(() => ({}))
throw new Error(
`Failed to store vault key: ${error.error || response.statusText}`,
)
}
// Cache decrypted vault key
vaultKeyCache.value[vaultId] = {
vaultKey,
timestamp: Date.now(),
}
}
/**
* Retrieves and decrypts vault key from the server
*/
const getVaultKeyAsync = async (
backendId: string,
vaultId: string,
password: string,
): Promise<Uint8Array> => {
// Check cache first
const cached = vaultKeyCache.value[vaultId]
if (cached) {
return cached.vaultKey
}
const backend = syncBackendsStore.backends.find((b) => b.id === backendId)
if (!backend) {
throw new Error('Backend not found')
}
// Get auth token
const token = await getAuthTokenAsync()
if (!token) {
throw new Error('Not authenticated')
}
// Fetch from server
const response = await fetch(
`${backend.serverUrl}/sync/vault-key/${vaultId}`,
{
method: 'GET',
headers: {
'Authorization': `Bearer ${token}`,
},
},
)
if (response.status === 404) {
throw new Error('Vault key not found on server')
}
if (!response.ok) {
const error = await response.json().catch(() => ({}))
throw new Error(
`Failed to get vault key: ${error.error || response.statusText}`,
)
}
const data = await response.json()
// Decrypt vault key
const vaultKey = await decryptVaultKeyAsync(
data.encryptedVaultKey,
data.salt,
data.nonce,
password,
)
// Cache decrypted vault key
vaultKeyCache.value[vaultId] = {
vaultKey,
timestamp: Date.now(),
}
return vaultKey
}
/**
* Pushes CRDT logs to the server
*/
const pushLogsAsync = async (
backendId: string,
vaultId: string,
logs: SelectHaexCrdtLogs[],
): Promise<void> => {
const backend = syncBackendsStore.backends.find((b) => b.id === backendId)
if (!backend) {
throw new Error('Backend not found')
}
// Get vault key from cache
const cached = vaultKeyCache.value[vaultId]
if (!cached) {
throw new Error('Vault key not available. Please unlock vault first.')
}
const vaultKey = cached.vaultKey
// Get auth token
const token = await getAuthTokenAsync()
if (!token) {
throw new Error('Not authenticated')
}
// Encrypt each log entry
const encryptedLogs: SyncLogData[] = []
for (const log of logs) {
const { encryptedData, nonce } = await encryptCrdtDataAsync(
log,
vaultKey,
)
// Generate sequence number based on timestamp
const sequence = Date.now()
encryptedLogs.push({
vaultId,
encryptedData,
nonce,
haexTimestamp: log.haexTimestamp!,
sequence,
})
}
// Send to server
const response = await fetch(`${backend.serverUrl}/sync/push`, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
'Authorization': `Bearer ${token}`,
},
body: JSON.stringify({
vaultId,
logs: encryptedLogs,
}),
})
if (!response.ok) {
const error = await response.json().catch(() => ({}))
throw new Error(
`Failed to push logs: ${error.error || response.statusText}`,
)
}
}
/**
* Pulls CRDT logs from the server
*/
const pullLogsAsync = async (
backendId: string,
vaultId: string,
afterSequence?: number,
limit?: number,
): Promise<SelectHaexCrdtLogs[]> => {
const backend = syncBackendsStore.backends.find((b) => b.id === backendId)
if (!backend) {
throw new Error('Backend not found')
}
// Get vault key from cache
const cached = vaultKeyCache.value[vaultId]
if (!cached) {
throw new Error('Vault key not available. Please unlock vault first.')
}
const vaultKey = cached.vaultKey
// Get auth token
const token = await getAuthTokenAsync()
if (!token) {
throw new Error('Not authenticated')
}
// Fetch from server
const response = await fetch(`${backend.serverUrl}/sync/pull`, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
'Authorization': `Bearer ${token}`,
},
body: JSON.stringify({
vaultId,
afterSequence,
limit: limit ?? 100,
}),
})
if (!response.ok) {
const error = await response.json().catch(() => ({}))
throw new Error(
`Failed to pull logs: ${error.error || response.statusText}`,
)
}
const data: PullLogsResponse = await response.json()
// Decrypt each log entry
const decryptedLogs: SelectHaexCrdtLogs[] = []
for (const log of data.logs) {
try {
const decrypted = await decryptCrdtDataAsync<SelectHaexCrdtLogs>(
log.encryptedData,
log.nonce,
vaultKey,
)
decryptedLogs.push(decrypted)
} catch (error) {
console.error('Failed to decrypt log entry:', log.id, error)
// Skip corrupted entries
}
}
return decryptedLogs
}
/**
* Clears vault key from cache
*/
const clearVaultKeyCache = (vaultId?: string) => {
if (vaultId) {
delete vaultKeyCache.value[vaultId]
} else {
vaultKeyCache.value = {}
}
}
/**
* Health check - verifies server is reachable
*/
const healthCheckAsync = async (backendId: string): Promise<boolean> => {
const backend = syncBackendsStore.backends.find((b) => b.id === backendId)
if (!backend) {
return false
}
try {
const response = await fetch(backend.serverUrl)
return response.ok
} catch {
return false
}
}
return {
vaultKeyCache,
supabaseClient,
initSupabaseClientAsync,
getAuthTokenAsync,
storeVaultKeyAsync,
getVaultKeyAsync,
pushLogsAsync,
pullLogsAsync,
clearVaultKeyCache,
healthCheckAsync,
}
})

View File

@ -0,0 +1,525 @@
/**
* Sync Orchestrator Store - Orchestrates sync operations across all backends
* Uses Supabase Realtime subscriptions for instant sync
*/
import { eq, gt } from 'drizzle-orm'
import type { RealtimeChannel } from '@supabase/supabase-js'
import {
haexCrdtLogs,
haexSyncStatus,
type SelectHaexCrdtLogs,
type SelectHaexSyncStatus,
} from '~/database/schemas'
interface SyncState {
isConnected: boolean
isSyncing: boolean
error: string | null
subscription: RealtimeChannel | null
status: SelectHaexSyncStatus | null
}
interface BackendSyncState {
[backendId: string]: SyncState
}
export const useSyncOrchestratorStore = defineStore(
'syncOrchestratorStore',
() => {
const { currentVault, currentVaultId } = storeToRefs(useVaultStore())
const syncBackendsStore = useSyncBackendsStore()
const syncEngineStore = useSyncEngineStore()
// Sync state per backend
const syncStates = ref<BackendSyncState>({})
// Track if we're currently processing a local write
const isProcessingLocalWrite = ref(false)
/**
* Loads sync status from database for a backend
*/
const loadSyncStatusAsync = async (
backendId: string,
): Promise<SelectHaexSyncStatus | null> => {
if (!currentVault.value?.drizzle) {
throw new Error('No vault opened')
}
try {
const results = await currentVault.value.drizzle
.select()
.from(haexSyncStatus)
.where(eq(haexSyncStatus.backendId, backendId))
.limit(1)
return results[0] ?? null
} catch (error) {
console.error('Failed to load sync status:', error)
return null
}
}
/**
* Updates sync status in database
*/
const updateSyncStatusAsync = async (
backendId: string,
updates: Partial<SelectHaexSyncStatus>,
): Promise<void> => {
if (!currentVault.value?.drizzle) {
throw new Error('No vault opened')
}
try {
const existing = await loadSyncStatusAsync(backendId)
if (existing) {
// Update existing
await currentVault.value.drizzle
.update(haexSyncStatus)
.set({
...updates,
lastSyncAt: new Date().toISOString(),
})
.where(eq(haexSyncStatus.backendId, backendId))
} else {
// Insert new
await currentVault.value.drizzle.insert(haexSyncStatus).values({
backendId,
...updates,
lastSyncAt: new Date().toISOString(),
})
}
// Update local state
if (syncStates.value[backendId]) {
syncStates.value[backendId].status = await loadSyncStatusAsync(
backendId,
)
}
} catch (error) {
console.error('Failed to update sync status:', error)
throw error
}
}
/**
* Gets logs that need to be pushed to server (after last push HLC)
*/
const getLogsToPushAsync = async (
backendId: string,
): Promise<SelectHaexCrdtLogs[]> => {
if (!currentVault.value?.drizzle) {
throw new Error('No vault opened')
}
try {
const status = await loadSyncStatusAsync(backendId)
const lastPushHlc = status?.lastPushHlcTimestamp
const query = currentVault.value.drizzle
.select()
.from(haexCrdtLogs)
.orderBy(haexCrdtLogs.haexTimestamp)
if (lastPushHlc) {
return await query.where(
gt(haexCrdtLogs.haexTimestamp, lastPushHlc),
)
}
return await query
} catch (error) {
console.error('Failed to get logs to push:', error)
throw error
}
}
/**
* Applies remote logs to local database
*/
const applyRemoteLogsAsync = async (
logs: SelectHaexCrdtLogs[],
): Promise<void> => {
if (!currentVault.value?.drizzle) {
throw new Error('No vault opened')
}
try {
// Insert logs into local CRDT log table
for (const log of logs) {
await currentVault.value.drizzle
.insert(haexCrdtLogs)
.values(log)
.onConflictDoNothing() // Skip if already exists
}
// TODO: Apply CRDT log entries to actual data tables
// This requires replaying the operations from the log
console.log(`Applied ${logs.length} remote logs to local database`)
} catch (error) {
console.error('Failed to apply remote logs:', error)
throw error
}
}
/**
* Pushes local changes to a specific backend
*/
const pushToBackendAsync = async (backendId: string): Promise<void> => {
if (!currentVaultId.value) {
throw new Error('No vault opened')
}
const state = syncStates.value[backendId]
if (!state) {
throw new Error('Backend not initialized')
}
if (state.isSyncing) {
console.log(`Already syncing with backend ${backendId}`)
return
}
state.isSyncing = true
state.error = null
try {
// Get logs that need to be pushed
const logs = await getLogsToPushAsync(backendId)
if (logs.length === 0) {
console.log(`No logs to push to backend ${backendId}`)
return
}
await syncEngineStore.pushLogsAsync(
backendId,
currentVaultId.value,
logs,
)
// Update sync status with last pushed HLC timestamp
const lastHlc = logs[logs.length - 1]?.haexTimestamp
if (lastHlc) {
await updateSyncStatusAsync(backendId, {
lastPushHlcTimestamp: lastHlc,
})
}
console.log(`Pushed ${logs.length} logs to backend ${backendId}`)
} catch (error) {
console.error(`Failed to push to backend ${backendId}:`, error)
state.error = error instanceof Error ? error.message : 'Unknown error'
await updateSyncStatusAsync(backendId, {
error: state.error,
})
throw error
} finally {
state.isSyncing = false
}
}
/**
* Pulls changes from a specific backend
*/
const pullFromBackendAsync = async (backendId: string): Promise<void> => {
if (!currentVaultId.value) {
throw new Error('No vault opened')
}
const state = syncStates.value[backendId]
if (!state) {
throw new Error('Backend not initialized')
}
if (state.isSyncing) {
console.log(`Already syncing with backend ${backendId}`)
return
}
state.isSyncing = true
state.error = null
try {
const status = await loadSyncStatusAsync(backendId)
const afterSequence = status?.lastPullSequence ?? undefined
const remoteLogs = await syncEngineStore.pullLogsAsync(
backendId,
currentVaultId.value,
afterSequence,
100,
)
if (remoteLogs.length > 0) {
await applyRemoteLogsAsync(remoteLogs)
// Update sync status with last pulled sequence
// TODO: Get actual sequence from server response
const lastSequence = Date.now()
await updateSyncStatusAsync(backendId, {
lastPullSequence: lastSequence,
})
console.log(
`Pulled ${remoteLogs.length} logs from backend ${backendId}`,
)
}
} catch (error) {
console.error(`Failed to pull from backend ${backendId}:`, error)
state.error = error instanceof Error ? error.message : 'Unknown error'
await updateSyncStatusAsync(backendId, {
error: state.error,
})
throw error
} finally {
state.isSyncing = false
}
}
/**
* Handles incoming realtime changes from Supabase
*/
const handleRealtimeChangeAsync = async (
backendId: string,
payload: any,
) => {
console.log(`Realtime change from backend ${backendId}:`, payload)
// Don't process if we're currently writing locally to avoid loops
if (isProcessingLocalWrite.value) {
console.log('Skipping realtime change - local write in progress')
return
}
// Pull latest changes from this backend
try {
await pullFromBackendAsync(backendId)
} catch (error) {
console.error('Failed to handle realtime change:', error)
}
}
/**
* Subscribes to realtime changes from a backend
*/
const subscribeToBackendAsync = async (backendId: string): Promise<void> => {
if (!currentVaultId.value) {
throw new Error('No vault opened')
}
const state = syncStates.value[backendId]
if (!state) {
throw new Error('Backend not initialized')
}
if (state.subscription) {
console.log(`Already subscribed to backend ${backendId}`)
return
}
const client = syncEngineStore.supabaseClient
if (!client) {
throw new Error('Supabase client not initialized')
}
try {
// Subscribe to sync_logs table for this vault
const channel = client
.channel(`sync_logs:${currentVaultId.value}`)
.on(
'postgres_changes',
{
event: 'INSERT',
schema: 'public',
table: 'sync_logs',
filter: `vault_id=eq.${currentVaultId.value}`,
},
(payload) => {
handleRealtimeChangeAsync(backendId, payload).catch(console.error)
},
)
.subscribe((status) => {
if (status === 'SUBSCRIBED') {
state.isConnected = true
console.log(`Subscribed to backend ${backendId}`)
} else if (status === 'CHANNEL_ERROR' || status === 'TIMED_OUT') {
state.isConnected = false
state.error = `Subscription error: ${status}`
console.error(
`Subscription to backend ${backendId} failed: ${status}`,
)
}
})
state.subscription = channel
} catch (error) {
console.error(`Failed to subscribe to backend ${backendId}:`, error)
state.error = error instanceof Error ? error.message : 'Unknown error'
throw error
}
}
/**
* Unsubscribes from realtime changes
*/
const unsubscribeFromBackendAsync = async (
backendId: string,
): Promise<void> => {
const state = syncStates.value[backendId]
if (!state || !state.subscription) {
return
}
try {
await state.subscription.unsubscribe()
state.subscription = null
state.isConnected = false
console.log(`Unsubscribed from backend ${backendId}`)
} catch (error) {
console.error(`Failed to unsubscribe from backend ${backendId}:`, error)
}
}
/**
* Initializes sync for a backend
*/
const initBackendAsync = async (backendId: string): Promise<void> => {
if (syncStates.value[backendId]) {
console.log(`Backend ${backendId} already initialized`)
return
}
// Load sync status from database
const status = await loadSyncStatusAsync(backendId)
// Initialize state
syncStates.value[backendId] = {
isConnected: false,
isSyncing: false,
error: null,
subscription: null,
status,
}
try {
// Initial pull to get all existing data
await pullFromBackendAsync(backendId)
// Subscribe to realtime changes
await subscribeToBackendAsync(backendId)
} catch (error) {
console.error(`Failed to initialize backend ${backendId}:`, error)
throw error
}
}
/**
* Called after local write operations to push changes
*/
const onLocalWriteAsync = async (): Promise<void> => {
isProcessingLocalWrite.value = true
try {
// Push to all enabled backends in parallel
const enabledBackends = syncBackendsStore.enabledBackends
await Promise.allSettled(
enabledBackends.map((backend) => pushToBackendAsync(backend.id)),
)
} catch (error) {
console.error('Failed to push local changes:', error)
} finally {
isProcessingLocalWrite.value = false
}
}
/**
* Starts sync for all enabled backends
*/
const startSyncAsync = async (): Promise<void> => {
const enabledBackends = syncBackendsStore.enabledBackends
if (enabledBackends.length === 0) {
console.log('No enabled backends to sync with')
return
}
console.log(`Starting sync with ${enabledBackends.length} backends`)
for (const backend of enabledBackends) {
try {
await initBackendAsync(backend.id)
} catch (error) {
console.error(
`Failed to start sync with backend ${backend.id}:`,
error,
)
}
}
}
/**
* Stops sync for all backends
*/
const stopSyncAsync = async (): Promise<void> => {
console.log('Stopping sync for all backends')
for (const backendId of Object.keys(syncStates.value)) {
await unsubscribeFromBackendAsync(backendId)
}
syncStates.value = {}
}
/**
* Gets sync state for a specific backend
*/
const getSyncState = (backendId: string): SyncState | null => {
return syncStates.value[backendId] ?? null
}
/**
* Checks if any backend is currently syncing
*/
const isAnySyncing = computed(() => {
return Object.values(syncStates.value).some((state) => state.isSyncing)
})
/**
* Checks if all backends are connected
*/
const areAllConnected = computed(() => {
const enabledBackends = syncBackendsStore.enabledBackends
if (enabledBackends.length === 0) return false
return enabledBackends.every((backend) => {
const state = syncStates.value[backend.id]
return state?.isConnected ?? false
})
})
return {
syncStates,
isProcessingLocalWrite,
isAnySyncing,
areAllConnected,
loadSyncStatusAsync,
updateSyncStatusAsync,
getLogsToPushAsync,
applyRemoteLogsAsync,
pushToBackendAsync,
pullFromBackendAsync,
subscribeToBackendAsync,
unsubscribeFromBackendAsync,
initBackendAsync,
onLocalWriteAsync,
startSyncAsync,
stopSyncAsync,
getSyncState,
}
},
)

View File

@ -0,0 +1,250 @@
/**
* Crypto utilities for Vault Key Management
* Implements the "Hybrid-Ansatz" for vault key encryption
*/
const PBKDF2_ITERATIONS = 600_000
const KEY_LENGTH = 256
const ALGORITHM = 'AES-GCM'
/**
* Derives a cryptographic key from a password using PBKDF2
*/
export async function deriveKeyFromPasswordAsync(
password: string,
salt: Uint8Array,
): Promise<CryptoKey> {
const encoder = new TextEncoder()
const passwordBuffer = encoder.encode(password)
// Ensure salt has a proper ArrayBuffer (not SharedArrayBuffer)
const saltBuffer = new Uint8Array(salt)
// Import password as key material
const keyMaterial = await crypto.subtle.importKey(
'raw',
passwordBuffer,
'PBKDF2',
false,
['deriveKey'],
)
// Derive key using PBKDF2
return await crypto.subtle.deriveKey(
{
name: 'PBKDF2',
salt: saltBuffer,
iterations: PBKDF2_ITERATIONS,
hash: 'SHA-256',
},
keyMaterial,
{ name: ALGORITHM, length: KEY_LENGTH },
false, // not extractable
['encrypt', 'decrypt'],
)
}
/**
* Generates a random vault key (32 bytes)
*/
export function generateVaultKey(): Uint8Array {
return crypto.getRandomValues(new Uint8Array(32))
}
/**
* Encrypts the vault key with a password-derived key
* Returns: { encryptedVaultKey, salt, nonce } all as Base64 strings
*/
export async function encryptVaultKeyAsync(
vaultKey: Uint8Array,
password: string,
): Promise<{
encryptedVaultKey: string
salt: string
nonce: string
}> {
// Generate random salt for PBKDF2
const salt = crypto.getRandomValues(new Uint8Array(32))
// Derive encryption key from password
const derivedKey = await deriveKeyFromPasswordAsync(password, salt)
// Generate random nonce for AES-GCM
const nonce = crypto.getRandomValues(new Uint8Array(12))
// Ensure vaultKey has proper ArrayBuffer
const vaultKeyBuffer = new Uint8Array(vaultKey)
// Encrypt vault key
const encryptedBuffer = await crypto.subtle.encrypt(
{
name: ALGORITHM,
iv: nonce,
},
derivedKey,
vaultKeyBuffer,
)
// Convert to Base64 for storage
return {
encryptedVaultKey: arrayBufferToBase64(encryptedBuffer),
salt: arrayBufferToBase64(salt),
nonce: arrayBufferToBase64(nonce),
}
}
/**
* Decrypts the vault key using the password
*/
export async function decryptVaultKeyAsync(
encryptedVaultKey: string,
salt: string,
nonce: string,
password: string,
): Promise<Uint8Array> {
// Convert Base64 to Uint8Array
const encryptedBuffer = base64ToArrayBuffer(encryptedVaultKey)
const saltBuffer = base64ToArrayBuffer(salt)
const nonceBuffer = base64ToArrayBuffer(nonce)
// Derive decryption key from password
const derivedKey = await deriveKeyFromPasswordAsync(password, saltBuffer)
// Ensure buffers have proper ArrayBuffer
const encryptedData = new Uint8Array(encryptedBuffer)
const iv = new Uint8Array(nonceBuffer)
// Decrypt vault key
const decryptedBuffer = await crypto.subtle.decrypt(
{
name: ALGORITHM,
iv,
},
derivedKey,
encryptedData,
)
return new Uint8Array(decryptedBuffer)
}
/**
* Encrypts CRDT log data with the vault key
*/
export async function encryptCrdtDataAsync(
data: object,
vaultKey: Uint8Array,
): Promise<{
encryptedData: string
nonce: string
}> {
// Ensure vaultKey has proper ArrayBuffer
const vaultKeyBuffer = new Uint8Array(vaultKey)
// Import vault key for encryption
const cryptoKey = await crypto.subtle.importKey(
'raw',
vaultKeyBuffer,
{ name: ALGORITHM },
false,
['encrypt'],
)
// Generate random nonce
const nonce = crypto.getRandomValues(new Uint8Array(12))
// Serialize data to JSON
const encoder = new TextEncoder()
const dataBuffer = encoder.encode(JSON.stringify(data))
// Encrypt data
const encryptedBuffer = await crypto.subtle.encrypt(
{
name: ALGORITHM,
iv: nonce,
},
cryptoKey,
dataBuffer,
)
return {
encryptedData: arrayBufferToBase64(encryptedBuffer),
nonce: arrayBufferToBase64(nonce),
}
}
/**
* Decrypts CRDT log data with the vault key
*/
export async function decryptCrdtDataAsync<T = object>(
encryptedData: string,
nonce: string,
vaultKey: Uint8Array,
): Promise<T> {
// Ensure vaultKey has proper ArrayBuffer
const vaultKeyBuffer = new Uint8Array(vaultKey)
// Import vault key for decryption
const cryptoKey = await crypto.subtle.importKey(
'raw',
vaultKeyBuffer,
{ name: ALGORITHM },
false,
['decrypt'],
)
// Convert Base64 to buffers
const encryptedBuffer = base64ToArrayBuffer(encryptedData)
const nonceBuffer = base64ToArrayBuffer(nonce)
// Ensure buffers have proper ArrayBuffer
const encryptedDataBuffer = new Uint8Array(encryptedBuffer)
const iv = new Uint8Array(nonceBuffer)
// Decrypt data
const decryptedBuffer = await crypto.subtle.decrypt(
{
name: ALGORITHM,
iv,
},
cryptoKey,
encryptedDataBuffer,
)
// Parse JSON
const decoder = new TextDecoder()
const jsonString = decoder.decode(decryptedBuffer)
return JSON.parse(jsonString) as T
}
// Utility functions for Base64 conversion
function arrayBufferToBase64(buffer: ArrayBuffer | Uint8Array): string {
const bytes = buffer instanceof Uint8Array ? buffer : new Uint8Array(buffer)
// Use Buffer for efficient base64 encoding (works in Node/Bun)
if (typeof Buffer !== 'undefined') {
return Buffer.from(bytes).toString('base64')
}
// Fallback to btoa for browser environments
let binary = ''
for (let i = 0; i < bytes.length; i++) {
const byte = bytes[i]
if (byte !== undefined) {
binary += String.fromCharCode(byte)
}
}
return btoa(binary)
}
function base64ToArrayBuffer(base64: string): Uint8Array {
// Use Buffer for efficient base64 decoding (works in Node/Bun)
if (typeof Buffer !== 'undefined') {
return new Uint8Array(Buffer.from(base64, 'base64'))
}
// Fallback to atob for browser environments
const binary = atob(base64)
const bytes = new Uint8Array(binary.length)
for (let i = 0; i < binary.length; i++) {
bytes[i] = binary.charCodeAt(i)
}
return bytes
}