mirror of
https://github.com/haexhub/haex-hub.git
synced 2025-12-18 06:50:51 +01:00
Compare commits
19 Commits
v0.1.10
...
9583e2f44b
| Author | SHA1 | Date | |
|---|---|---|---|
| 9583e2f44b | |||
| d886fbd8bd | |||
| 9bad4008f2 | |||
| 203f81e775 | |||
| 554cb7762d | |||
| 5856a73e5b | |||
| 38cc6f36d4 | |||
| 0d4059e518 | |||
| c551641737 | |||
| 75093485bd | |||
| e1be08cb76 | |||
| 7d1f346c4b | |||
| af61972342 | |||
| 6187e32f89 | |||
| 43ba246174 | |||
| 2b739b9e79 | |||
| 63849d86e1 | |||
| 9adee46166 | |||
| be7dff72dd |
26
README.md
26
README.md
@ -168,6 +168,32 @@ pnpm install
|
||||
pnpm tauri dev
|
||||
```
|
||||
|
||||
#### 📦 Release Process
|
||||
|
||||
Create a new release using the automated scripts:
|
||||
|
||||
```bash
|
||||
# Patch release (0.1.13 → 0.1.14)
|
||||
pnpm release:patch
|
||||
|
||||
# Minor release (0.1.13 → 0.2.0)
|
||||
pnpm release:minor
|
||||
|
||||
# Major release (0.1.13 → 1.0.0)
|
||||
pnpm release:major
|
||||
```
|
||||
|
||||
The script automatically:
|
||||
1. Updates version in `package.json`
|
||||
2. Creates a git commit
|
||||
3. Creates a git tag
|
||||
4. Pushes to remote
|
||||
|
||||
GitHub Actions will then automatically:
|
||||
- Build desktop apps (macOS, Linux, Windows)
|
||||
- Build Android apps (APK and AAB)
|
||||
- Create and publish a GitHub release
|
||||
|
||||
#### 🧭 Summary
|
||||
|
||||
HaexHub aims to:
|
||||
|
||||
36
package.json
36
package.json
@ -1,7 +1,7 @@
|
||||
{
|
||||
"name": "haex-hub",
|
||||
"private": true,
|
||||
"version": "0.1.10",
|
||||
"version": "0.1.13",
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"build": "nuxt build",
|
||||
@ -14,6 +14,9 @@
|
||||
"generate": "nuxt generate",
|
||||
"postinstall": "nuxt prepare",
|
||||
"preview": "nuxt preview",
|
||||
"release:patch": "node scripts/release.js patch",
|
||||
"release:minor": "node scripts/release.js minor",
|
||||
"release:major": "node scripts/release.js major",
|
||||
"tauri:build:debug": "tauri build --debug",
|
||||
"tauri": "tauri"
|
||||
},
|
||||
@ -23,8 +26,9 @@
|
||||
"@nuxt/icon": "2.0.0",
|
||||
"@nuxt/ui": "4.1.0",
|
||||
"@nuxtjs/i18n": "10.0.6",
|
||||
"@pinia/nuxt": "^0.11.2",
|
||||
"@tailwindcss/vite": "^4.1.16",
|
||||
"@pinia/nuxt": "^0.11.3",
|
||||
"@supabase/supabase-js": "^2.80.0",
|
||||
"@tailwindcss/vite": "^4.1.17",
|
||||
"@tauri-apps/api": "^2.9.0",
|
||||
"@tauri-apps/plugin-dialog": "^2.4.2",
|
||||
"@tauri-apps/plugin-fs": "^2.4.4",
|
||||
@ -37,32 +41,32 @@
|
||||
"@vueuse/gesture": "^2.0.0",
|
||||
"@vueuse/nuxt": "^13.9.0",
|
||||
"drizzle-orm": "^0.44.7",
|
||||
"eslint": "^9.38.0",
|
||||
"eslint": "^9.39.1",
|
||||
"nuxt-zod-i18n": "^1.12.1",
|
||||
"swiper": "^12.0.3",
|
||||
"tailwindcss": "^4.1.16",
|
||||
"vue": "^3.5.22",
|
||||
"tailwindcss": "^4.1.17",
|
||||
"vue": "^3.5.24",
|
||||
"vue-router": "^4.6.3",
|
||||
"zod": "^3.25.76"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@iconify-json/hugeicons": "^1.2.17",
|
||||
"@iconify-json/lucide": "^1.2.71",
|
||||
"@iconify/json": "^2.2.401",
|
||||
"@iconify/tailwind4": "^1.0.6",
|
||||
"@iconify-json/lucide": "^1.2.72",
|
||||
"@iconify/json": "^2.2.404",
|
||||
"@iconify/tailwind4": "^1.1.0",
|
||||
"@libsql/client": "^0.15.15",
|
||||
"@tauri-apps/cli": "^2.9.1",
|
||||
"@types/node": "^24.9.1",
|
||||
"@tauri-apps/cli": "^2.9.3",
|
||||
"@types/node": "^24.10.0",
|
||||
"@vitejs/plugin-vue": "6.0.1",
|
||||
"@vue/compiler-sfc": "^3.5.22",
|
||||
"drizzle-kit": "^0.31.5",
|
||||
"globals": "^16.4.0",
|
||||
"nuxt": "^4.2.0",
|
||||
"@vue/compiler-sfc": "^3.5.24",
|
||||
"drizzle-kit": "^0.31.6",
|
||||
"globals": "^16.5.0",
|
||||
"nuxt": "^4.2.1",
|
||||
"prettier": "3.6.2",
|
||||
"tsx": "^4.20.6",
|
||||
"tw-animate-css": "^1.4.0",
|
||||
"typescript": "^5.9.3",
|
||||
"vite": "^7.1.3",
|
||||
"vite": "^7.2.2",
|
||||
"vue-tsc": "3.0.6"
|
||||
},
|
||||
"prettier": {
|
||||
|
||||
3239
pnpm-lock.yaml
generated
3239
pnpm-lock.yaml
generated
File diff suppressed because it is too large
Load Diff
91
scripts/release.js
Executable file
91
scripts/release.js
Executable file
@ -0,0 +1,91 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
import { readFileSync, writeFileSync } from 'fs';
|
||||
import { execSync } from 'child_process';
|
||||
import { fileURLToPath } from 'url';
|
||||
import { dirname, join } from 'path';
|
||||
|
||||
const __filename = fileURLToPath(import.meta.url);
|
||||
const __dirname = dirname(__filename);
|
||||
const rootDir = join(__dirname, '..');
|
||||
|
||||
const versionType = process.argv[2];
|
||||
|
||||
if (!['patch', 'minor', 'major'].includes(versionType)) {
|
||||
console.error('Usage: pnpm release <patch|minor|major>');
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
// Read current package.json
|
||||
const packageJsonPath = join(rootDir, 'package.json');
|
||||
const packageJson = JSON.parse(readFileSync(packageJsonPath, 'utf8'));
|
||||
const currentVersion = packageJson.version;
|
||||
|
||||
if (!currentVersion) {
|
||||
console.error('No version found in package.json');
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
// Parse version
|
||||
const [major, minor, patch] = currentVersion.split('.').map(Number);
|
||||
|
||||
// Calculate new version
|
||||
let newVersion;
|
||||
switch (versionType) {
|
||||
case 'major':
|
||||
newVersion = `${major + 1}.0.0`;
|
||||
break;
|
||||
case 'minor':
|
||||
newVersion = `${major}.${minor + 1}.0`;
|
||||
break;
|
||||
case 'patch':
|
||||
newVersion = `${major}.${minor}.${patch + 1}`;
|
||||
break;
|
||||
}
|
||||
|
||||
console.log(`📦 Bumping version from ${currentVersion} to ${newVersion}`);
|
||||
|
||||
// Update package.json
|
||||
packageJson.version = newVersion;
|
||||
writeFileSync(packageJsonPath, JSON.stringify(packageJson, null, 2) + '\n');
|
||||
console.log('✅ Updated package.json');
|
||||
|
||||
// Git operations
|
||||
try {
|
||||
// Check if there are uncommitted changes
|
||||
const status = execSync('git status --porcelain', { encoding: 'utf8' });
|
||||
const hasOtherChanges = status
|
||||
.split('\n')
|
||||
.filter(line => line && !line.includes('package.json'))
|
||||
.length > 0;
|
||||
|
||||
if (hasOtherChanges) {
|
||||
console.error('❌ There are uncommitted changes besides package.json. Please commit or stash them first.');
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
// Add and commit package.json
|
||||
execSync('git add package.json', { stdio: 'inherit' });
|
||||
execSync(`git commit -m "Bump version to ${newVersion}"`, { stdio: 'inherit' });
|
||||
console.log('✅ Committed version bump');
|
||||
|
||||
// Create tag
|
||||
execSync(`git tag v${newVersion}`, { stdio: 'inherit' });
|
||||
console.log(`✅ Created tag v${newVersion}`);
|
||||
|
||||
// Push changes and tag
|
||||
console.log('📤 Pushing to remote...');
|
||||
execSync('git push', { stdio: 'inherit' });
|
||||
execSync(`git push origin v${newVersion}`, { stdio: 'inherit' });
|
||||
console.log('✅ Pushed changes and tag');
|
||||
|
||||
console.log('\n🎉 Release v' + newVersion + ' created successfully!');
|
||||
console.log('📋 GitHub Actions will now build and publish the release.');
|
||||
} catch (error) {
|
||||
console.error('❌ Git operation failed:', error.message);
|
||||
// Rollback package.json changes
|
||||
packageJson.version = currentVersion;
|
||||
writeFileSync(packageJsonPath, JSON.stringify(packageJson, null, 2) + '\n');
|
||||
console.log('↩️ Rolled back package.json changes');
|
||||
process.exit(1);
|
||||
}
|
||||
6
src-tauri/bindings/ExtensionErrorCode.ts
Normal file
6
src-tauri/bindings/ExtensionErrorCode.ts
Normal file
@ -0,0 +1,6 @@
|
||||
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
|
||||
|
||||
/**
|
||||
* Error codes for frontend handling
|
||||
*/
|
||||
export type ExtensionErrorCode = "SecurityViolation" | "NotFound" | "PermissionDenied" | "MutexPoisoned" | "Database" | "Filesystem" | "FilesystemWithPath" | "Http" | "Shell" | "Manifest" | "Validation" | "InvalidPublicKey" | "InvalidSignature" | "InvalidActionString" | "SignatureVerificationFailed" | "CalculateHash" | "Installation";
|
||||
6
src-tauri/bindings/SerializedExtensionError.ts
Normal file
6
src-tauri/bindings/SerializedExtensionError.ts
Normal file
@ -0,0 +1,6 @@
|
||||
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
|
||||
|
||||
/**
|
||||
* Serialized representation of ExtensionError for TypeScript
|
||||
*/
|
||||
export type SerializedExtensionError = { code: number, type: string, message: string, extension_id: string | null, };
|
||||
@ -30,10 +30,15 @@
|
||||
"fs:allow-resource-write-recursive",
|
||||
"fs:allow-download-read-recursive",
|
||||
"fs:allow-download-write-recursive",
|
||||
"fs:allow-temp-read-recursive",
|
||||
"fs:allow-temp-write-recursive",
|
||||
"fs:default",
|
||||
{
|
||||
"identifier": "fs:scope",
|
||||
"allow": [{ "path": "**" }]
|
||||
"allow": [
|
||||
{ "path": "**" },
|
||||
{ "path": "$TEMP/**" }
|
||||
]
|
||||
},
|
||||
"http:allow-fetch-send",
|
||||
"http:allow-fetch",
|
||||
@ -44,6 +49,12 @@
|
||||
"notification:allow-is-permission-granted",
|
||||
"notification:default",
|
||||
"opener:allow-open-url",
|
||||
{
|
||||
"identifier": "opener:allow-open-path",
|
||||
"allow": [
|
||||
{ "path": "$TEMP/**" }
|
||||
]
|
||||
},
|
||||
"opener:default",
|
||||
"os:allow-hostname",
|
||||
"os:default",
|
||||
|
||||
10
src-tauri/database/migrations/0003_luxuriant_deathstrike.sql
Normal file
10
src-tauri/database/migrations/0003_luxuriant_deathstrike.sql
Normal file
@ -0,0 +1,10 @@
|
||||
CREATE TABLE `haex_sync_backends` (
|
||||
`id` text PRIMARY KEY NOT NULL,
|
||||
`name` text NOT NULL,
|
||||
`server_url` text NOT NULL,
|
||||
`enabled` integer DEFAULT true NOT NULL,
|
||||
`priority` integer DEFAULT 0 NOT NULL,
|
||||
`created_at` text DEFAULT (CURRENT_TIMESTAMP),
|
||||
`updated_at` integer,
|
||||
`haex_timestamp` text
|
||||
);
|
||||
843
src-tauri/database/migrations/meta/0003_snapshot.json
Normal file
843
src-tauri/database/migrations/meta/0003_snapshot.json
Normal file
@ -0,0 +1,843 @@
|
||||
{
|
||||
"version": "6",
|
||||
"dialect": "sqlite",
|
||||
"id": "bf82259e-9264-44e7-a60f-8cc14a1f22e2",
|
||||
"prevId": "3aedf10c-2266-40f4-8549-0ff8b0588853",
|
||||
"tables": {
|
||||
"haex_crdt_configs": {
|
||||
"name": "haex_crdt_configs",
|
||||
"columns": {
|
||||
"key": {
|
||||
"name": "key",
|
||||
"type": "text",
|
||||
"primaryKey": true,
|
||||
"notNull": true,
|
||||
"autoincrement": false
|
||||
},
|
||||
"value": {
|
||||
"name": "value",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": false,
|
||||
"autoincrement": false
|
||||
}
|
||||
},
|
||||
"indexes": {},
|
||||
"foreignKeys": {},
|
||||
"compositePrimaryKeys": {},
|
||||
"uniqueConstraints": {},
|
||||
"checkConstraints": {}
|
||||
},
|
||||
"haex_crdt_logs": {
|
||||
"name": "haex_crdt_logs",
|
||||
"columns": {
|
||||
"id": {
|
||||
"name": "id",
|
||||
"type": "text",
|
||||
"primaryKey": true,
|
||||
"notNull": true,
|
||||
"autoincrement": false
|
||||
},
|
||||
"haex_timestamp": {
|
||||
"name": "haex_timestamp",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": false,
|
||||
"autoincrement": false
|
||||
},
|
||||
"table_name": {
|
||||
"name": "table_name",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": false,
|
||||
"autoincrement": false
|
||||
},
|
||||
"row_pks": {
|
||||
"name": "row_pks",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": false,
|
||||
"autoincrement": false
|
||||
},
|
||||
"op_type": {
|
||||
"name": "op_type",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": false,
|
||||
"autoincrement": false
|
||||
},
|
||||
"column_name": {
|
||||
"name": "column_name",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": false,
|
||||
"autoincrement": false
|
||||
},
|
||||
"new_value": {
|
||||
"name": "new_value",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": false,
|
||||
"autoincrement": false
|
||||
},
|
||||
"old_value": {
|
||||
"name": "old_value",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": false,
|
||||
"autoincrement": false
|
||||
}
|
||||
},
|
||||
"indexes": {
|
||||
"idx_haex_timestamp": {
|
||||
"name": "idx_haex_timestamp",
|
||||
"columns": [
|
||||
"haex_timestamp"
|
||||
],
|
||||
"isUnique": false
|
||||
},
|
||||
"idx_table_row": {
|
||||
"name": "idx_table_row",
|
||||
"columns": [
|
||||
"table_name",
|
||||
"row_pks"
|
||||
],
|
||||
"isUnique": false
|
||||
}
|
||||
},
|
||||
"foreignKeys": {},
|
||||
"compositePrimaryKeys": {},
|
||||
"uniqueConstraints": {},
|
||||
"checkConstraints": {}
|
||||
},
|
||||
"haex_crdt_snapshots": {
|
||||
"name": "haex_crdt_snapshots",
|
||||
"columns": {
|
||||
"snapshot_id": {
|
||||
"name": "snapshot_id",
|
||||
"type": "text",
|
||||
"primaryKey": true,
|
||||
"notNull": true,
|
||||
"autoincrement": false
|
||||
},
|
||||
"created": {
|
||||
"name": "created",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": false,
|
||||
"autoincrement": false
|
||||
},
|
||||
"epoch_hlc": {
|
||||
"name": "epoch_hlc",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": false,
|
||||
"autoincrement": false
|
||||
},
|
||||
"location_url": {
|
||||
"name": "location_url",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": false,
|
||||
"autoincrement": false
|
||||
},
|
||||
"file_size_bytes": {
|
||||
"name": "file_size_bytes",
|
||||
"type": "integer",
|
||||
"primaryKey": false,
|
||||
"notNull": false,
|
||||
"autoincrement": false
|
||||
}
|
||||
},
|
||||
"indexes": {},
|
||||
"foreignKeys": {},
|
||||
"compositePrimaryKeys": {},
|
||||
"uniqueConstraints": {},
|
||||
"checkConstraints": {}
|
||||
},
|
||||
"haex_desktop_items": {
|
||||
"name": "haex_desktop_items",
|
||||
"columns": {
|
||||
"id": {
|
||||
"name": "id",
|
||||
"type": "text",
|
||||
"primaryKey": true,
|
||||
"notNull": true,
|
||||
"autoincrement": false
|
||||
},
|
||||
"workspace_id": {
|
||||
"name": "workspace_id",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": true,
|
||||
"autoincrement": false
|
||||
},
|
||||
"item_type": {
|
||||
"name": "item_type",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": true,
|
||||
"autoincrement": false
|
||||
},
|
||||
"extension_id": {
|
||||
"name": "extension_id",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": false,
|
||||
"autoincrement": false
|
||||
},
|
||||
"system_window_id": {
|
||||
"name": "system_window_id",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": false,
|
||||
"autoincrement": false
|
||||
},
|
||||
"position_x": {
|
||||
"name": "position_x",
|
||||
"type": "integer",
|
||||
"primaryKey": false,
|
||||
"notNull": true,
|
||||
"autoincrement": false,
|
||||
"default": 0
|
||||
},
|
||||
"position_y": {
|
||||
"name": "position_y",
|
||||
"type": "integer",
|
||||
"primaryKey": false,
|
||||
"notNull": true,
|
||||
"autoincrement": false,
|
||||
"default": 0
|
||||
},
|
||||
"haex_timestamp": {
|
||||
"name": "haex_timestamp",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": false,
|
||||
"autoincrement": false
|
||||
}
|
||||
},
|
||||
"indexes": {},
|
||||
"foreignKeys": {
|
||||
"haex_desktop_items_workspace_id_haex_workspaces_id_fk": {
|
||||
"name": "haex_desktop_items_workspace_id_haex_workspaces_id_fk",
|
||||
"tableFrom": "haex_desktop_items",
|
||||
"tableTo": "haex_workspaces",
|
||||
"columnsFrom": [
|
||||
"workspace_id"
|
||||
],
|
||||
"columnsTo": [
|
||||
"id"
|
||||
],
|
||||
"onDelete": "cascade",
|
||||
"onUpdate": "no action"
|
||||
},
|
||||
"haex_desktop_items_extension_id_haex_extensions_id_fk": {
|
||||
"name": "haex_desktop_items_extension_id_haex_extensions_id_fk",
|
||||
"tableFrom": "haex_desktop_items",
|
||||
"tableTo": "haex_extensions",
|
||||
"columnsFrom": [
|
||||
"extension_id"
|
||||
],
|
||||
"columnsTo": [
|
||||
"id"
|
||||
],
|
||||
"onDelete": "cascade",
|
||||
"onUpdate": "no action"
|
||||
}
|
||||
},
|
||||
"compositePrimaryKeys": {},
|
||||
"uniqueConstraints": {},
|
||||
"checkConstraints": {
|
||||
"item_reference": {
|
||||
"name": "item_reference",
|
||||
"value": "(\"haex_desktop_items\".\"item_type\" = 'extension' AND \"haex_desktop_items\".\"extension_id\" IS NOT NULL AND \"haex_desktop_items\".\"system_window_id\" IS NULL) OR (\"haex_desktop_items\".\"item_type\" = 'system' AND \"haex_desktop_items\".\"system_window_id\" IS NOT NULL AND \"haex_desktop_items\".\"extension_id\" IS NULL) OR (\"haex_desktop_items\".\"item_type\" = 'file' AND \"haex_desktop_items\".\"system_window_id\" IS NOT NULL AND \"haex_desktop_items\".\"extension_id\" IS NULL) OR (\"haex_desktop_items\".\"item_type\" = 'folder' AND \"haex_desktop_items\".\"system_window_id\" IS NOT NULL AND \"haex_desktop_items\".\"extension_id\" IS NULL)"
|
||||
}
|
||||
}
|
||||
},
|
||||
"haex_devices": {
|
||||
"name": "haex_devices",
|
||||
"columns": {
|
||||
"id": {
|
||||
"name": "id",
|
||||
"type": "text",
|
||||
"primaryKey": true,
|
||||
"notNull": true,
|
||||
"autoincrement": false
|
||||
},
|
||||
"device_id": {
|
||||
"name": "device_id",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": true,
|
||||
"autoincrement": false
|
||||
},
|
||||
"name": {
|
||||
"name": "name",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": true,
|
||||
"autoincrement": false
|
||||
},
|
||||
"created_at": {
|
||||
"name": "created_at",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": false,
|
||||
"autoincrement": false,
|
||||
"default": "(CURRENT_TIMESTAMP)"
|
||||
},
|
||||
"updated_at": {
|
||||
"name": "updated_at",
|
||||
"type": "integer",
|
||||
"primaryKey": false,
|
||||
"notNull": false,
|
||||
"autoincrement": false
|
||||
},
|
||||
"haex_timestamp": {
|
||||
"name": "haex_timestamp",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": false,
|
||||
"autoincrement": false
|
||||
}
|
||||
},
|
||||
"indexes": {
|
||||
"haex_devices_device_id_unique": {
|
||||
"name": "haex_devices_device_id_unique",
|
||||
"columns": [
|
||||
"device_id"
|
||||
],
|
||||
"isUnique": true
|
||||
}
|
||||
},
|
||||
"foreignKeys": {},
|
||||
"compositePrimaryKeys": {},
|
||||
"uniqueConstraints": {},
|
||||
"checkConstraints": {}
|
||||
},
|
||||
"haex_extension_permissions": {
|
||||
"name": "haex_extension_permissions",
|
||||
"columns": {
|
||||
"id": {
|
||||
"name": "id",
|
||||
"type": "text",
|
||||
"primaryKey": true,
|
||||
"notNull": true,
|
||||
"autoincrement": false
|
||||
},
|
||||
"extension_id": {
|
||||
"name": "extension_id",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": true,
|
||||
"autoincrement": false
|
||||
},
|
||||
"resource_type": {
|
||||
"name": "resource_type",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": false,
|
||||
"autoincrement": false
|
||||
},
|
||||
"action": {
|
||||
"name": "action",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": false,
|
||||
"autoincrement": false
|
||||
},
|
||||
"target": {
|
||||
"name": "target",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": false,
|
||||
"autoincrement": false
|
||||
},
|
||||
"constraints": {
|
||||
"name": "constraints",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": false,
|
||||
"autoincrement": false
|
||||
},
|
||||
"status": {
|
||||
"name": "status",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": true,
|
||||
"autoincrement": false,
|
||||
"default": "'denied'"
|
||||
},
|
||||
"created_at": {
|
||||
"name": "created_at",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": false,
|
||||
"autoincrement": false,
|
||||
"default": "(CURRENT_TIMESTAMP)"
|
||||
},
|
||||
"updated_at": {
|
||||
"name": "updated_at",
|
||||
"type": "integer",
|
||||
"primaryKey": false,
|
||||
"notNull": false,
|
||||
"autoincrement": false
|
||||
},
|
||||
"haex_timestamp": {
|
||||
"name": "haex_timestamp",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": false,
|
||||
"autoincrement": false
|
||||
}
|
||||
},
|
||||
"indexes": {
|
||||
"haex_extension_permissions_extension_id_resource_type_action_target_unique": {
|
||||
"name": "haex_extension_permissions_extension_id_resource_type_action_target_unique",
|
||||
"columns": [
|
||||
"extension_id",
|
||||
"resource_type",
|
||||
"action",
|
||||
"target"
|
||||
],
|
||||
"isUnique": true
|
||||
}
|
||||
},
|
||||
"foreignKeys": {
|
||||
"haex_extension_permissions_extension_id_haex_extensions_id_fk": {
|
||||
"name": "haex_extension_permissions_extension_id_haex_extensions_id_fk",
|
||||
"tableFrom": "haex_extension_permissions",
|
||||
"tableTo": "haex_extensions",
|
||||
"columnsFrom": [
|
||||
"extension_id"
|
||||
],
|
||||
"columnsTo": [
|
||||
"id"
|
||||
],
|
||||
"onDelete": "cascade",
|
||||
"onUpdate": "no action"
|
||||
}
|
||||
},
|
||||
"compositePrimaryKeys": {},
|
||||
"uniqueConstraints": {},
|
||||
"checkConstraints": {}
|
||||
},
|
||||
"haex_extensions": {
|
||||
"name": "haex_extensions",
|
||||
"columns": {
|
||||
"id": {
|
||||
"name": "id",
|
||||
"type": "text",
|
||||
"primaryKey": true,
|
||||
"notNull": true,
|
||||
"autoincrement": false
|
||||
},
|
||||
"public_key": {
|
||||
"name": "public_key",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": true,
|
||||
"autoincrement": false
|
||||
},
|
||||
"name": {
|
||||
"name": "name",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": true,
|
||||
"autoincrement": false
|
||||
},
|
||||
"version": {
|
||||
"name": "version",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": true,
|
||||
"autoincrement": false
|
||||
},
|
||||
"author": {
|
||||
"name": "author",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": false,
|
||||
"autoincrement": false
|
||||
},
|
||||
"description": {
|
||||
"name": "description",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": false,
|
||||
"autoincrement": false
|
||||
},
|
||||
"entry": {
|
||||
"name": "entry",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": false,
|
||||
"autoincrement": false,
|
||||
"default": "'index.html'"
|
||||
},
|
||||
"homepage": {
|
||||
"name": "homepage",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": false,
|
||||
"autoincrement": false
|
||||
},
|
||||
"enabled": {
|
||||
"name": "enabled",
|
||||
"type": "integer",
|
||||
"primaryKey": false,
|
||||
"notNull": false,
|
||||
"autoincrement": false,
|
||||
"default": true
|
||||
},
|
||||
"icon": {
|
||||
"name": "icon",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": false,
|
||||
"autoincrement": false
|
||||
},
|
||||
"signature": {
|
||||
"name": "signature",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": true,
|
||||
"autoincrement": false
|
||||
},
|
||||
"single_instance": {
|
||||
"name": "single_instance",
|
||||
"type": "integer",
|
||||
"primaryKey": false,
|
||||
"notNull": false,
|
||||
"autoincrement": false,
|
||||
"default": false
|
||||
},
|
||||
"haex_timestamp": {
|
||||
"name": "haex_timestamp",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": false,
|
||||
"autoincrement": false
|
||||
}
|
||||
},
|
||||
"indexes": {
|
||||
"haex_extensions_public_key_name_unique": {
|
||||
"name": "haex_extensions_public_key_name_unique",
|
||||
"columns": [
|
||||
"public_key",
|
||||
"name"
|
||||
],
|
||||
"isUnique": true
|
||||
}
|
||||
},
|
||||
"foreignKeys": {},
|
||||
"compositePrimaryKeys": {},
|
||||
"uniqueConstraints": {},
|
||||
"checkConstraints": {}
|
||||
},
|
||||
"haex_notifications": {
|
||||
"name": "haex_notifications",
|
||||
"columns": {
|
||||
"id": {
|
||||
"name": "id",
|
||||
"type": "text",
|
||||
"primaryKey": true,
|
||||
"notNull": true,
|
||||
"autoincrement": false
|
||||
},
|
||||
"alt": {
|
||||
"name": "alt",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": false,
|
||||
"autoincrement": false
|
||||
},
|
||||
"date": {
|
||||
"name": "date",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": false,
|
||||
"autoincrement": false
|
||||
},
|
||||
"icon": {
|
||||
"name": "icon",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": false,
|
||||
"autoincrement": false
|
||||
},
|
||||
"image": {
|
||||
"name": "image",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": false,
|
||||
"autoincrement": false
|
||||
},
|
||||
"read": {
|
||||
"name": "read",
|
||||
"type": "integer",
|
||||
"primaryKey": false,
|
||||
"notNull": false,
|
||||
"autoincrement": false
|
||||
},
|
||||
"source": {
|
||||
"name": "source",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": false,
|
||||
"autoincrement": false
|
||||
},
|
||||
"text": {
|
||||
"name": "text",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": false,
|
||||
"autoincrement": false
|
||||
},
|
||||
"title": {
|
||||
"name": "title",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": false,
|
||||
"autoincrement": false
|
||||
},
|
||||
"type": {
|
||||
"name": "type",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": true,
|
||||
"autoincrement": false
|
||||
},
|
||||
"haex_timestamp": {
|
||||
"name": "haex_timestamp",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": false,
|
||||
"autoincrement": false
|
||||
}
|
||||
},
|
||||
"indexes": {},
|
||||
"foreignKeys": {},
|
||||
"compositePrimaryKeys": {},
|
||||
"uniqueConstraints": {},
|
||||
"checkConstraints": {}
|
||||
},
|
||||
"haex_settings": {
|
||||
"name": "haex_settings",
|
||||
"columns": {
|
||||
"id": {
|
||||
"name": "id",
|
||||
"type": "text",
|
||||
"primaryKey": true,
|
||||
"notNull": true,
|
||||
"autoincrement": false
|
||||
},
|
||||
"device_id": {
|
||||
"name": "device_id",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": false,
|
||||
"autoincrement": false
|
||||
},
|
||||
"key": {
|
||||
"name": "key",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": false,
|
||||
"autoincrement": false
|
||||
},
|
||||
"type": {
|
||||
"name": "type",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": false,
|
||||
"autoincrement": false
|
||||
},
|
||||
"value": {
|
||||
"name": "value",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": false,
|
||||
"autoincrement": false
|
||||
},
|
||||
"haex_timestamp": {
|
||||
"name": "haex_timestamp",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": false,
|
||||
"autoincrement": false
|
||||
}
|
||||
},
|
||||
"indexes": {
|
||||
"haex_settings_device_id_key_type_unique": {
|
||||
"name": "haex_settings_device_id_key_type_unique",
|
||||
"columns": [
|
||||
"device_id",
|
||||
"key",
|
||||
"type"
|
||||
],
|
||||
"isUnique": true
|
||||
}
|
||||
},
|
||||
"foreignKeys": {
|
||||
"haex_settings_device_id_haex_devices_id_fk": {
|
||||
"name": "haex_settings_device_id_haex_devices_id_fk",
|
||||
"tableFrom": "haex_settings",
|
||||
"tableTo": "haex_devices",
|
||||
"columnsFrom": [
|
||||
"device_id"
|
||||
],
|
||||
"columnsTo": [
|
||||
"id"
|
||||
],
|
||||
"onDelete": "cascade",
|
||||
"onUpdate": "no action"
|
||||
}
|
||||
},
|
||||
"compositePrimaryKeys": {},
|
||||
"uniqueConstraints": {},
|
||||
"checkConstraints": {}
|
||||
},
|
||||
"haex_sync_backends": {
|
||||
"name": "haex_sync_backends",
|
||||
"columns": {
|
||||
"id": {
|
||||
"name": "id",
|
||||
"type": "text",
|
||||
"primaryKey": true,
|
||||
"notNull": true,
|
||||
"autoincrement": false
|
||||
},
|
||||
"name": {
|
||||
"name": "name",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": true,
|
||||
"autoincrement": false
|
||||
},
|
||||
"server_url": {
|
||||
"name": "server_url",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": true,
|
||||
"autoincrement": false
|
||||
},
|
||||
"enabled": {
|
||||
"name": "enabled",
|
||||
"type": "integer",
|
||||
"primaryKey": false,
|
||||
"notNull": true,
|
||||
"autoincrement": false,
|
||||
"default": true
|
||||
},
|
||||
"priority": {
|
||||
"name": "priority",
|
||||
"type": "integer",
|
||||
"primaryKey": false,
|
||||
"notNull": true,
|
||||
"autoincrement": false,
|
||||
"default": 0
|
||||
},
|
||||
"created_at": {
|
||||
"name": "created_at",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": false,
|
||||
"autoincrement": false,
|
||||
"default": "(CURRENT_TIMESTAMP)"
|
||||
},
|
||||
"updated_at": {
|
||||
"name": "updated_at",
|
||||
"type": "integer",
|
||||
"primaryKey": false,
|
||||
"notNull": false,
|
||||
"autoincrement": false
|
||||
},
|
||||
"haex_timestamp": {
|
||||
"name": "haex_timestamp",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": false,
|
||||
"autoincrement": false
|
||||
}
|
||||
},
|
||||
"indexes": {},
|
||||
"foreignKeys": {},
|
||||
"compositePrimaryKeys": {},
|
||||
"uniqueConstraints": {},
|
||||
"checkConstraints": {}
|
||||
},
|
||||
"haex_workspaces": {
|
||||
"name": "haex_workspaces",
|
||||
"columns": {
|
||||
"id": {
|
||||
"name": "id",
|
||||
"type": "text",
|
||||
"primaryKey": true,
|
||||
"notNull": true,
|
||||
"autoincrement": false
|
||||
},
|
||||
"device_id": {
|
||||
"name": "device_id",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": true,
|
||||
"autoincrement": false
|
||||
},
|
||||
"name": {
|
||||
"name": "name",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": true,
|
||||
"autoincrement": false
|
||||
},
|
||||
"position": {
|
||||
"name": "position",
|
||||
"type": "integer",
|
||||
"primaryKey": false,
|
||||
"notNull": true,
|
||||
"autoincrement": false,
|
||||
"default": 0
|
||||
},
|
||||
"background": {
|
||||
"name": "background",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": false,
|
||||
"autoincrement": false
|
||||
},
|
||||
"haex_timestamp": {
|
||||
"name": "haex_timestamp",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": false,
|
||||
"autoincrement": false
|
||||
}
|
||||
},
|
||||
"indexes": {
|
||||
"haex_workspaces_position_unique": {
|
||||
"name": "haex_workspaces_position_unique",
|
||||
"columns": [
|
||||
"position"
|
||||
],
|
||||
"isUnique": true
|
||||
}
|
||||
},
|
||||
"foreignKeys": {},
|
||||
"compositePrimaryKeys": {},
|
||||
"uniqueConstraints": {},
|
||||
"checkConstraints": {}
|
||||
}
|
||||
},
|
||||
"views": {},
|
||||
"enums": {},
|
||||
"_meta": {
|
||||
"schemas": {},
|
||||
"tables": {},
|
||||
"columns": {}
|
||||
},
|
||||
"internal": {
|
||||
"indexes": {}
|
||||
}
|
||||
}
|
||||
@ -22,6 +22,13 @@
|
||||
"when": 1762263814375,
|
||||
"tag": "0002_loose_quasimodo",
|
||||
"breakpoints": true
|
||||
},
|
||||
{
|
||||
"idx": 3,
|
||||
"version": "6",
|
||||
"when": 1762300795436,
|
||||
"tag": "0003_luxuriant_deathstrike",
|
||||
"breakpoints": true
|
||||
}
|
||||
]
|
||||
}
|
||||
Binary file not shown.
@ -1 +1 @@
|
||||
{"default":{"identifier":"default","description":"Capability for the main window","local":true,"windows":["main"],"permissions":["core:default","core:webview:allow-create-webview-window","core:webview:allow-create-webview","core:webview:allow-webview-show","core:webview:default","core:window:allow-create","core:window:allow-get-all-windows","core:window:allow-show","core:window:default","dialog:default","fs:allow-appconfig-read-recursive","fs:allow-appconfig-write-recursive","fs:allow-appdata-read-recursive","fs:allow-appdata-write-recursive","fs:allow-applocaldata-read-recursive","fs:allow-applocaldata-write-recursive","fs:allow-read-file","fs:allow-write-file","fs:allow-read-dir","fs:allow-mkdir","fs:allow-exists","fs:allow-remove","fs:allow-resource-read-recursive","fs:allow-resource-write-recursive","fs:allow-download-read-recursive","fs:allow-download-write-recursive","fs:default",{"identifier":"fs:scope","allow":[{"path":"**"}]},"http:allow-fetch-send","http:allow-fetch","http:default","notification:allow-create-channel","notification:allow-list-channels","notification:allow-notify","notification:allow-is-permission-granted","notification:default","opener:allow-open-url","opener:default","os:allow-hostname","os:default","store:default"]}}
|
||||
{"default":{"identifier":"default","description":"Capability for the main window","local":true,"windows":["main"],"permissions":["core:default","core:webview:allow-create-webview-window","core:webview:allow-create-webview","core:webview:allow-webview-show","core:webview:default","core:window:allow-create","core:window:allow-get-all-windows","core:window:allow-show","core:window:default","dialog:default","fs:allow-appconfig-read-recursive","fs:allow-appconfig-write-recursive","fs:allow-appdata-read-recursive","fs:allow-appdata-write-recursive","fs:allow-applocaldata-read-recursive","fs:allow-applocaldata-write-recursive","fs:allow-read-file","fs:allow-write-file","fs:allow-read-dir","fs:allow-mkdir","fs:allow-exists","fs:allow-remove","fs:allow-resource-read-recursive","fs:allow-resource-write-recursive","fs:allow-download-read-recursive","fs:allow-download-write-recursive","fs:allow-temp-read-recursive","fs:allow-temp-write-recursive","fs:default",{"identifier":"fs:scope","allow":[{"path":"**"},{"path":"$TEMP/**"}]},"http:allow-fetch-send","http:allow-fetch","http:default","notification:allow-create-channel","notification:allow-list-channels","notification:allow-notify","notification:allow-is-permission-granted","notification:default","opener:allow-open-url",{"identifier":"opener:allow-open-path","allow":[{"path":"$TEMP/**"}]},"opener:default","os:allow-hostname","os:default","store:default"]}}
|
||||
@ -1,6 +1,6 @@
|
||||
use crate::extension::error::ExtensionError;
|
||||
use crate::extension::permissions::types::{
|
||||
Action, DbAction, ExtensionPermission, FsAction, HttpAction, PermissionConstraints,
|
||||
Action, DbAction, ExtensionPermission, FsAction, WebAction, PermissionConstraints,
|
||||
PermissionStatus, ResourceType, ShellAction,
|
||||
};
|
||||
use serde::{Deserialize, Serialize};
|
||||
@ -117,7 +117,7 @@ impl ExtensionPermissions {
|
||||
}
|
||||
if let Some(entries) = &self.http {
|
||||
for p in entries {
|
||||
if let Some(perm) = Self::create_internal(extension_id, ResourceType::Http, p) {
|
||||
if let Some(perm) = Self::create_internal(extension_id, ResourceType::Web, p) {
|
||||
permissions.push(perm);
|
||||
}
|
||||
}
|
||||
@ -146,7 +146,7 @@ impl ExtensionPermissions {
|
||||
ResourceType::Fs => FsAction::from_str(operation_str)
|
||||
.ok()
|
||||
.map(Action::Filesystem),
|
||||
ResourceType::Http => HttpAction::from_str(operation_str).ok().map(Action::Http),
|
||||
ResourceType::Web => WebAction::from_str(operation_str).ok().map(Action::Web),
|
||||
ResourceType::Shell => ShellAction::from_str(operation_str).ok().map(Action::Shell),
|
||||
};
|
||||
|
||||
|
||||
@ -1,10 +1,12 @@
|
||||
// src-tauri/src/extension/error.rs
|
||||
use thiserror::Error;
|
||||
use ts_rs::TS;
|
||||
|
||||
use crate::database::error::DatabaseError;
|
||||
|
||||
/// Error codes for frontend handling
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, TS)]
|
||||
#[ts(export)]
|
||||
pub enum ExtensionErrorCode {
|
||||
SecurityViolation = 1000,
|
||||
NotFound = 1001,
|
||||
@ -14,6 +16,7 @@ pub enum ExtensionErrorCode {
|
||||
Filesystem = 2001,
|
||||
FilesystemWithPath = 2004,
|
||||
Http = 2002,
|
||||
Web = 2005,
|
||||
Shell = 2003,
|
||||
Manifest = 3000,
|
||||
Validation = 3001,
|
||||
@ -25,6 +28,17 @@ pub enum ExtensionErrorCode {
|
||||
Installation = 5000,
|
||||
}
|
||||
|
||||
/// Serialized representation of ExtensionError for TypeScript
|
||||
#[derive(Debug, Clone, serde::Serialize, TS)]
|
||||
#[ts(export)]
|
||||
pub struct SerializedExtensionError {
|
||||
pub code: u16,
|
||||
#[serde(rename = "type")]
|
||||
pub error_type: String,
|
||||
pub message: String,
|
||||
pub extension_id: Option<String>,
|
||||
}
|
||||
|
||||
impl serde::Serialize for ExtensionErrorCode {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
@ -70,6 +84,9 @@ pub enum ExtensionError {
|
||||
#[error("HTTP request failed: {reason}")]
|
||||
Http { reason: String },
|
||||
|
||||
#[error("Web request failed: {reason}")]
|
||||
WebError { reason: String },
|
||||
|
||||
#[error("Shell command failed: {reason}")]
|
||||
Shell {
|
||||
reason: String,
|
||||
@ -118,6 +135,7 @@ impl ExtensionError {
|
||||
ExtensionError::Filesystem { .. } => ExtensionErrorCode::Filesystem,
|
||||
ExtensionError::FilesystemWithPath { .. } => ExtensionErrorCode::FilesystemWithPath,
|
||||
ExtensionError::Http { .. } => ExtensionErrorCode::Http,
|
||||
ExtensionError::WebError { .. } => ExtensionErrorCode::Web,
|
||||
ExtensionError::Shell { .. } => ExtensionErrorCode::Shell,
|
||||
ExtensionError::ManifestError { .. } => ExtensionErrorCode::Manifest,
|
||||
ExtensionError::ValidationError { .. } => ExtensionErrorCode::Validation,
|
||||
|
||||
@ -13,6 +13,7 @@ pub mod database;
|
||||
pub mod error;
|
||||
pub mod filesystem;
|
||||
pub mod permissions;
|
||||
pub mod web;
|
||||
|
||||
#[tauri::command]
|
||||
pub fn get_extension_info(
|
||||
|
||||
@ -4,7 +4,7 @@ use crate::database::core::with_connection;
|
||||
use crate::database::error::DatabaseError;
|
||||
use crate::extension::database::executor::SqlExecutor;
|
||||
use crate::extension::error::ExtensionError;
|
||||
use crate::extension::permissions::types::{Action, ExtensionPermission, PermissionStatus, ResourceType};
|
||||
use crate::extension::permissions::types::{Action, ExtensionPermission, PermissionConstraints, PermissionStatus, ResourceType};
|
||||
use tauri::State;
|
||||
use crate::database::generated::HaexExtensionPermissions;
|
||||
use rusqlite::params;
|
||||
@ -245,6 +245,74 @@ impl PermissionManager {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Prüft Web-Berechtigungen für Requests
|
||||
pub async fn check_web_permission(
|
||||
app_state: &State<'_, AppState>,
|
||||
extension_id: &str,
|
||||
method: &str,
|
||||
url: &str,
|
||||
) -> Result<(), ExtensionError> {
|
||||
// Optimiert: Lade nur Web-Permissions aus der Datenbank
|
||||
let permissions = with_connection(&app_state.db, |conn| {
|
||||
let sql = format!(
|
||||
"SELECT * FROM {TABLE_EXTENSION_PERMISSIONS} WHERE extension_id = ? AND resource_type = 'web'"
|
||||
);
|
||||
let mut stmt = conn.prepare(&sql).map_err(DatabaseError::from)?;
|
||||
|
||||
let perms_iter = stmt.query_map(params![extension_id], |row| {
|
||||
crate::database::generated::HaexExtensionPermissions::from_row(row)
|
||||
})?;
|
||||
|
||||
let permissions: Vec<ExtensionPermission> = perms_iter
|
||||
.filter_map(Result::ok)
|
||||
.map(Into::into)
|
||||
.collect();
|
||||
|
||||
Ok(permissions)
|
||||
})?;
|
||||
|
||||
let url_parsed = url::Url::parse(url).map_err(|e| ExtensionError::ValidationError {
|
||||
reason: format!("Invalid URL: {}", e),
|
||||
})?;
|
||||
|
||||
let domain = url_parsed.host_str().ok_or_else(|| ExtensionError::ValidationError {
|
||||
reason: "URL does not contain a valid host".to_string(),
|
||||
})?;
|
||||
|
||||
let has_permission = permissions
|
||||
.iter()
|
||||
.filter(|perm| perm.status == PermissionStatus::Granted)
|
||||
.any(|perm| {
|
||||
let domain_matches = perm.target == "*"
|
||||
|| perm.target == domain
|
||||
|| domain.ends_with(&format!(".{}", perm.target));
|
||||
|
||||
if !domain_matches {
|
||||
return false;
|
||||
}
|
||||
|
||||
if let Some(PermissionConstraints::Web(constraints)) = &perm.constraints {
|
||||
if let Some(methods) = &constraints.methods {
|
||||
if !methods.iter().any(|m| m.eq_ignore_ascii_case(method)) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
true
|
||||
});
|
||||
|
||||
if !has_permission {
|
||||
return Err(ExtensionError::permission_denied(
|
||||
extension_id,
|
||||
method,
|
||||
&format!("web request to '{}'", url),
|
||||
));
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/* /// Prüft Dateisystem-Berechtigungen
|
||||
pub async fn check_filesystem_permission(
|
||||
app_state: &State<'_, AppState>,
|
||||
@ -293,56 +361,6 @@ impl PermissionManager {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Prüft HTTP-Berechtigungen
|
||||
pub async fn check_http_permission(
|
||||
app_state: &State<'_, AppState>,
|
||||
extension_id: &str,
|
||||
method: &str,
|
||||
url: &str,
|
||||
) -> Result<(), ExtensionError> {
|
||||
let permissions = Self::get_permissions(app_state, extension_id).await?;
|
||||
|
||||
let url_parsed = Url::parse(url).map_err(|e| ExtensionError::ValidationError {
|
||||
reason: format!("Invalid URL: {}", e),
|
||||
})?;
|
||||
|
||||
let domain = url_parsed.host_str().unwrap_or("");
|
||||
|
||||
let has_permission = permissions
|
||||
.iter()
|
||||
.filter(|perm| perm.status == PermissionStatus::Granted)
|
||||
.filter(|perm| perm.resource_type == ResourceType::Http)
|
||||
.any(|perm| {
|
||||
let domain_matches = perm.target == "*"
|
||||
|| perm.target == domain
|
||||
|| domain.ends_with(&format!(".{}", perm.target));
|
||||
|
||||
if !domain_matches {
|
||||
return false;
|
||||
}
|
||||
|
||||
if let Some(PermissionConstraints::Http(constraints)) = &perm.constraints {
|
||||
if let Some(methods) = &constraints.methods {
|
||||
if !methods.iter().any(|m| m.eq_ignore_ascii_case(method)) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
true
|
||||
});
|
||||
|
||||
if !has_permission {
|
||||
return Err(ExtensionError::permission_denied(
|
||||
extension_id,
|
||||
method,
|
||||
&format!("HTTP request to '{}'", url),
|
||||
));
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Prüft Shell-Berechtigungen
|
||||
pub async fn check_shell_permission(
|
||||
app_state: &State<'_, AppState>,
|
||||
@ -410,7 +428,7 @@ impl PermissionManager {
|
||||
pub fn parse_resource_type(s: &str) -> Result<ResourceType, DatabaseError> {
|
||||
match s {
|
||||
"fs" => Ok(ResourceType::Fs),
|
||||
"http" => Ok(ResourceType::Http),
|
||||
"web" => Ok(ResourceType::Web),
|
||||
"db" => Ok(ResourceType::Db),
|
||||
"shell" => Ok(ResourceType::Shell),
|
||||
_ => Err(DatabaseError::SerializationError {
|
||||
|
||||
@ -86,11 +86,11 @@ impl FromStr for FsAction {
|
||||
}
|
||||
}
|
||||
|
||||
/// Definiert Aktionen (HTTP-Methoden), die auf HTTP-Anfragen angewendet werden können.
|
||||
/// Definiert Aktionen (HTTP-Methoden), die auf Web-Anfragen angewendet werden können.
|
||||
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize, TS)]
|
||||
#[serde(rename_all = "UPPERCASE")]
|
||||
#[ts(export)]
|
||||
pub enum HttpAction {
|
||||
pub enum WebAction {
|
||||
Get,
|
||||
Post,
|
||||
Put,
|
||||
@ -100,20 +100,20 @@ pub enum HttpAction {
|
||||
All,
|
||||
}
|
||||
|
||||
impl FromStr for HttpAction {
|
||||
impl FromStr for WebAction {
|
||||
type Err = ExtensionError;
|
||||
|
||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||
match s.to_uppercase().as_str() {
|
||||
"GET" => Ok(HttpAction::Get),
|
||||
"POST" => Ok(HttpAction::Post),
|
||||
"PUT" => Ok(HttpAction::Put),
|
||||
"PATCH" => Ok(HttpAction::Patch),
|
||||
"DELETE" => Ok(HttpAction::Delete),
|
||||
"*" => Ok(HttpAction::All),
|
||||
"GET" => Ok(WebAction::Get),
|
||||
"POST" => Ok(WebAction::Post),
|
||||
"PUT" => Ok(WebAction::Put),
|
||||
"PATCH" => Ok(WebAction::Patch),
|
||||
"DELETE" => Ok(WebAction::Delete),
|
||||
"*" => Ok(WebAction::All),
|
||||
_ => Err(ExtensionError::InvalidActionString {
|
||||
input: s.to_string(),
|
||||
resource_type: "http".to_string(),
|
||||
resource_type: "web".to_string(),
|
||||
}),
|
||||
}
|
||||
}
|
||||
@ -149,7 +149,7 @@ impl FromStr for ShellAction {
|
||||
pub enum Action {
|
||||
Database(DbAction),
|
||||
Filesystem(FsAction),
|
||||
Http(HttpAction),
|
||||
Web(WebAction),
|
||||
Shell(ShellAction),
|
||||
}
|
||||
|
||||
@ -173,7 +173,7 @@ pub struct ExtensionPermission {
|
||||
#[ts(export)]
|
||||
pub enum ResourceType {
|
||||
Fs,
|
||||
Http,
|
||||
Web,
|
||||
Db,
|
||||
Shell,
|
||||
}
|
||||
@ -195,7 +195,7 @@ pub enum PermissionStatus {
|
||||
pub enum PermissionConstraints {
|
||||
Database(DbConstraints),
|
||||
Filesystem(FsConstraints),
|
||||
Http(HttpConstraints),
|
||||
Web(WebConstraints),
|
||||
Shell(ShellConstraints),
|
||||
}
|
||||
|
||||
@ -223,7 +223,7 @@ pub struct FsConstraints {
|
||||
|
||||
#[derive(Serialize, Deserialize, Clone, Debug, Default, TS)]
|
||||
#[ts(export)]
|
||||
pub struct HttpConstraints {
|
||||
pub struct WebConstraints {
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub methods: Option<Vec<String>>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
@ -254,7 +254,7 @@ impl ResourceType {
|
||||
pub fn as_str(&self) -> &str {
|
||||
match self {
|
||||
ResourceType::Fs => "fs",
|
||||
ResourceType::Http => "http",
|
||||
ResourceType::Web => "web",
|
||||
ResourceType::Db => "db",
|
||||
ResourceType::Shell => "shell",
|
||||
}
|
||||
@ -263,7 +263,7 @@ impl ResourceType {
|
||||
pub fn from_str(s: &str) -> Result<Self, ExtensionError> {
|
||||
match s {
|
||||
"fs" => Ok(ResourceType::Fs),
|
||||
"http" => Ok(ResourceType::Http),
|
||||
"web" => Ok(ResourceType::Web),
|
||||
"db" => Ok(ResourceType::Db),
|
||||
"shell" => Ok(ResourceType::Shell),
|
||||
_ => Err(ExtensionError::ValidationError {
|
||||
@ -284,7 +284,7 @@ impl Action {
|
||||
.unwrap_or_default()
|
||||
.trim_matches('"')
|
||||
.to_string(),
|
||||
Action::Http(action) => serde_json::to_string(action)
|
||||
Action::Web(action) => serde_json::to_string(action)
|
||||
.unwrap_or_default()
|
||||
.trim_matches('"')
|
||||
.to_string(),
|
||||
@ -299,15 +299,15 @@ impl Action {
|
||||
match resource_type {
|
||||
ResourceType::Db => Ok(Action::Database(DbAction::from_str(s)?)),
|
||||
ResourceType::Fs => Ok(Action::Filesystem(FsAction::from_str(s)?)),
|
||||
ResourceType::Http => {
|
||||
let action: HttpAction =
|
||||
ResourceType::Web => {
|
||||
let action: WebAction =
|
||||
serde_json::from_str(&format!("\"{s}\"")).map_err(|_| {
|
||||
ExtensionError::InvalidActionString {
|
||||
input: s.to_string(),
|
||||
resource_type: "http".to_string(),
|
||||
resource_type: "web".to_string(),
|
||||
}
|
||||
})?;
|
||||
Ok(Action::Http(action))
|
||||
Ok(Action::Web(action))
|
||||
}
|
||||
ResourceType::Shell => Ok(Action::Shell(ShellAction::from_str(s)?)),
|
||||
}
|
||||
|
||||
210
src-tauri/src/extension/web/mod.rs
Normal file
210
src-tauri/src/extension/web/mod.rs
Normal file
@ -0,0 +1,210 @@
|
||||
// src-tauri/src/extension/web/mod.rs
|
||||
|
||||
use crate::extension::error::ExtensionError;
|
||||
use crate::AppState;
|
||||
use base64::{engine::general_purpose::STANDARD, Engine as _};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::collections::HashMap;
|
||||
use std::time::Duration;
|
||||
use tauri::State;
|
||||
use tauri_plugin_http::reqwest;
|
||||
|
||||
/// Request structure matching the SDK's WebRequestOptions
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub struct WebFetchRequest {
|
||||
pub url: String,
|
||||
#[serde(default)]
|
||||
pub method: Option<String>,
|
||||
#[serde(default)]
|
||||
pub headers: Option<HashMap<String, String>>,
|
||||
#[serde(default)]
|
||||
pub body: Option<String>, // Base64 encoded
|
||||
#[serde(default)]
|
||||
pub timeout: Option<u64>, // milliseconds
|
||||
}
|
||||
|
||||
/// Response structure matching the SDK's WebResponse
|
||||
#[derive(Debug, Serialize)]
|
||||
pub struct WebFetchResponse {
|
||||
pub status: u16,
|
||||
pub status_text: String,
|
||||
pub headers: HashMap<String, String>,
|
||||
pub body: String, // Base64 encoded
|
||||
pub url: String,
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn extension_web_open(
|
||||
url: String,
|
||||
public_key: String,
|
||||
name: String,
|
||||
state: State<'_, AppState>,
|
||||
) -> Result<(), ExtensionError> {
|
||||
// Get extension to validate it exists
|
||||
let extension = state
|
||||
.extension_manager
|
||||
.get_extension_by_public_key_and_name(&public_key, &name)?
|
||||
.ok_or_else(|| ExtensionError::NotFound {
|
||||
public_key: public_key.clone(),
|
||||
name: name.clone(),
|
||||
})?;
|
||||
|
||||
// Validate URL format
|
||||
let parsed_url = url::Url::parse(&url).map_err(|e| ExtensionError::WebError {
|
||||
reason: format!("Invalid URL: {}", e),
|
||||
})?;
|
||||
|
||||
// Only allow http and https URLs
|
||||
let scheme = parsed_url.scheme();
|
||||
if scheme != "http" && scheme != "https" {
|
||||
return Err(ExtensionError::WebError {
|
||||
reason: format!("Unsupported URL scheme: {}. Only http and https are allowed.", scheme),
|
||||
});
|
||||
}
|
||||
|
||||
// Check web permissions (open uses GET method for permission check)
|
||||
crate::extension::permissions::manager::PermissionManager::check_web_permission(
|
||||
&state,
|
||||
&extension.id,
|
||||
"GET",
|
||||
&url,
|
||||
)
|
||||
.await?;
|
||||
|
||||
// Open URL in default browser using tauri-plugin-opener
|
||||
tauri_plugin_opener::open_url(&url, None::<&str>).map_err(|e| ExtensionError::WebError {
|
||||
reason: format!("Failed to open URL in browser: {}", e),
|
||||
})?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn extension_web_fetch(
|
||||
url: String,
|
||||
method: Option<String>,
|
||||
headers: Option<HashMap<String, String>>,
|
||||
body: Option<String>,
|
||||
timeout: Option<u64>,
|
||||
public_key: String,
|
||||
name: String,
|
||||
state: State<'_, AppState>,
|
||||
) -> Result<WebFetchResponse, ExtensionError> {
|
||||
// Get extension to validate it exists
|
||||
let extension = state
|
||||
.extension_manager
|
||||
.get_extension_by_public_key_and_name(&public_key, &name)?
|
||||
.ok_or_else(|| ExtensionError::NotFound {
|
||||
public_key: public_key.clone(),
|
||||
name: name.clone(),
|
||||
})?;
|
||||
|
||||
let method_str = method.as_deref().unwrap_or("GET");
|
||||
|
||||
// Check web permissions before making request
|
||||
crate::extension::permissions::manager::PermissionManager::check_web_permission(
|
||||
&state,
|
||||
&extension.id,
|
||||
method_str,
|
||||
&url,
|
||||
)
|
||||
.await?;
|
||||
|
||||
let request = WebFetchRequest {
|
||||
url,
|
||||
method: Some(method_str.to_string()),
|
||||
headers,
|
||||
body,
|
||||
timeout,
|
||||
};
|
||||
|
||||
fetch_web_request(request).await
|
||||
}
|
||||
|
||||
/// Performs the actual HTTP request without CORS restrictions
|
||||
async fn fetch_web_request(request: WebFetchRequest) -> Result<WebFetchResponse, ExtensionError> {
|
||||
let method_str = request.method.as_deref().unwrap_or("GET");
|
||||
let timeout_ms = request.timeout.unwrap_or(30000);
|
||||
|
||||
// Build reqwest client with timeout
|
||||
let client = reqwest::Client::builder()
|
||||
.timeout(Duration::from_millis(timeout_ms))
|
||||
.build()
|
||||
.map_err(|e| ExtensionError::WebError {
|
||||
reason: format!("Failed to create HTTP client: {}", e),
|
||||
})?;
|
||||
|
||||
// Build request
|
||||
let mut req_builder = match method_str.to_uppercase().as_str() {
|
||||
"GET" => client.get(&request.url),
|
||||
"POST" => client.post(&request.url),
|
||||
"PUT" => client.put(&request.url),
|
||||
"DELETE" => client.delete(&request.url),
|
||||
"PATCH" => client.patch(&request.url),
|
||||
"HEAD" => client.head(&request.url),
|
||||
"OPTIONS" => client.request(reqwest::Method::OPTIONS, &request.url),
|
||||
_ => {
|
||||
return Err(ExtensionError::WebError {
|
||||
reason: format!("Unsupported HTTP method: {}", method_str),
|
||||
})
|
||||
}
|
||||
};
|
||||
|
||||
// Add headers
|
||||
if let Some(headers) = request.headers {
|
||||
for (key, value) in headers {
|
||||
req_builder = req_builder.header(key, value);
|
||||
}
|
||||
}
|
||||
|
||||
// Add body if present (decode from base64)
|
||||
if let Some(body_base64) = request.body {
|
||||
let body_bytes = STANDARD.decode(&body_base64).map_err(|e| {
|
||||
ExtensionError::WebError {
|
||||
reason: format!("Failed to decode request body from base64: {}", e),
|
||||
}
|
||||
})?;
|
||||
req_builder = req_builder.body(body_bytes);
|
||||
}
|
||||
|
||||
// Execute request
|
||||
let response = req_builder.send().await.map_err(|e| {
|
||||
if e.is_timeout() {
|
||||
ExtensionError::WebError {
|
||||
reason: format!("Request timeout after {}ms", timeout_ms),
|
||||
}
|
||||
} else {
|
||||
ExtensionError::WebError {
|
||||
reason: format!("Request failed: {}", e),
|
||||
}
|
||||
}
|
||||
})?;
|
||||
|
||||
// Extract response data
|
||||
let status = response.status().as_u16();
|
||||
let status_text = response.status().canonical_reason().unwrap_or("").to_string();
|
||||
let final_url = response.url().to_string();
|
||||
|
||||
// Extract headers
|
||||
let mut response_headers = HashMap::new();
|
||||
for (key, value) in response.headers() {
|
||||
if let Ok(value_str) = value.to_str() {
|
||||
response_headers.insert(key.to_string(), value_str.to_string());
|
||||
}
|
||||
}
|
||||
|
||||
// Read body and encode to base64
|
||||
let body_bytes = response.bytes().await.map_err(|e| ExtensionError::WebError {
|
||||
reason: format!("Failed to read response body: {}", e),
|
||||
})?;
|
||||
|
||||
let body_base64 = STANDARD.encode(&body_bytes);
|
||||
|
||||
Ok(WebFetchResponse {
|
||||
status,
|
||||
status_text,
|
||||
headers: response_headers,
|
||||
body: body_base64,
|
||||
url: final_url,
|
||||
})
|
||||
}
|
||||
@ -78,6 +78,8 @@ pub fn run() {
|
||||
database::vault_exists,
|
||||
extension::database::extension_sql_execute,
|
||||
extension::database::extension_sql_select,
|
||||
extension::web::extension_web_fetch,
|
||||
extension::web::extension_web_open,
|
||||
extension::get_all_dev_extensions,
|
||||
extension::get_all_extensions,
|
||||
extension::get_extension_info,
|
||||
|
||||
@ -1,7 +1,7 @@
|
||||
{
|
||||
"$schema": "https://schema.tauri.app/config/2",
|
||||
"productName": "haex-hub",
|
||||
"version": "0.1.4",
|
||||
"version": "0.1.13",
|
||||
"identifier": "space.haex.hub",
|
||||
"build": {
|
||||
"beforeDevCommand": "pnpm dev",
|
||||
|
||||
@ -1,5 +1,5 @@
|
||||
<template>
|
||||
<UDrawer
|
||||
<UiDrawer
|
||||
v-model:open="open"
|
||||
direction="right"
|
||||
:title="t('launcher.title')"
|
||||
@ -7,9 +7,6 @@
|
||||
:overlay="false"
|
||||
:modal="false"
|
||||
:handle-only="true"
|
||||
:ui="{
|
||||
content: 'w-dvw max-w-md sm:max-w-fit',
|
||||
}"
|
||||
>
|
||||
<UButton
|
||||
icon="material-symbols:apps"
|
||||
@ -66,7 +63,7 @@
|
||||
</div>
|
||||
</div>
|
||||
</template>
|
||||
</UDrawer>
|
||||
</UiDrawer>
|
||||
|
||||
<!-- Uninstall Confirmation Dialog -->
|
||||
<UiDialogConfirm
|
||||
|
||||
@ -163,8 +163,9 @@ const loadDevExtensionAsync = async () => {
|
||||
extensionPath.value = ''
|
||||
} catch (error) {
|
||||
console.error('Failed to load dev extension:', error)
|
||||
const { getErrorMessage } = useExtensionError()
|
||||
add({
|
||||
description: t('add.errors.loadFailed') + error,
|
||||
description: `${t('add.errors.loadFailed')}: ${getErrorMessage(error)}`,
|
||||
color: 'error',
|
||||
})
|
||||
} finally {
|
||||
@ -196,8 +197,9 @@ const reloadDevExtensionAsync = async (extension: ExtensionInfoResponse) => {
|
||||
})
|
||||
} catch (error) {
|
||||
console.error('Failed to reload dev extension:', error)
|
||||
const { getErrorMessage } = useExtensionError()
|
||||
add({
|
||||
description: t('list.errors.reloadFailed') + error,
|
||||
description: `${t('list.errors.reloadFailed')}: ${getErrorMessage(error)}`,
|
||||
color: 'error',
|
||||
})
|
||||
}
|
||||
@ -223,8 +225,9 @@ const removeDevExtensionAsync = async (extension: ExtensionInfoResponse) => {
|
||||
await loadExtensionsAsync()
|
||||
} catch (error) {
|
||||
console.error('Failed to remove dev extension:', error)
|
||||
const { getErrorMessage } = useExtensionError()
|
||||
add({
|
||||
description: t('list.errors.removeFailed') + error,
|
||||
description: `${t('list.errors.removeFailed')}: ${getErrorMessage(error)}`,
|
||||
color: 'error',
|
||||
})
|
||||
}
|
||||
|
||||
@ -83,6 +83,7 @@
|
||||
</template>
|
||||
|
||||
<script setup lang="ts">
|
||||
import { getAvailableContentHeight } from '~/utils/viewport'
|
||||
const props = defineProps<{
|
||||
id: string
|
||||
title: string
|
||||
@ -329,31 +330,11 @@ const handleMaximize = () => {
|
||||
const bounds = getViewportBounds()
|
||||
|
||||
if (bounds && bounds.width > 0 && bounds.height > 0) {
|
||||
// Get safe-area-insets from CSS variables for debug
|
||||
const safeAreaTop = parseFloat(
|
||||
getComputedStyle(document.documentElement).getPropertyValue(
|
||||
'--safe-area-inset-top',
|
||||
) || '0',
|
||||
)
|
||||
const safeAreaBottom = parseFloat(
|
||||
getComputedStyle(document.documentElement).getPropertyValue(
|
||||
'--safe-area-inset-bottom',
|
||||
) || '0',
|
||||
)
|
||||
|
||||
// Desktop container uses 'absolute inset-0' which stretches over full viewport
|
||||
// bounds.height = full viewport height (includes header area + safe-areas)
|
||||
// We need to calculate available space properly
|
||||
|
||||
// Get header height from UI store (measured reactively in layout)
|
||||
const uiStore = useUiStore()
|
||||
const headerHeight = uiStore.headerHeight
|
||||
|
||||
x.value = 0
|
||||
y.value = 0 // Start below header and status bar
|
||||
y.value = 0
|
||||
width.value = bounds.width
|
||||
// Height: viewport - header - both safe-areas
|
||||
height.value = bounds.height - headerHeight - safeAreaTop - safeAreaBottom
|
||||
// Use helper function to calculate correct height with safe areas
|
||||
height.value = getAvailableContentHeight()
|
||||
isMaximized.value = true
|
||||
}
|
||||
}
|
||||
|
||||
@ -1,5 +1,5 @@
|
||||
<template>
|
||||
<UDrawer
|
||||
<UiDrawer
|
||||
v-model:open="localShowWindowOverview"
|
||||
direction="bottom"
|
||||
:title="t('modal.title')"
|
||||
@ -70,7 +70,7 @@
|
||||
</div>
|
||||
</div>
|
||||
</template>
|
||||
</UDrawer>
|
||||
</UiDrawer>
|
||||
</template>
|
||||
|
||||
<script setup lang="ts">
|
||||
|
||||
@ -1,5 +1,5 @@
|
||||
<template>
|
||||
<UDrawer
|
||||
<UiDrawer
|
||||
v-model:open="isOverviewMode"
|
||||
direction="left"
|
||||
:overlay="false"
|
||||
@ -8,7 +8,7 @@
|
||||
description="Workspaces"
|
||||
>
|
||||
<template #content>
|
||||
<div class="py-8 pl-8 pr-4 h-full overflow-y-auto">
|
||||
<div class="pl-8 pr-4 overflow-y-auto py-8">
|
||||
<!-- Workspace Cards -->
|
||||
<div class="flex flex-col gap-3">
|
||||
<HaexWorkspaceCard
|
||||
@ -29,7 +29,7 @@
|
||||
/>
|
||||
</div>
|
||||
</template>
|
||||
</UDrawer>
|
||||
</UiDrawer>
|
||||
</template>
|
||||
|
||||
<script setup lang="ts">
|
||||
|
||||
32
src/components/ui/Drawer.vue
Normal file
32
src/components/ui/Drawer.vue
Normal file
@ -0,0 +1,32 @@
|
||||
<template>
|
||||
<UDrawer
|
||||
v-bind="$attrs"
|
||||
:ui="{
|
||||
content:
|
||||
'pb-[env(safe-area-inset-bottom)] pt-[env(safe-area-inset-top)] ',
|
||||
...(ui || {}),
|
||||
}"
|
||||
>
|
||||
<template
|
||||
v-for="(_, name) in $slots"
|
||||
#[name]="slotData"
|
||||
>
|
||||
<slot
|
||||
:name="name"
|
||||
v-bind="slotData"
|
||||
/>
|
||||
</template>
|
||||
</UDrawer>
|
||||
</template>
|
||||
|
||||
<script setup lang="ts">
|
||||
import type { DrawerProps } from '@nuxt/ui'
|
||||
|
||||
/**
|
||||
* Wrapper around UDrawer that automatically applies safe area insets for mobile devices.
|
||||
* Passes through all props and slots to UDrawer.
|
||||
*/
|
||||
const props = defineProps</* @vue-ignore */ DrawerProps>()
|
||||
|
||||
const { ui } = toRefs(props)
|
||||
</script>
|
||||
@ -83,8 +83,6 @@ const filteredSlots = computed(() => {
|
||||
Object.entries(useSlots()).filter(([name]) => name !== 'trailing'),
|
||||
)
|
||||
})
|
||||
|
||||
const { isSmallScreen } = storeToRefs(useUiStore())
|
||||
</script>
|
||||
|
||||
<i18n lang="yaml">
|
||||
|
||||
@ -1,38 +1,29 @@
|
||||
// composables/extensionMessageHandler.ts
|
||||
import { invoke } from '@tauri-apps/api/core'
|
||||
import type { IHaexHubExtension } from '~/types/haexhub'
|
||||
import {
|
||||
EXTENSION_PROTOCOL_NAME,
|
||||
EXTENSION_PROTOCOL_PREFIX,
|
||||
} from '~/config/constants'
|
||||
import type { Platform } from '@tauri-apps/plugin-os'
|
||||
|
||||
interface ExtensionRequest {
|
||||
id: string
|
||||
method: string
|
||||
params: Record<string, unknown>
|
||||
timestamp: number
|
||||
}
|
||||
import {
|
||||
handleDatabaseMethodAsync,
|
||||
handleFilesystemMethodAsync,
|
||||
handleWebMethodAsync,
|
||||
handlePermissionsMethodAsync,
|
||||
handleContextMethodAsync,
|
||||
handleStorageMethodAsync,
|
||||
setContextGetters,
|
||||
type ExtensionRequest,
|
||||
type ExtensionInstance,
|
||||
} from './handlers'
|
||||
|
||||
// Globaler Handler - nur einmal registriert
|
||||
let globalHandlerRegistered = false
|
||||
interface ExtensionInstance {
|
||||
extension: IHaexHubExtension
|
||||
windowId: string
|
||||
}
|
||||
const iframeRegistry = new Map<HTMLIFrameElement, ExtensionInstance>()
|
||||
// Map event.source (WindowProxy) to extension instance for sandbox-compatible matching
|
||||
const sourceRegistry = new Map<Window, ExtensionInstance>()
|
||||
// Reverse map: window ID to Window for broadcasting (supports multiple windows per extension)
|
||||
const windowIdToWindowMap = new Map<string, Window>()
|
||||
|
||||
// Store context values that need to be accessed outside setup
|
||||
let contextGetters: {
|
||||
getTheme: () => string
|
||||
getLocale: () => string
|
||||
getPlatform: () => Platform | undefined
|
||||
} | null = null
|
||||
|
||||
const registerGlobalMessageHandler = () => {
|
||||
if (globalHandlerRegistered) return
|
||||
|
||||
@ -174,8 +165,8 @@ const registerGlobalMessageHandler = () => {
|
||||
result = await handleDatabaseMethodAsync(request, instance.extension)
|
||||
} else if (request.method.startsWith('haextension.fs.')) {
|
||||
result = await handleFilesystemMethodAsync(request, instance.extension)
|
||||
} else if (request.method.startsWith('haextension.http.')) {
|
||||
result = await handleHttpMethodAsync(request, instance.extension)
|
||||
} else if (request.method.startsWith('haextension.web.')) {
|
||||
result = await handleWebMethodAsync(request, instance.extension)
|
||||
} else if (request.method.startsWith('haextension.permissions.')) {
|
||||
result = await handlePermissionsMethodAsync(request, instance.extension)
|
||||
} else {
|
||||
@ -227,13 +218,11 @@ export const useExtensionMessageHandler = (
|
||||
const { locale } = useI18n()
|
||||
const { platform } = useDeviceStore()
|
||||
// Store getters for use outside setup context
|
||||
if (!contextGetters) {
|
||||
contextGetters = {
|
||||
setContextGetters({
|
||||
getTheme: () => currentTheme.value?.value || 'system',
|
||||
getLocale: () => locale.value,
|
||||
getPlatform: () => platform,
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
// Registriere globalen Handler beim ersten Aufruf
|
||||
registerGlobalMessageHandler()
|
||||
@ -275,12 +264,7 @@ export const registerExtensionIFrame = (
|
||||
// Stelle sicher, dass der globale Handler registriert ist
|
||||
registerGlobalMessageHandler()
|
||||
|
||||
// Warnung wenn Context Getters nicht initialisiert wurden
|
||||
if (!contextGetters) {
|
||||
console.warn(
|
||||
'Context getters not initialized. Make sure useExtensionMessageHandler was called in setup context first.',
|
||||
)
|
||||
}
|
||||
// Note: Context getters should be initialized via useExtensionMessageHandler first
|
||||
|
||||
iframeRegistry.set(iframe, { extension, windowId })
|
||||
}
|
||||
@ -338,201 +322,21 @@ export const broadcastContextToAllExtensions = (context: {
|
||||
timestamp: Date.now(),
|
||||
}
|
||||
|
||||
console.log('[ExtensionHandler] Broadcasting context to all extensions:', context)
|
||||
console.log(
|
||||
'[ExtensionHandler] Broadcasting context to all extensions:',
|
||||
context,
|
||||
)
|
||||
|
||||
// Send to all registered extension windows
|
||||
for (const [_, instance] of iframeRegistry.entries()) {
|
||||
const win = windowIdToWindowMap.get(instance.windowId)
|
||||
if (win) {
|
||||
console.log('[ExtensionHandler] Sending context to:', instance.extension.name, instance.windowId)
|
||||
console.log(
|
||||
'[ExtensionHandler] Sending context to:',
|
||||
instance.extension.name,
|
||||
instance.windowId,
|
||||
)
|
||||
win.postMessage(message, '*')
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// ==========================================
|
||||
// Database Methods
|
||||
// ==========================================
|
||||
|
||||
async function handleDatabaseMethodAsync(
|
||||
request: ExtensionRequest,
|
||||
extension: IHaexHubExtension, // Direkter Typ
|
||||
) {
|
||||
const params = request.params as {
|
||||
query?: string
|
||||
params?: unknown[]
|
||||
}
|
||||
|
||||
switch (request.method) {
|
||||
case 'haextension.db.query': {
|
||||
const rows = await invoke<unknown[]>('extension_sql_select', {
|
||||
sql: params.query || '',
|
||||
params: params.params || [],
|
||||
publicKey: extension.publicKey,
|
||||
name: extension.name,
|
||||
})
|
||||
|
||||
return {
|
||||
rows,
|
||||
rowsAffected: 0,
|
||||
lastInsertId: undefined,
|
||||
}
|
||||
}
|
||||
|
||||
case 'haextension.db.execute': {
|
||||
const rows = await invoke<unknown[]>('extension_sql_execute', {
|
||||
sql: params.query || '',
|
||||
params: params.params || [],
|
||||
publicKey: extension.publicKey,
|
||||
name: extension.name,
|
||||
})
|
||||
|
||||
return {
|
||||
rows,
|
||||
rowsAffected: 1,
|
||||
lastInsertId: undefined,
|
||||
}
|
||||
}
|
||||
|
||||
case 'haextension.db.transaction': {
|
||||
const statements =
|
||||
(request.params as { statements?: string[] }).statements || []
|
||||
|
||||
for (const stmt of statements) {
|
||||
await invoke('extension_sql_execute', {
|
||||
sql: stmt,
|
||||
params: [],
|
||||
publicKey: extension.publicKey,
|
||||
name: extension.name,
|
||||
})
|
||||
}
|
||||
|
||||
return { success: true }
|
||||
}
|
||||
|
||||
default:
|
||||
throw new Error(`Unknown database method: ${request.method}`)
|
||||
}
|
||||
}
|
||||
// ==========================================
|
||||
// Filesystem Methods (TODO)
|
||||
// ==========================================
|
||||
|
||||
async function handleFilesystemMethodAsync(
|
||||
request: ExtensionRequest,
|
||||
extension: IHaexHubExtension,
|
||||
) {
|
||||
if (!request || !extension) return
|
||||
// TODO: Implementiere Filesystem Commands im Backend
|
||||
throw new Error('Filesystem methods not yet implemented')
|
||||
}
|
||||
|
||||
// ==========================================
|
||||
// HTTP Methods (TODO)
|
||||
// ==========================================
|
||||
|
||||
async function handleHttpMethodAsync(
|
||||
request: ExtensionRequest,
|
||||
extension: IHaexHubExtension,
|
||||
) {
|
||||
if (!extension || !request) {
|
||||
throw new Error('Extension not found')
|
||||
}
|
||||
|
||||
// TODO: Implementiere HTTP Commands im Backend
|
||||
throw new Error('HTTP methods not yet implemented')
|
||||
}
|
||||
|
||||
// ==========================================
|
||||
// Permission Methods (TODO)
|
||||
// ==========================================
|
||||
|
||||
async function handlePermissionsMethodAsync(
|
||||
request: ExtensionRequest,
|
||||
extension: IHaexHubExtension,
|
||||
) {
|
||||
if (!extension || !request) {
|
||||
throw new Error('Extension not found')
|
||||
}
|
||||
|
||||
// TODO: Implementiere Permission Request UI
|
||||
throw new Error('Permission methods not yet implemented')
|
||||
}
|
||||
|
||||
// ==========================================
|
||||
// Context Methods
|
||||
// ==========================================
|
||||
|
||||
async function handleContextMethodAsync(request: ExtensionRequest) {
|
||||
switch (request.method) {
|
||||
case 'haextension.context.get':
|
||||
if (!contextGetters) {
|
||||
throw new Error(
|
||||
'Context not initialized. Make sure useExtensionMessageHandler is called in a component.',
|
||||
)
|
||||
}
|
||||
return {
|
||||
theme: contextGetters.getTheme(),
|
||||
locale: contextGetters.getLocale(),
|
||||
platform: contextGetters.getPlatform(),
|
||||
}
|
||||
|
||||
default:
|
||||
throw new Error(`Unknown context method: ${request.method}`)
|
||||
}
|
||||
}
|
||||
|
||||
// ==========================================
|
||||
// Storage Methods
|
||||
// ==========================================
|
||||
|
||||
async function handleStorageMethodAsync(
|
||||
request: ExtensionRequest,
|
||||
instance: ExtensionInstance,
|
||||
) {
|
||||
// Storage is now per-window, not per-extension
|
||||
const storageKey = `ext_${instance.extension.id}_${instance.windowId}_`
|
||||
console.log(
|
||||
`[HaexHub Storage] ${request.method} for window ${instance.windowId}`,
|
||||
)
|
||||
|
||||
switch (request.method) {
|
||||
case 'haextension.storage.getItem': {
|
||||
const key = request.params.key as string
|
||||
return localStorage.getItem(storageKey + key)
|
||||
}
|
||||
|
||||
case 'haextension.storage.setItem': {
|
||||
const key = request.params.key as string
|
||||
const value = request.params.value as string
|
||||
localStorage.setItem(storageKey + key, value)
|
||||
return null
|
||||
}
|
||||
|
||||
case 'haextension.storage.removeItem': {
|
||||
const key = request.params.key as string
|
||||
localStorage.removeItem(storageKey + key)
|
||||
return null
|
||||
}
|
||||
|
||||
case 'haextension.storage.clear': {
|
||||
// Remove only instance-specific keys
|
||||
const keys = Object.keys(localStorage).filter((k) =>
|
||||
k.startsWith(storageKey),
|
||||
)
|
||||
keys.forEach((k) => localStorage.removeItem(k))
|
||||
return null
|
||||
}
|
||||
|
||||
case 'haextension.storage.keys': {
|
||||
// Return only instance-specific keys (without prefix)
|
||||
const keys = Object.keys(localStorage)
|
||||
.filter((k) => k.startsWith(storageKey))
|
||||
.map((k) => k.substring(storageKey.length))
|
||||
return keys
|
||||
}
|
||||
|
||||
default:
|
||||
throw new Error(`Unknown storage method: ${request.method}`)
|
||||
}
|
||||
}
|
||||
|
||||
36
src/composables/handlers/context.ts
Normal file
36
src/composables/handlers/context.ts
Normal file
@ -0,0 +1,36 @@
|
||||
import type { Platform } from '@tauri-apps/plugin-os'
|
||||
import type { ExtensionRequest } from './types'
|
||||
|
||||
// Context getters are set from the main handler during initialization
|
||||
let contextGetters: {
|
||||
getTheme: () => string
|
||||
getLocale: () => string
|
||||
getPlatform: () => Platform | undefined
|
||||
} | null = null
|
||||
|
||||
export function setContextGetters(getters: {
|
||||
getTheme: () => string
|
||||
getLocale: () => string
|
||||
getPlatform: () => Platform | undefined
|
||||
}) {
|
||||
contextGetters = getters
|
||||
}
|
||||
|
||||
export async function handleContextMethodAsync(request: ExtensionRequest) {
|
||||
switch (request.method) {
|
||||
case 'haextension.context.get':
|
||||
if (!contextGetters) {
|
||||
throw new Error(
|
||||
'Context not initialized. Make sure useExtensionMessageHandler is called in a component.',
|
||||
)
|
||||
}
|
||||
return {
|
||||
theme: contextGetters.getTheme(),
|
||||
locale: contextGetters.getLocale(),
|
||||
platform: contextGetters.getPlatform(),
|
||||
}
|
||||
|
||||
default:
|
||||
throw new Error(`Unknown context method: ${request.method}`)
|
||||
}
|
||||
}
|
||||
84
src/composables/handlers/database.ts
Normal file
84
src/composables/handlers/database.ts
Normal file
@ -0,0 +1,84 @@
|
||||
import { invoke } from '@tauri-apps/api/core'
|
||||
import type { IHaexHubExtension } from '~/types/haexhub'
|
||||
import type { ExtensionRequest } from './types'
|
||||
|
||||
export async function handleDatabaseMethodAsync(
|
||||
request: ExtensionRequest,
|
||||
extension: IHaexHubExtension,
|
||||
) {
|
||||
const params = request.params as {
|
||||
query?: string
|
||||
params?: unknown[]
|
||||
}
|
||||
|
||||
switch (request.method) {
|
||||
case 'haextension.db.query': {
|
||||
try {
|
||||
const rows = await invoke<unknown[]>('extension_sql_select', {
|
||||
sql: params.query || '',
|
||||
params: params.params || [],
|
||||
publicKey: extension.publicKey,
|
||||
name: extension.name,
|
||||
})
|
||||
|
||||
return {
|
||||
rows,
|
||||
rowsAffected: 0,
|
||||
lastInsertId: undefined,
|
||||
}
|
||||
} catch (error) {
|
||||
// If error is about non-SELECT statements (INSERT/UPDATE/DELETE with RETURNING),
|
||||
// automatically retry with execute
|
||||
if (error?.message?.includes('Only SELECT statements are allowed')) {
|
||||
const rows = await invoke<unknown[]>('extension_sql_execute', {
|
||||
sql: params.query || '',
|
||||
params: params.params || [],
|
||||
publicKey: extension.publicKey,
|
||||
name: extension.name,
|
||||
})
|
||||
|
||||
return {
|
||||
rows,
|
||||
rowsAffected: rows.length,
|
||||
lastInsertId: undefined,
|
||||
}
|
||||
}
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
case 'haextension.db.execute': {
|
||||
const rows = await invoke<unknown[]>('extension_sql_execute', {
|
||||
sql: params.query || '',
|
||||
params: params.params || [],
|
||||
publicKey: extension.publicKey,
|
||||
name: extension.name,
|
||||
})
|
||||
|
||||
return {
|
||||
rows,
|
||||
rowsAffected: 1,
|
||||
lastInsertId: undefined,
|
||||
}
|
||||
}
|
||||
|
||||
case 'haextension.db.transaction': {
|
||||
const statements =
|
||||
(request.params as { statements?: string[] }).statements || []
|
||||
|
||||
for (const stmt of statements) {
|
||||
await invoke('extension_sql_execute', {
|
||||
sql: stmt,
|
||||
params: [],
|
||||
publicKey: extension.publicKey,
|
||||
name: extension.name,
|
||||
})
|
||||
}
|
||||
|
||||
return { success: true }
|
||||
}
|
||||
|
||||
default:
|
||||
throw new Error(`Unknown database method: ${request.method}`)
|
||||
}
|
||||
}
|
||||
92
src/composables/handlers/filesystem.ts
Normal file
92
src/composables/handlers/filesystem.ts
Normal file
@ -0,0 +1,92 @@
|
||||
import { save } from '@tauri-apps/plugin-dialog'
|
||||
import { writeFile } from '@tauri-apps/plugin-fs'
|
||||
import { openPath } from '@tauri-apps/plugin-opener'
|
||||
import { tempDir, join } from '@tauri-apps/api/path'
|
||||
import type { IHaexHubExtension } from '~/types/haexhub'
|
||||
import type { ExtensionRequest } from './types'
|
||||
|
||||
export async function handleFilesystemMethodAsync(
|
||||
request: ExtensionRequest,
|
||||
extension: IHaexHubExtension,
|
||||
) {
|
||||
if (!request || !extension) return
|
||||
|
||||
switch (request.method) {
|
||||
case 'haextension.fs.saveFile': {
|
||||
const params = request.params as {
|
||||
data: number[]
|
||||
defaultPath?: string
|
||||
title?: string
|
||||
filters?: Array<{ name: string; extensions: string[] }>
|
||||
}
|
||||
|
||||
// Convert number array back to Uint8Array
|
||||
const data = new Uint8Array(params.data)
|
||||
|
||||
// Open save dialog
|
||||
const filePath = await save({
|
||||
defaultPath: params.defaultPath,
|
||||
title: params.title || 'Save File',
|
||||
filters: params.filters,
|
||||
})
|
||||
|
||||
// User cancelled
|
||||
if (!filePath) {
|
||||
return null
|
||||
}
|
||||
|
||||
// Write file
|
||||
await writeFile(filePath, data)
|
||||
|
||||
return {
|
||||
path: filePath,
|
||||
success: true,
|
||||
}
|
||||
}
|
||||
|
||||
case 'haextension.fs.showImage': {
|
||||
// This method is now handled by the frontend using PhotoSwipe
|
||||
// We keep it for backwards compatibility but it's a no-op
|
||||
return {
|
||||
success: true,
|
||||
useFrontend: true,
|
||||
}
|
||||
}
|
||||
|
||||
case 'haextension.fs.openFile': {
|
||||
const params = request.params as {
|
||||
data: number[]
|
||||
fileName: string
|
||||
mimeType?: string
|
||||
}
|
||||
|
||||
try {
|
||||
// Convert number array back to Uint8Array
|
||||
const data = new Uint8Array(params.data)
|
||||
|
||||
// Get temp directory and create file path
|
||||
const tempDirPath = await tempDir()
|
||||
const tempFilePath = await join(tempDirPath, params.fileName)
|
||||
|
||||
// Write file to temp directory
|
||||
await writeFile(tempFilePath, data)
|
||||
|
||||
// Open file with system's default viewer
|
||||
await openPath(tempFilePath)
|
||||
|
||||
return {
|
||||
success: true,
|
||||
}
|
||||
}
|
||||
catch (error) {
|
||||
console.error('[Filesystem] Error opening file:', error)
|
||||
return {
|
||||
success: false,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
default:
|
||||
throw new Error(`Unknown filesystem method: ${request.method}`)
|
||||
}
|
||||
}
|
||||
10
src/composables/handlers/index.ts
Normal file
10
src/composables/handlers/index.ts
Normal file
@ -0,0 +1,10 @@
|
||||
// Export all handler functions
|
||||
export { handleDatabaseMethodAsync } from './database'
|
||||
export { handleFilesystemMethodAsync } from './filesystem'
|
||||
export { handleWebMethodAsync } from './web'
|
||||
export { handlePermissionsMethodAsync } from './permissions'
|
||||
export { handleContextMethodAsync, setContextGetters } from './context'
|
||||
export { handleStorageMethodAsync } from './storage'
|
||||
|
||||
// Export shared types
|
||||
export type { ExtensionRequest, ExtensionInstance } from './types'
|
||||
14
src/composables/handlers/permissions.ts
Normal file
14
src/composables/handlers/permissions.ts
Normal file
@ -0,0 +1,14 @@
|
||||
import type { IHaexHubExtension } from '~/types/haexhub'
|
||||
import type { ExtensionRequest } from './types'
|
||||
|
||||
export async function handlePermissionsMethodAsync(
|
||||
request: ExtensionRequest,
|
||||
extension: IHaexHubExtension,
|
||||
) {
|
||||
if (!extension || !request) {
|
||||
throw new Error('Extension not found')
|
||||
}
|
||||
|
||||
// TODO: Implementiere Permission Request UI
|
||||
throw new Error('Permission methods not yet implemented')
|
||||
}
|
||||
52
src/composables/handlers/storage.ts
Normal file
52
src/composables/handlers/storage.ts
Normal file
@ -0,0 +1,52 @@
|
||||
import type { ExtensionRequest, ExtensionInstance } from './types'
|
||||
|
||||
export async function handleStorageMethodAsync(
|
||||
request: ExtensionRequest,
|
||||
instance: ExtensionInstance,
|
||||
) {
|
||||
// Storage is now per-window, not per-extension
|
||||
const storageKey = `ext_${instance.extension.id}_${instance.windowId}_`
|
||||
console.log(
|
||||
`[HaexHub Storage] ${request.method} for window ${instance.windowId}`,
|
||||
)
|
||||
|
||||
switch (request.method) {
|
||||
case 'haextension.storage.getItem': {
|
||||
const key = request.params.key as string
|
||||
return localStorage.getItem(storageKey + key)
|
||||
}
|
||||
|
||||
case 'haextension.storage.setItem': {
|
||||
const key = request.params.key as string
|
||||
const value = request.params.value as string
|
||||
localStorage.setItem(storageKey + key, value)
|
||||
return null
|
||||
}
|
||||
|
||||
case 'haextension.storage.removeItem': {
|
||||
const key = request.params.key as string
|
||||
localStorage.removeItem(storageKey + key)
|
||||
return null
|
||||
}
|
||||
|
||||
case 'haextension.storage.clear': {
|
||||
// Remove only instance-specific keys
|
||||
const keys = Object.keys(localStorage).filter((k) =>
|
||||
k.startsWith(storageKey),
|
||||
)
|
||||
keys.forEach((k) => localStorage.removeItem(k))
|
||||
return null
|
||||
}
|
||||
|
||||
case 'haextension.storage.keys': {
|
||||
// Return only instance-specific keys (without prefix)
|
||||
const keys = Object.keys(localStorage)
|
||||
.filter((k) => k.startsWith(storageKey))
|
||||
.map((k) => k.substring(storageKey.length))
|
||||
return keys
|
||||
}
|
||||
|
||||
default:
|
||||
throw new Error(`Unknown storage method: ${request.method}`)
|
||||
}
|
||||
}
|
||||
14
src/composables/handlers/types.ts
Normal file
14
src/composables/handlers/types.ts
Normal file
@ -0,0 +1,14 @@
|
||||
// Shared types for extension message handlers
|
||||
import type { IHaexHubExtension } from '~/types/haexhub'
|
||||
|
||||
export interface ExtensionRequest {
|
||||
id: string
|
||||
method: string
|
||||
params: Record<string, unknown>
|
||||
timestamp: number
|
||||
}
|
||||
|
||||
export interface ExtensionInstance {
|
||||
extension: IHaexHubExtension
|
||||
windowId: string
|
||||
}
|
||||
96
src/composables/handlers/web.ts
Normal file
96
src/composables/handlers/web.ts
Normal file
@ -0,0 +1,96 @@
|
||||
import type { IHaexHubExtension } from '~/types/haexhub'
|
||||
import type { ExtensionRequest } from './types'
|
||||
import { invoke } from '@tauri-apps/api/core'
|
||||
|
||||
export async function handleWebMethodAsync(
|
||||
request: ExtensionRequest,
|
||||
extension: IHaexHubExtension,
|
||||
) {
|
||||
if (!extension || !request) {
|
||||
throw new Error('Extension not found')
|
||||
}
|
||||
|
||||
const { method, params } = request
|
||||
|
||||
if (method === 'haextension.web.fetch') {
|
||||
return await handleWebFetchAsync(params, extension)
|
||||
}
|
||||
|
||||
if (method === 'haextension.web.open') {
|
||||
return await handleWebOpenAsync(params, extension)
|
||||
}
|
||||
|
||||
throw new Error(`Unknown web method: ${method}`)
|
||||
}
|
||||
|
||||
async function handleWebFetchAsync(
|
||||
params: Record<string, unknown>,
|
||||
extension: IHaexHubExtension,
|
||||
) {
|
||||
const url = params.url as string
|
||||
const method = (params.method as string) || undefined
|
||||
const headers = (params.headers as Record<string, string>) || undefined
|
||||
const body = params.body as string | undefined
|
||||
const timeout = (params.timeout as number) || undefined
|
||||
|
||||
if (!url) {
|
||||
throw new Error('URL is required')
|
||||
}
|
||||
|
||||
try {
|
||||
// Call Rust backend through Tauri IPC to avoid CORS restrictions
|
||||
const response = await invoke<{
|
||||
status: number
|
||||
status_text: string
|
||||
headers: Record<string, string>
|
||||
body: string
|
||||
url: string
|
||||
}>('extension_web_fetch', {
|
||||
url,
|
||||
method,
|
||||
headers,
|
||||
body,
|
||||
timeout,
|
||||
publicKey: extension.publicKey,
|
||||
name: extension.name,
|
||||
})
|
||||
|
||||
return {
|
||||
status: response.status,
|
||||
statusText: response.status_text,
|
||||
headers: response.headers,
|
||||
body: response.body,
|
||||
url: response.url,
|
||||
}
|
||||
} catch (error) {
|
||||
if (error instanceof Error) {
|
||||
throw new Error(`Web request failed: ${error.message}`)
|
||||
}
|
||||
throw new Error('Web request failed with unknown error')
|
||||
}
|
||||
}
|
||||
|
||||
async function handleWebOpenAsync(
|
||||
params: Record<string, unknown>,
|
||||
extension: IHaexHubExtension,
|
||||
) {
|
||||
const url = params.url as string
|
||||
|
||||
if (!url) {
|
||||
throw new Error('URL is required')
|
||||
}
|
||||
|
||||
try {
|
||||
// Call Rust backend to open URL in default browser
|
||||
await invoke<void>('extension_web_open', {
|
||||
url,
|
||||
publicKey: extension.publicKey,
|
||||
name: extension.name,
|
||||
})
|
||||
} catch (error) {
|
||||
if (error instanceof Error) {
|
||||
throw new Error(`Failed to open URL: ${error.message}`)
|
||||
}
|
||||
throw new Error('Failed to open URL with unknown error')
|
||||
}
|
||||
}
|
||||
43
src/composables/useExtensionError.ts
Normal file
43
src/composables/useExtensionError.ts
Normal file
@ -0,0 +1,43 @@
|
||||
import type { SerializedExtensionError } from '~~/src-tauri/bindings/SerializedExtensionError'
|
||||
|
||||
/**
|
||||
* Type guard to check if error is a SerializedExtensionError
|
||||
*/
|
||||
export function isSerializedExtensionError(error: unknown): error is SerializedExtensionError {
|
||||
return (
|
||||
typeof error === 'object' &&
|
||||
error !== null &&
|
||||
'code' in error &&
|
||||
'message' in error &&
|
||||
'type' in error
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract error message from unknown error type
|
||||
*/
|
||||
export function getErrorMessage(error: unknown): string {
|
||||
if (isSerializedExtensionError(error)) {
|
||||
return error.message
|
||||
}
|
||||
|
||||
if (error instanceof Error) {
|
||||
return error.message
|
||||
}
|
||||
|
||||
if (typeof error === 'string') {
|
||||
return error
|
||||
}
|
||||
|
||||
return String(error)
|
||||
}
|
||||
|
||||
/**
|
||||
* Composable for handling extension errors
|
||||
*/
|
||||
export function useExtensionError() {
|
||||
return {
|
||||
isSerializedExtensionError,
|
||||
getErrorMessage,
|
||||
}
|
||||
}
|
||||
@ -48,3 +48,27 @@ export const haexCrdtConfigs = sqliteTable(tableNames.haex.crdt.configs.name, {
|
||||
key: text().primaryKey(),
|
||||
value: text(),
|
||||
})
|
||||
|
||||
/**
|
||||
* Sync Status Table (WITHOUT CRDT - local-only metadata)
|
||||
* Tracks sync progress for each backend
|
||||
*/
|
||||
export const haexSyncStatus = sqliteTable(
|
||||
'haex_sync_status',
|
||||
{
|
||||
id: text('id')
|
||||
.$defaultFn(() => crypto.randomUUID())
|
||||
.primaryKey(),
|
||||
backendId: text('backend_id').notNull(),
|
||||
// Last server sequence number received from pull
|
||||
lastPullSequence: integer('last_pull_sequence'),
|
||||
// Last HLC timestamp pushed to server
|
||||
lastPushHlcTimestamp: text('last_push_hlc_timestamp'),
|
||||
// Last successful sync timestamp
|
||||
lastSyncAt: text('last_sync_at'),
|
||||
// Sync error message if any
|
||||
error: text('error'),
|
||||
},
|
||||
)
|
||||
export type InsertHaexSyncStatus = typeof haexSyncStatus.$inferInsert
|
||||
export type SelectHaexSyncStatus = typeof haexSyncStatus.$inferSelect
|
||||
|
||||
@ -205,3 +205,30 @@ export const haexDesktopItems = sqliteTable(
|
||||
)
|
||||
export type InsertHaexDesktopItems = typeof haexDesktopItems.$inferInsert
|
||||
export type SelectHaexDesktopItems = typeof haexDesktopItems.$inferSelect
|
||||
|
||||
export const haexSyncBackends = sqliteTable(
|
||||
tableNames.haex.sync_backends.name,
|
||||
withCrdtColumns({
|
||||
id: text(tableNames.haex.sync_backends.columns.id)
|
||||
.$defaultFn(() => crypto.randomUUID())
|
||||
.primaryKey(),
|
||||
name: text(tableNames.haex.sync_backends.columns.name).notNull(),
|
||||
serverUrl: text(tableNames.haex.sync_backends.columns.serverUrl).notNull(),
|
||||
enabled: integer(tableNames.haex.sync_backends.columns.enabled, {
|
||||
mode: 'boolean',
|
||||
})
|
||||
.default(true)
|
||||
.notNull(),
|
||||
priority: integer(tableNames.haex.sync_backends.columns.priority)
|
||||
.default(0)
|
||||
.notNull(),
|
||||
createdAt: text(tableNames.haex.sync_backends.columns.createdAt).default(
|
||||
sql`(CURRENT_TIMESTAMP)`,
|
||||
),
|
||||
updatedAt: integer(tableNames.haex.sync_backends.columns.updatedAt, {
|
||||
mode: 'timestamp',
|
||||
}).$onUpdate(() => new Date()),
|
||||
}),
|
||||
)
|
||||
export type InsertHaexSyncBackends = typeof haexSyncBackends.$inferInsert
|
||||
export type SelectHaexSyncBackends = typeof haexSyncBackends.$inferSelect
|
||||
|
||||
@ -102,6 +102,20 @@
|
||||
"haexTimestamp": "haex_timestamp"
|
||||
}
|
||||
},
|
||||
"sync_backends": {
|
||||
"name": "haex_sync_backends",
|
||||
"columns": {
|
||||
"id": "id",
|
||||
"name": "name",
|
||||
"serverUrl": "server_url",
|
||||
"enabled": "enabled",
|
||||
"priority": "priority",
|
||||
"createdAt": "created_at",
|
||||
"updatedAt": "updated_at",
|
||||
|
||||
"haexTimestamp": "haex_timestamp"
|
||||
}
|
||||
},
|
||||
|
||||
"crdt": {
|
||||
"logs": {
|
||||
|
||||
@ -91,31 +91,34 @@ export const useDesktopStore = defineStore('desktopStore', () => {
|
||||
iconHeight?: number,
|
||||
) => {
|
||||
const cellSize = gridCellSize.value
|
||||
const halfCell = cellSize / 2
|
||||
|
||||
// Adjust y for grid offset
|
||||
const adjustedY = Math.max(0, y + iconPadding)
|
||||
// Use provided dimensions or fall back to the effective icon size (not cell size!)
|
||||
const actualIconWidth = iconWidth || effectiveIconSize.value
|
||||
const actualIconHeight = iconHeight || effectiveIconSize.value
|
||||
|
||||
// Calculate which grid cell the position falls into
|
||||
const col = Math.floor(x / cellSize)
|
||||
const row = Math.floor(adjustedY / cellSize)
|
||||
// Add half the icon size to x/y to get the center point for snapping
|
||||
const centerX = x + actualIconWidth / 2
|
||||
const centerY = y + actualIconHeight / 2
|
||||
|
||||
// Use provided dimensions or fall back to cell size
|
||||
const actualIconWidth = iconWidth || cellSize
|
||||
const actualIconHeight = iconHeight || cellSize
|
||||
// Find nearest grid cell center
|
||||
// Grid cells are centered at: halfCell, halfCell + cellSize, halfCell + 2*cellSize, ...
|
||||
// Which is: halfCell + (n * cellSize) for n = 0, 1, 2, ...
|
||||
const col = Math.round((centerX - halfCell) / cellSize)
|
||||
const row = Math.round((centerY - halfCell) / cellSize)
|
||||
|
||||
// Center the icon in the cell(s) it occupies
|
||||
const cellsWide = Math.max(1, Math.ceil(actualIconWidth / cellSize))
|
||||
const cellsHigh = Math.max(1, Math.ceil(actualIconHeight / cellSize))
|
||||
// Calculate the center of the target grid cell
|
||||
const gridCenterX = halfCell + col * cellSize
|
||||
const gridCenterY = halfCell + row * cellSize
|
||||
|
||||
const totalWidth = cellsWide * cellSize
|
||||
const totalHeight = cellsHigh * cellSize
|
||||
|
||||
const paddingX = (totalWidth - actualIconWidth) / 2
|
||||
const paddingY = (totalHeight - actualIconHeight) / 2
|
||||
// Calculate the top-left position that centers the icon in the cell
|
||||
const snappedX = gridCenterX - actualIconWidth / 2
|
||||
const snappedY = gridCenterY - actualIconHeight / 2
|
||||
|
||||
return {
|
||||
x: col * cellSize + paddingX - iconPadding,
|
||||
y: row * cellSize + paddingY - iconPadding,
|
||||
x: snappedX,
|
||||
y: snappedY,
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -1,4 +1,5 @@
|
||||
import { defineAsyncComponent, type Component } from 'vue'
|
||||
import { getFullscreenDimensions } from '~/utils/viewport'
|
||||
|
||||
export interface IWindow {
|
||||
id: string
|
||||
@ -191,11 +192,30 @@ export const useWindowManagerStore = defineStore('windowManager', () => {
|
||||
const viewportHeight = window.innerHeight - 60
|
||||
|
||||
console.log('viewportHeight', window.innerHeight, viewportHeight)
|
||||
const windowHeight = Math.min(height, viewportHeight)
|
||||
|
||||
// Check if we're on a small screen
|
||||
const { isSmallScreen } = useUiStore()
|
||||
|
||||
let windowWidth: number
|
||||
let windowHeight: number
|
||||
let x: number
|
||||
let y: number
|
||||
|
||||
if (isSmallScreen) {
|
||||
// On small screens, make window fullscreen starting at 0,0
|
||||
// Use helper function to calculate correct dimensions with safe areas
|
||||
const fullscreen = getFullscreenDimensions()
|
||||
x = fullscreen.x
|
||||
y = fullscreen.y
|
||||
windowWidth = fullscreen.width
|
||||
windowHeight = fullscreen.height
|
||||
} else {
|
||||
// On larger screens, use normal sizing and positioning
|
||||
windowHeight = Math.min(height, viewportHeight)
|
||||
|
||||
// Adjust width proportionally if needed (optional)
|
||||
const aspectRatio = width / height
|
||||
const windowWidth = Math.min(
|
||||
windowWidth = Math.min(
|
||||
width,
|
||||
viewportWidth,
|
||||
windowHeight * aspectRatio,
|
||||
@ -205,8 +225,9 @@ export const useWindowManagerStore = defineStore('windowManager', () => {
|
||||
const offset = currentWorkspaceWindows.value.length * 30
|
||||
const centerX = Math.max(0, (viewportWidth - windowWidth) / 1 / 3)
|
||||
const centerY = Math.max(0, (viewportHeight - windowHeight) / 1 / 3)
|
||||
const x = Math.min(centerX + offset, viewportWidth - windowWidth)
|
||||
const y = Math.min(centerY + offset, viewportHeight - windowHeight)
|
||||
x = Math.min(centerX + offset, viewportWidth - windowWidth)
|
||||
y = Math.min(centerY + offset, viewportHeight - windowHeight)
|
||||
}
|
||||
|
||||
const newWindow: IWindow = {
|
||||
id: windowId,
|
||||
|
||||
130
src/stores/sync/backends.ts
Normal file
130
src/stores/sync/backends.ts
Normal file
@ -0,0 +1,130 @@
|
||||
import { eq } from 'drizzle-orm'
|
||||
import {
|
||||
haexSyncBackends,
|
||||
type InsertHaexSyncBackends,
|
||||
type SelectHaexSyncBackends,
|
||||
} from '~/database/schemas'
|
||||
|
||||
export const useSyncBackendsStore = defineStore('syncBackendsStore', () => {
|
||||
const { currentVault } = storeToRefs(useVaultStore())
|
||||
|
||||
const backends = ref<SelectHaexSyncBackends[]>([])
|
||||
|
||||
const enabledBackends = computed(() =>
|
||||
backends.value.filter((b) => b.enabled),
|
||||
)
|
||||
|
||||
const sortedBackends = computed(() =>
|
||||
[...backends.value].sort((a, b) => (b.priority || 0) - (a.priority || 0)),
|
||||
)
|
||||
|
||||
// Load all sync backends from database
|
||||
const loadBackendsAsync = async () => {
|
||||
if (!currentVault.value?.drizzle) {
|
||||
console.error('No vault opened')
|
||||
return
|
||||
}
|
||||
|
||||
try {
|
||||
const result = await currentVault.value.drizzle
|
||||
.select()
|
||||
.from(haexSyncBackends)
|
||||
|
||||
backends.value = result
|
||||
} catch (error) {
|
||||
console.error('Failed to load sync backends:', error)
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
// Add a new sync backend
|
||||
const addBackendAsync = async (backend: InsertHaexSyncBackends) => {
|
||||
if (!currentVault.value?.drizzle) {
|
||||
throw new Error('No vault opened')
|
||||
}
|
||||
|
||||
try {
|
||||
const result = await currentVault.value.drizzle
|
||||
.insert(haexSyncBackends)
|
||||
.values(backend)
|
||||
.returning()
|
||||
|
||||
if (result.length > 0 && result[0]) {
|
||||
backends.value.push(result[0])
|
||||
return result[0]
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Failed to add sync backend:', error)
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
// Update an existing sync backend
|
||||
const updateBackendAsync = async (
|
||||
id: string,
|
||||
updates: Partial<InsertHaexSyncBackends>,
|
||||
) => {
|
||||
if (!currentVault.value?.drizzle) {
|
||||
throw new Error('No vault opened')
|
||||
}
|
||||
|
||||
try {
|
||||
const result = await currentVault.value.drizzle
|
||||
.update(haexSyncBackends)
|
||||
.set(updates)
|
||||
.where(eq(haexSyncBackends.id, id))
|
||||
.returning()
|
||||
|
||||
if (result.length > 0 && result[0]) {
|
||||
const index = backends.value.findIndex((b) => b.id === id)
|
||||
if (index !== -1) {
|
||||
backends.value[index] = result[0]
|
||||
}
|
||||
return result[0]
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Failed to update sync backend:', error)
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
// Delete a sync backend
|
||||
const deleteBackendAsync = async (id: string) => {
|
||||
if (!currentVault.value?.drizzle) {
|
||||
throw new Error('No vault opened')
|
||||
}
|
||||
|
||||
try {
|
||||
await currentVault.value.drizzle
|
||||
.delete(haexSyncBackends)
|
||||
.where(eq(haexSyncBackends.id, id))
|
||||
|
||||
backends.value = backends.value.filter((b) => b.id !== id)
|
||||
} catch (error) {
|
||||
console.error('Failed to delete sync backend:', error)
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
// Enable/disable a backend
|
||||
const toggleBackendAsync = async (id: string, enabled: boolean) => {
|
||||
return updateBackendAsync(id, { enabled })
|
||||
}
|
||||
|
||||
// Update backend priority (for sync order)
|
||||
const updatePriorityAsync = async (id: string, priority: number) => {
|
||||
return updateBackendAsync(id, { priority })
|
||||
}
|
||||
|
||||
return {
|
||||
backends,
|
||||
enabledBackends,
|
||||
sortedBackends,
|
||||
loadBackendsAsync,
|
||||
addBackendAsync,
|
||||
updateBackendAsync,
|
||||
deleteBackendAsync,
|
||||
toggleBackendAsync,
|
||||
updatePriorityAsync,
|
||||
}
|
||||
})
|
||||
390
src/stores/sync/engine.ts
Normal file
390
src/stores/sync/engine.ts
Normal file
@ -0,0 +1,390 @@
|
||||
/**
|
||||
* Sync Engine Store - Executes sync operations with haex-sync-server backends
|
||||
* Handles vault key storage and CRDT log synchronization
|
||||
*/
|
||||
|
||||
import { createClient } from '@supabase/supabase-js'
|
||||
import type { SelectHaexCrdtLogs } from '~/database/schemas'
|
||||
import {
|
||||
encryptVaultKeyAsync,
|
||||
decryptVaultKeyAsync,
|
||||
encryptCrdtDataAsync,
|
||||
decryptCrdtDataAsync,
|
||||
generateVaultKey,
|
||||
} from '~/utils/crypto/vaultKey'
|
||||
|
||||
interface VaultKeyCache {
|
||||
[vaultId: string]: {
|
||||
vaultKey: Uint8Array
|
||||
timestamp: number
|
||||
}
|
||||
}
|
||||
|
||||
interface SyncLogData {
|
||||
vaultId: string
|
||||
encryptedData: string
|
||||
nonce: string
|
||||
haexTimestamp: string
|
||||
sequence: number
|
||||
}
|
||||
|
||||
interface PullLogsResponse {
|
||||
logs: Array<{
|
||||
id: string
|
||||
userId: string
|
||||
vaultId: string
|
||||
encryptedData: string
|
||||
nonce: string
|
||||
haexTimestamp: string
|
||||
sequence: number
|
||||
createdAt: string
|
||||
}>
|
||||
hasMore: boolean
|
||||
}
|
||||
|
||||
export const useSyncEngineStore = defineStore('syncEngineStore', () => {
|
||||
const { currentVault, currentVaultId } = storeToRefs(useVaultStore())
|
||||
const syncBackendsStore = useSyncBackendsStore()
|
||||
|
||||
// In-memory cache for decrypted vault keys (cleared on logout/vault close)
|
||||
const vaultKeyCache = ref<VaultKeyCache>({})
|
||||
|
||||
// Supabase client (initialized with config from backend)
|
||||
const supabaseClient = ref<ReturnType<typeof createClient> | null>(null)
|
||||
|
||||
/**
|
||||
* Initializes Supabase client for a specific backend
|
||||
*/
|
||||
const initSupabaseClientAsync = async (backendId: string) => {
|
||||
const backend = syncBackendsStore.backends.find((b) => b.id === backendId)
|
||||
if (!backend) {
|
||||
throw new Error('Backend not found')
|
||||
}
|
||||
|
||||
// Get Supabase URL and anon key from server health check
|
||||
const response = await fetch(backend.serverUrl)
|
||||
if (!response.ok) {
|
||||
throw new Error('Failed to connect to sync server')
|
||||
}
|
||||
|
||||
const serverInfo = await response.json()
|
||||
const supabaseUrl = serverInfo.supabaseUrl
|
||||
|
||||
// For now, we need to configure the anon key somewhere
|
||||
// TODO: Store this in backend config or fetch from somewhere secure
|
||||
const supabaseAnonKey = 'YOUR_SUPABASE_ANON_KEY'
|
||||
|
||||
supabaseClient.value = createClient(supabaseUrl, supabaseAnonKey)
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the current Supabase auth token
|
||||
*/
|
||||
const getAuthTokenAsync = async (): Promise<string | null> => {
|
||||
if (!supabaseClient.value) {
|
||||
return null
|
||||
}
|
||||
|
||||
const {
|
||||
data: { session },
|
||||
} = await supabaseClient.value.auth.getSession()
|
||||
return session?.access_token ?? null
|
||||
}
|
||||
|
||||
/**
|
||||
* Stores encrypted vault key on the server
|
||||
*/
|
||||
const storeVaultKeyAsync = async (
|
||||
backendId: string,
|
||||
vaultId: string,
|
||||
password: string,
|
||||
): Promise<void> => {
|
||||
const backend = syncBackendsStore.backends.find((b) => b.id === backendId)
|
||||
if (!backend) {
|
||||
throw new Error('Backend not found')
|
||||
}
|
||||
|
||||
// Generate new vault key
|
||||
const vaultKey = generateVaultKey()
|
||||
|
||||
// Encrypt vault key with password
|
||||
const encryptedData = await encryptVaultKeyAsync(vaultKey, password)
|
||||
|
||||
// Get auth token
|
||||
const token = await getAuthTokenAsync()
|
||||
if (!token) {
|
||||
throw new Error('Not authenticated')
|
||||
}
|
||||
|
||||
// Send to server
|
||||
const response = await fetch(`${backend.serverUrl}/sync/vault-key`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
'Authorization': `Bearer ${token}`,
|
||||
},
|
||||
body: JSON.stringify({
|
||||
vaultId,
|
||||
...encryptedData,
|
||||
}),
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const error = await response.json().catch(() => ({}))
|
||||
throw new Error(
|
||||
`Failed to store vault key: ${error.error || response.statusText}`,
|
||||
)
|
||||
}
|
||||
|
||||
// Cache decrypted vault key
|
||||
vaultKeyCache.value[vaultId] = {
|
||||
vaultKey,
|
||||
timestamp: Date.now(),
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieves and decrypts vault key from the server
|
||||
*/
|
||||
const getVaultKeyAsync = async (
|
||||
backendId: string,
|
||||
vaultId: string,
|
||||
password: string,
|
||||
): Promise<Uint8Array> => {
|
||||
// Check cache first
|
||||
const cached = vaultKeyCache.value[vaultId]
|
||||
if (cached) {
|
||||
return cached.vaultKey
|
||||
}
|
||||
|
||||
const backend = syncBackendsStore.backends.find((b) => b.id === backendId)
|
||||
if (!backend) {
|
||||
throw new Error('Backend not found')
|
||||
}
|
||||
|
||||
// Get auth token
|
||||
const token = await getAuthTokenAsync()
|
||||
if (!token) {
|
||||
throw new Error('Not authenticated')
|
||||
}
|
||||
|
||||
// Fetch from server
|
||||
const response = await fetch(
|
||||
`${backend.serverUrl}/sync/vault-key/${vaultId}`,
|
||||
{
|
||||
method: 'GET',
|
||||
headers: {
|
||||
'Authorization': `Bearer ${token}`,
|
||||
},
|
||||
},
|
||||
)
|
||||
|
||||
if (response.status === 404) {
|
||||
throw new Error('Vault key not found on server')
|
||||
}
|
||||
|
||||
if (!response.ok) {
|
||||
const error = await response.json().catch(() => ({}))
|
||||
throw new Error(
|
||||
`Failed to get vault key: ${error.error || response.statusText}`,
|
||||
)
|
||||
}
|
||||
|
||||
const data = await response.json()
|
||||
|
||||
// Decrypt vault key
|
||||
const vaultKey = await decryptVaultKeyAsync(
|
||||
data.encryptedVaultKey,
|
||||
data.salt,
|
||||
data.nonce,
|
||||
password,
|
||||
)
|
||||
|
||||
// Cache decrypted vault key
|
||||
vaultKeyCache.value[vaultId] = {
|
||||
vaultKey,
|
||||
timestamp: Date.now(),
|
||||
}
|
||||
|
||||
return vaultKey
|
||||
}
|
||||
|
||||
/**
|
||||
* Pushes CRDT logs to the server
|
||||
*/
|
||||
const pushLogsAsync = async (
|
||||
backendId: string,
|
||||
vaultId: string,
|
||||
logs: SelectHaexCrdtLogs[],
|
||||
): Promise<void> => {
|
||||
const backend = syncBackendsStore.backends.find((b) => b.id === backendId)
|
||||
if (!backend) {
|
||||
throw new Error('Backend not found')
|
||||
}
|
||||
|
||||
// Get vault key from cache
|
||||
const cached = vaultKeyCache.value[vaultId]
|
||||
if (!cached) {
|
||||
throw new Error('Vault key not available. Please unlock vault first.')
|
||||
}
|
||||
|
||||
const vaultKey = cached.vaultKey
|
||||
|
||||
// Get auth token
|
||||
const token = await getAuthTokenAsync()
|
||||
if (!token) {
|
||||
throw new Error('Not authenticated')
|
||||
}
|
||||
|
||||
// Encrypt each log entry
|
||||
const encryptedLogs: SyncLogData[] = []
|
||||
for (const log of logs) {
|
||||
const { encryptedData, nonce } = await encryptCrdtDataAsync(
|
||||
log,
|
||||
vaultKey,
|
||||
)
|
||||
|
||||
// Generate sequence number based on timestamp
|
||||
const sequence = Date.now()
|
||||
|
||||
encryptedLogs.push({
|
||||
vaultId,
|
||||
encryptedData,
|
||||
nonce,
|
||||
haexTimestamp: log.haexTimestamp!,
|
||||
sequence,
|
||||
})
|
||||
}
|
||||
|
||||
// Send to server
|
||||
const response = await fetch(`${backend.serverUrl}/sync/push`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
'Authorization': `Bearer ${token}`,
|
||||
},
|
||||
body: JSON.stringify({
|
||||
vaultId,
|
||||
logs: encryptedLogs,
|
||||
}),
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const error = await response.json().catch(() => ({}))
|
||||
throw new Error(
|
||||
`Failed to push logs: ${error.error || response.statusText}`,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Pulls CRDT logs from the server
|
||||
*/
|
||||
const pullLogsAsync = async (
|
||||
backendId: string,
|
||||
vaultId: string,
|
||||
afterSequence?: number,
|
||||
limit?: number,
|
||||
): Promise<SelectHaexCrdtLogs[]> => {
|
||||
const backend = syncBackendsStore.backends.find((b) => b.id === backendId)
|
||||
if (!backend) {
|
||||
throw new Error('Backend not found')
|
||||
}
|
||||
|
||||
// Get vault key from cache
|
||||
const cached = vaultKeyCache.value[vaultId]
|
||||
if (!cached) {
|
||||
throw new Error('Vault key not available. Please unlock vault first.')
|
||||
}
|
||||
|
||||
const vaultKey = cached.vaultKey
|
||||
|
||||
// Get auth token
|
||||
const token = await getAuthTokenAsync()
|
||||
if (!token) {
|
||||
throw new Error('Not authenticated')
|
||||
}
|
||||
|
||||
// Fetch from server
|
||||
const response = await fetch(`${backend.serverUrl}/sync/pull`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
'Authorization': `Bearer ${token}`,
|
||||
},
|
||||
body: JSON.stringify({
|
||||
vaultId,
|
||||
afterSequence,
|
||||
limit: limit ?? 100,
|
||||
}),
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const error = await response.json().catch(() => ({}))
|
||||
throw new Error(
|
||||
`Failed to pull logs: ${error.error || response.statusText}`,
|
||||
)
|
||||
}
|
||||
|
||||
const data: PullLogsResponse = await response.json()
|
||||
|
||||
// Decrypt each log entry
|
||||
const decryptedLogs: SelectHaexCrdtLogs[] = []
|
||||
for (const log of data.logs) {
|
||||
try {
|
||||
const decrypted = await decryptCrdtDataAsync<SelectHaexCrdtLogs>(
|
||||
log.encryptedData,
|
||||
log.nonce,
|
||||
vaultKey,
|
||||
)
|
||||
decryptedLogs.push(decrypted)
|
||||
} catch (error) {
|
||||
console.error('Failed to decrypt log entry:', log.id, error)
|
||||
// Skip corrupted entries
|
||||
}
|
||||
}
|
||||
|
||||
return decryptedLogs
|
||||
}
|
||||
|
||||
/**
|
||||
* Clears vault key from cache
|
||||
*/
|
||||
const clearVaultKeyCache = (vaultId?: string) => {
|
||||
if (vaultId) {
|
||||
delete vaultKeyCache.value[vaultId]
|
||||
} else {
|
||||
vaultKeyCache.value = {}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Health check - verifies server is reachable
|
||||
*/
|
||||
const healthCheckAsync = async (backendId: string): Promise<boolean> => {
|
||||
const backend = syncBackendsStore.backends.find((b) => b.id === backendId)
|
||||
if (!backend) {
|
||||
return false
|
||||
}
|
||||
|
||||
try {
|
||||
const response = await fetch(backend.serverUrl)
|
||||
return response.ok
|
||||
} catch {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
vaultKeyCache,
|
||||
supabaseClient,
|
||||
initSupabaseClientAsync,
|
||||
getAuthTokenAsync,
|
||||
storeVaultKeyAsync,
|
||||
getVaultKeyAsync,
|
||||
pushLogsAsync,
|
||||
pullLogsAsync,
|
||||
clearVaultKeyCache,
|
||||
healthCheckAsync,
|
||||
}
|
||||
})
|
||||
525
src/stores/sync/orchestrator.ts
Normal file
525
src/stores/sync/orchestrator.ts
Normal file
@ -0,0 +1,525 @@
|
||||
/**
|
||||
* Sync Orchestrator Store - Orchestrates sync operations across all backends
|
||||
* Uses Supabase Realtime subscriptions for instant sync
|
||||
*/
|
||||
|
||||
import { eq, gt } from 'drizzle-orm'
|
||||
import type { RealtimeChannel } from '@supabase/supabase-js'
|
||||
import {
|
||||
haexCrdtLogs,
|
||||
haexSyncStatus,
|
||||
type SelectHaexCrdtLogs,
|
||||
type SelectHaexSyncStatus,
|
||||
} from '~/database/schemas'
|
||||
|
||||
interface SyncState {
|
||||
isConnected: boolean
|
||||
isSyncing: boolean
|
||||
error: string | null
|
||||
subscription: RealtimeChannel | null
|
||||
status: SelectHaexSyncStatus | null
|
||||
}
|
||||
|
||||
interface BackendSyncState {
|
||||
[backendId: string]: SyncState
|
||||
}
|
||||
|
||||
export const useSyncOrchestratorStore = defineStore(
|
||||
'syncOrchestratorStore',
|
||||
() => {
|
||||
const { currentVault, currentVaultId } = storeToRefs(useVaultStore())
|
||||
const syncBackendsStore = useSyncBackendsStore()
|
||||
const syncEngineStore = useSyncEngineStore()
|
||||
|
||||
// Sync state per backend
|
||||
const syncStates = ref<BackendSyncState>({})
|
||||
|
||||
// Track if we're currently processing a local write
|
||||
const isProcessingLocalWrite = ref(false)
|
||||
|
||||
/**
|
||||
* Loads sync status from database for a backend
|
||||
*/
|
||||
const loadSyncStatusAsync = async (
|
||||
backendId: string,
|
||||
): Promise<SelectHaexSyncStatus | null> => {
|
||||
if (!currentVault.value?.drizzle) {
|
||||
throw new Error('No vault opened')
|
||||
}
|
||||
|
||||
try {
|
||||
const results = await currentVault.value.drizzle
|
||||
.select()
|
||||
.from(haexSyncStatus)
|
||||
.where(eq(haexSyncStatus.backendId, backendId))
|
||||
.limit(1)
|
||||
|
||||
return results[0] ?? null
|
||||
} catch (error) {
|
||||
console.error('Failed to load sync status:', error)
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Updates sync status in database
|
||||
*/
|
||||
const updateSyncStatusAsync = async (
|
||||
backendId: string,
|
||||
updates: Partial<SelectHaexSyncStatus>,
|
||||
): Promise<void> => {
|
||||
if (!currentVault.value?.drizzle) {
|
||||
throw new Error('No vault opened')
|
||||
}
|
||||
|
||||
try {
|
||||
const existing = await loadSyncStatusAsync(backendId)
|
||||
|
||||
if (existing) {
|
||||
// Update existing
|
||||
await currentVault.value.drizzle
|
||||
.update(haexSyncStatus)
|
||||
.set({
|
||||
...updates,
|
||||
lastSyncAt: new Date().toISOString(),
|
||||
})
|
||||
.where(eq(haexSyncStatus.backendId, backendId))
|
||||
} else {
|
||||
// Insert new
|
||||
await currentVault.value.drizzle.insert(haexSyncStatus).values({
|
||||
backendId,
|
||||
...updates,
|
||||
lastSyncAt: new Date().toISOString(),
|
||||
})
|
||||
}
|
||||
|
||||
// Update local state
|
||||
if (syncStates.value[backendId]) {
|
||||
syncStates.value[backendId].status = await loadSyncStatusAsync(
|
||||
backendId,
|
||||
)
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Failed to update sync status:', error)
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets logs that need to be pushed to server (after last push HLC)
|
||||
*/
|
||||
const getLogsToPushAsync = async (
|
||||
backendId: string,
|
||||
): Promise<SelectHaexCrdtLogs[]> => {
|
||||
if (!currentVault.value?.drizzle) {
|
||||
throw new Error('No vault opened')
|
||||
}
|
||||
|
||||
try {
|
||||
const status = await loadSyncStatusAsync(backendId)
|
||||
const lastPushHlc = status?.lastPushHlcTimestamp
|
||||
|
||||
const query = currentVault.value.drizzle
|
||||
.select()
|
||||
.from(haexCrdtLogs)
|
||||
.orderBy(haexCrdtLogs.haexTimestamp)
|
||||
|
||||
if (lastPushHlc) {
|
||||
return await query.where(
|
||||
gt(haexCrdtLogs.haexTimestamp, lastPushHlc),
|
||||
)
|
||||
}
|
||||
|
||||
return await query
|
||||
} catch (error) {
|
||||
console.error('Failed to get logs to push:', error)
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Applies remote logs to local database
|
||||
*/
|
||||
const applyRemoteLogsAsync = async (
|
||||
logs: SelectHaexCrdtLogs[],
|
||||
): Promise<void> => {
|
||||
if (!currentVault.value?.drizzle) {
|
||||
throw new Error('No vault opened')
|
||||
}
|
||||
|
||||
try {
|
||||
// Insert logs into local CRDT log table
|
||||
for (const log of logs) {
|
||||
await currentVault.value.drizzle
|
||||
.insert(haexCrdtLogs)
|
||||
.values(log)
|
||||
.onConflictDoNothing() // Skip if already exists
|
||||
}
|
||||
|
||||
// TODO: Apply CRDT log entries to actual data tables
|
||||
// This requires replaying the operations from the log
|
||||
console.log(`Applied ${logs.length} remote logs to local database`)
|
||||
} catch (error) {
|
||||
console.error('Failed to apply remote logs:', error)
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Pushes local changes to a specific backend
|
||||
*/
|
||||
const pushToBackendAsync = async (backendId: string): Promise<void> => {
|
||||
if (!currentVaultId.value) {
|
||||
throw new Error('No vault opened')
|
||||
}
|
||||
|
||||
const state = syncStates.value[backendId]
|
||||
if (!state) {
|
||||
throw new Error('Backend not initialized')
|
||||
}
|
||||
|
||||
if (state.isSyncing) {
|
||||
console.log(`Already syncing with backend ${backendId}`)
|
||||
return
|
||||
}
|
||||
|
||||
state.isSyncing = true
|
||||
state.error = null
|
||||
|
||||
try {
|
||||
// Get logs that need to be pushed
|
||||
const logs = await getLogsToPushAsync(backendId)
|
||||
|
||||
if (logs.length === 0) {
|
||||
console.log(`No logs to push to backend ${backendId}`)
|
||||
return
|
||||
}
|
||||
|
||||
await syncEngineStore.pushLogsAsync(
|
||||
backendId,
|
||||
currentVaultId.value,
|
||||
logs,
|
||||
)
|
||||
|
||||
// Update sync status with last pushed HLC timestamp
|
||||
const lastHlc = logs[logs.length - 1]?.haexTimestamp
|
||||
if (lastHlc) {
|
||||
await updateSyncStatusAsync(backendId, {
|
||||
lastPushHlcTimestamp: lastHlc,
|
||||
})
|
||||
}
|
||||
|
||||
console.log(`Pushed ${logs.length} logs to backend ${backendId}`)
|
||||
} catch (error) {
|
||||
console.error(`Failed to push to backend ${backendId}:`, error)
|
||||
state.error = error instanceof Error ? error.message : 'Unknown error'
|
||||
await updateSyncStatusAsync(backendId, {
|
||||
error: state.error,
|
||||
})
|
||||
throw error
|
||||
} finally {
|
||||
state.isSyncing = false
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Pulls changes from a specific backend
|
||||
*/
|
||||
const pullFromBackendAsync = async (backendId: string): Promise<void> => {
|
||||
if (!currentVaultId.value) {
|
||||
throw new Error('No vault opened')
|
||||
}
|
||||
|
||||
const state = syncStates.value[backendId]
|
||||
if (!state) {
|
||||
throw new Error('Backend not initialized')
|
||||
}
|
||||
|
||||
if (state.isSyncing) {
|
||||
console.log(`Already syncing with backend ${backendId}`)
|
||||
return
|
||||
}
|
||||
|
||||
state.isSyncing = true
|
||||
state.error = null
|
||||
|
||||
try {
|
||||
const status = await loadSyncStatusAsync(backendId)
|
||||
const afterSequence = status?.lastPullSequence ?? undefined
|
||||
|
||||
const remoteLogs = await syncEngineStore.pullLogsAsync(
|
||||
backendId,
|
||||
currentVaultId.value,
|
||||
afterSequence,
|
||||
100,
|
||||
)
|
||||
|
||||
if (remoteLogs.length > 0) {
|
||||
await applyRemoteLogsAsync(remoteLogs)
|
||||
|
||||
// Update sync status with last pulled sequence
|
||||
// TODO: Get actual sequence from server response
|
||||
const lastSequence = Date.now()
|
||||
await updateSyncStatusAsync(backendId, {
|
||||
lastPullSequence: lastSequence,
|
||||
})
|
||||
|
||||
console.log(
|
||||
`Pulled ${remoteLogs.length} logs from backend ${backendId}`,
|
||||
)
|
||||
}
|
||||
} catch (error) {
|
||||
console.error(`Failed to pull from backend ${backendId}:`, error)
|
||||
state.error = error instanceof Error ? error.message : 'Unknown error'
|
||||
await updateSyncStatusAsync(backendId, {
|
||||
error: state.error,
|
||||
})
|
||||
throw error
|
||||
} finally {
|
||||
state.isSyncing = false
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Handles incoming realtime changes from Supabase
|
||||
*/
|
||||
const handleRealtimeChangeAsync = async (
|
||||
backendId: string,
|
||||
payload: any,
|
||||
) => {
|
||||
console.log(`Realtime change from backend ${backendId}:`, payload)
|
||||
|
||||
// Don't process if we're currently writing locally to avoid loops
|
||||
if (isProcessingLocalWrite.value) {
|
||||
console.log('Skipping realtime change - local write in progress')
|
||||
return
|
||||
}
|
||||
|
||||
// Pull latest changes from this backend
|
||||
try {
|
||||
await pullFromBackendAsync(backendId)
|
||||
} catch (error) {
|
||||
console.error('Failed to handle realtime change:', error)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Subscribes to realtime changes from a backend
|
||||
*/
|
||||
const subscribeToBackendAsync = async (backendId: string): Promise<void> => {
|
||||
if (!currentVaultId.value) {
|
||||
throw new Error('No vault opened')
|
||||
}
|
||||
|
||||
const state = syncStates.value[backendId]
|
||||
if (!state) {
|
||||
throw new Error('Backend not initialized')
|
||||
}
|
||||
|
||||
if (state.subscription) {
|
||||
console.log(`Already subscribed to backend ${backendId}`)
|
||||
return
|
||||
}
|
||||
|
||||
const client = syncEngineStore.supabaseClient
|
||||
if (!client) {
|
||||
throw new Error('Supabase client not initialized')
|
||||
}
|
||||
|
||||
try {
|
||||
// Subscribe to sync_logs table for this vault
|
||||
const channel = client
|
||||
.channel(`sync_logs:${currentVaultId.value}`)
|
||||
.on(
|
||||
'postgres_changes',
|
||||
{
|
||||
event: 'INSERT',
|
||||
schema: 'public',
|
||||
table: 'sync_logs',
|
||||
filter: `vault_id=eq.${currentVaultId.value}`,
|
||||
},
|
||||
(payload) => {
|
||||
handleRealtimeChangeAsync(backendId, payload).catch(console.error)
|
||||
},
|
||||
)
|
||||
.subscribe((status) => {
|
||||
if (status === 'SUBSCRIBED') {
|
||||
state.isConnected = true
|
||||
console.log(`Subscribed to backend ${backendId}`)
|
||||
} else if (status === 'CHANNEL_ERROR' || status === 'TIMED_OUT') {
|
||||
state.isConnected = false
|
||||
state.error = `Subscription error: ${status}`
|
||||
console.error(
|
||||
`Subscription to backend ${backendId} failed: ${status}`,
|
||||
)
|
||||
}
|
||||
})
|
||||
|
||||
state.subscription = channel
|
||||
} catch (error) {
|
||||
console.error(`Failed to subscribe to backend ${backendId}:`, error)
|
||||
state.error = error instanceof Error ? error.message : 'Unknown error'
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Unsubscribes from realtime changes
|
||||
*/
|
||||
const unsubscribeFromBackendAsync = async (
|
||||
backendId: string,
|
||||
): Promise<void> => {
|
||||
const state = syncStates.value[backendId]
|
||||
if (!state || !state.subscription) {
|
||||
return
|
||||
}
|
||||
|
||||
try {
|
||||
await state.subscription.unsubscribe()
|
||||
state.subscription = null
|
||||
state.isConnected = false
|
||||
console.log(`Unsubscribed from backend ${backendId}`)
|
||||
} catch (error) {
|
||||
console.error(`Failed to unsubscribe from backend ${backendId}:`, error)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Initializes sync for a backend
|
||||
*/
|
||||
const initBackendAsync = async (backendId: string): Promise<void> => {
|
||||
if (syncStates.value[backendId]) {
|
||||
console.log(`Backend ${backendId} already initialized`)
|
||||
return
|
||||
}
|
||||
|
||||
// Load sync status from database
|
||||
const status = await loadSyncStatusAsync(backendId)
|
||||
|
||||
// Initialize state
|
||||
syncStates.value[backendId] = {
|
||||
isConnected: false,
|
||||
isSyncing: false,
|
||||
error: null,
|
||||
subscription: null,
|
||||
status,
|
||||
}
|
||||
|
||||
try {
|
||||
// Initial pull to get all existing data
|
||||
await pullFromBackendAsync(backendId)
|
||||
|
||||
// Subscribe to realtime changes
|
||||
await subscribeToBackendAsync(backendId)
|
||||
} catch (error) {
|
||||
console.error(`Failed to initialize backend ${backendId}:`, error)
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Called after local write operations to push changes
|
||||
*/
|
||||
const onLocalWriteAsync = async (): Promise<void> => {
|
||||
isProcessingLocalWrite.value = true
|
||||
|
||||
try {
|
||||
// Push to all enabled backends in parallel
|
||||
const enabledBackends = syncBackendsStore.enabledBackends
|
||||
|
||||
await Promise.allSettled(
|
||||
enabledBackends.map((backend) => pushToBackendAsync(backend.id)),
|
||||
)
|
||||
} catch (error) {
|
||||
console.error('Failed to push local changes:', error)
|
||||
} finally {
|
||||
isProcessingLocalWrite.value = false
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Starts sync for all enabled backends
|
||||
*/
|
||||
const startSyncAsync = async (): Promise<void> => {
|
||||
const enabledBackends = syncBackendsStore.enabledBackends
|
||||
|
||||
if (enabledBackends.length === 0) {
|
||||
console.log('No enabled backends to sync with')
|
||||
return
|
||||
}
|
||||
|
||||
console.log(`Starting sync with ${enabledBackends.length} backends`)
|
||||
|
||||
for (const backend of enabledBackends) {
|
||||
try {
|
||||
await initBackendAsync(backend.id)
|
||||
} catch (error) {
|
||||
console.error(
|
||||
`Failed to start sync with backend ${backend.id}:`,
|
||||
error,
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Stops sync for all backends
|
||||
*/
|
||||
const stopSyncAsync = async (): Promise<void> => {
|
||||
console.log('Stopping sync for all backends')
|
||||
|
||||
for (const backendId of Object.keys(syncStates.value)) {
|
||||
await unsubscribeFromBackendAsync(backendId)
|
||||
}
|
||||
|
||||
syncStates.value = {}
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets sync state for a specific backend
|
||||
*/
|
||||
const getSyncState = (backendId: string): SyncState | null => {
|
||||
return syncStates.value[backendId] ?? null
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if any backend is currently syncing
|
||||
*/
|
||||
const isAnySyncing = computed(() => {
|
||||
return Object.values(syncStates.value).some((state) => state.isSyncing)
|
||||
})
|
||||
|
||||
/**
|
||||
* Checks if all backends are connected
|
||||
*/
|
||||
const areAllConnected = computed(() => {
|
||||
const enabledBackends = syncBackendsStore.enabledBackends
|
||||
if (enabledBackends.length === 0) return false
|
||||
|
||||
return enabledBackends.every((backend) => {
|
||||
const state = syncStates.value[backend.id]
|
||||
return state?.isConnected ?? false
|
||||
})
|
||||
})
|
||||
|
||||
return {
|
||||
syncStates,
|
||||
isProcessingLocalWrite,
|
||||
isAnySyncing,
|
||||
areAllConnected,
|
||||
loadSyncStatusAsync,
|
||||
updateSyncStatusAsync,
|
||||
getLogsToPushAsync,
|
||||
applyRemoteLogsAsync,
|
||||
pushToBackendAsync,
|
||||
pullFromBackendAsync,
|
||||
subscribeToBackendAsync,
|
||||
unsubscribeFromBackendAsync,
|
||||
initBackendAsync,
|
||||
onLocalWriteAsync,
|
||||
startSyncAsync,
|
||||
stopSyncAsync,
|
||||
getSyncState,
|
||||
}
|
||||
},
|
||||
)
|
||||
250
src/utils/crypto/vaultKey.ts
Normal file
250
src/utils/crypto/vaultKey.ts
Normal file
@ -0,0 +1,250 @@
|
||||
/**
|
||||
* Crypto utilities for Vault Key Management
|
||||
* Implements the "Hybrid-Ansatz" for vault key encryption
|
||||
*/
|
||||
|
||||
const PBKDF2_ITERATIONS = 600_000
|
||||
const KEY_LENGTH = 256
|
||||
const ALGORITHM = 'AES-GCM'
|
||||
|
||||
/**
|
||||
* Derives a cryptographic key from a password using PBKDF2
|
||||
*/
|
||||
export async function deriveKeyFromPasswordAsync(
|
||||
password: string,
|
||||
salt: Uint8Array,
|
||||
): Promise<CryptoKey> {
|
||||
const encoder = new TextEncoder()
|
||||
const passwordBuffer = encoder.encode(password)
|
||||
|
||||
// Ensure salt has a proper ArrayBuffer (not SharedArrayBuffer)
|
||||
const saltBuffer = new Uint8Array(salt)
|
||||
|
||||
// Import password as key material
|
||||
const keyMaterial = await crypto.subtle.importKey(
|
||||
'raw',
|
||||
passwordBuffer,
|
||||
'PBKDF2',
|
||||
false,
|
||||
['deriveKey'],
|
||||
)
|
||||
|
||||
// Derive key using PBKDF2
|
||||
return await crypto.subtle.deriveKey(
|
||||
{
|
||||
name: 'PBKDF2',
|
||||
salt: saltBuffer,
|
||||
iterations: PBKDF2_ITERATIONS,
|
||||
hash: 'SHA-256',
|
||||
},
|
||||
keyMaterial,
|
||||
{ name: ALGORITHM, length: KEY_LENGTH },
|
||||
false, // not extractable
|
||||
['encrypt', 'decrypt'],
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates a random vault key (32 bytes)
|
||||
*/
|
||||
export function generateVaultKey(): Uint8Array {
|
||||
return crypto.getRandomValues(new Uint8Array(32))
|
||||
}
|
||||
|
||||
/**
|
||||
* Encrypts the vault key with a password-derived key
|
||||
* Returns: { encryptedVaultKey, salt, nonce } all as Base64 strings
|
||||
*/
|
||||
export async function encryptVaultKeyAsync(
|
||||
vaultKey: Uint8Array,
|
||||
password: string,
|
||||
): Promise<{
|
||||
encryptedVaultKey: string
|
||||
salt: string
|
||||
nonce: string
|
||||
}> {
|
||||
// Generate random salt for PBKDF2
|
||||
const salt = crypto.getRandomValues(new Uint8Array(32))
|
||||
|
||||
// Derive encryption key from password
|
||||
const derivedKey = await deriveKeyFromPasswordAsync(password, salt)
|
||||
|
||||
// Generate random nonce for AES-GCM
|
||||
const nonce = crypto.getRandomValues(new Uint8Array(12))
|
||||
|
||||
// Ensure vaultKey has proper ArrayBuffer
|
||||
const vaultKeyBuffer = new Uint8Array(vaultKey)
|
||||
|
||||
// Encrypt vault key
|
||||
const encryptedBuffer = await crypto.subtle.encrypt(
|
||||
{
|
||||
name: ALGORITHM,
|
||||
iv: nonce,
|
||||
},
|
||||
derivedKey,
|
||||
vaultKeyBuffer,
|
||||
)
|
||||
|
||||
// Convert to Base64 for storage
|
||||
return {
|
||||
encryptedVaultKey: arrayBufferToBase64(encryptedBuffer),
|
||||
salt: arrayBufferToBase64(salt),
|
||||
nonce: arrayBufferToBase64(nonce),
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Decrypts the vault key using the password
|
||||
*/
|
||||
export async function decryptVaultKeyAsync(
|
||||
encryptedVaultKey: string,
|
||||
salt: string,
|
||||
nonce: string,
|
||||
password: string,
|
||||
): Promise<Uint8Array> {
|
||||
// Convert Base64 to Uint8Array
|
||||
const encryptedBuffer = base64ToArrayBuffer(encryptedVaultKey)
|
||||
const saltBuffer = base64ToArrayBuffer(salt)
|
||||
const nonceBuffer = base64ToArrayBuffer(nonce)
|
||||
|
||||
// Derive decryption key from password
|
||||
const derivedKey = await deriveKeyFromPasswordAsync(password, saltBuffer)
|
||||
|
||||
// Ensure buffers have proper ArrayBuffer
|
||||
const encryptedData = new Uint8Array(encryptedBuffer)
|
||||
const iv = new Uint8Array(nonceBuffer)
|
||||
|
||||
// Decrypt vault key
|
||||
const decryptedBuffer = await crypto.subtle.decrypt(
|
||||
{
|
||||
name: ALGORITHM,
|
||||
iv,
|
||||
},
|
||||
derivedKey,
|
||||
encryptedData,
|
||||
)
|
||||
|
||||
return new Uint8Array(decryptedBuffer)
|
||||
}
|
||||
|
||||
/**
|
||||
* Encrypts CRDT log data with the vault key
|
||||
*/
|
||||
export async function encryptCrdtDataAsync(
|
||||
data: object,
|
||||
vaultKey: Uint8Array,
|
||||
): Promise<{
|
||||
encryptedData: string
|
||||
nonce: string
|
||||
}> {
|
||||
// Ensure vaultKey has proper ArrayBuffer
|
||||
const vaultKeyBuffer = new Uint8Array(vaultKey)
|
||||
|
||||
// Import vault key for encryption
|
||||
const cryptoKey = await crypto.subtle.importKey(
|
||||
'raw',
|
||||
vaultKeyBuffer,
|
||||
{ name: ALGORITHM },
|
||||
false,
|
||||
['encrypt'],
|
||||
)
|
||||
|
||||
// Generate random nonce
|
||||
const nonce = crypto.getRandomValues(new Uint8Array(12))
|
||||
|
||||
// Serialize data to JSON
|
||||
const encoder = new TextEncoder()
|
||||
const dataBuffer = encoder.encode(JSON.stringify(data))
|
||||
|
||||
// Encrypt data
|
||||
const encryptedBuffer = await crypto.subtle.encrypt(
|
||||
{
|
||||
name: ALGORITHM,
|
||||
iv: nonce,
|
||||
},
|
||||
cryptoKey,
|
||||
dataBuffer,
|
||||
)
|
||||
|
||||
return {
|
||||
encryptedData: arrayBufferToBase64(encryptedBuffer),
|
||||
nonce: arrayBufferToBase64(nonce),
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Decrypts CRDT log data with the vault key
|
||||
*/
|
||||
export async function decryptCrdtDataAsync<T = object>(
|
||||
encryptedData: string,
|
||||
nonce: string,
|
||||
vaultKey: Uint8Array,
|
||||
): Promise<T> {
|
||||
// Ensure vaultKey has proper ArrayBuffer
|
||||
const vaultKeyBuffer = new Uint8Array(vaultKey)
|
||||
|
||||
// Import vault key for decryption
|
||||
const cryptoKey = await crypto.subtle.importKey(
|
||||
'raw',
|
||||
vaultKeyBuffer,
|
||||
{ name: ALGORITHM },
|
||||
false,
|
||||
['decrypt'],
|
||||
)
|
||||
|
||||
// Convert Base64 to buffers
|
||||
const encryptedBuffer = base64ToArrayBuffer(encryptedData)
|
||||
const nonceBuffer = base64ToArrayBuffer(nonce)
|
||||
|
||||
// Ensure buffers have proper ArrayBuffer
|
||||
const encryptedDataBuffer = new Uint8Array(encryptedBuffer)
|
||||
const iv = new Uint8Array(nonceBuffer)
|
||||
|
||||
// Decrypt data
|
||||
const decryptedBuffer = await crypto.subtle.decrypt(
|
||||
{
|
||||
name: ALGORITHM,
|
||||
iv,
|
||||
},
|
||||
cryptoKey,
|
||||
encryptedDataBuffer,
|
||||
)
|
||||
|
||||
// Parse JSON
|
||||
const decoder = new TextDecoder()
|
||||
const jsonString = decoder.decode(decryptedBuffer)
|
||||
return JSON.parse(jsonString) as T
|
||||
}
|
||||
|
||||
// Utility functions for Base64 conversion
|
||||
|
||||
function arrayBufferToBase64(buffer: ArrayBuffer | Uint8Array): string {
|
||||
const bytes = buffer instanceof Uint8Array ? buffer : new Uint8Array(buffer)
|
||||
// Use Buffer for efficient base64 encoding (works in Node/Bun)
|
||||
if (typeof Buffer !== 'undefined') {
|
||||
return Buffer.from(bytes).toString('base64')
|
||||
}
|
||||
// Fallback to btoa for browser environments
|
||||
let binary = ''
|
||||
for (let i = 0; i < bytes.length; i++) {
|
||||
const byte = bytes[i]
|
||||
if (byte !== undefined) {
|
||||
binary += String.fromCharCode(byte)
|
||||
}
|
||||
}
|
||||
return btoa(binary)
|
||||
}
|
||||
|
||||
function base64ToArrayBuffer(base64: string): Uint8Array {
|
||||
// Use Buffer for efficient base64 decoding (works in Node/Bun)
|
||||
if (typeof Buffer !== 'undefined') {
|
||||
return new Uint8Array(Buffer.from(base64, 'base64'))
|
||||
}
|
||||
// Fallback to atob for browser environments
|
||||
const binary = atob(base64)
|
||||
const bytes = new Uint8Array(binary.length)
|
||||
for (let i = 0; i < binary.length; i++) {
|
||||
bytes[i] = binary.charCodeAt(i)
|
||||
}
|
||||
return bytes
|
||||
}
|
||||
63
src/utils/viewport.ts
Normal file
63
src/utils/viewport.ts
Normal file
@ -0,0 +1,63 @@
|
||||
// Viewport and safe area utilities
|
||||
|
||||
export interface ViewportDimensions {
|
||||
width: number
|
||||
height: number
|
||||
safeAreaTop: number
|
||||
safeAreaBottom: number
|
||||
headerHeight: number
|
||||
}
|
||||
|
||||
/**
|
||||
* Get viewport dimensions with safe areas and header height
|
||||
*/
|
||||
export function getViewportDimensions(): ViewportDimensions {
|
||||
const viewportWidth = window.innerWidth
|
||||
const viewportHeight = window.innerHeight - 40 // Subtract header height
|
||||
|
||||
// Get safe-area-insets from CSS variables
|
||||
const safeAreaTop = parseFloat(
|
||||
getComputedStyle(document.documentElement).getPropertyValue(
|
||||
'--safe-area-inset-top',
|
||||
) || '0',
|
||||
)
|
||||
const safeAreaBottom = parseFloat(
|
||||
getComputedStyle(document.documentElement).getPropertyValue(
|
||||
'--safe-area-inset-bottom',
|
||||
) || '0',
|
||||
)
|
||||
|
||||
// Get header height from UI store
|
||||
const { headerHeight } = useUiStore()
|
||||
|
||||
return {
|
||||
width: viewportWidth,
|
||||
height: viewportHeight,
|
||||
safeAreaTop,
|
||||
safeAreaBottom,
|
||||
headerHeight,
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate available content height (viewport height minus safe areas)
|
||||
* Note: viewport height already excludes header, so we only subtract safe areas
|
||||
*/
|
||||
export function getAvailableContentHeight(): number {
|
||||
const dimensions = getViewportDimensions()
|
||||
return dimensions.height - dimensions.safeAreaTop - dimensions.safeAreaBottom
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate fullscreen window dimensions (for small screens)
|
||||
*/
|
||||
export function getFullscreenDimensions() {
|
||||
const dimensions = getViewportDimensions()
|
||||
|
||||
return {
|
||||
x: 0,
|
||||
y: 0,
|
||||
width: dimensions.width,
|
||||
height: getAvailableContentHeight(),
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user