mirror of
https://github.com/haexhub/haex-hub.git
synced 2025-12-18 15:00:52 +01:00
Compare commits
44 Commits
ef225b281f
...
v0.1.6
| Author | SHA1 | Date | |
|---|---|---|---|
| 405cf25aab | |||
| b097bf211d | |||
| c71b8468df | |||
| 3a4f482021 | |||
| 88507410ed | |||
| f38cecc84b | |||
| 931d51a1e1 | |||
| c97afdee18 | |||
| 65d2770df3 | |||
| a52e1b43fa | |||
| 6ceb22f014 | |||
| 4833dee89a | |||
| a80c783576 | |||
| 4e1e4ae601 | |||
| 6a7f58a513 | |||
| 3ed8d6bc05 | |||
| 81a72da26c | |||
| 4fa3515e32 | |||
| c5c30fd4c4 | |||
| 8c7a02a019 | |||
| 465fe19542 | |||
| d2d0f8996b | |||
| f727d00639 | |||
| a946b14f69 | |||
| 471baec284 | |||
| 8298d807f3 | |||
| 42e6459fbf | |||
| 6ae87fc694 | |||
| f7867a5bde | |||
| d82599f588 | |||
| 72bb211a76 | |||
| f14ce0d6ad | |||
| af09f4524d | |||
| 102832675d | |||
| 3490de2f51 | |||
| 7c3af10938 | |||
| 5c5d0785b9 | |||
| 121dd9dd00 | |||
| 4ff6aee4d8 | |||
| dceb49ae90 | |||
| 5ea04a80e0 | |||
| 65cf2e2c3c | |||
| 68d542b4d7 | |||
| f97cd4ad97 |
228
.github/workflows/build.yml
vendored
Normal file
228
.github/workflows/build.yml
vendored
Normal file
@ -0,0 +1,228 @@
|
||||
name: Build
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- develop
|
||||
tags-ignore:
|
||||
- '**'
|
||||
pull_request:
|
||||
branches:
|
||||
- main
|
||||
- develop
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
build-desktop:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- platform: 'macos-latest'
|
||||
args: '--target aarch64-apple-darwin'
|
||||
- platform: 'macos-latest'
|
||||
args: '--target x86_64-apple-darwin'
|
||||
- platform: 'ubuntu-22.04'
|
||||
args: ''
|
||||
- platform: 'windows-latest'
|
||||
args: ''
|
||||
|
||||
runs-on: ${{ matrix.platform }}
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: '20'
|
||||
|
||||
- name: Install pnpm
|
||||
uses: pnpm/action-setup@v4
|
||||
|
||||
- name: Setup Rust
|
||||
uses: dtolnay/rust-toolchain@stable
|
||||
with:
|
||||
targets: ${{ matrix.platform == 'macos-latest' && 'aarch64-apple-darwin,x86_64-apple-darwin' || '' }}
|
||||
|
||||
- name: Install dependencies (Ubuntu)
|
||||
if: matrix.platform == 'ubuntu-22.04'
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y libwebkit2gtk-4.1-dev libappindicator3-dev librsvg2-dev patchelf libssl-dev
|
||||
|
||||
- name: Get pnpm store directory
|
||||
shell: bash
|
||||
run: |
|
||||
echo "STORE_PATH=$(pnpm store path --silent)" >> $GITHUB_ENV
|
||||
|
||||
- name: Setup pnpm cache
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: ${{ env.STORE_PATH }}
|
||||
key: ${{ runner.os }}-pnpm-store-${{ hashFiles('**/pnpm-lock.yaml') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-pnpm-store-
|
||||
|
||||
- name: Setup Rust cache
|
||||
uses: Swatinem/rust-cache@v2
|
||||
with:
|
||||
workspaces: src-tauri
|
||||
|
||||
- name: Install frontend dependencies
|
||||
run: pnpm install --frozen-lockfile
|
||||
|
||||
- name: Build Tauri app
|
||||
uses: tauri-apps/tauri-action@v0
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
args: ${{ matrix.args }}
|
||||
|
||||
- name: Upload artifacts (macOS)
|
||||
if: matrix.platform == 'macos-latest'
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: macos-${{ contains(matrix.args, 'aarch64') && 'aarch64' || 'x86_64' }}
|
||||
path: |
|
||||
src-tauri/target/*/release/bundle/dmg/*.dmg
|
||||
src-tauri/target/*/release/bundle/macos/*.app
|
||||
|
||||
- name: Upload artifacts (Ubuntu)
|
||||
if: matrix.platform == 'ubuntu-22.04'
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: linux
|
||||
path: |
|
||||
src-tauri/target/release/bundle/deb/*.deb
|
||||
src-tauri/target/release/bundle/appimage/*.AppImage
|
||||
src-tauri/target/release/bundle/rpm/*.rpm
|
||||
|
||||
- name: Upload artifacts (Windows)
|
||||
if: matrix.platform == 'windows-latest'
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: windows
|
||||
path: |
|
||||
src-tauri/target/release/bundle/msi/*.msi
|
||||
src-tauri/target/release/bundle/nsis/*.exe
|
||||
|
||||
build-android:
|
||||
runs-on: ubuntu-22.04
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: '20'
|
||||
|
||||
- name: Install pnpm
|
||||
uses: pnpm/action-setup@v4
|
||||
|
||||
- name: Setup Java
|
||||
uses: actions/setup-java@v4
|
||||
with:
|
||||
distribution: 'temurin'
|
||||
java-version: '17'
|
||||
|
||||
- name: Setup Android SDK
|
||||
uses: android-actions/setup-android@v3
|
||||
|
||||
- name: Setup Rust
|
||||
uses: dtolnay/rust-toolchain@stable
|
||||
|
||||
- name: Install Rust Android targets
|
||||
run: |
|
||||
rustup target add aarch64-linux-android
|
||||
rustup target add armv7-linux-androideabi
|
||||
rustup target add i686-linux-android
|
||||
rustup target add x86_64-linux-android
|
||||
|
||||
- name: Setup NDK
|
||||
uses: nttld/setup-ndk@v1
|
||||
with:
|
||||
ndk-version: r26d
|
||||
id: setup-ndk
|
||||
|
||||
- name: Setup Android NDK environment for OpenSSL
|
||||
run: |
|
||||
echo "ANDROID_NDK_HOME=${{ steps.setup-ndk.outputs.ndk-path }}" >> $GITHUB_ENV
|
||||
echo "NDK_HOME=${{ steps.setup-ndk.outputs.ndk-path }}" >> $GITHUB_ENV
|
||||
|
||||
# Add all Android toolchains to PATH for OpenSSL cross-compilation
|
||||
echo "${{ steps.setup-ndk.outputs.ndk-path }}/toolchains/llvm/prebuilt/linux-x86_64/bin" >> $GITHUB_PATH
|
||||
|
||||
# Set CC, AR, RANLIB for each target
|
||||
echo "CC_aarch64_linux_android=aarch64-linux-android24-clang" >> $GITHUB_ENV
|
||||
echo "AR_aarch64_linux_android=llvm-ar" >> $GITHUB_ENV
|
||||
echo "RANLIB_aarch64_linux_android=llvm-ranlib" >> $GITHUB_ENV
|
||||
|
||||
echo "CC_armv7_linux_androideabi=armv7a-linux-androideabi24-clang" >> $GITHUB_ENV
|
||||
echo "AR_armv7_linux_androideabi=llvm-ar" >> $GITHUB_ENV
|
||||
echo "RANLIB_armv7_linux_androideabi=llvm-ranlib" >> $GITHUB_ENV
|
||||
|
||||
echo "CC_i686_linux_android=i686-linux-android24-clang" >> $GITHUB_ENV
|
||||
echo "AR_i686_linux_android=llvm-ar" >> $GITHUB_ENV
|
||||
echo "RANLIB_i686_linux_android=llvm-ranlib" >> $GITHUB_ENV
|
||||
|
||||
echo "CC_x86_64_linux_android=x86_64-linux-android24-clang" >> $GITHUB_ENV
|
||||
echo "AR_x86_64_linux_android=llvm-ar" >> $GITHUB_ENV
|
||||
echo "RANLIB_x86_64_linux_android=llvm-ranlib" >> $GITHUB_ENV
|
||||
|
||||
- name: Install build dependencies for OpenSSL
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y perl make
|
||||
|
||||
- name: Get pnpm store directory
|
||||
shell: bash
|
||||
run: |
|
||||
echo "STORE_PATH=$(pnpm store path --silent)" >> $GITHUB_ENV
|
||||
|
||||
- name: Setup pnpm cache
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: ${{ env.STORE_PATH }}
|
||||
key: ${{ runner.os }}-pnpm-store-${{ hashFiles('**/pnpm-lock.yaml') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-pnpm-store-
|
||||
|
||||
- name: Setup Rust cache
|
||||
uses: Swatinem/rust-cache@v2
|
||||
with:
|
||||
workspaces: src-tauri
|
||||
|
||||
- name: Install frontend dependencies
|
||||
run: pnpm install --frozen-lockfile
|
||||
|
||||
- name: Setup Keystore (if secrets available)
|
||||
env:
|
||||
ANDROID_KEYSTORE: ${{ secrets.ANDROID_KEYSTORE }}
|
||||
ANDROID_KEYSTORE_PASSWORD: ${{ secrets.ANDROID_KEYSTORE_PASSWORD }}
|
||||
ANDROID_KEY_ALIAS: ${{ secrets.ANDROID_KEY_ALIAS }}
|
||||
ANDROID_KEY_PASSWORD: ${{ secrets.ANDROID_KEY_PASSWORD }}
|
||||
run: |
|
||||
if [ -n "$ANDROID_KEYSTORE" ]; then
|
||||
echo "$ANDROID_KEYSTORE" | base64 -d > $HOME/keystore.jks
|
||||
echo "ANDROID_KEYSTORE_PATH=$HOME/keystore.jks" >> $GITHUB_ENV
|
||||
echo "ANDROID_KEYSTORE_PASSWORD=$ANDROID_KEYSTORE_PASSWORD" >> $GITHUB_ENV
|
||||
echo "ANDROID_KEY_ALIAS=$ANDROID_KEY_ALIAS" >> $GITHUB_ENV
|
||||
echo "ANDROID_KEY_PASSWORD=$ANDROID_KEY_PASSWORD" >> $GITHUB_ENV
|
||||
echo "Keystore configured for signing"
|
||||
else
|
||||
echo "No keystore configured, building unsigned APK"
|
||||
fi
|
||||
|
||||
- name: Build Android APK and AAB (unsigned if no keystore)
|
||||
run: pnpm tauri android build
|
||||
|
||||
- name: Upload Android artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: android
|
||||
path: |
|
||||
src-tauri/gen/android/app/build/outputs/apk/**/*.apk
|
||||
src-tauri/gen/android/app/build/outputs/bundle/**/*.aab
|
||||
251
.github/workflows/release.yml
vendored
Normal file
251
.github/workflows/release.yml
vendored
Normal file
@ -0,0 +1,251 @@
|
||||
name: Release
|
||||
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- 'v*'
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
create-release:
|
||||
permissions:
|
||||
contents: write
|
||||
runs-on: ubuntu-22.04
|
||||
outputs:
|
||||
release_id: ${{ steps.create-release.outputs.release_id }}
|
||||
upload_url: ${{ steps.create-release.outputs.upload_url }}
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: '20'
|
||||
|
||||
- name: Get version
|
||||
run: echo "PACKAGE_VERSION=$(node -p "require('./package.json').version")" >> $GITHUB_ENV
|
||||
|
||||
- name: Create release
|
||||
id: create-release
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
script: |
|
||||
const { data } = await github.rest.repos.createRelease({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
tag_name: `v${process.env.PACKAGE_VERSION}`,
|
||||
name: `haex-hub v${process.env.PACKAGE_VERSION}`,
|
||||
body: 'Take a look at the assets to download and install this app.',
|
||||
draft: true,
|
||||
prerelease: false
|
||||
})
|
||||
core.setOutput('release_id', data.id)
|
||||
core.setOutput('upload_url', data.upload_url)
|
||||
return data.id
|
||||
|
||||
build-desktop:
|
||||
needs: create-release
|
||||
permissions:
|
||||
contents: write
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- platform: 'macos-latest'
|
||||
args: '--target aarch64-apple-darwin'
|
||||
- platform: 'macos-latest'
|
||||
args: '--target x86_64-apple-darwin'
|
||||
- platform: 'ubuntu-22.04'
|
||||
args: ''
|
||||
- platform: 'windows-latest'
|
||||
args: ''
|
||||
|
||||
runs-on: ${{ matrix.platform }}
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: '20'
|
||||
|
||||
- name: Install pnpm
|
||||
uses: pnpm/action-setup@v4
|
||||
|
||||
- name: Setup Rust
|
||||
uses: dtolnay/rust-toolchain@stable
|
||||
with:
|
||||
targets: ${{ matrix.platform == 'macos-latest' && 'aarch64-apple-darwin,x86_64-apple-darwin' || '' }}
|
||||
|
||||
- name: Install dependencies (Ubuntu)
|
||||
if: matrix.platform == 'ubuntu-22.04'
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y libwebkit2gtk-4.1-dev libappindicator3-dev librsvg2-dev patchelf libssl-dev
|
||||
|
||||
- name: Get pnpm store directory
|
||||
shell: bash
|
||||
run: |
|
||||
echo "STORE_PATH=$(pnpm store path --silent)" >> $GITHUB_ENV
|
||||
|
||||
- name: Setup pnpm cache
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: ${{ env.STORE_PATH }}
|
||||
key: ${{ runner.os }}-pnpm-store-${{ hashFiles('**/pnpm-lock.yaml') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-pnpm-store-
|
||||
|
||||
- name: Setup Rust cache
|
||||
uses: Swatinem/rust-cache@v2
|
||||
with:
|
||||
workspaces: src-tauri
|
||||
|
||||
- name: Install frontend dependencies
|
||||
run: pnpm install --frozen-lockfile
|
||||
|
||||
- name: Build and release Tauri app
|
||||
uses: tauri-apps/tauri-action@v0
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
releaseId: ${{ needs.create-release.outputs.release_id }}
|
||||
args: ${{ matrix.args }}
|
||||
|
||||
build-android:
|
||||
needs: create-release
|
||||
permissions:
|
||||
contents: write
|
||||
runs-on: ubuntu-22.04
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: '20'
|
||||
|
||||
- name: Install pnpm
|
||||
uses: pnpm/action-setup@v4
|
||||
|
||||
- name: Setup Java
|
||||
uses: actions/setup-java@v4
|
||||
with:
|
||||
distribution: 'temurin'
|
||||
java-version: '17'
|
||||
|
||||
- name: Setup Android SDK
|
||||
uses: android-actions/setup-android@v3
|
||||
|
||||
- name: Setup Rust
|
||||
uses: dtolnay/rust-toolchain@stable
|
||||
|
||||
- name: Install Rust Android targets
|
||||
run: |
|
||||
rustup target add aarch64-linux-android
|
||||
rustup target add armv7-linux-androideabi
|
||||
rustup target add i686-linux-android
|
||||
rustup target add x86_64-linux-android
|
||||
|
||||
- name: Setup NDK
|
||||
uses: nttld/setup-ndk@v1
|
||||
with:
|
||||
ndk-version: r26d
|
||||
id: setup-ndk
|
||||
|
||||
- name: Setup Android NDK environment for OpenSSL
|
||||
run: |
|
||||
echo "ANDROID_NDK_HOME=${{ steps.setup-ndk.outputs.ndk-path }}" >> $GITHUB_ENV
|
||||
echo "NDK_HOME=${{ steps.setup-ndk.outputs.ndk-path }}" >> $GITHUB_ENV
|
||||
|
||||
# Add all Android toolchains to PATH for OpenSSL cross-compilation
|
||||
echo "${{ steps.setup-ndk.outputs.ndk-path }}/toolchains/llvm/prebuilt/linux-x86_64/bin" >> $GITHUB_PATH
|
||||
|
||||
# Set CC, AR, RANLIB for each target
|
||||
echo "CC_aarch64_linux_android=aarch64-linux-android24-clang" >> $GITHUB_ENV
|
||||
echo "AR_aarch64_linux_android=llvm-ar" >> $GITHUB_ENV
|
||||
echo "RANLIB_aarch64_linux_android=llvm-ranlib" >> $GITHUB_ENV
|
||||
|
||||
echo "CC_armv7_linux_androideabi=armv7a-linux-androideabi24-clang" >> $GITHUB_ENV
|
||||
echo "AR_armv7_linux_androideabi=llvm-ar" >> $GITHUB_ENV
|
||||
echo "RANLIB_armv7_linux_androideabi=llvm-ranlib" >> $GITHUB_ENV
|
||||
|
||||
echo "CC_i686_linux_android=i686-linux-android24-clang" >> $GITHUB_ENV
|
||||
echo "AR_i686_linux_android=llvm-ar" >> $GITHUB_ENV
|
||||
echo "RANLIB_i686_linux_android=llvm-ranlib" >> $GITHUB_ENV
|
||||
|
||||
echo "CC_x86_64_linux_android=x86_64-linux-android24-clang" >> $GITHUB_ENV
|
||||
echo "AR_x86_64_linux_android=llvm-ar" >> $GITHUB_ENV
|
||||
echo "RANLIB_x86_64_linux_android=llvm-ranlib" >> $GITHUB_ENV
|
||||
|
||||
- name: Install build dependencies for OpenSSL
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y perl make
|
||||
|
||||
- name: Get pnpm store directory
|
||||
shell: bash
|
||||
run: |
|
||||
echo "STORE_PATH=$(pnpm store path --silent)" >> $GITHUB_ENV
|
||||
|
||||
- name: Setup pnpm cache
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: ${{ env.STORE_PATH }}
|
||||
key: ${{ runner.os }}-pnpm-store-${{ hashFiles('**/pnpm-lock.yaml') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-pnpm-store-
|
||||
|
||||
- name: Setup Rust cache
|
||||
uses: Swatinem/rust-cache@v2
|
||||
with:
|
||||
workspaces: src-tauri
|
||||
|
||||
- name: Install frontend dependencies
|
||||
run: pnpm install --frozen-lockfile
|
||||
|
||||
- name: Setup Keystore (required for release)
|
||||
run: |
|
||||
echo "${{ secrets.ANDROID_KEYSTORE }}" | base64 -d > $HOME/keystore.jks
|
||||
echo "ANDROID_KEYSTORE_PATH=$HOME/keystore.jks" >> $GITHUB_ENV
|
||||
echo "ANDROID_KEYSTORE_PASSWORD=${{ secrets.ANDROID_KEYSTORE_PASSWORD }}" >> $GITHUB_ENV
|
||||
echo "ANDROID_KEY_ALIAS=${{ secrets.ANDROID_KEY_ALIAS }}" >> $GITHUB_ENV
|
||||
echo "ANDROID_KEY_PASSWORD=${{ secrets.ANDROID_KEY_PASSWORD }}" >> $GITHUB_ENV
|
||||
|
||||
- name: Build Android APK and AAB (signed)
|
||||
run: pnpm tauri android build
|
||||
|
||||
- name: Upload Android artifacts to Release
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
gh release upload ${{ github.ref_name }} \
|
||||
src-tauri/gen/android/app/build/outputs/apk/universal/release/app-universal-release.apk \
|
||||
src-tauri/gen/android/app/build/outputs/bundle/universalRelease/app-universal-release.aab \
|
||||
--clobber
|
||||
|
||||
publish-release:
|
||||
permissions:
|
||||
contents: write
|
||||
runs-on: ubuntu-22.04
|
||||
needs: [create-release, build-desktop, build-android]
|
||||
|
||||
steps:
|
||||
- name: Publish release
|
||||
id: publish-release
|
||||
uses: actions/github-script@v7
|
||||
env:
|
||||
release_id: ${{ needs.create-release.outputs.release_id }}
|
||||
with:
|
||||
script: |
|
||||
github.rest.repos.updateRelease({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
release_id: process.env.release_id,
|
||||
draft: false,
|
||||
prerelease: false
|
||||
})
|
||||
4
.gitignore
vendored
4
.gitignore
vendored
@ -26,4 +26,6 @@ dist-ssr
|
||||
src-tauri/target
|
||||
nogit*
|
||||
.claude
|
||||
.output
|
||||
.output
|
||||
target
|
||||
CLAUDE.md
|
||||
@ -1,7 +1,7 @@
|
||||
import { defineConfig } from 'drizzle-kit'
|
||||
|
||||
export default defineConfig({
|
||||
schema: './src-tauri/database/schemas/**.ts',
|
||||
schema: './src/database/schemas/**.ts',
|
||||
out: './src-tauri/database/migrations',
|
||||
dialect: 'sqlite',
|
||||
dbCredentials: {
|
||||
|
||||
@ -1,5 +1,3 @@
|
||||
//import tailwindcss from '@tailwindcss/vite'
|
||||
|
||||
import { fileURLToPath } from 'node:url'
|
||||
|
||||
// https://nuxt.com/docs/api/configuration/nuxt-config
|
||||
@ -31,7 +29,6 @@ export default defineNuxtConfig({
|
||||
'@vueuse/nuxt',
|
||||
'@nuxt/icon',
|
||||
'@nuxt/eslint',
|
||||
//"@nuxt/image",
|
||||
'@nuxt/fonts',
|
||||
'@nuxt/ui',
|
||||
],
|
||||
@ -71,7 +68,7 @@ export default defineNuxtConfig({
|
||||
includeCustomCollections: true,
|
||||
},
|
||||
serverBundle: {
|
||||
collections: ['mdi', 'line-md', 'solar', 'gg', 'emojione'],
|
||||
collections: ['mdi', 'line-md', 'solar', 'gg', 'emojione', 'lucide', 'hugeicons'],
|
||||
},
|
||||
|
||||
customCollections: [
|
||||
@ -125,7 +122,6 @@ export default defineNuxtConfig({
|
||||
},
|
||||
|
||||
vite: {
|
||||
//plugins: [tailwindcss()],
|
||||
// Better support for Tauri CLI output
|
||||
clearScreen: false,
|
||||
// Enable environment variables
|
||||
|
||||
@ -1,7 +1,7 @@
|
||||
{
|
||||
"name": "haex-hub",
|
||||
"private": true,
|
||||
"version": "0.1.0",
|
||||
"version": "0.1.6",
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"build": "nuxt build",
|
||||
@ -28,11 +28,9 @@
|
||||
"@tauri-apps/api": "^2.9.0",
|
||||
"@tauri-apps/plugin-dialog": "^2.4.2",
|
||||
"@tauri-apps/plugin-fs": "^2.4.4",
|
||||
"@tauri-apps/plugin-http": "2.5.2",
|
||||
"@tauri-apps/plugin-notification": "2.3.1",
|
||||
"@tauri-apps/plugin-opener": "^2.5.2",
|
||||
"@tauri-apps/plugin-os": "^2.3.2",
|
||||
"@tauri-apps/plugin-sql": "2.3.0",
|
||||
"@tauri-apps/plugin-store": "^2.4.1",
|
||||
"@vueuse/components": "^13.9.0",
|
||||
"@vueuse/core": "^13.9.0",
|
||||
@ -40,7 +38,6 @@
|
||||
"@vueuse/nuxt": "^13.9.0",
|
||||
"drizzle-orm": "^0.44.7",
|
||||
"eslint": "^9.38.0",
|
||||
"fuse.js": "^7.1.0",
|
||||
"nuxt-zod-i18n": "^1.12.1",
|
||||
"swiper": "^12.0.3",
|
||||
"tailwindcss": "^4.1.16",
|
||||
@ -65,7 +62,7 @@
|
||||
"tsx": "^4.20.6",
|
||||
"tw-animate-css": "^1.4.0",
|
||||
"typescript": "^5.9.3",
|
||||
"vite": "7.1.3",
|
||||
"vite": "^7.1.3",
|
||||
"vue-tsc": "3.0.6"
|
||||
},
|
||||
"prettier": {
|
||||
|
||||
1025
pnpm-lock.yaml
generated
1025
pnpm-lock.yaml
generated
File diff suppressed because it is too large
Load Diff
1238
src-tauri/Cargo.lock
generated
1238
src-tauri/Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "haex-hub"
|
||||
version = "0.1.0"
|
||||
version = "0.1.4"
|
||||
description = "A Tauri App"
|
||||
authors = ["you"]
|
||||
edition = "2021"
|
||||
@ -20,14 +20,7 @@ tauri-build = { version = "2.2", features = [] }
|
||||
serde = { version = "1.0.228", features = ["derive"] }
|
||||
|
||||
[dependencies]
|
||||
rusqlite = { version = "0.37.0", features = [
|
||||
"load_extension",
|
||||
"bundled-sqlcipher-vendored-openssl",
|
||||
"functions",
|
||||
] }
|
||||
|
||||
#tauri-plugin-sql = { version = "2", features = ["sqlite"] }tokio = { version = "1.47.1", features = ["macros", "rt-multi-thread"] }#libsqlite3-sys = { version = "0.31", features = ["bundled-sqlcipher"] }
|
||||
#sqlx = { version = "0.8", features = ["runtime-tokio-rustls", "sqlite"] }
|
||||
tokio = { version = "1.47.1", features = ["macros", "rt-multi-thread"] }
|
||||
base64 = "0.22"
|
||||
ed25519-dalek = "2.1"
|
||||
fs_extra = "1.3.0"
|
||||
@ -54,3 +47,11 @@ uhlc = "0.8.2"
|
||||
url = "2.5.7"
|
||||
uuid = { version = "1.18.1", features = ["v4"] }
|
||||
zip = "6.0.0"
|
||||
rusqlite = { version = "0.37.0", features = [
|
||||
"load_extension",
|
||||
"bundled-sqlcipher-vendored-openssl",
|
||||
"functions",
|
||||
] }
|
||||
|
||||
[target.'cfg(not(target_os = "android"))'.dependencies]
|
||||
trash = "5.2.5"
|
||||
|
||||
@ -1,3 +1,3 @@
|
||||
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
|
||||
|
||||
export type ExtensionInfoResponse = { id: string, publicKey: string, name: string, version: string, author: string | null, enabled: boolean, description: string | null, homepage: string | null, icon: string | null, devServerUrl: string | null, };
|
||||
export type ExtensionInfoResponse = { id: string, publicKey: string, name: string, version: string, author: string | null, enabled: boolean, description: string | null, homepage: string | null, icon: string | null, entry: string | null, singleInstance: boolean | null, devServerUrl: string | null, };
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
|
||||
import type { ExtensionPermissions } from "./ExtensionPermissions";
|
||||
|
||||
export type ExtensionManifest = { name: string, version: string, author: string | null, entry: string, icon: string | null, public_key: string, signature: string, permissions: ExtensionPermissions, homepage: string | null, description: string | null, };
|
||||
export type ExtensionManifest = { name: string, version: string, author: string | null, entry: string | null, icon: string | null, public_key: string, signature: string, permissions: ExtensionPermissions, homepage: string | null, description: string | null, single_instance: boolean | null, };
|
||||
|
||||
@ -18,8 +18,14 @@
|
||||
"fs:allow-appconfig-write-recursive",
|
||||
"fs:allow-appdata-read-recursive",
|
||||
"fs:allow-appdata-write-recursive",
|
||||
"fs:allow-applocaldata-read-recursive",
|
||||
"fs:allow-applocaldata-write-recursive",
|
||||
"fs:allow-read-file",
|
||||
"fs:allow-write-file",
|
||||
"fs:allow-read-dir",
|
||||
"fs:allow-mkdir",
|
||||
"fs:allow-exists",
|
||||
"fs:allow-remove",
|
||||
"fs:allow-resource-read-recursive",
|
||||
"fs:allow-resource-write-recursive",
|
||||
"fs:allow-download-read-recursive",
|
||||
@ -35,6 +41,7 @@
|
||||
"notification:allow-create-channel",
|
||||
"notification:allow-list-channels",
|
||||
"notification:allow-notify",
|
||||
"notification:allow-is-permission-granted",
|
||||
"notification:default",
|
||||
"opener:allow-open-url",
|
||||
"opener:default",
|
||||
|
||||
@ -1,8 +1,8 @@
|
||||
import { writeFileSync, mkdirSync } from 'node:fs'
|
||||
import { join, dirname } from 'node:path'
|
||||
import { fileURLToPath } from 'node:url'
|
||||
import tablesNames from './tableNames.json'
|
||||
import { schema } from './index'
|
||||
import tablesNames from '../../src/database/tableNames.json'
|
||||
import { schema } from '../../src/database/index'
|
||||
import { getTableColumns } from 'drizzle-orm'
|
||||
import type { AnySQLiteColumn, SQLiteTable } from 'drizzle-orm/sqlite-core'
|
||||
|
||||
|
||||
@ -1,21 +0,0 @@
|
||||
import { drizzle } from 'drizzle-orm/sqlite-proxy' // Adapter für Query Building ohne direkte Verbindung
|
||||
import * as schema from './schemas' // Importiere alles aus deiner Schema-Datei
|
||||
export * as schema from './schemas'
|
||||
// sqlite-proxy benötigt eine (dummy) Ausführungsfunktion als Argument.
|
||||
// Diese wird in unserem Tauri-Workflow nie aufgerufen, da wir nur .toSQL() verwenden.
|
||||
// Sie muss aber vorhanden sein, um drizzle() aufrufen zu können.
|
||||
const dummyExecutor = async (
|
||||
sql: string,
|
||||
params: unknown[],
|
||||
method: 'all' | 'run' | 'get' | 'values',
|
||||
) => {
|
||||
console.warn(
|
||||
`Frontend Drizzle Executor wurde aufgerufen (Methode: ${method}). Das sollte im Tauri-Invoke-Workflow nicht passieren!`,
|
||||
)
|
||||
// Wir geben leere Ergebnisse zurück, um die Typen zufriedenzustellen, falls es doch aufgerufen wird.
|
||||
return { rows: [] } // Für 'run' (z.B. bei INSERT/UPDATE)
|
||||
}
|
||||
|
||||
// Erstelle die Drizzle-Instanz für den SQLite-Dialekt
|
||||
// Übergib den dummyExecutor und das importierte Schema
|
||||
export const db = drizzle(dummyExecutor, { schema })
|
||||
@ -60,11 +60,12 @@ CREATE TABLE `haex_extensions` (
|
||||
`version` text NOT NULL,
|
||||
`author` text,
|
||||
`description` text,
|
||||
`entry` text DEFAULT 'index.html' NOT NULL,
|
||||
`entry` text DEFAULT 'index.html',
|
||||
`homepage` text,
|
||||
`enabled` integer DEFAULT true,
|
||||
`icon` text,
|
||||
`signature` text NOT NULL,
|
||||
`single_instance` integer DEFAULT false,
|
||||
`haex_timestamp` text
|
||||
);
|
||||
--> statement-breakpoint
|
||||
@ -94,8 +95,10 @@ CREATE TABLE `haex_settings` (
|
||||
CREATE UNIQUE INDEX `haex_settings_key_type_value_unique` ON `haex_settings` (`key`,`type`,`value`);--> statement-breakpoint
|
||||
CREATE TABLE `haex_workspaces` (
|
||||
`id` text PRIMARY KEY NOT NULL,
|
||||
`device_id` text NOT NULL,
|
||||
`name` text NOT NULL,
|
||||
`position` integer DEFAULT 0 NOT NULL,
|
||||
`background` blob,
|
||||
`haex_timestamp` text
|
||||
);
|
||||
--> statement-breakpoint
|
||||
15
src-tauri/database/migrations/0001_furry_brother_voodoo.sql
Normal file
15
src-tauri/database/migrations/0001_furry_brother_voodoo.sql
Normal file
@ -0,0 +1,15 @@
|
||||
PRAGMA foreign_keys=OFF;--> statement-breakpoint
|
||||
CREATE TABLE `__new_haex_workspaces` (
|
||||
`id` text PRIMARY KEY NOT NULL,
|
||||
`device_id` text NOT NULL,
|
||||
`name` text NOT NULL,
|
||||
`position` integer DEFAULT 0 NOT NULL,
|
||||
`background` text,
|
||||
`haex_timestamp` text
|
||||
);
|
||||
--> statement-breakpoint
|
||||
INSERT INTO `__new_haex_workspaces`("id", "device_id", "name", "position", "background", "haex_timestamp") SELECT "id", "device_id", "name", "position", "background", "haex_timestamp" FROM `haex_workspaces`;--> statement-breakpoint
|
||||
DROP TABLE `haex_workspaces`;--> statement-breakpoint
|
||||
ALTER TABLE `__new_haex_workspaces` RENAME TO `haex_workspaces`;--> statement-breakpoint
|
||||
PRAGMA foreign_keys=ON;--> statement-breakpoint
|
||||
CREATE UNIQUE INDEX `haex_workspaces_position_unique` ON `haex_workspaces` (`position`);
|
||||
@ -1 +0,0 @@
|
||||
ALTER TABLE `haex_workspaces` ADD `device_id` text NOT NULL;
|
||||
@ -1,7 +1,7 @@
|
||||
{
|
||||
"version": "6",
|
||||
"dialect": "sqlite",
|
||||
"id": "bcdd9ad3-a87a-4a43-9eba-673f94b10287",
|
||||
"id": "e3d61ad1-63be-41be-9243-41144e215f98",
|
||||
"prevId": "00000000-0000-0000-0000-000000000000",
|
||||
"tables": {
|
||||
"haex_crdt_configs": {
|
||||
@ -411,7 +411,7 @@
|
||||
"name": "entry",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": true,
|
||||
"notNull": false,
|
||||
"autoincrement": false,
|
||||
"default": "'index.html'"
|
||||
},
|
||||
@ -444,6 +444,14 @@
|
||||
"notNull": true,
|
||||
"autoincrement": false
|
||||
},
|
||||
"single_instance": {
|
||||
"name": "single_instance",
|
||||
"type": "integer",
|
||||
"primaryKey": false,
|
||||
"notNull": false,
|
||||
"autoincrement": false,
|
||||
"default": false
|
||||
},
|
||||
"haex_timestamp": {
|
||||
"name": "haex_timestamp",
|
||||
"type": "text",
|
||||
@ -619,6 +627,13 @@
|
||||
"notNull": true,
|
||||
"autoincrement": false
|
||||
},
|
||||
"device_id": {
|
||||
"name": "device_id",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": true,
|
||||
"autoincrement": false
|
||||
},
|
||||
"name": {
|
||||
"name": "name",
|
||||
"type": "text",
|
||||
@ -634,6 +649,13 @@
|
||||
"autoincrement": false,
|
||||
"default": 0
|
||||
},
|
||||
"background": {
|
||||
"name": "background",
|
||||
"type": "blob",
|
||||
"primaryKey": false,
|
||||
"notNull": false,
|
||||
"autoincrement": false
|
||||
},
|
||||
"haex_timestamp": {
|
||||
"name": "haex_timestamp",
|
||||
"type": "text",
|
||||
|
||||
@ -1,8 +1,8 @@
|
||||
{
|
||||
"version": "6",
|
||||
"dialect": "sqlite",
|
||||
"id": "27735348-b9c5-4bc6-9cc5-dd707ad689b9",
|
||||
"prevId": "bcdd9ad3-a87a-4a43-9eba-673f94b10287",
|
||||
"id": "10bec43a-4227-483e-b1c1-fd50ae32bb96",
|
||||
"prevId": "e3d61ad1-63be-41be-9243-41144e215f98",
|
||||
"tables": {
|
||||
"haex_crdt_configs": {
|
||||
"name": "haex_crdt_configs",
|
||||
@ -411,7 +411,7 @@
|
||||
"name": "entry",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": true,
|
||||
"notNull": false,
|
||||
"autoincrement": false,
|
||||
"default": "'index.html'"
|
||||
},
|
||||
@ -444,6 +444,14 @@
|
||||
"notNull": true,
|
||||
"autoincrement": false
|
||||
},
|
||||
"single_instance": {
|
||||
"name": "single_instance",
|
||||
"type": "integer",
|
||||
"primaryKey": false,
|
||||
"notNull": false,
|
||||
"autoincrement": false,
|
||||
"default": false
|
||||
},
|
||||
"haex_timestamp": {
|
||||
"name": "haex_timestamp",
|
||||
"type": "text",
|
||||
@ -641,6 +649,13 @@
|
||||
"autoincrement": false,
|
||||
"default": 0
|
||||
},
|
||||
"background": {
|
||||
"name": "background",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": false,
|
||||
"autoincrement": false
|
||||
},
|
||||
"haex_timestamp": {
|
||||
"name": "haex_timestamp",
|
||||
"type": "text",
|
||||
|
||||
@ -5,15 +5,15 @@
|
||||
{
|
||||
"idx": 0,
|
||||
"version": "6",
|
||||
"when": 1761430560028,
|
||||
"tag": "0000_secret_ender_wiggin",
|
||||
"when": 1762119713008,
|
||||
"tag": "0000_cynical_nicolaos",
|
||||
"breakpoints": true
|
||||
},
|
||||
{
|
||||
"idx": 1,
|
||||
"version": "6",
|
||||
"when": 1761581351395,
|
||||
"tag": "0001_late_the_renegades",
|
||||
"when": 1762122405562,
|
||||
"tag": "0001_furry_brother_voodoo",
|
||||
"breakpoints": true
|
||||
}
|
||||
]
|
||||
|
||||
@ -1,5 +0,0 @@
|
||||
export const crdtColumnNames = {
|
||||
haexTimestamp: 'haex_timestamp',
|
||||
}
|
||||
export * from './crdt'
|
||||
export * from './haex'
|
||||
Binary file not shown.
@ -24,6 +24,23 @@ android {
|
||||
versionCode = tauriProperties.getProperty("tauri.android.versionCode", "1").toInt()
|
||||
versionName = tauriProperties.getProperty("tauri.android.versionName", "1.0")
|
||||
}
|
||||
|
||||
signingConfigs {
|
||||
create("release") {
|
||||
val keystorePath = System.getenv("ANDROID_KEYSTORE_PATH")
|
||||
val keystorePassword = System.getenv("ANDROID_KEYSTORE_PASSWORD")
|
||||
val keyAlias = System.getenv("ANDROID_KEY_ALIAS")
|
||||
val keyPassword = System.getenv("ANDROID_KEY_PASSWORD")
|
||||
|
||||
if (keystorePath != null && keystorePassword != null && keyAlias != null && keyPassword != null) {
|
||||
storeFile = file(keystorePath)
|
||||
storePassword = keystorePassword
|
||||
this.keyAlias = keyAlias
|
||||
this.keyPassword = keyPassword
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
buildTypes {
|
||||
getByName("debug") {
|
||||
manifestPlaceholders["usesCleartextTraffic"] = "true"
|
||||
@ -43,6 +60,12 @@ android {
|
||||
.plus(getDefaultProguardFile("proguard-android-optimize.txt"))
|
||||
.toList().toTypedArray()
|
||||
)
|
||||
|
||||
// Sign with release config if available
|
||||
val releaseSigningConfig = signingConfigs.getByName("release")
|
||||
if (releaseSigningConfig.storeFile != null) {
|
||||
signingConfig = releaseSigningConfig
|
||||
}
|
||||
}
|
||||
}
|
||||
kotlinOptions {
|
||||
|
||||
Binary file not shown.
@ -1 +1 @@
|
||||
{"default":{"identifier":"default","description":"Capability for the main window","local":true,"windows":["main"],"permissions":["core:default","core:webview:allow-create-webview-window","core:webview:allow-create-webview","core:webview:allow-webview-show","core:webview:default","core:window:allow-create","core:window:allow-get-all-windows","core:window:allow-show","core:window:default","dialog:default","fs:allow-appconfig-read-recursive","fs:allow-appconfig-write-recursive","fs:allow-appdata-read-recursive","fs:allow-appdata-write-recursive","fs:allow-read-file","fs:allow-read-dir","fs:allow-resource-read-recursive","fs:allow-resource-write-recursive","fs:allow-download-read-recursive","fs:allow-download-write-recursive","fs:default",{"identifier":"fs:scope","allow":[{"path":"**"}]},"http:allow-fetch-send","http:allow-fetch","http:default","notification:allow-create-channel","notification:allow-list-channels","notification:allow-notify","notification:default","opener:allow-open-url","opener:default","os:allow-hostname","os:default","store:default"]}}
|
||||
{"default":{"identifier":"default","description":"Capability for the main window","local":true,"windows":["main"],"permissions":["core:default","core:webview:allow-create-webview-window","core:webview:allow-create-webview","core:webview:allow-webview-show","core:webview:default","core:window:allow-create","core:window:allow-get-all-windows","core:window:allow-show","core:window:default","dialog:default","fs:allow-appconfig-read-recursive","fs:allow-appconfig-write-recursive","fs:allow-appdata-read-recursive","fs:allow-appdata-write-recursive","fs:allow-applocaldata-read-recursive","fs:allow-applocaldata-write-recursive","fs:allow-read-file","fs:allow-write-file","fs:allow-read-dir","fs:allow-mkdir","fs:allow-exists","fs:allow-remove","fs:allow-resource-read-recursive","fs:allow-resource-write-recursive","fs:allow-download-read-recursive","fs:allow-download-write-recursive","fs:default",{"identifier":"fs:scope","allow":[{"path":"**"}]},"http:allow-fetch-send","http:allow-fetch","http:default","notification:allow-create-channel","notification:allow-list-channels","notification:allow-notify","notification:allow-is-permission-granted","notification:default","opener:allow-open-url","opener:default","os:allow-hostname","os:default","store:default"]}}
|
||||
@ -20,11 +20,11 @@ struct TableDefinition {
|
||||
|
||||
pub fn generate_table_names() {
|
||||
let out_dir = env::var("OUT_DIR").expect("OUT_DIR ist nicht gesetzt.");
|
||||
println!("Generiere Tabellennamen nach {}", out_dir);
|
||||
let schema_path = Path::new("database/tableNames.json");
|
||||
println!("Generiere Tabellennamen nach {out_dir}");
|
||||
let schema_path = Path::new("../src/database/tableNames.json");
|
||||
let dest_path = Path::new(&out_dir).join("tableNames.rs");
|
||||
|
||||
let file = File::open(&schema_path).expect("Konnte tableNames.json nicht öffnen");
|
||||
let file = File::open(schema_path).expect("Konnte tableNames.json nicht öffnen");
|
||||
let reader = BufReader::new(file);
|
||||
let schema: Schema =
|
||||
serde_json::from_reader(reader).expect("Konnte tableNames.json nicht parsen");
|
||||
@ -66,7 +66,7 @@ pub fn generate_table_names() {
|
||||
f.write_all(code.as_bytes())
|
||||
.expect("Konnte nicht in Zieldatei schreiben");
|
||||
|
||||
println!("cargo:rerun-if-changed=database/tableNames.json");
|
||||
println!("cargo:rerun-if-changed=../src/database/tableNames.json");
|
||||
}
|
||||
|
||||
/// Konvertiert einen String zu SCREAMING_SNAKE_CASE
|
||||
@ -108,8 +108,7 @@ fn generate_table_constants(table: &TableDefinition, const_prefix: &str) -> Stri
|
||||
for (col_key, col_value) in &table.columns {
|
||||
let col_const_name = format!("COL_{}_{}", const_prefix, to_screaming_snake_case(col_key));
|
||||
code.push_str(&format!(
|
||||
"pub const {}: &str = \"{}\";\n",
|
||||
col_const_name, col_value
|
||||
"pub const {col_const_name}: &str = \"{col_value}\";\n"
|
||||
));
|
||||
}
|
||||
|
||||
|
||||
@ -74,15 +74,14 @@ impl HlcService {
|
||||
// Parse den String in ein Uuid-Objekt.
|
||||
let uuid = Uuid::parse_str(&node_id_str).map_err(|e| {
|
||||
HlcError::ParseNodeId(format!(
|
||||
"Stored device ID is not a valid UUID: {}. Error: {}",
|
||||
node_id_str, e
|
||||
"Stored device ID is not a valid UUID: {node_id_str}. Error: {e}"
|
||||
))
|
||||
})?;
|
||||
|
||||
// Hol dir die rohen 16 Bytes und erstelle daraus die uhlc::ID.
|
||||
// Das `*` dereferenziert den `&[u8; 16]` zu `[u8; 16]`, was `try_from` erwartet.
|
||||
let node_id = ID::try_from(*uuid.as_bytes()).map_err(|e| {
|
||||
HlcError::ParseNodeId(format!("Invalid node ID format from device store: {:?}", e))
|
||||
HlcError::ParseNodeId(format!("Invalid node ID format from device store: {e:?}"))
|
||||
})?;
|
||||
|
||||
// 2. Erstelle eine HLC-Instanz mit stabiler Identität
|
||||
@ -95,8 +94,7 @@ impl HlcService {
|
||||
if let Some(last_timestamp) = Self::load_last_timestamp(conn)? {
|
||||
hlc.update_with_timestamp(&last_timestamp).map_err(|e| {
|
||||
HlcError::Parse(format!(
|
||||
"Failed to update HLC with persisted timestamp: {:?}",
|
||||
e
|
||||
"Failed to update HLC with persisted timestamp: {e:?}"
|
||||
))
|
||||
})?;
|
||||
}
|
||||
@ -119,7 +117,7 @@ impl HlcService {
|
||||
if let Some(s) = value.as_str() {
|
||||
// Das ist unser Erfolgsfall. Wir haben einen &str und können
|
||||
// eine Kopie davon zurückgeben.
|
||||
println!("Gefundene und validierte Geräte-ID: {}", s);
|
||||
println!("Gefundene und validierte Geräte-ID: {s}");
|
||||
if Uuid::parse_str(s).is_ok() {
|
||||
// Erfolgsfall: Der Wert ist ein String UND eine gültige UUID.
|
||||
// Wir können die Funktion direkt mit dem Wert verlassen.
|
||||
@ -183,19 +181,19 @@ impl HlcService {
|
||||
let hlc = hlc_guard.as_mut().ok_or(HlcError::NotInitialized)?;
|
||||
|
||||
hlc.update_with_timestamp(timestamp)
|
||||
.map_err(|e| HlcError::Parse(format!("Failed to update HLC: {:?}", e)))
|
||||
.map_err(|e| HlcError::Parse(format!("Failed to update HLC: {e:?}")))
|
||||
}
|
||||
|
||||
/// Lädt den letzten persistierten Zeitstempel aus der Datenbank.
|
||||
fn load_last_timestamp(conn: &Connection) -> Result<Option<Timestamp>, HlcError> {
|
||||
let query = format!("SELECT value FROM {} WHERE key = ?1", TABLE_CRDT_CONFIGS);
|
||||
let query = format!("SELECT value FROM {TABLE_CRDT_CONFIGS} WHERE key = ?1");
|
||||
|
||||
match conn.query_row(&query, params![HLC_TIMESTAMP_TYPE], |row| {
|
||||
row.get::<_, String>(0)
|
||||
}) {
|
||||
Ok(state_str) => {
|
||||
let timestamp = Timestamp::from_str(&state_str).map_err(|e| {
|
||||
HlcError::ParseTimestamp(format!("Invalid timestamp format: {:?}", e))
|
||||
HlcError::ParseTimestamp(format!("Invalid timestamp format: {e:?}"))
|
||||
})?;
|
||||
Ok(Some(timestamp))
|
||||
}
|
||||
@ -209,9 +207,8 @@ impl HlcService {
|
||||
let timestamp_str = timestamp.to_string();
|
||||
tx.execute(
|
||||
&format!(
|
||||
"INSERT INTO {} (key, value) VALUES (?1, ?2)
|
||||
ON CONFLICT(key) DO UPDATE SET value = excluded.value",
|
||||
TABLE_CRDT_CONFIGS
|
||||
"INSERT INTO {TABLE_CRDT_CONFIGS} (key, value) VALUES (?1, ?2)
|
||||
ON CONFLICT(key) DO UPDATE SET value = excluded.value"
|
||||
),
|
||||
params![HLC_TIMESTAMP_TYPE, timestamp_str],
|
||||
)?;
|
||||
|
||||
@ -11,8 +11,6 @@ const INSERT_TRIGGER_TPL: &str = "z_crdt_{TABLE_NAME}_insert";
|
||||
const UPDATE_TRIGGER_TPL: &str = "z_crdt_{TABLE_NAME}_update";
|
||||
const DELETE_TRIGGER_TPL: &str = "z_crdt_{TABLE_NAME}_delete";
|
||||
|
||||
//const SYNC_ACTIVE_KEY: &str = "sync_active";
|
||||
|
||||
pub const HLC_TIMESTAMP_COLUMN: &str = "haex_timestamp";
|
||||
|
||||
/// Name der custom UUID-Generierungs-Funktion (registriert in database::core::open_and_init_db)
|
||||
@ -34,17 +32,16 @@ pub enum CrdtSetupError {
|
||||
impl Display for CrdtSetupError {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
|
||||
match self {
|
||||
CrdtSetupError::DatabaseError(e) => write!(f, "Database error: {}", e),
|
||||
CrdtSetupError::DatabaseError(e) => write!(f, "Database error: {e}"),
|
||||
CrdtSetupError::HlcColumnMissing {
|
||||
table_name,
|
||||
column_name,
|
||||
} => write!(
|
||||
f,
|
||||
"Table '{}' is missing the required hlc column '{}'",
|
||||
table_name, column_name
|
||||
"Table '{table_name}' is missing the required hlc column '{column_name}'"
|
||||
),
|
||||
CrdtSetupError::PrimaryKeyMissing { table_name } => {
|
||||
write!(f, "Table '{}' has no primary key", table_name)
|
||||
write!(f, "Table '{table_name}' has no primary key")
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -85,7 +82,8 @@ impl ColumnInfo {
|
||||
}
|
||||
|
||||
fn is_safe_identifier(name: &str) -> bool {
|
||||
!name.is_empty() && name.chars().all(|c| c.is_alphanumeric() || c == '_')
|
||||
// Allow alphanumeric characters, underscores, and hyphens (for extension names like "nuxt-app")
|
||||
!name.is_empty() && name.chars().all(|c| c.is_alphanumeric() || c == '_' || c == '-')
|
||||
}
|
||||
|
||||
/// Richtet CRDT-Trigger für eine einzelne Tabelle ein.
|
||||
@ -130,7 +128,7 @@ pub fn setup_triggers_for_table(
|
||||
let delete_trigger_sql = generate_delete_trigger_sql(table_name, &pks, &cols_to_track);
|
||||
|
||||
if recreate {
|
||||
drop_triggers_for_table(&tx, table_name)?;
|
||||
drop_triggers_for_table(tx, table_name)?;
|
||||
}
|
||||
|
||||
tx.execute_batch(&insert_trigger_sql)?;
|
||||
@ -144,13 +142,11 @@ pub fn setup_triggers_for_table(
|
||||
pub fn get_table_schema(conn: &Connection, table_name: &str) -> RusqliteResult<Vec<ColumnInfo>> {
|
||||
if !is_safe_identifier(table_name) {
|
||||
return Err(rusqlite::Error::InvalidParameterName(format!(
|
||||
"Invalid or unsafe table name provided: {}",
|
||||
table_name
|
||||
))
|
||||
.into());
|
||||
"Invalid or unsafe table name provided: {table_name}"
|
||||
)));
|
||||
}
|
||||
|
||||
let sql = format!("PRAGMA table_info(\"{}\");", table_name);
|
||||
let sql = format!("PRAGMA table_info(\"{table_name}\");");
|
||||
let mut stmt = conn.prepare(&sql)?;
|
||||
let rows = stmt.query_map([], ColumnInfo::from_row)?;
|
||||
rows.collect()
|
||||
@ -164,8 +160,7 @@ pub fn drop_triggers_for_table(
|
||||
) -> Result<(), CrdtSetupError> {
|
||||
if !is_safe_identifier(table_name) {
|
||||
return Err(rusqlite::Error::InvalidParameterName(format!(
|
||||
"Invalid or unsafe table name provided: {}",
|
||||
table_name
|
||||
"Invalid or unsafe table name provided: {table_name}"
|
||||
))
|
||||
.into());
|
||||
}
|
||||
@ -178,8 +173,7 @@ pub fn drop_triggers_for_table(
|
||||
drop_trigger_sql(DELETE_TRIGGER_TPL.replace("{TABLE_NAME}", table_name));
|
||||
|
||||
let sql_batch = format!(
|
||||
"{}\n{}\n{}",
|
||||
drop_insert_trigger_sql, drop_update_trigger_sql, drop_delete_trigger_sql
|
||||
"{drop_insert_trigger_sql}\n{drop_update_trigger_sql}\n{drop_delete_trigger_sql}"
|
||||
);
|
||||
|
||||
tx.execute_batch(&sql_batch)?;
|
||||
@ -245,33 +239,22 @@ pub fn drop_triggers_for_table(
|
||||
fn generate_insert_trigger_sql(table_name: &str, pks: &[String], cols: &[String]) -> String {
|
||||
let pk_json_payload = pks
|
||||
.iter()
|
||||
.map(|pk| format!("'{}', NEW.\"{}\"", pk, pk))
|
||||
.map(|pk| format!("'{pk}', NEW.\"{pk}\""))
|
||||
.collect::<Vec<_>>()
|
||||
.join(", ");
|
||||
|
||||
let column_inserts = if cols.is_empty() {
|
||||
// Nur PKs -> einfacher Insert ins Log
|
||||
format!(
|
||||
"INSERT INTO {log_table} (id, haex_timestamp, op_type, table_name, row_pks)
|
||||
VALUES ({uuid_fn}(), NEW.\"{hlc_col}\", 'INSERT', '{table}', json_object({pk_payload}));",
|
||||
log_table = TABLE_CRDT_LOGS,
|
||||
uuid_fn = UUID_FUNCTION_NAME,
|
||||
hlc_col = HLC_TIMESTAMP_COLUMN,
|
||||
table = table_name,
|
||||
pk_payload = pk_json_payload
|
||||
"INSERT INTO {TABLE_CRDT_LOGS} (id, haex_timestamp, op_type, table_name, row_pks)
|
||||
VALUES ({UUID_FUNCTION_NAME}(), NEW.\"{HLC_TIMESTAMP_COLUMN}\", 'INSERT', '{table_name}', json_object({pk_json_payload}));"
|
||||
)
|
||||
} else {
|
||||
cols.iter().fold(String::new(), |mut acc, col| {
|
||||
writeln!(
|
||||
&mut acc,
|
||||
"INSERT INTO {log_table} (id, haex_timestamp, op_type, table_name, row_pks, column_name, new_value)
|
||||
VALUES ({uuid_fn}(), NEW.\"{hlc_col}\", 'INSERT', '{table}', json_object({pk_payload}), '{column}', json_object('value', NEW.\"{column}\"));",
|
||||
log_table = TABLE_CRDT_LOGS,
|
||||
uuid_fn = UUID_FUNCTION_NAME,
|
||||
hlc_col = HLC_TIMESTAMP_COLUMN,
|
||||
table = table_name,
|
||||
pk_payload = pk_json_payload,
|
||||
column = col
|
||||
"INSERT INTO {TABLE_CRDT_LOGS} (id, haex_timestamp, op_type, table_name, row_pks, column_name, new_value)
|
||||
VALUES ({UUID_FUNCTION_NAME}(), NEW.\"{HLC_TIMESTAMP_COLUMN}\", 'INSERT', '{table_name}', json_object({pk_json_payload}), '{col}', json_object('value', NEW.\"{col}\"));"
|
||||
).unwrap();
|
||||
acc
|
||||
})
|
||||
@ -291,14 +274,14 @@ fn generate_insert_trigger_sql(table_name: &str, pks: &[String], cols: &[String]
|
||||
|
||||
/// Generiert das SQL zum Löschen eines Triggers.
|
||||
fn drop_trigger_sql(trigger_name: String) -> String {
|
||||
format!("DROP TRIGGER IF EXISTS \"{}\";", trigger_name)
|
||||
format!("DROP TRIGGER IF EXISTS \"{trigger_name}\";")
|
||||
}
|
||||
|
||||
/// Generiert das SQL für den UPDATE-Trigger.
|
||||
fn generate_update_trigger_sql(table_name: &str, pks: &[String], cols: &[String]) -> String {
|
||||
let pk_json_payload = pks
|
||||
.iter()
|
||||
.map(|pk| format!("'{}', NEW.\"{}\"", pk, pk))
|
||||
.map(|pk| format!("'{pk}', NEW.\"{pk}\""))
|
||||
.collect::<Vec<_>>()
|
||||
.join(", ");
|
||||
|
||||
@ -309,16 +292,10 @@ fn generate_update_trigger_sql(table_name: &str, pks: &[String], cols: &[String]
|
||||
for col in cols {
|
||||
writeln!(
|
||||
&mut body,
|
||||
"INSERT INTO {log_table} (id, haex_timestamp, op_type, table_name, row_pks, column_name, new_value, old_value)
|
||||
SELECT {uuid_fn}(), NEW.\"{hlc_col}\", 'UPDATE', '{table}', json_object({pk_payload}), '{column}',
|
||||
json_object('value', NEW.\"{column}\"), json_object('value', OLD.\"{column}\")
|
||||
WHERE NEW.\"{column}\" IS NOT OLD.\"{column}\";",
|
||||
log_table = TABLE_CRDT_LOGS,
|
||||
uuid_fn = UUID_FUNCTION_NAME,
|
||||
hlc_col = HLC_TIMESTAMP_COLUMN,
|
||||
table = table_name,
|
||||
pk_payload = pk_json_payload,
|
||||
column = col
|
||||
"INSERT INTO {TABLE_CRDT_LOGS} (id, haex_timestamp, op_type, table_name, row_pks, column_name, new_value, old_value)
|
||||
SELECT {UUID_FUNCTION_NAME}(), NEW.\"{HLC_TIMESTAMP_COLUMN}\", 'UPDATE', '{table_name}', json_object({pk_json_payload}), '{col}',
|
||||
json_object('value', NEW.\"{col}\"), json_object('value', OLD.\"{col}\")
|
||||
WHERE NEW.\"{col}\" IS NOT OLD.\"{col}\";"
|
||||
).unwrap();
|
||||
}
|
||||
}
|
||||
@ -342,7 +319,7 @@ fn generate_update_trigger_sql(table_name: &str, pks: &[String], cols: &[String]
|
||||
fn generate_delete_trigger_sql(table_name: &str, pks: &[String], cols: &[String]) -> String {
|
||||
let pk_json_payload = pks
|
||||
.iter()
|
||||
.map(|pk| format!("'{}', OLD.\"{}\"", pk, pk))
|
||||
.map(|pk| format!("'{pk}', OLD.\"{pk}\""))
|
||||
.collect::<Vec<_>>()
|
||||
.join(", ");
|
||||
|
||||
@ -353,28 +330,17 @@ fn generate_delete_trigger_sql(table_name: &str, pks: &[String], cols: &[String]
|
||||
for col in cols {
|
||||
writeln!(
|
||||
&mut body,
|
||||
"INSERT INTO {log_table} (id, haex_timestamp, op_type, table_name, row_pks, column_name, old_value)
|
||||
VALUES ({uuid_fn}(), OLD.\"{hlc_col}\", 'DELETE', '{table}', json_object({pk_payload}), '{column}',
|
||||
json_object('value', OLD.\"{column}\"));",
|
||||
log_table = TABLE_CRDT_LOGS,
|
||||
uuid_fn = UUID_FUNCTION_NAME,
|
||||
hlc_col = HLC_TIMESTAMP_COLUMN,
|
||||
table = table_name,
|
||||
pk_payload = pk_json_payload,
|
||||
column = col
|
||||
"INSERT INTO {TABLE_CRDT_LOGS} (id, haex_timestamp, op_type, table_name, row_pks, column_name, old_value)
|
||||
VALUES ({UUID_FUNCTION_NAME}(), OLD.\"{HLC_TIMESTAMP_COLUMN}\", 'DELETE', '{table_name}', json_object({pk_json_payload}), '{col}',
|
||||
json_object('value', OLD.\"{col}\"));"
|
||||
).unwrap();
|
||||
}
|
||||
} else {
|
||||
// Nur PKs -> minimales Delete Log
|
||||
writeln!(
|
||||
&mut body,
|
||||
"INSERT INTO {log_table} (id, haex_timestamp, op_type, table_name, row_pks)
|
||||
VALUES ({uuid_fn}(), OLD.\"{hlc_col}\", 'DELETE', '{table}', json_object({pk_payload}));",
|
||||
log_table = TABLE_CRDT_LOGS,
|
||||
uuid_fn = UUID_FUNCTION_NAME,
|
||||
hlc_col = HLC_TIMESTAMP_COLUMN,
|
||||
table = table_name,
|
||||
pk_payload = pk_json_payload
|
||||
"INSERT INTO {TABLE_CRDT_LOGS} (id, haex_timestamp, op_type, table_name, row_pks)
|
||||
VALUES ({UUID_FUNCTION_NAME}(), OLD.\"{HLC_TIMESTAMP_COLUMN}\", 'DELETE', '{table_name}', json_object({pk_json_payload}));"
|
||||
)
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
@ -47,7 +47,7 @@ pub fn open_and_init_db(path: &str, key: &str, create: bool) -> Result<Connectio
|
||||
},
|
||||
)
|
||||
.map_err(|e| DatabaseError::DatabaseError {
|
||||
reason: format!("Failed to register {} function: {}", UUID_FUNCTION_NAME, e),
|
||||
reason: format!("Failed to register {UUID_FUNCTION_NAME} function: {e}"),
|
||||
})?;
|
||||
|
||||
let journal_mode: String = conn
|
||||
@ -61,8 +61,7 @@ pub fn open_and_init_db(path: &str, key: &str, create: bool) -> Result<Connectio
|
||||
println!("WAL mode successfully enabled.");
|
||||
} else {
|
||||
eprintln!(
|
||||
"Failed to enable WAL mode, journal_mode is '{}'.",
|
||||
journal_mode
|
||||
"Failed to enable WAL mode, journal_mode is '{journal_mode}'."
|
||||
);
|
||||
}
|
||||
|
||||
@ -89,8 +88,15 @@ pub fn parse_single_statement(sql: &str) -> Result<Statement, DatabaseError> {
|
||||
/// Utility für SQL-Parsing - parst mehrere SQL-Statements
|
||||
pub fn parse_sql_statements(sql: &str) -> Result<Vec<Statement>, DatabaseError> {
|
||||
let dialect = SQLiteDialect {};
|
||||
Parser::parse_sql(&dialect, sql).map_err(|e| DatabaseError::ParseError {
|
||||
reason: e.to_string(),
|
||||
|
||||
// Normalize whitespace: replace multiple whitespaces (including newlines, tabs) with single space
|
||||
let normalized_sql = sql
|
||||
.split_whitespace()
|
||||
.collect::<Vec<&str>>()
|
||||
.join(" ");
|
||||
|
||||
Parser::parse_sql(&dialect, &normalized_sql).map_err(|e| DatabaseError::ParseError {
|
||||
reason: format!("Failed to parse SQL: {e}"),
|
||||
sql: sql.to_string(),
|
||||
})
|
||||
}
|
||||
@ -131,7 +137,7 @@ impl ValueConverter {
|
||||
serde_json::to_string(json_val)
|
||||
.map(SqlValue::Text)
|
||||
.map_err(|e| DatabaseError::SerializationError {
|
||||
reason: format!("Failed to serialize JSON param: {}", e),
|
||||
reason: format!("Failed to serialize JSON param: {e}"),
|
||||
})
|
||||
}
|
||||
}
|
||||
@ -251,7 +257,7 @@ pub fn select_with_crdt(
|
||||
params: Vec<JsonValue>,
|
||||
connection: &DbConnection,
|
||||
) -> Result<Vec<Vec<JsonValue>>, DatabaseError> {
|
||||
with_connection(&connection, |conn| {
|
||||
with_connection(connection, |conn| {
|
||||
SqlExecutor::query_select(conn, &sql, ¶ms)
|
||||
})
|
||||
}
|
||||
|
||||
@ -36,8 +36,7 @@ pub fn ensure_triggers_initialized(conn: &mut Connection) -> Result<bool, Databa
|
||||
|
||||
// Check if triggers already initialized
|
||||
let check_sql = format!(
|
||||
"SELECT value FROM {} WHERE key = ? AND type = ?",
|
||||
TABLE_SETTINGS
|
||||
"SELECT value FROM {TABLE_SETTINGS} WHERE key = ? AND type = ?"
|
||||
);
|
||||
let initialized: Option<String> = tx
|
||||
.query_row(
|
||||
@ -57,7 +56,7 @@ pub fn ensure_triggers_initialized(conn: &mut Connection) -> Result<bool, Databa
|
||||
|
||||
// Create triggers for all CRDT tables
|
||||
for table_name in CRDT_TABLES {
|
||||
eprintln!(" - Setting up triggers for: {}", table_name);
|
||||
eprintln!(" - Setting up triggers for: {table_name}");
|
||||
trigger::setup_triggers_for_table(&tx, table_name, false)?;
|
||||
}
|
||||
|
||||
|
||||
@ -20,6 +20,8 @@ use std::time::UNIX_EPOCH;
|
||||
use std::{fs, sync::Arc};
|
||||
use tauri::{path::BaseDirectory, AppHandle, Manager, State};
|
||||
use tauri_plugin_fs::FsExt;
|
||||
#[cfg(not(target_os = "android"))]
|
||||
use trash;
|
||||
use ts_rs::TS;
|
||||
|
||||
pub struct DbConnection(pub Arc<Mutex<Option<Connection>>>);
|
||||
@ -91,7 +93,7 @@ fn get_vault_path(app_handle: &AppHandle, vault_name: &str) -> Result<String, Da
|
||||
let vault_file_name = if vault_name.ends_with(VAULT_EXTENSION) {
|
||||
vault_name.to_string()
|
||||
} else {
|
||||
format!("{}{VAULT_EXTENSION}", vault_name)
|
||||
format!("{vault_name}{VAULT_EXTENSION}")
|
||||
};
|
||||
|
||||
let vault_directory = get_vaults_directory(app_handle)?;
|
||||
@ -99,13 +101,12 @@ fn get_vault_path(app_handle: &AppHandle, vault_name: &str) -> Result<String, Da
|
||||
let vault_path = app_handle
|
||||
.path()
|
||||
.resolve(
|
||||
format!("{vault_directory}/{}", vault_file_name),
|
||||
format!("{vault_directory}/{vault_file_name}"),
|
||||
BaseDirectory::AppLocalData,
|
||||
)
|
||||
.map_err(|e| DatabaseError::PathResolutionError {
|
||||
reason: format!(
|
||||
"Failed to resolve vault path for '{}': {}",
|
||||
vault_file_name, e
|
||||
"Failed to resolve vault path for '{vault_file_name}': {e}"
|
||||
),
|
||||
})?;
|
||||
|
||||
@ -113,7 +114,7 @@ fn get_vault_path(app_handle: &AppHandle, vault_name: &str) -> Result<String, Da
|
||||
if let Some(parent) = vault_path.parent() {
|
||||
fs::create_dir_all(parent).map_err(|e| DatabaseError::IoError {
|
||||
path: parent.display().to_string(),
|
||||
reason: format!("Failed to create vaults directory: {}", e),
|
||||
reason: format!("Failed to create vaults directory: {e}"),
|
||||
})?;
|
||||
}
|
||||
|
||||
@ -133,7 +134,6 @@ pub fn get_vaults_directory(app_handle: &AppHandle) -> Result<String, DatabaseEr
|
||||
Ok(vaults_dir.to_string_lossy().to_string())
|
||||
}
|
||||
|
||||
//#[serde(tag = "type", content = "details")]
|
||||
#[derive(Debug, Serialize, Deserialize, TS)]
|
||||
#[ts(export)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
@ -173,18 +173,18 @@ pub fn list_vaults(app_handle: AppHandle) -> Result<Vec<VaultInfo>, DatabaseErro
|
||||
if let Some(filename) = path.file_name().and_then(|n| n.to_str()) {
|
||||
if filename.ends_with(VAULT_EXTENSION) {
|
||||
// Entferne .db Endung für die Rückgabe
|
||||
println!("Vault gefunden {}", filename.to_string());
|
||||
println!("Vault gefunden {filename}");
|
||||
|
||||
let metadata = fs::metadata(&path).map_err(|e| DatabaseError::IoError {
|
||||
path: path.to_string_lossy().to_string(),
|
||||
reason: format!("Metadaten konnten nicht gelesen werden: {}", e),
|
||||
reason: format!("Metadaten konnten nicht gelesen werden: {e}"),
|
||||
})?;
|
||||
|
||||
let last_access_timestamp = metadata
|
||||
.accessed()
|
||||
.map_err(|e| DatabaseError::IoError {
|
||||
path: path.to_string_lossy().to_string(),
|
||||
reason: format!("Zugriffszeit konnte nicht gelesen werden: {}", e),
|
||||
reason: format!("Zugriffszeit konnte nicht gelesen werden: {e}"),
|
||||
})?
|
||||
.duration_since(UNIX_EPOCH)
|
||||
.unwrap_or_default() // Fallback für den seltenen Fall einer Zeit vor 1970
|
||||
@ -212,12 +212,63 @@ pub fn vault_exists(app_handle: AppHandle, vault_name: String) -> Result<bool, D
|
||||
Ok(Path::new(&vault_path).exists())
|
||||
}
|
||||
|
||||
/// Deletes a vault database file
|
||||
/// Moves a vault database file to trash (or deletes permanently if trash is unavailable)
|
||||
#[tauri::command]
|
||||
pub fn move_vault_to_trash(
|
||||
app_handle: AppHandle,
|
||||
vault_name: String,
|
||||
) -> Result<String, DatabaseError> {
|
||||
// On Android, trash is not available, so delete permanently
|
||||
#[cfg(target_os = "android")]
|
||||
{
|
||||
println!(
|
||||
"Android platform detected, permanently deleting vault '{}'",
|
||||
vault_name
|
||||
);
|
||||
return delete_vault(app_handle, vault_name);
|
||||
}
|
||||
|
||||
// On non-Android platforms, try to use trash
|
||||
#[cfg(not(target_os = "android"))]
|
||||
{
|
||||
let vault_path = get_vault_path(&app_handle, &vault_name)?;
|
||||
let vault_shm_path = format!("{vault_path}-shm");
|
||||
let vault_wal_path = format!("{vault_path}-wal");
|
||||
|
||||
if !Path::new(&vault_path).exists() {
|
||||
return Err(DatabaseError::IoError {
|
||||
path: vault_path,
|
||||
reason: "Vault does not exist".to_string(),
|
||||
});
|
||||
}
|
||||
|
||||
// Try to move to trash first (works on desktop systems)
|
||||
let moved_to_trash = trash::delete(&vault_path).is_ok();
|
||||
|
||||
if moved_to_trash {
|
||||
// Also try to move auxiliary files to trash (ignore errors as they might not exist)
|
||||
let _ = trash::delete(&vault_shm_path);
|
||||
let _ = trash::delete(&vault_wal_path);
|
||||
|
||||
Ok(format!(
|
||||
"Vault '{vault_name}' successfully moved to trash"
|
||||
))
|
||||
} else {
|
||||
// Fallback: Permanent deletion if trash fails
|
||||
println!(
|
||||
"Trash not available, falling back to permanent deletion for vault '{vault_name}'"
|
||||
);
|
||||
delete_vault(app_handle, vault_name)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Deletes a vault database file permanently (bypasses trash)
|
||||
#[tauri::command]
|
||||
pub fn delete_vault(app_handle: AppHandle, vault_name: String) -> Result<String, DatabaseError> {
|
||||
let vault_path = get_vault_path(&app_handle, &vault_name)?;
|
||||
let vault_shm_path = format!("{}-shm", vault_path);
|
||||
let vault_wal_path = format!("{}-wal", vault_path);
|
||||
let vault_shm_path = format!("{vault_path}-shm");
|
||||
let vault_wal_path = format!("{vault_path}-wal");
|
||||
|
||||
if !Path::new(&vault_path).exists() {
|
||||
return Err(DatabaseError::IoError {
|
||||
@ -229,23 +280,23 @@ pub fn delete_vault(app_handle: AppHandle, vault_name: String) -> Result<String,
|
||||
if Path::new(&vault_shm_path).exists() {
|
||||
fs::remove_file(&vault_shm_path).map_err(|e| DatabaseError::IoError {
|
||||
path: vault_shm_path.clone(),
|
||||
reason: format!("Failed to delete vault: {}", e),
|
||||
reason: format!("Failed to delete vault: {e}"),
|
||||
})?;
|
||||
}
|
||||
|
||||
if Path::new(&vault_wal_path).exists() {
|
||||
fs::remove_file(&vault_wal_path).map_err(|e| DatabaseError::IoError {
|
||||
path: vault_wal_path.clone(),
|
||||
reason: format!("Failed to delete vault: {}", e),
|
||||
reason: format!("Failed to delete vault: {e}"),
|
||||
})?;
|
||||
}
|
||||
|
||||
fs::remove_file(&vault_path).map_err(|e| DatabaseError::IoError {
|
||||
path: vault_path.clone(),
|
||||
reason: format!("Failed to delete vault: {}", e),
|
||||
reason: format!("Failed to delete vault: {e}"),
|
||||
})?;
|
||||
|
||||
Ok(format!("Vault '{}' successfully deleted", vault_name))
|
||||
Ok(format!("Vault '{vault_name}' successfully deleted"))
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
@ -255,16 +306,16 @@ pub fn create_encrypted_database(
|
||||
key: String,
|
||||
state: State<'_, AppState>,
|
||||
) -> Result<String, DatabaseError> {
|
||||
println!("Creating encrypted vault with name: {}", vault_name);
|
||||
println!("Creating encrypted vault with name: {vault_name}");
|
||||
|
||||
let vault_path = get_vault_path(&app_handle, &vault_name)?;
|
||||
println!("Resolved vault path: {}", vault_path);
|
||||
println!("Resolved vault path: {vault_path}");
|
||||
|
||||
// Prüfen, ob bereits eine Vault mit diesem Namen existiert
|
||||
if Path::new(&vault_path).exists() {
|
||||
return Err(DatabaseError::IoError {
|
||||
path: vault_path,
|
||||
reason: format!("A vault with the name '{}' already exists", vault_name),
|
||||
reason: format!("A vault with the name '{vault_name}' already exists"),
|
||||
});
|
||||
}
|
||||
/* let resource_path = app_handle
|
||||
@ -276,7 +327,7 @@ pub fn create_encrypted_database(
|
||||
.path()
|
||||
.resolve("database/vault.db", BaseDirectory::Resource)
|
||||
.map_err(|e| DatabaseError::PathResolutionError {
|
||||
reason: format!("Failed to resolve template database: {}", e),
|
||||
reason: format!("Failed to resolve template database: {e}"),
|
||||
})?;
|
||||
|
||||
let template_content =
|
||||
@ -285,20 +336,20 @@ pub fn create_encrypted_database(
|
||||
.read(&template_path)
|
||||
.map_err(|e| DatabaseError::IoError {
|
||||
path: template_path.display().to_string(),
|
||||
reason: format!("Failed to read template database from resources: {}", e),
|
||||
reason: format!("Failed to read template database from resources: {e}"),
|
||||
})?;
|
||||
|
||||
let temp_path = app_handle
|
||||
.path()
|
||||
.resolve("temp_vault.db", BaseDirectory::AppLocalData)
|
||||
.map_err(|e| DatabaseError::PathResolutionError {
|
||||
reason: format!("Failed to resolve temp database: {}", e),
|
||||
reason: format!("Failed to resolve temp database: {e}"),
|
||||
})?;
|
||||
|
||||
let temp_path_clone = temp_path.to_owned();
|
||||
fs::write(temp_path, template_content).map_err(|e| DatabaseError::IoError {
|
||||
path: vault_path.to_string(),
|
||||
reason: format!("Failed to write temporary template database: {}", e),
|
||||
reason: format!("Failed to write temporary template database: {e}"),
|
||||
})?;
|
||||
/* if !template_path.exists() {
|
||||
return Err(DatabaseError::IoError {
|
||||
@ -311,8 +362,7 @@ pub fn create_encrypted_database(
|
||||
let conn = Connection::open(&temp_path_clone).map_err(|e| DatabaseError::ConnectionFailed {
|
||||
path: temp_path_clone.display().to_string(),
|
||||
reason: format!(
|
||||
"Fehler beim Öffnen der unverschlüsselten Quelldatenbank: {}",
|
||||
e
|
||||
"Fehler beim Öffnen der unverschlüsselten Quelldatenbank: {e}"
|
||||
),
|
||||
})?;
|
||||
|
||||
@ -340,7 +390,7 @@ pub fn create_encrypted_database(
|
||||
let _ = fs::remove_file(&vault_path);
|
||||
let _ = fs::remove_file(&temp_path_clone);
|
||||
return Err(DatabaseError::QueryError {
|
||||
reason: format!("Fehler während sqlcipher_export: {}", e),
|
||||
reason: format!("Fehler während sqlcipher_export: {e}"),
|
||||
});
|
||||
}
|
||||
|
||||
@ -365,11 +415,11 @@ pub fn create_encrypted_database(
|
||||
Ok(version)
|
||||
}) {
|
||||
Ok(version) => {
|
||||
println!("SQLCipher ist aktiv! Version: {}", version);
|
||||
println!("SQLCipher ist aktiv! Version: {version}");
|
||||
}
|
||||
Err(e) => {
|
||||
eprintln!("FEHLER: SQLCipher scheint NICHT aktiv zu sein!");
|
||||
eprintln!("Der Befehl 'PRAGMA cipher_version;' schlug fehl: {}", e);
|
||||
eprintln!("Der Befehl 'PRAGMA cipher_version;' schlug fehl: {e}");
|
||||
eprintln!("Die Datenbank wurde wahrscheinlich NICHT verschlüsselt.");
|
||||
}
|
||||
}
|
||||
@ -377,7 +427,7 @@ pub fn create_encrypted_database(
|
||||
conn.close()
|
||||
.map_err(|(_, e)| DatabaseError::ConnectionFailed {
|
||||
path: template_path.display().to_string(),
|
||||
reason: format!("Fehler beim Schließen der Quelldatenbank: {}", e),
|
||||
reason: format!("Fehler beim Schließen der Quelldatenbank: {e}"),
|
||||
})?;
|
||||
|
||||
let _ = fs::remove_file(&temp_path_clone);
|
||||
@ -394,22 +444,19 @@ pub fn open_encrypted_database(
|
||||
key: String,
|
||||
state: State<'_, AppState>,
|
||||
) -> Result<String, DatabaseError> {
|
||||
println!("Opening encrypted database vault_path: {}", vault_path);
|
||||
|
||||
// Vault-Pfad aus dem Namen ableiten
|
||||
//let vault_path = get_vault_path(&app_handle, &vault_name)?;
|
||||
println!("Resolved vault path: {}", vault_path);
|
||||
println!("Opening encrypted database vault_path: {vault_path}");
|
||||
println!("Resolved vault path: {vault_path}");
|
||||
|
||||
if !Path::new(&vault_path).exists() {
|
||||
return Err(DatabaseError::IoError {
|
||||
path: vault_path.to_string(),
|
||||
reason: format!("Vault '{}' does not exist", vault_path),
|
||||
reason: format!("Vault '{vault_path}' does not exist"),
|
||||
});
|
||||
}
|
||||
|
||||
initialize_session(&app_handle, &vault_path, &key, &state)?;
|
||||
|
||||
Ok(format!("Vault '{}' opened successfully", vault_path))
|
||||
Ok(format!("Vault '{vault_path}' opened successfully"))
|
||||
}
|
||||
|
||||
/// Opens the DB, initializes the HLC service, and stores both in the AppState.
|
||||
@ -461,8 +508,7 @@ fn initialize_session(
|
||||
eprintln!("INFO: Setting 'triggers_initialized' flag via CRDT...");
|
||||
|
||||
let insert_sql = format!(
|
||||
"INSERT INTO {} (id, key, type, value) VALUES (?, ?, ?, ?)",
|
||||
TABLE_SETTINGS
|
||||
"INSERT INTO {TABLE_SETTINGS} (id, key, type, value) VALUES (?, ?, ?, ?)"
|
||||
);
|
||||
|
||||
// execute_with_crdt erwartet Vec<JsonValue>, kein params!-Makro
|
||||
|
||||
@ -10,10 +10,8 @@ use crate::extension::permissions::manager::PermissionManager;
|
||||
use crate::extension::permissions::types::ExtensionPermission;
|
||||
use crate::table_names::{TABLE_EXTENSIONS, TABLE_EXTENSION_PERMISSIONS};
|
||||
use crate::AppState;
|
||||
use serde_json::Value as JsonValue;
|
||||
use std::collections::{HashMap, HashSet};
|
||||
use std::collections::HashMap;
|
||||
use std::fs;
|
||||
use std::io::Cursor;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Mutex;
|
||||
use std::time::{Duration, SystemTime};
|
||||
@ -66,39 +64,149 @@ impl ExtensionManager {
|
||||
Self::default()
|
||||
}
|
||||
|
||||
/// Helper function to validate path and check for path traversal
|
||||
/// Returns the cleaned path if valid, or None if invalid/not found
|
||||
/// If require_exists is true, returns None if path doesn't exist
|
||||
pub fn validate_path_in_directory(
|
||||
base_dir: &PathBuf,
|
||||
relative_path: &str,
|
||||
require_exists: bool,
|
||||
) -> Result<Option<PathBuf>, ExtensionError> {
|
||||
// Check for path traversal patterns
|
||||
if relative_path.contains("..") {
|
||||
return Err(ExtensionError::SecurityViolation {
|
||||
reason: format!("Path traversal attempt: {relative_path}"),
|
||||
});
|
||||
}
|
||||
|
||||
// Clean the path (same logic as in protocol.rs)
|
||||
let clean_path = relative_path
|
||||
.replace('\\', "/")
|
||||
.trim_start_matches('/')
|
||||
.split('/')
|
||||
.filter(|&part| !part.is_empty() && part != "." && part != "..")
|
||||
.collect::<PathBuf>();
|
||||
|
||||
let full_path = base_dir.join(&clean_path);
|
||||
|
||||
// Check if file/directory exists (if required)
|
||||
if require_exists && !full_path.exists() {
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
// Verify path is within base directory
|
||||
let canonical_base = base_dir
|
||||
.canonicalize()
|
||||
.map_err(|e| ExtensionError::Filesystem { source: e })?;
|
||||
|
||||
if let Ok(canonical_path) = full_path.canonicalize() {
|
||||
if !canonical_path.starts_with(&canonical_base) {
|
||||
return Err(ExtensionError::SecurityViolation {
|
||||
reason: format!("Path outside base directory: {relative_path}"),
|
||||
});
|
||||
}
|
||||
Ok(Some(canonical_path))
|
||||
} else {
|
||||
// Path doesn't exist yet - still validate it would be within base
|
||||
if full_path.starts_with(&canonical_base) {
|
||||
Ok(Some(full_path))
|
||||
} else {
|
||||
Err(ExtensionError::SecurityViolation {
|
||||
reason: format!("Path outside base directory: {relative_path}"),
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Validates icon path and falls back to favicon.ico if not specified
|
||||
fn validate_and_resolve_icon_path(
|
||||
extension_dir: &PathBuf,
|
||||
haextension_dir: &str,
|
||||
icon_path: Option<&str>,
|
||||
) -> Result<Option<String>, ExtensionError> {
|
||||
// If icon is specified in manifest, validate it
|
||||
if let Some(icon) = icon_path {
|
||||
if let Some(clean_path) = Self::validate_path_in_directory(extension_dir, icon, true)? {
|
||||
return Ok(Some(clean_path.to_string_lossy().to_string()));
|
||||
} else {
|
||||
eprintln!("WARNING: Icon path specified in manifest not found: {icon}");
|
||||
// Continue to fallback logic
|
||||
}
|
||||
}
|
||||
|
||||
// Fallback 1: Check haextension/favicon.ico
|
||||
let haextension_favicon = format!("{haextension_dir}/favicon.ico");
|
||||
if let Some(clean_path) = Self::validate_path_in_directory(extension_dir, &haextension_favicon, true)? {
|
||||
return Ok(Some(clean_path.to_string_lossy().to_string()));
|
||||
}
|
||||
|
||||
// Fallback 2: Check public/favicon.ico
|
||||
if let Some(clean_path) = Self::validate_path_in_directory(extension_dir, "public/favicon.ico", true)? {
|
||||
return Ok(Some(clean_path.to_string_lossy().to_string()));
|
||||
}
|
||||
|
||||
// No icon found
|
||||
Ok(None)
|
||||
}
|
||||
|
||||
/// Extrahiert eine Extension-ZIP-Datei und validiert das Manifest
|
||||
fn extract_and_validate_extension(
|
||||
bytes: Vec<u8>,
|
||||
temp_prefix: &str,
|
||||
app_handle: &AppHandle,
|
||||
) -> Result<ExtractedExtension, ExtensionError> {
|
||||
let temp = std::env::temp_dir().join(format!("{}_{}", temp_prefix, uuid::Uuid::new_v4()));
|
||||
// Use app_cache_dir for better Android compatibility
|
||||
let cache_dir = app_handle
|
||||
.path()
|
||||
.app_cache_dir()
|
||||
.map_err(|e| ExtensionError::InstallationFailed {
|
||||
reason: format!("Cannot get app cache dir: {e}"),
|
||||
})?;
|
||||
|
||||
let temp_id = uuid::Uuid::new_v4();
|
||||
let temp = cache_dir.join(format!("{temp_prefix}_{temp_id}"));
|
||||
let zip_file_path = cache_dir.join(format!("{}_{}_{}.haextension", temp_prefix, temp_id, "temp"));
|
||||
|
||||
// Write bytes to a temporary ZIP file first (important for Android file system)
|
||||
fs::write(&zip_file_path, &bytes).map_err(|e| {
|
||||
ExtensionError::filesystem_with_path(zip_file_path.display().to_string(), e)
|
||||
})?;
|
||||
|
||||
// Create extraction directory
|
||||
fs::create_dir_all(&temp)
|
||||
.map_err(|e| ExtensionError::filesystem_with_path(temp.display().to_string(), e))?;
|
||||
|
||||
let mut archive = ZipArchive::new(Cursor::new(bytes)).map_err(|e| {
|
||||
// Open ZIP file from disk (more reliable on Android than from memory)
|
||||
let zip_file = fs::File::open(&zip_file_path).map_err(|e| {
|
||||
ExtensionError::filesystem_with_path(zip_file_path.display().to_string(), e)
|
||||
})?;
|
||||
|
||||
let mut archive = ZipArchive::new(zip_file).map_err(|e| {
|
||||
ExtensionError::InstallationFailed {
|
||||
reason: format!("Invalid ZIP: {}", e),
|
||||
reason: format!("Invalid ZIP: {e}"),
|
||||
}
|
||||
})?;
|
||||
|
||||
archive
|
||||
.extract(&temp)
|
||||
.map_err(|e| ExtensionError::InstallationFailed {
|
||||
reason: format!("Cannot extract ZIP: {}", e),
|
||||
reason: format!("Cannot extract ZIP: {e}"),
|
||||
})?;
|
||||
|
||||
// Clean up temporary ZIP file
|
||||
let _ = fs::remove_file(&zip_file_path);
|
||||
|
||||
// Read haextension_dir from config if it exists, otherwise use default
|
||||
let config_path = temp.join("haextension.config.json");
|
||||
let haextension_dir = if config_path.exists() {
|
||||
let config_content = std::fs::read_to_string(&config_path)
|
||||
.map_err(|e| ExtensionError::ManifestError {
|
||||
reason: format!("Cannot read haextension.config.json: {}", e),
|
||||
reason: format!("Cannot read haextension.config.json: {e}"),
|
||||
})?;
|
||||
|
||||
let config: serde_json::Value = serde_json::from_str(&config_content)
|
||||
.map_err(|e| ExtensionError::ManifestError {
|
||||
reason: format!("Invalid haextension.config.json: {}", e),
|
||||
reason: format!("Invalid haextension.config.json: {e}"),
|
||||
})?;
|
||||
|
||||
let dir = config
|
||||
@ -108,50 +216,29 @@ impl ExtensionManager {
|
||||
.unwrap_or("haextension")
|
||||
.to_string();
|
||||
|
||||
// Security: Validate that haextension_dir doesn't contain ".." for path traversal
|
||||
if dir.contains("..") {
|
||||
return Err(ExtensionError::ManifestError {
|
||||
reason: "Invalid haextension_dir: path traversal with '..' not allowed".to_string(),
|
||||
});
|
||||
}
|
||||
|
||||
dir
|
||||
} else {
|
||||
"haextension".to_string()
|
||||
};
|
||||
|
||||
// Build the manifest path
|
||||
let manifest_path = temp.join(&haextension_dir).join("manifest.json");
|
||||
|
||||
// Ensure the resolved path is still within temp directory (safety check against path traversal)
|
||||
let canonical_temp = temp.canonicalize()
|
||||
.map_err(|e| ExtensionError::Filesystem { source: e })?;
|
||||
|
||||
// Only check if manifest_path parent exists to avoid errors
|
||||
if let Some(parent) = manifest_path.parent() {
|
||||
if let Ok(canonical_manifest_dir) = parent.canonicalize() {
|
||||
if !canonical_manifest_dir.starts_with(&canonical_temp) {
|
||||
return Err(ExtensionError::ManifestError {
|
||||
reason: "Security violation: manifest path outside extension directory".to_string(),
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Check if manifest exists
|
||||
if !manifest_path.exists() {
|
||||
return Err(ExtensionError::ManifestError {
|
||||
reason: format!("manifest.json not found at {}/manifest.json", haextension_dir),
|
||||
});
|
||||
}
|
||||
// Validate manifest path using helper function
|
||||
let manifest_relative_path = format!("{haextension_dir}/manifest.json");
|
||||
let manifest_path = Self::validate_path_in_directory(&temp, &manifest_relative_path, true)?
|
||||
.ok_or_else(|| ExtensionError::ManifestError {
|
||||
reason: format!("manifest.json not found at {haextension_dir}/manifest.json"),
|
||||
})?;
|
||||
|
||||
let actual_dir = temp.clone();
|
||||
let manifest_content =
|
||||
std::fs::read_to_string(&manifest_path).map_err(|e| ExtensionError::ManifestError {
|
||||
reason: format!("Cannot read manifest: {}", e),
|
||||
reason: format!("Cannot read manifest: {e}"),
|
||||
})?;
|
||||
|
||||
let manifest: ExtensionManifest = serde_json::from_str(&manifest_content)?;
|
||||
let mut manifest: ExtensionManifest = serde_json::from_str(&manifest_content)?;
|
||||
|
||||
// Validate and resolve icon path with fallback logic
|
||||
let validated_icon = Self::validate_and_resolve_icon_path(&actual_dir, &haextension_dir, manifest.icon.as_deref())?;
|
||||
manifest.icon = validated_icon;
|
||||
|
||||
let content_hash = ExtensionCrypto::hash_directory(&actual_dir, &manifest_path).map_err(|e| {
|
||||
ExtensionError::SignatureVerificationFailed {
|
||||
@ -351,8 +438,7 @@ impl ExtensionManager {
|
||||
|
||||
eprintln!("DEBUG: Removing extension with ID: {}", extension.id);
|
||||
eprintln!(
|
||||
"DEBUG: Extension name: {}, version: {}",
|
||||
extension_name, extension_version
|
||||
"DEBUG: Extension name: {extension_name}, version: {extension_version}"
|
||||
);
|
||||
|
||||
// Lösche Permissions und Extension-Eintrag in einer Transaktion
|
||||
@ -371,7 +457,7 @@ impl ExtensionManager {
|
||||
PermissionManager::delete_permissions_in_transaction(&tx, &hlc_service, &extension.id)?;
|
||||
|
||||
// Lösche Extension-Eintrag mit extension_id
|
||||
let sql = format!("DELETE FROM {} WHERE id = ?", TABLE_EXTENSIONS);
|
||||
let sql = format!("DELETE FROM {TABLE_EXTENSIONS} WHERE id = ?");
|
||||
eprintln!("DEBUG: Executing SQL: {} with id = {}", sql, extension.id);
|
||||
SqlExecutor::execute_internal_typed(
|
||||
&tx,
|
||||
@ -427,9 +513,10 @@ impl ExtensionManager {
|
||||
|
||||
pub async fn preview_extension_internal(
|
||||
&self,
|
||||
app_handle: &AppHandle,
|
||||
file_bytes: Vec<u8>,
|
||||
) -> Result<ExtensionPreview, ExtensionError> {
|
||||
let extracted = Self::extract_and_validate_extension(file_bytes, "haexhub_preview")?;
|
||||
let extracted = Self::extract_and_validate_extension(file_bytes, "haexhub_preview", app_handle)?;
|
||||
|
||||
let is_valid_signature = ExtensionCrypto::verify_signature(
|
||||
&extracted.manifest.public_key,
|
||||
@ -454,7 +541,7 @@ impl ExtensionManager {
|
||||
custom_permissions: EditablePermissions,
|
||||
state: &State<'_, AppState>,
|
||||
) -> Result<String, ExtensionError> {
|
||||
let extracted = Self::extract_and_validate_extension(file_bytes, "haexhub_ext")?;
|
||||
let extracted = Self::extract_and_validate_extension(file_bytes, "haexhub_ext", &app_handle)?;
|
||||
|
||||
// Signatur verifizieren (bei Installation wird ein Fehler geworfen, nicht nur geprüft)
|
||||
ExtensionCrypto::verify_signature(
|
||||
@ -525,8 +612,7 @@ impl ExtensionManager {
|
||||
|
||||
// 1. Extension-Eintrag erstellen mit generierter UUID
|
||||
let insert_ext_sql = format!(
|
||||
"INSERT INTO {} (id, name, version, author, entry, icon, public_key, signature, homepage, description, enabled) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)",
|
||||
TABLE_EXTENSIONS
|
||||
"INSERT INTO {TABLE_EXTENSIONS} (id, name, version, author, entry, icon, public_key, signature, homepage, description, enabled, single_instance) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)"
|
||||
);
|
||||
|
||||
SqlExecutor::execute_internal_typed(
|
||||
@ -545,13 +631,13 @@ impl ExtensionManager {
|
||||
extracted.manifest.homepage,
|
||||
extracted.manifest.description,
|
||||
true, // enabled
|
||||
extracted.manifest.single_instance.unwrap_or(false),
|
||||
],
|
||||
)?;
|
||||
|
||||
// 2. Permissions speichern
|
||||
let insert_perm_sql = format!(
|
||||
"INSERT INTO {} (id, extension_id, resource_type, action, target, constraints, status) VALUES (?, ?, ?, ?, ?, ?, ?)",
|
||||
TABLE_EXTENSION_PERMISSIONS
|
||||
"INSERT INTO {TABLE_EXTENSION_PERMISSIONS} (id, extension_id, resource_type, action, target, constraints, status) VALUES (?, ?, ?, ?, ?, ?, ?)"
|
||||
);
|
||||
|
||||
for perm in &permissions {
|
||||
@ -623,10 +709,9 @@ impl ExtensionManager {
|
||||
// Lade alle Daten aus der Datenbank
|
||||
let extensions = with_connection(&state.db, |conn| {
|
||||
let sql = format!(
|
||||
"SELECT id, name, version, author, entry, icon, public_key, signature, homepage, description, enabled FROM {}",
|
||||
TABLE_EXTENSIONS
|
||||
"SELECT id, name, version, author, entry, icon, public_key, signature, homepage, description, enabled, single_instance FROM {TABLE_EXTENSIONS}"
|
||||
);
|
||||
eprintln!("DEBUG: SQL Query before transformation: {}", sql);
|
||||
eprintln!("DEBUG: SQL Query before transformation: {sql}");
|
||||
|
||||
let results = SqlExecutor::query_select(conn, &sql, &[])?;
|
||||
eprintln!("DEBUG: Query returned {} results", results.len());
|
||||
@ -655,13 +740,16 @@ impl ExtensionManager {
|
||||
})?
|
||||
.to_string(),
|
||||
author: row[3].as_str().map(String::from),
|
||||
entry: row[4].as_str().unwrap_or("index.html").to_string(),
|
||||
entry: row[4].as_str().map(String::from),
|
||||
icon: row[5].as_str().map(String::from),
|
||||
public_key: row[6].as_str().unwrap_or("").to_string(),
|
||||
signature: row[7].as_str().unwrap_or("").to_string(),
|
||||
permissions: ExtensionPermissions::default(),
|
||||
homepage: row[8].as_str().map(String::from),
|
||||
description: row[9].as_str().map(String::from),
|
||||
single_instance: row[11]
|
||||
.as_bool()
|
||||
.or_else(|| row[11].as_i64().map(|v| v != 0)),
|
||||
};
|
||||
|
||||
let enabled = row[10]
|
||||
@ -685,7 +773,7 @@ impl ExtensionManager {
|
||||
|
||||
for extension_data in extensions {
|
||||
let extension_id = extension_data.id;
|
||||
eprintln!("DEBUG: Processing extension: {}", extension_id);
|
||||
eprintln!("DEBUG: Processing extension: {extension_id}");
|
||||
|
||||
// Use public_key/name/version path structure
|
||||
let extension_path = self.get_extension_dir(
|
||||
@ -698,8 +786,7 @@ impl ExtensionManager {
|
||||
// Check if extension directory exists
|
||||
if !extension_path.exists() {
|
||||
eprintln!(
|
||||
"DEBUG: Extension directory missing for: {} at {:?}",
|
||||
extension_id, extension_path
|
||||
"DEBUG: Extension directory missing for: {extension_id} at {extension_path:?}"
|
||||
);
|
||||
self.missing_extensions
|
||||
.lock()
|
||||
@ -722,33 +809,12 @@ impl ExtensionManager {
|
||||
Ok(config_content) => {
|
||||
match serde_json::from_str::<serde_json::Value>(&config_content) {
|
||||
Ok(config) => {
|
||||
let dir = config
|
||||
config
|
||||
.get("dev")
|
||||
.and_then(|dev| dev.get("haextension_dir"))
|
||||
.and_then(|dir| dir.as_str())
|
||||
.unwrap_or("haextension")
|
||||
.to_string();
|
||||
|
||||
// Security: Validate that haextension_dir doesn't contain ".."
|
||||
if dir.contains("..") {
|
||||
eprintln!(
|
||||
"DEBUG: Invalid haextension_dir for: {}, contains '..'",
|
||||
extension_id
|
||||
);
|
||||
self.missing_extensions
|
||||
.lock()
|
||||
.map_err(|e| ExtensionError::MutexPoisoned {
|
||||
reason: e.to_string(),
|
||||
})?
|
||||
.push(MissingExtension {
|
||||
id: extension_id.clone(),
|
||||
public_key: extension_data.manifest.public_key.clone(),
|
||||
name: extension_data.manifest.name.clone(),
|
||||
version: extension_data.manifest.version.clone(),
|
||||
});
|
||||
continue;
|
||||
}
|
||||
dir
|
||||
.to_string()
|
||||
}
|
||||
Err(_) => "haextension".to_string(),
|
||||
}
|
||||
@ -759,12 +825,13 @@ impl ExtensionManager {
|
||||
"haextension".to_string()
|
||||
};
|
||||
|
||||
// Check if manifest.json exists in the haextension_dir
|
||||
let manifest_path = extension_path.join(&haextension_dir).join("manifest.json");
|
||||
if !manifest_path.exists() {
|
||||
// Validate manifest.json path using helper function
|
||||
let manifest_relative_path = format!("{haextension_dir}/manifest.json");
|
||||
if Self::validate_path_in_directory(&extension_path, &manifest_relative_path, true)?
|
||||
.is_none()
|
||||
{
|
||||
eprintln!(
|
||||
"DEBUG: manifest.json missing for: {} at {:?}",
|
||||
extension_id, manifest_path
|
||||
"DEBUG: manifest.json missing or invalid for: {extension_id} at {haextension_dir}/manifest.json"
|
||||
);
|
||||
self.missing_extensions
|
||||
.lock()
|
||||
@ -780,7 +847,7 @@ impl ExtensionManager {
|
||||
continue;
|
||||
}
|
||||
|
||||
eprintln!("DEBUG: Extension loaded successfully: {}", extension_id);
|
||||
eprintln!("DEBUG: Extension loaded successfully: {extension_id}");
|
||||
|
||||
let extension = Extension {
|
||||
id: extension_id.clone(),
|
||||
|
||||
@ -57,13 +57,20 @@ pub struct ExtensionManifest {
|
||||
pub name: String,
|
||||
pub version: String,
|
||||
pub author: Option<String>,
|
||||
pub entry: String,
|
||||
#[serde(default = "default_entry_value")]
|
||||
pub entry: Option<String>,
|
||||
pub icon: Option<String>,
|
||||
pub public_key: String,
|
||||
pub signature: String,
|
||||
pub permissions: ExtensionPermissions,
|
||||
pub homepage: Option<String>,
|
||||
pub description: Option<String>,
|
||||
#[serde(default)]
|
||||
pub single_instance: Option<bool>,
|
||||
}
|
||||
|
||||
fn default_entry_value() -> Option<String> {
|
||||
Some("index.html".to_string())
|
||||
}
|
||||
|
||||
impl ExtensionManifest {
|
||||
@ -172,6 +179,8 @@ pub struct ExtensionInfoResponse {
|
||||
pub description: Option<String>,
|
||||
pub homepage: Option<String>,
|
||||
pub icon: Option<String>,
|
||||
pub entry: Option<String>,
|
||||
pub single_instance: Option<bool>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub dev_server_url: Option<String>,
|
||||
}
|
||||
@ -197,6 +206,8 @@ impl ExtensionInfoResponse {
|
||||
description: extension.manifest.description.clone(),
|
||||
homepage: extension.manifest.homepage.clone(),
|
||||
icon: extension.manifest.icon.clone(),
|
||||
entry: extension.manifest.entry.clone(),
|
||||
single_instance: extension.manifest.single_instance,
|
||||
dev_server_url,
|
||||
})
|
||||
}
|
||||
|
||||
@ -42,12 +42,12 @@ enum DataProcessingError {
|
||||
impl fmt::Display for DataProcessingError {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
match self {
|
||||
DataProcessingError::HexDecoding(e) => write!(f, "Hex-Dekodierungsfehler: {}", e),
|
||||
DataProcessingError::HexDecoding(e) => write!(f, "Hex-Dekodierungsfehler: {e}"),
|
||||
DataProcessingError::Utf8Conversion(e) => {
|
||||
write!(f, "UTF-8-Konvertierungsfehler: {}", e)
|
||||
write!(f, "UTF-8-Konvertierungsfehler: {e}")
|
||||
}
|
||||
DataProcessingError::JsonParsing(e) => write!(f, "JSON-Parsing-Fehler: {}", e),
|
||||
DataProcessingError::Custom(msg) => write!(f, "Datenverarbeitungsfehler: {}", msg),
|
||||
DataProcessingError::JsonParsing(e) => write!(f, "JSON-Parsing-Fehler: {e}"),
|
||||
DataProcessingError::Custom(msg) => write!(f, "Datenverarbeitungsfehler: {msg}"),
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -101,7 +101,7 @@ pub fn resolve_secure_extension_asset_path(
|
||||
.all(|c| c.is_ascii_alphanumeric() || c == '-')
|
||||
{
|
||||
return Err(ExtensionError::ValidationError {
|
||||
reason: format!("Invalid extension name: {}", extension_name),
|
||||
reason: format!("Invalid extension name: {extension_name}"),
|
||||
});
|
||||
}
|
||||
|
||||
@ -111,7 +111,7 @@ pub fn resolve_secure_extension_asset_path(
|
||||
.all(|c| c.is_ascii_alphanumeric() || c == '-' || c == '.')
|
||||
{
|
||||
return Err(ExtensionError::ValidationError {
|
||||
reason: format!("Invalid extension version: {}", extension_version),
|
||||
reason: format!("Invalid extension version: {extension_version}"),
|
||||
});
|
||||
}
|
||||
|
||||
@ -146,11 +146,10 @@ pub fn resolve_secure_extension_asset_path(
|
||||
Ok(canonical_path)
|
||||
} else {
|
||||
eprintln!(
|
||||
"SECURITY WARNING: Path traversal attempt blocked: {}",
|
||||
requested_asset_path
|
||||
"SECURITY WARNING: Path traversal attempt blocked: {requested_asset_path}"
|
||||
);
|
||||
Err(ExtensionError::SecurityViolation {
|
||||
reason: format!("Path traversal attempt: {}", requested_asset_path),
|
||||
reason: format!("Path traversal attempt: {requested_asset_path}"),
|
||||
})
|
||||
}
|
||||
}
|
||||
@ -159,11 +158,10 @@ pub fn resolve_secure_extension_asset_path(
|
||||
Ok(final_path)
|
||||
} else {
|
||||
eprintln!(
|
||||
"SECURITY WARNING: Invalid asset path: {}",
|
||||
requested_asset_path
|
||||
"SECURITY WARNING: Invalid asset path: {requested_asset_path}"
|
||||
);
|
||||
Err(ExtensionError::SecurityViolation {
|
||||
reason: format!("Invalid asset path: {}", requested_asset_path),
|
||||
reason: format!("Invalid asset path: {requested_asset_path}"),
|
||||
})
|
||||
}
|
||||
}
|
||||
@ -184,7 +182,7 @@ pub fn extension_protocol_handler(
|
||||
|
||||
// Only allow same-protocol requests or tauri origin
|
||||
// For null/empty origin (initial load), use wildcard
|
||||
let protocol_prefix = format!("{}://", EXTENSION_PROTOCOL_NAME);
|
||||
let protocol_prefix = format!("{EXTENSION_PROTOCOL_NAME}://");
|
||||
let allowed_origin = if origin.starts_with(&protocol_prefix) || origin == get_tauri_origin() {
|
||||
origin
|
||||
} else if origin.is_empty() || origin == "null" {
|
||||
@ -216,9 +214,9 @@ pub fn extension_protocol_handler(
|
||||
.and_then(|v| v.to_str().ok())
|
||||
.unwrap_or("");
|
||||
|
||||
println!("Protokoll Handler für: {}", uri_ref);
|
||||
println!("Origin: {}", origin);
|
||||
println!("Referer: {}", referer);
|
||||
println!("Protokoll Handler für: {uri_ref}");
|
||||
println!("Origin: {origin}");
|
||||
println!("Referer: {referer}");
|
||||
|
||||
let path_str = uri_ref.path();
|
||||
|
||||
@ -227,16 +225,16 @@ pub fn extension_protocol_handler(
|
||||
// - Desktop: haex-extension://<base64>/{assetPath}
|
||||
// - Android: http://localhost/{base64}/{assetPath}
|
||||
let host = uri_ref.host().unwrap_or("");
|
||||
println!("URI Host: {}", host);
|
||||
println!("URI Host: {host}");
|
||||
|
||||
let (info, segments_after_version) = if host == "localhost" || host == format!("{}.localhost", EXTENSION_PROTOCOL_NAME).as_str() {
|
||||
let (info, segments_after_version) = if host == "localhost" || host == format!("{EXTENSION_PROTOCOL_NAME}.localhost").as_str() {
|
||||
// Android format: http://haex-extension.localhost/{base64}/{assetPath}
|
||||
// Extract base64 from first path segment
|
||||
println!("Android format detected: http://{}/...", host);
|
||||
println!("Android format detected: http://{host}/...");
|
||||
let mut segments_iter = path_str.split('/').filter(|s| !s.is_empty());
|
||||
|
||||
if let Some(first_segment) = segments_iter.next() {
|
||||
println!("First path segment (base64): {}", first_segment);
|
||||
println!("First path segment (base64): {first_segment}");
|
||||
match BASE64_STANDARD.decode(first_segment) {
|
||||
Ok(decoded_bytes) => match String::from_utf8(decoded_bytes) {
|
||||
Ok(json_str) => match serde_json::from_str::<ExtensionInfo>(&json_str) {
|
||||
@ -252,29 +250,29 @@ pub fn extension_protocol_handler(
|
||||
(info, remaining)
|
||||
}
|
||||
Err(e) => {
|
||||
eprintln!("Failed to parse JSON from base64 path: {}", e);
|
||||
eprintln!("Failed to parse JSON from base64 path: {e}");
|
||||
return Response::builder()
|
||||
.status(400)
|
||||
.header("Access-Control-Allow-Origin", allowed_origin)
|
||||
.body(Vec::from(format!("Invalid extension info in base64 path: {}", e)))
|
||||
.body(Vec::from(format!("Invalid extension info in base64 path: {e}")))
|
||||
.map_err(|e| e.into());
|
||||
}
|
||||
},
|
||||
Err(e) => {
|
||||
eprintln!("Failed to decode UTF-8 from base64 path: {}", e);
|
||||
eprintln!("Failed to decode UTF-8 from base64 path: {e}");
|
||||
return Response::builder()
|
||||
.status(400)
|
||||
.header("Access-Control-Allow-Origin", allowed_origin)
|
||||
.body(Vec::from(format!("Invalid UTF-8 in base64 path: {}", e)))
|
||||
.body(Vec::from(format!("Invalid UTF-8 in base64 path: {e}")))
|
||||
.map_err(|e| e.into());
|
||||
}
|
||||
},
|
||||
Err(e) => {
|
||||
eprintln!("Failed to decode base64 from path: {}", e);
|
||||
eprintln!("Failed to decode base64 from path: {e}");
|
||||
return Response::builder()
|
||||
.status(400)
|
||||
.header("Access-Control-Allow-Origin", allowed_origin)
|
||||
.body(Vec::from(format!("Invalid base64 in path: {}", e)))
|
||||
.body(Vec::from(format!("Invalid base64 in path: {e}")))
|
||||
.map_err(|e| e.into());
|
||||
}
|
||||
}
|
||||
@ -311,35 +309,35 @@ pub fn extension_protocol_handler(
|
||||
(info, segments)
|
||||
}
|
||||
Err(e) => {
|
||||
eprintln!("Failed to parse JSON from base64 host: {}", e);
|
||||
eprintln!("Failed to parse JSON from base64 host: {e}");
|
||||
return Response::builder()
|
||||
.status(400)
|
||||
.header("Access-Control-Allow-Origin", allowed_origin)
|
||||
.body(Vec::from(format!("Invalid extension info in base64 host: {}", e)))
|
||||
.body(Vec::from(format!("Invalid extension info in base64 host: {e}")))
|
||||
.map_err(|e| e.into());
|
||||
}
|
||||
},
|
||||
Err(e) => {
|
||||
eprintln!("Failed to decode UTF-8 from base64 host: {}", e);
|
||||
eprintln!("Failed to decode UTF-8 from base64 host: {e}");
|
||||
return Response::builder()
|
||||
.status(400)
|
||||
.header("Access-Control-Allow-Origin", allowed_origin)
|
||||
.body(Vec::from(format!("Invalid UTF-8 in base64 host: {}", e)))
|
||||
.body(Vec::from(format!("Invalid UTF-8 in base64 host: {e}")))
|
||||
.map_err(|e| e.into());
|
||||
}
|
||||
},
|
||||
Err(e) => {
|
||||
eprintln!("Failed to decode base64 host: {}", e);
|
||||
eprintln!("Failed to decode base64 host: {e}");
|
||||
return Response::builder()
|
||||
.status(400)
|
||||
.header("Access-Control-Allow-Origin", allowed_origin)
|
||||
.body(Vec::from(format!("Invalid base64 in host: {}", e)))
|
||||
.body(Vec::from(format!("Invalid base64 in host: {e}")))
|
||||
.map_err(|e| e.into());
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// No base64 host - use path-based parsing (for localhost/Android/Windows)
|
||||
parse_extension_info_from_path(path_str, origin, uri_ref, referer, &allowed_origin)?
|
||||
parse_extension_info_from_path(path_str, origin, uri_ref, referer)?
|
||||
};
|
||||
|
||||
// Construct asset path from remaining segments
|
||||
@ -353,8 +351,8 @@ pub fn extension_protocol_handler(
|
||||
&raw_asset_path
|
||||
};
|
||||
|
||||
println!("Path: {}", path_str);
|
||||
println!("Asset to load: {}", asset_to_load);
|
||||
println!("Path: {path_str}");
|
||||
println!("Asset to load: {asset_to_load}");
|
||||
|
||||
let absolute_secure_path = resolve_secure_extension_asset_path(
|
||||
app_handle,
|
||||
@ -362,7 +360,7 @@ pub fn extension_protocol_handler(
|
||||
&info.public_key,
|
||||
&info.name,
|
||||
&info.version,
|
||||
&asset_to_load,
|
||||
asset_to_load,
|
||||
)?;
|
||||
|
||||
println!("Resolved path: {}", absolute_secure_path.display());
|
||||
@ -497,7 +495,7 @@ fn parse_encoded_info_from_origin_or_uri_or_referer_or_cache(
|
||||
if let Ok(hex) = parse_from_origin(origin) {
|
||||
if let Ok(info) = process_hex_encoded_json(&hex) {
|
||||
cache_extension_info(&info); // Cache setzen
|
||||
println!("Parsed und gecached aus Origin: {}", hex);
|
||||
println!("Parsed und gecached aus Origin: {hex}");
|
||||
return Ok(info);
|
||||
}
|
||||
}
|
||||
@ -507,17 +505,17 @@ fn parse_encoded_info_from_origin_or_uri_or_referer_or_cache(
|
||||
if let Ok(hex) = parse_from_uri_path(uri_ref) {
|
||||
if let Ok(info) = process_hex_encoded_json(&hex) {
|
||||
cache_extension_info(&info); // Cache setzen
|
||||
println!("Parsed und gecached aus URI: {}", hex);
|
||||
println!("Parsed und gecached aus URI: {hex}");
|
||||
return Ok(info);
|
||||
}
|
||||
}
|
||||
|
||||
println!("Fallback zu Referer-Parsing: {}", referer);
|
||||
println!("Fallback zu Referer-Parsing: {referer}");
|
||||
if !referer.is_empty() && referer != "null" {
|
||||
if let Ok(hex) = parse_from_uri_string(referer) {
|
||||
if let Ok(info) = process_hex_encoded_json(&hex) {
|
||||
cache_extension_info(&info); // Cache setzen
|
||||
println!("Parsed und gecached aus Referer: {}", hex);
|
||||
println!("Parsed und gecached aus Referer: {hex}");
|
||||
return Ok(info);
|
||||
}
|
||||
}
|
||||
@ -609,29 +607,23 @@ fn validate_and_return_hex(segment: &str) -> Result<String, DataProcessingError>
|
||||
Ok(segment.to_string())
|
||||
}
|
||||
|
||||
fn encode_hex_for_log(info: &ExtensionInfo) -> String {
|
||||
let json_str = serde_json::to_string(info).unwrap_or_default();
|
||||
hex::encode(json_str.as_bytes())
|
||||
}
|
||||
|
||||
// Helper function to parse extension info from path segments
|
||||
fn parse_extension_info_from_path(
|
||||
path_str: &str,
|
||||
origin: &str,
|
||||
uri_ref: &Uri,
|
||||
referer: &str,
|
||||
allowed_origin: &str,
|
||||
) -> Result<(ExtensionInfo, Vec<String>), Box<dyn std::error::Error>> {
|
||||
let mut segments_iter = path_str.split('/').filter(|s| !s.is_empty());
|
||||
|
||||
match (segments_iter.next(), segments_iter.next(), segments_iter.next()) {
|
||||
(Some(public_key), Some(name), Some(version)) => {
|
||||
println!("=== Extension Protocol Handler (path-based) ===");
|
||||
println!("Full URI: {}", uri_ref);
|
||||
println!("Full URI: {uri_ref}");
|
||||
println!("Parsed from path segments:");
|
||||
println!(" PublicKey: {}", public_key);
|
||||
println!(" Name: {}", name);
|
||||
println!(" Version: {}", version);
|
||||
println!(" PublicKey: {public_key}");
|
||||
println!(" Name: {name}");
|
||||
println!(" Version: {version}");
|
||||
|
||||
let info = ExtensionInfo {
|
||||
public_key: public_key.to_string(),
|
||||
@ -653,7 +645,7 @@ fn parse_extension_info_from_path(
|
||||
) {
|
||||
Ok(decoded) => {
|
||||
println!("=== Extension Protocol Handler (legacy hex format) ===");
|
||||
println!("Full URI: {}", uri_ref);
|
||||
println!("Full URI: {uri_ref}");
|
||||
println!("Decoded info:");
|
||||
println!(" PublicKey: {}", decoded.public_key);
|
||||
println!(" Name: {}", decoded.name);
|
||||
@ -670,8 +662,8 @@ fn parse_extension_info_from_path(
|
||||
Ok((decoded, segments))
|
||||
}
|
||||
Err(e) => {
|
||||
eprintln!("Fehler beim Parsen (alle Fallbacks): {}", e);
|
||||
Err(format!("Ungültige Anfrage: {}", e).into())
|
||||
eprintln!("Fehler beim Parsen (alle Fallbacks): {e}");
|
||||
Err(format!("Ungültige Anfrage: {e}").into())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -70,8 +70,7 @@ pub fn copy_directory(
|
||||
use std::path::PathBuf;
|
||||
|
||||
println!(
|
||||
"Kopiere Verzeichnis von '{}' nach '{}'",
|
||||
source, destination
|
||||
"Kopiere Verzeichnis von '{source}' nach '{destination}'"
|
||||
);
|
||||
|
||||
let source_path = PathBuf::from(&source);
|
||||
@ -81,7 +80,7 @@ pub fn copy_directory(
|
||||
return Err(ExtensionError::Filesystem {
|
||||
source: std::io::Error::new(
|
||||
std::io::ErrorKind::NotFound,
|
||||
format!("Source directory '{}' not found", source),
|
||||
format!("Source directory '{source}' not found"),
|
||||
),
|
||||
});
|
||||
}
|
||||
@ -93,7 +92,7 @@ pub fn copy_directory(
|
||||
|
||||
fs_extra::dir::copy(&source_path, &destination_path, &options).map_err(|e| {
|
||||
ExtensionError::Filesystem {
|
||||
source: std::io::Error::new(std::io::ErrorKind::Other, e.to_string()),
|
||||
source: std::io::Error::other(e.to_string()),
|
||||
}
|
||||
})?;
|
||||
Ok(())
|
||||
|
||||
@ -18,20 +18,20 @@ impl ExtensionCrypto {
|
||||
signature_hex: &str,
|
||||
) -> Result<(), String> {
|
||||
let public_key_bytes =
|
||||
hex::decode(public_key_hex).map_err(|e| format!("Invalid public key: {}", e))?;
|
||||
hex::decode(public_key_hex).map_err(|e| format!("Invalid public key: {e}"))?;
|
||||
let public_key = VerifyingKey::from_bytes(&public_key_bytes.try_into().unwrap())
|
||||
.map_err(|e| format!("Invalid public key: {}", e))?;
|
||||
.map_err(|e| format!("Invalid public key: {e}"))?;
|
||||
|
||||
let signature_bytes =
|
||||
hex::decode(signature_hex).map_err(|e| format!("Invalid signature: {}", e))?;
|
||||
hex::decode(signature_hex).map_err(|e| format!("Invalid signature: {e}"))?;
|
||||
let signature = Signature::from_bytes(&signature_bytes.try_into().unwrap());
|
||||
|
||||
let content_hash =
|
||||
hex::decode(content_hash_hex).map_err(|e| format!("Invalid content hash: {}", e))?;
|
||||
hex::decode(content_hash_hex).map_err(|e| format!("Invalid content hash: {e}"))?;
|
||||
|
||||
public_key
|
||||
.verify(&content_hash, &signature)
|
||||
.map_err(|e| format!("Signature verification failed: {}", e))
|
||||
.map_err(|e| format!("Signature verification failed: {e}"))
|
||||
}
|
||||
|
||||
/// Berechnet Hash eines Verzeichnisses (für Verifikation)
|
||||
@ -48,7 +48,9 @@ impl ExtensionCrypto {
|
||||
let relative = path.strip_prefix(dir)
|
||||
.unwrap_or(&path)
|
||||
.to_string_lossy()
|
||||
.to_string();
|
||||
.to_string()
|
||||
// Normalisiere Pfad-Separatoren zu Unix-Style (/) für plattformübergreifende Konsistenz
|
||||
.replace('\\', "/");
|
||||
(relative, path)
|
||||
})
|
||||
.collect();
|
||||
@ -56,16 +58,30 @@ impl ExtensionCrypto {
|
||||
// 3. Sortiere nach relativen Pfaden
|
||||
relative_files.sort_by(|a, b| a.0.cmp(&b.0));
|
||||
|
||||
println!("=== Files to hash ({}): ===", relative_files.len());
|
||||
for (rel, _) in &relative_files {
|
||||
println!(" - {}", rel);
|
||||
}
|
||||
|
||||
let mut hasher = Sha256::new();
|
||||
|
||||
// Canonicalize manifest path for comparison (important on Android where symlinks may differ)
|
||||
// Also ensure the canonical path is still within the allowed directory (security check)
|
||||
let canonical_manifest_path = manifest_path.canonicalize()
|
||||
.unwrap_or_else(|_| manifest_path.to_path_buf());
|
||||
|
||||
// Security: Verify canonical manifest path is still within dir
|
||||
let canonical_dir = dir.canonicalize()
|
||||
.unwrap_or_else(|_| dir.to_path_buf());
|
||||
|
||||
if !canonical_manifest_path.starts_with(&canonical_dir) {
|
||||
return Err(ExtensionError::ManifestError {
|
||||
reason: "Manifest path resolves outside of extension directory (potential path traversal)".to_string(),
|
||||
});
|
||||
}
|
||||
|
||||
// 4. Inhalte der sortierten Dateien hashen
|
||||
for (_relative, file_path) in relative_files {
|
||||
if file_path == manifest_path {
|
||||
// Canonicalize file_path for comparison
|
||||
let canonical_file_path = file_path.canonicalize()
|
||||
.unwrap_or_else(|_| file_path.clone());
|
||||
|
||||
if canonical_file_path == canonical_manifest_path {
|
||||
// FÜR DIE MANIFEST.JSON:
|
||||
let content_str = fs::read_to_string(&file_path)
|
||||
.map_err(|e| ExtensionError::Filesystem { source: e })?;
|
||||
@ -74,7 +90,7 @@ impl ExtensionCrypto {
|
||||
let mut manifest: serde_json::Value =
|
||||
serde_json::from_str(&content_str).map_err(|e| {
|
||||
ExtensionError::ManifestError {
|
||||
reason: format!("Cannot parse manifest JSON: {}", e),
|
||||
reason: format!("Cannot parse manifest JSON: {e}"),
|
||||
}
|
||||
})?;
|
||||
|
||||
@ -91,11 +107,15 @@ impl ExtensionCrypto {
|
||||
let canonical_manifest_content =
|
||||
serde_json::to_string_pretty(&manifest).map_err(|e| {
|
||||
ExtensionError::ManifestError {
|
||||
reason: format!("Failed to serialize manifest: {}", e),
|
||||
reason: format!("Failed to serialize manifest: {e}"),
|
||||
}
|
||||
})?;
|
||||
println!("canonical_manifest_content: {}", canonical_manifest_content);
|
||||
hasher.update(canonical_manifest_content.as_bytes());
|
||||
|
||||
// Normalisiere Zeilenenden zu Unix-Style (\n), wie Node.js JSON.stringify es macht
|
||||
// Dies ist wichtig für plattformübergreifende Konsistenz (Desktop vs Android)
|
||||
let normalized_content = canonical_manifest_content.replace("\r\n", "\n");
|
||||
|
||||
hasher.update(normalized_content.as_bytes());
|
||||
} else {
|
||||
// FÜR ALLE ANDEREN DATEIEN:
|
||||
let content =
|
||||
|
||||
@ -3,7 +3,7 @@
|
||||
use crate::crdt::hlc::HlcService;
|
||||
use crate::crdt::transformer::CrdtTransformer;
|
||||
use crate::crdt::trigger;
|
||||
use crate::database::core::{convert_value_ref_to_json, parse_sql_statements, ValueConverter};
|
||||
use crate::database::core::{convert_value_ref_to_json, parse_sql_statements};
|
||||
use crate::database::error::DatabaseError;
|
||||
use rusqlite::{params_from_iter, types::Value as SqliteValue, ToSql, Transaction};
|
||||
use serde_json::Value as JsonValue;
|
||||
@ -52,19 +52,25 @@ impl SqlExecutor {
|
||||
}
|
||||
|
||||
let sql_str = statement.to_string();
|
||||
eprintln!("DEBUG: Transformed execute SQL: {}", sql_str);
|
||||
eprintln!("DEBUG: Transformed execute SQL: {sql_str}");
|
||||
|
||||
// Führe Statement aus
|
||||
tx.execute(&sql_str, params)
|
||||
.map_err(|e| DatabaseError::ExecutionError {
|
||||
sql: sql_str.clone(),
|
||||
table: None,
|
||||
reason: format!("Execute failed: {}", e),
|
||||
reason: format!("Execute failed: {e}"),
|
||||
})?;
|
||||
|
||||
// Trigger-Logik für CREATE TABLE
|
||||
if let Statement::CreateTable(create_table_details) = statement {
|
||||
let table_name_str = create_table_details.name.to_string();
|
||||
let raw_name = create_table_details.name.to_string();
|
||||
// Remove quotes from table name
|
||||
let table_name_str = raw_name
|
||||
.trim_matches('"')
|
||||
.trim_matches('`')
|
||||
.to_string();
|
||||
eprintln!("DEBUG: Setting up triggers for table: {table_name_str}");
|
||||
trigger::setup_triggers_for_table(tx, &table_name_str, false)?;
|
||||
}
|
||||
|
||||
@ -109,7 +115,7 @@ impl SqlExecutor {
|
||||
}
|
||||
|
||||
let sql_str = statement.to_string();
|
||||
eprintln!("DEBUG: Transformed SQL (with RETURNING): {}", sql_str);
|
||||
eprintln!("DEBUG: Transformed SQL (with RETURNING): {sql_str}");
|
||||
|
||||
// Prepare und query ausführen
|
||||
let mut stmt = tx
|
||||
@ -158,7 +164,13 @@ impl SqlExecutor {
|
||||
|
||||
// Trigger-Logik für CREATE TABLE
|
||||
if let Statement::CreateTable(create_table_details) = statement {
|
||||
let table_name_str = create_table_details.name.to_string();
|
||||
let raw_name = create_table_details.name.to_string();
|
||||
// Remove quotes from table name
|
||||
let table_name_str = raw_name
|
||||
.trim_matches('"')
|
||||
.trim_matches('`')
|
||||
.to_string();
|
||||
eprintln!("DEBUG: Setting up triggers for table (RETURNING): {table_name_str}");
|
||||
trigger::setup_triggers_for_table(tx, &table_name_str, false)?;
|
||||
}
|
||||
|
||||
@ -174,7 +186,7 @@ impl SqlExecutor {
|
||||
) -> Result<HashSet<String>, DatabaseError> {
|
||||
let sql_params: Vec<SqliteValue> = params
|
||||
.iter()
|
||||
.map(|v| crate::database::core::ValueConverter::json_to_rusqlite_value(v))
|
||||
.map(crate::database::core::ValueConverter::json_to_rusqlite_value)
|
||||
.collect::<Result<Vec<_>, _>>()?;
|
||||
let param_refs: Vec<&dyn ToSql> = sql_params.iter().map(|p| p as &dyn ToSql).collect();
|
||||
Self::execute_internal_typed(tx, hlc_service, sql, ¶m_refs)
|
||||
@ -189,7 +201,7 @@ impl SqlExecutor {
|
||||
) -> Result<(HashSet<String>, Vec<Vec<JsonValue>>), DatabaseError> {
|
||||
let sql_params: Vec<SqliteValue> = params
|
||||
.iter()
|
||||
.map(|v| crate::database::core::ValueConverter::json_to_rusqlite_value(v))
|
||||
.map(crate::database::core::ValueConverter::json_to_rusqlite_value)
|
||||
.collect::<Result<Vec<_>, _>>()?;
|
||||
let param_refs: Vec<&dyn ToSql> = sql_params.iter().map(|p| p as &dyn ToSql).collect();
|
||||
Self::query_internal_typed(tx, hlc_service, sql, ¶m_refs)
|
||||
@ -240,12 +252,12 @@ impl SqlExecutor {
|
||||
let stmt_to_execute = ast_vec.pop().unwrap();
|
||||
let transformed_sql = stmt_to_execute.to_string();
|
||||
|
||||
eprintln!("DEBUG: SELECT (no transformation): {}", transformed_sql);
|
||||
eprintln!("DEBUG: SELECT (no transformation): {transformed_sql}");
|
||||
|
||||
// Convert JSON params to SQLite values
|
||||
let sql_params: Vec<SqliteValue> = params
|
||||
.iter()
|
||||
.map(|v| crate::database::core::ValueConverter::json_to_rusqlite_value(v))
|
||||
.map(crate::database::core::ValueConverter::json_to_rusqlite_value)
|
||||
.collect::<Result<Vec<_>, _>>()?;
|
||||
|
||||
let mut prepared_stmt = conn.prepare(&transformed_sql)?;
|
||||
|
||||
@ -5,6 +5,7 @@ use crate::crdt::transformer::CrdtTransformer;
|
||||
use crate::crdt::trigger;
|
||||
use crate::database::core::{parse_sql_statements, with_connection, ValueConverter};
|
||||
use crate::database::error::DatabaseError;
|
||||
use crate::extension::database::executor::SqlExecutor;
|
||||
use crate::extension::error::ExtensionError;
|
||||
use crate::extension::permissions::validator::SqlPermissionValidator;
|
||||
use crate::AppState;
|
||||
@ -12,10 +13,8 @@ use crate::AppState;
|
||||
use rusqlite::params_from_iter;
|
||||
use rusqlite::types::Value as SqlValue;
|
||||
use rusqlite::Transaction;
|
||||
use serde_json::json;
|
||||
use serde_json::Value as JsonValue;
|
||||
use sqlparser::ast::{Statement, TableFactor, TableObject};
|
||||
use std::collections::HashSet;
|
||||
use tauri::State;
|
||||
|
||||
/// Führt Statements mit korrekter Parameter-Bindung aus
|
||||
@ -110,7 +109,7 @@ pub async fn extension_sql_execute(
|
||||
public_key: String,
|
||||
name: String,
|
||||
state: State<'_, AppState>,
|
||||
) -> Result<Vec<String>, ExtensionError> {
|
||||
) -> Result<Vec<Vec<JsonValue>>, ExtensionError> {
|
||||
// Get extension to retrieve its ID
|
||||
let extension = state
|
||||
.extension_manager
|
||||
@ -129,58 +128,98 @@ pub async fn extension_sql_execute(
|
||||
// SQL parsing
|
||||
let mut ast_vec = parse_sql_statements(sql)?;
|
||||
|
||||
if ast_vec.len() != 1 {
|
||||
return Err(ExtensionError::Database {
|
||||
source: DatabaseError::ExecutionError {
|
||||
sql: sql.to_string(),
|
||||
reason: "extension_sql_execute should only receive a single SQL statement"
|
||||
.to_string(),
|
||||
table: None,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
let mut statement = ast_vec.pop().unwrap();
|
||||
|
||||
// Check if statement has RETURNING clause
|
||||
let has_returning = crate::database::core::statement_has_returning(&statement);
|
||||
|
||||
// Database operation
|
||||
with_connection(&state.db, |conn| {
|
||||
let tx = conn.transaction().map_err(DatabaseError::from)?;
|
||||
|
||||
let transformer = CrdtTransformer::new();
|
||||
let executor = StatementExecutor::new(&tx);
|
||||
|
||||
// Get HLC service reference
|
||||
let hlc_service = state.hlc.lock().map_err(|_| DatabaseError::MutexPoisoned {
|
||||
reason: "Failed to lock HLC service".to_string(),
|
||||
})?;
|
||||
|
||||
// Generate HLC timestamp
|
||||
let hlc_timestamp = state
|
||||
.hlc
|
||||
.lock()
|
||||
.unwrap()
|
||||
.new_timestamp_and_persist(&tx)
|
||||
.map_err(|e| DatabaseError::HlcError {
|
||||
reason: e.to_string(),
|
||||
})?;
|
||||
let hlc_timestamp =
|
||||
hlc_service
|
||||
.new_timestamp_and_persist(&tx)
|
||||
.map_err(|e| DatabaseError::HlcError {
|
||||
reason: e.to_string(),
|
||||
})?;
|
||||
|
||||
// Transform statements
|
||||
let mut modified_schema_tables = HashSet::new();
|
||||
for statement in &mut ast_vec {
|
||||
if let Some(table_name) =
|
||||
transformer.transform_execute_statement(statement, &hlc_timestamp)?
|
||||
{
|
||||
modified_schema_tables.insert(table_name);
|
||||
}
|
||||
}
|
||||
// Transform statement
|
||||
transformer.transform_execute_statement(&mut statement, &hlc_timestamp)?;
|
||||
|
||||
// Convert parameters
|
||||
// Convert parameters to references
|
||||
let sql_values = ValueConverter::convert_params(¶ms)?;
|
||||
let param_refs: Vec<&dyn rusqlite::ToSql> = sql_values
|
||||
.iter()
|
||||
.map(|v| v as &dyn rusqlite::ToSql)
|
||||
.collect();
|
||||
|
||||
// Execute statements
|
||||
for statement in ast_vec {
|
||||
executor.execute_statement_with_params(&statement, &sql_values)?;
|
||||
let result = if has_returning {
|
||||
// Use query_internal for statements with RETURNING
|
||||
let (_, rows) = SqlExecutor::query_internal_typed(
|
||||
&tx,
|
||||
&hlc_service,
|
||||
&statement.to_string(),
|
||||
¶m_refs,
|
||||
)?;
|
||||
rows
|
||||
} else {
|
||||
// Use execute_internal for statements without RETURNING
|
||||
SqlExecutor::execute_internal_typed(
|
||||
&tx,
|
||||
&hlc_service,
|
||||
&statement.to_string(),
|
||||
¶m_refs,
|
||||
)?;
|
||||
vec![]
|
||||
};
|
||||
|
||||
if let Statement::CreateTable(create_table_details) = statement {
|
||||
let table_name_str = create_table_details.name.to_string();
|
||||
println!(
|
||||
"Table '{}' created by extension, setting up CRDT triggers...",
|
||||
table_name_str
|
||||
);
|
||||
trigger::setup_triggers_for_table(&tx, &table_name_str, false)?;
|
||||
println!(
|
||||
"Triggers for table '{}' successfully created.",
|
||||
table_name_str
|
||||
);
|
||||
}
|
||||
// Handle CREATE TABLE trigger setup
|
||||
if let Statement::CreateTable(ref create_table_details) = statement {
|
||||
// Extract table name and remove quotes (both " and `)
|
||||
let raw_name = create_table_details.name.to_string();
|
||||
println!("DEBUG: Raw table name from AST: {raw_name:?}");
|
||||
println!(
|
||||
"DEBUG: Raw table name chars: {:?}",
|
||||
raw_name.chars().collect::<Vec<_>>()
|
||||
);
|
||||
|
||||
let table_name_str = raw_name.trim_matches('"').trim_matches('`').to_string();
|
||||
|
||||
println!("DEBUG: Cleaned table name: {table_name_str:?}");
|
||||
println!(
|
||||
"DEBUG: Cleaned table name chars: {:?}",
|
||||
table_name_str.chars().collect::<Vec<_>>()
|
||||
);
|
||||
|
||||
println!("Table '{table_name_str}' created by extension, setting up CRDT triggers...");
|
||||
trigger::setup_triggers_for_table(&tx, &table_name_str, false)?;
|
||||
println!("Triggers for table '{table_name_str}' successfully created.");
|
||||
}
|
||||
|
||||
// Commit transaction
|
||||
tx.commit().map_err(DatabaseError::from)?;
|
||||
|
||||
Ok(modified_schema_tables.into_iter().collect())
|
||||
Ok(result)
|
||||
})
|
||||
.map_err(ExtensionError::from)
|
||||
}
|
||||
@ -192,7 +231,7 @@ pub async fn extension_sql_select(
|
||||
public_key: String,
|
||||
name: String,
|
||||
state: State<'_, AppState>,
|
||||
) -> Result<Vec<JsonValue>, ExtensionError> {
|
||||
) -> Result<Vec<Vec<JsonValue>>, ExtensionError> {
|
||||
// Get extension to retrieve its ID
|
||||
let extension = state
|
||||
.extension_manager
|
||||
@ -229,10 +268,9 @@ pub async fn extension_sql_select(
|
||||
}
|
||||
}
|
||||
|
||||
// Database operation
|
||||
// Database operation - return Vec<Vec<JsonValue>> like sql_select_with_crdt
|
||||
with_connection(&state.db, |conn| {
|
||||
let sql_params = ValueConverter::convert_params(¶ms)?;
|
||||
// Hard Delete: Keine SELECT-Transformation mehr nötig
|
||||
let stmt_to_execute = ast_vec.pop().unwrap();
|
||||
let transformed_sql = stmt_to_execute.to_string();
|
||||
|
||||
@ -245,52 +283,34 @@ pub async fn extension_sql_select(
|
||||
table: None,
|
||||
})?;
|
||||
|
||||
let column_names: Vec<String> = prepared_stmt
|
||||
.column_names()
|
||||
.into_iter()
|
||||
.map(|s| s.to_string())
|
||||
.collect();
|
||||
|
||||
let rows = prepared_stmt
|
||||
.query_map(params_from_iter(sql_params.iter()), |row| {
|
||||
row_to_json_value(row, &column_names)
|
||||
})
|
||||
let num_columns = prepared_stmt.column_count();
|
||||
let mut rows = prepared_stmt
|
||||
.query(params_from_iter(sql_params.iter()))
|
||||
.map_err(|e| DatabaseError::QueryError {
|
||||
reason: e.to_string(),
|
||||
})?;
|
||||
|
||||
let mut results = Vec::new();
|
||||
for row_result in rows {
|
||||
results.push(row_result.map_err(|e| DatabaseError::RowProcessingError {
|
||||
reason: e.to_string(),
|
||||
})?);
|
||||
let mut result_vec: Vec<Vec<JsonValue>> = Vec::new();
|
||||
|
||||
while let Some(row) = rows.next().map_err(|e| DatabaseError::QueryError {
|
||||
reason: e.to_string(),
|
||||
})? {
|
||||
let mut row_values: Vec<JsonValue> = Vec::new();
|
||||
for i in 0..num_columns {
|
||||
let value_ref = row.get_ref(i).map_err(|e| DatabaseError::QueryError {
|
||||
reason: e.to_string(),
|
||||
})?;
|
||||
let json_value = crate::database::core::convert_value_ref_to_json(value_ref)?;
|
||||
row_values.push(json_value);
|
||||
}
|
||||
result_vec.push(row_values);
|
||||
}
|
||||
|
||||
Ok(results)
|
||||
Ok(result_vec)
|
||||
})
|
||||
.map_err(ExtensionError::from)
|
||||
}
|
||||
|
||||
/// Konvertiert eine SQLite-Zeile zu JSON
|
||||
fn row_to_json_value(
|
||||
row: &rusqlite::Row,
|
||||
columns: &[String],
|
||||
) -> Result<JsonValue, rusqlite::Error> {
|
||||
let mut map = serde_json::Map::new();
|
||||
for (i, col_name) in columns.iter().enumerate() {
|
||||
let value = row.get::<usize, rusqlite::types::Value>(i)?;
|
||||
let json_value = match value {
|
||||
rusqlite::types::Value::Null => JsonValue::Null,
|
||||
rusqlite::types::Value::Integer(i) => json!(i),
|
||||
rusqlite::types::Value::Real(f) => json!(f),
|
||||
rusqlite::types::Value::Text(s) => json!(s),
|
||||
rusqlite::types::Value::Blob(blob) => json!(blob.to_vec()),
|
||||
};
|
||||
map.insert(col_name.clone(), json_value);
|
||||
}
|
||||
Ok(JsonValue::Object(map))
|
||||
}
|
||||
|
||||
/// Validiert Parameter gegen SQL-Platzhalter
|
||||
fn validate_params(sql: &str, params: &[JsonValue]) -> Result<(), DatabaseError> {
|
||||
let total_placeholders = count_sql_placeholders(sql);
|
||||
@ -327,20 +347,4 @@ mod tests {
|
||||
);
|
||||
assert_eq!(count_sql_placeholders("SELECT * FROM users"), 0);
|
||||
}
|
||||
|
||||
/* #[test]
|
||||
fn test_truncate_sql() {
|
||||
let sql = "SELECT * FROM very_long_table_name";
|
||||
assert_eq!(truncate_sql(sql, 10), "SELECT * F...");
|
||||
assert_eq!(truncate_sql(sql, 50), sql);
|
||||
} */
|
||||
|
||||
#[test]
|
||||
fn test_validate_params() {
|
||||
let params = vec![json!(1), json!("test")];
|
||||
|
||||
assert!(validate_params("SELECT * FROM users WHERE id = ? AND name = ?", ¶ms).is_ok());
|
||||
assert!(validate_params("SELECT * FROM users WHERE id = ?", ¶ms).is_err());
|
||||
assert!(validate_params("SELECT * FROM users", ¶ms).is_err());
|
||||
}
|
||||
}
|
||||
|
||||
@ -174,7 +174,7 @@ impl serde::Serialize for ExtensionError {
|
||||
let mut state = serializer.serialize_struct("ExtensionError", 4)?;
|
||||
|
||||
state.serialize_field("code", &self.code())?;
|
||||
state.serialize_field("type", &format!("{:?}", self))?;
|
||||
state.serialize_field("type", &format!("{self:?}"))?;
|
||||
state.serialize_field("message", &self.to_string())?;
|
||||
|
||||
if let Some(ext_id) = self.extension_id() {
|
||||
|
||||
@ -133,7 +133,7 @@ fn validate_path_pattern(pattern: &str) -> Result<(), ExtensionError> {
|
||||
// Check for path traversal attempts
|
||||
if pattern.contains("../") || pattern.contains("..\\") {
|
||||
return Err(ExtensionError::SecurityViolation {
|
||||
reason: format!("Path traversal detected in pattern: {}", pattern),
|
||||
reason: format!("Path traversal detected in pattern: {pattern}"),
|
||||
});
|
||||
}
|
||||
|
||||
@ -143,7 +143,6 @@ fn validate_path_pattern(pattern: &str) -> Result<(), ExtensionError> {
|
||||
/// Resolves a path pattern to actual filesystem paths using Tauri's BaseDirectory
|
||||
pub fn resolve_path_pattern(
|
||||
pattern: &str,
|
||||
app_handle: &tauri::AppHandle,
|
||||
) -> Result<(String, String), ExtensionError> {
|
||||
let (base_var, relative_path) = if let Some(slash_pos) = pattern.find('/') {
|
||||
(&pattern[..slash_pos], &pattern[slash_pos + 1..])
|
||||
@ -177,7 +176,7 @@ pub fn resolve_path_pattern(
|
||||
"$TEMP" => "Temp",
|
||||
_ => {
|
||||
return Err(ExtensionError::ValidationError {
|
||||
reason: format!("Unknown base directory variable: {}", base_var),
|
||||
reason: format!("Unknown base directory variable: {base_var}"),
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
@ -1,7 +1,7 @@
|
||||
/// src-tauri/src/extension/mod.rs
|
||||
use crate::{
|
||||
extension::{
|
||||
core::{EditablePermissions, ExtensionInfoResponse, ExtensionPreview},
|
||||
core::{manager::ExtensionManager, EditablePermissions, ExtensionInfoResponse, ExtensionPreview},
|
||||
error::ExtensionError,
|
||||
},
|
||||
AppState,
|
||||
@ -52,7 +52,7 @@ pub async fn get_all_extensions(
|
||||
.extension_manager
|
||||
.load_installed_extensions(&app_handle, &state)
|
||||
.await
|
||||
.map_err(|e| format!("Failed to load extensions: {:?}", e))?;
|
||||
.map_err(|e| format!("Failed to load extensions: {e:?}"))?;
|
||||
/* } */
|
||||
|
||||
let mut extensions = Vec::new();
|
||||
@ -82,12 +82,13 @@ pub async fn get_all_extensions(
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn preview_extension(
|
||||
app_handle: AppHandle,
|
||||
state: State<'_, AppState>,
|
||||
file_bytes: Vec<u8>,
|
||||
) -> Result<ExtensionPreview, ExtensionError> {
|
||||
state
|
||||
.extension_manager
|
||||
.preview_extension_internal(file_bytes)
|
||||
.preview_extension_internal(&app_handle, file_bytes)
|
||||
.await
|
||||
}
|
||||
|
||||
@ -291,12 +292,12 @@ pub async fn load_dev_extension(
|
||||
let (host, port, haextension_dir) = if config_path.exists() {
|
||||
let config_content =
|
||||
std::fs::read_to_string(&config_path).map_err(|e| ExtensionError::ValidationError {
|
||||
reason: format!("Failed to read haextension.config.json: {}", e),
|
||||
reason: format!("Failed to read haextension.config.json: {e}"),
|
||||
})?;
|
||||
|
||||
let config: HaextensionConfig =
|
||||
serde_json::from_str(&config_content).map_err(|e| ExtensionError::ValidationError {
|
||||
reason: format!("Failed to parse haextension.config.json: {}", e),
|
||||
reason: format!("Failed to parse haextension.config.json: {e}"),
|
||||
})?;
|
||||
|
||||
(config.dev.host, config.dev.port, config.dev.haextension_dir)
|
||||
@ -305,47 +306,43 @@ pub async fn load_dev_extension(
|
||||
(default_host(), default_port(), default_haextension_dir())
|
||||
};
|
||||
|
||||
let dev_server_url = format!("http://{}:{}", host, port);
|
||||
eprintln!("📡 Dev server URL: {}", dev_server_url);
|
||||
eprintln!("📁 Haextension directory: {}", haextension_dir);
|
||||
let dev_server_url = format!("http://{host}:{port}");
|
||||
eprintln!("📡 Dev server URL: {dev_server_url}");
|
||||
eprintln!("📁 Haextension directory: {haextension_dir}");
|
||||
|
||||
// 1.5. Check if dev server is running
|
||||
if !check_dev_server_health(&dev_server_url).await {
|
||||
return Err(ExtensionError::ValidationError {
|
||||
reason: format!(
|
||||
"Dev server at {} is not reachable. Please start your dev server first (e.g., 'npm run dev')",
|
||||
dev_server_url
|
||||
"Dev server at {dev_server_url} is not reachable. Please start your dev server first (e.g., 'npm run dev')"
|
||||
),
|
||||
});
|
||||
}
|
||||
eprintln!("✅ Dev server is reachable");
|
||||
|
||||
// 2. Build path to manifest: <extension_path>/<haextension_dir>/manifest.json
|
||||
let manifest_path = extension_path_buf
|
||||
.join(&haextension_dir)
|
||||
.join("manifest.json");
|
||||
|
||||
// Check if manifest exists
|
||||
if !manifest_path.exists() {
|
||||
return Err(ExtensionError::ManifestError {
|
||||
reason: format!(
|
||||
"Manifest not found at: {}. Make sure you run 'npx @haexhub/sdk init' first.",
|
||||
manifest_path.display()
|
||||
),
|
||||
});
|
||||
}
|
||||
// 2. Validate and build path to manifest: <extension_path>/<haextension_dir>/manifest.json
|
||||
let manifest_relative_path = format!("{haextension_dir}/manifest.json");
|
||||
let manifest_path = ExtensionManager::validate_path_in_directory(
|
||||
&extension_path_buf,
|
||||
&manifest_relative_path,
|
||||
true,
|
||||
)?
|
||||
.ok_or_else(|| ExtensionError::ManifestError {
|
||||
reason: format!(
|
||||
"Manifest not found at: {haextension_dir}/manifest.json. Make sure you run 'npx @haexhub/sdk init' first."
|
||||
),
|
||||
})?;
|
||||
|
||||
// 3. Read and parse manifest
|
||||
let manifest_content =
|
||||
std::fs::read_to_string(&manifest_path).map_err(|e| ExtensionError::ManifestError {
|
||||
reason: format!("Failed to read manifest: {}", e),
|
||||
reason: format!("Failed to read manifest: {e}"),
|
||||
})?;
|
||||
|
||||
let manifest: ExtensionManifest = serde_json::from_str(&manifest_content)?;
|
||||
|
||||
// 4. Generate a unique ID for dev extension: dev_<public_key_first_8>_<name>
|
||||
let key_prefix = manifest.public_key.chars().take(8).collect::<String>();
|
||||
let extension_id = format!("dev_{}_{}", key_prefix, manifest.name);
|
||||
// 4. Generate a unique ID for dev extension: dev_<public_key>_<name>
|
||||
let extension_id = format!("dev_{}_{}", manifest.public_key, manifest.name);
|
||||
|
||||
// 5. Check if dev extension already exists (allow reload)
|
||||
if let Some(existing) = state
|
||||
@ -407,7 +404,7 @@ pub fn remove_dev_extension(
|
||||
|
||||
if let Some(id) = to_remove {
|
||||
dev_exts.remove(&id);
|
||||
eprintln!("✅ Dev extension removed: {}", name);
|
||||
eprintln!("✅ Dev extension removed: {name}");
|
||||
Ok(())
|
||||
} else {
|
||||
Err(ExtensionError::NotFound { public_key, name })
|
||||
|
||||
@ -28,8 +28,7 @@ impl PermissionManager {
|
||||
})?;
|
||||
|
||||
let sql = format!(
|
||||
"INSERT INTO {} (id, extension_id, resource_type, action, target, constraints, status) VALUES (?, ?, ?, ?, ?, ?, ?)",
|
||||
TABLE_EXTENSION_PERMISSIONS
|
||||
"INSERT INTO {TABLE_EXTENSION_PERMISSIONS} (id, extension_id, resource_type, action, target, constraints, status) VALUES (?, ?, ?, ?, ?, ?, ?)"
|
||||
);
|
||||
|
||||
for perm in permissions {
|
||||
@ -76,8 +75,7 @@ impl PermissionManager {
|
||||
let db_perm: HaexExtensionPermissions = permission.into();
|
||||
|
||||
let sql = format!(
|
||||
"UPDATE {} SET resource_type = ?, action = ?, target = ?, constraints = ?, status = ? WHERE id = ?",
|
||||
TABLE_EXTENSION_PERMISSIONS
|
||||
"UPDATE {TABLE_EXTENSION_PERMISSIONS} SET resource_type = ?, action = ?, target = ?, constraints = ?, status = ? WHERE id = ?"
|
||||
);
|
||||
|
||||
let params = params![
|
||||
@ -111,7 +109,7 @@ impl PermissionManager {
|
||||
reason: "Failed to lock HLC service".to_string(),
|
||||
})?;
|
||||
|
||||
let sql = format!("UPDATE {} SET status = ? WHERE id = ?", TABLE_EXTENSION_PERMISSIONS);
|
||||
let sql = format!("UPDATE {TABLE_EXTENSION_PERMISSIONS} SET status = ? WHERE id = ?");
|
||||
let params = params![new_status.as_str(), permission_id];
|
||||
SqlExecutor::execute_internal_typed(&tx, &hlc_service, &sql, params)?;
|
||||
tx.commit().map_err(DatabaseError::from)
|
||||
@ -133,7 +131,7 @@ impl PermissionManager {
|
||||
})?;
|
||||
|
||||
// Echtes DELETE - wird vom CrdtTransformer zu UPDATE umgewandelt
|
||||
let sql = format!("DELETE FROM {} WHERE id = ?", TABLE_EXTENSION_PERMISSIONS);
|
||||
let sql = format!("DELETE FROM {TABLE_EXTENSION_PERMISSIONS} WHERE id = ?");
|
||||
SqlExecutor::execute_internal_typed(&tx, &hlc_service, &sql, params![permission_id])?;
|
||||
tx.commit().map_err(DatabaseError::from)
|
||||
}).map_err(ExtensionError::from)
|
||||
@ -152,7 +150,7 @@ impl PermissionManager {
|
||||
reason: "Failed to lock HLC service".to_string(),
|
||||
})?;
|
||||
|
||||
let sql = format!("DELETE FROM {} WHERE extension_id = ?", TABLE_EXTENSION_PERMISSIONS);
|
||||
let sql = format!("DELETE FROM {TABLE_EXTENSION_PERMISSIONS} WHERE extension_id = ?");
|
||||
SqlExecutor::execute_internal_typed(&tx, &hlc_service, &sql, params![extension_id])?;
|
||||
tx.commit().map_err(DatabaseError::from)
|
||||
}).map_err(ExtensionError::from)
|
||||
@ -164,7 +162,7 @@ impl PermissionManager {
|
||||
hlc_service: &crate::crdt::hlc::HlcService,
|
||||
extension_id: &str,
|
||||
) -> Result<(), DatabaseError> {
|
||||
let sql = format!("DELETE FROM {} WHERE extension_id = ?", TABLE_EXTENSION_PERMISSIONS);
|
||||
let sql = format!("DELETE FROM {TABLE_EXTENSION_PERMISSIONS} WHERE extension_id = ?");
|
||||
SqlExecutor::execute_internal_typed(tx, hlc_service, &sql, params![extension_id])?;
|
||||
Ok(())
|
||||
}
|
||||
@ -174,7 +172,7 @@ impl PermissionManager {
|
||||
extension_id: &str,
|
||||
) -> Result<Vec<ExtensionPermission>, ExtensionError> {
|
||||
with_connection(&app_state.db, |conn| {
|
||||
let sql = format!("SELECT * FROM {} WHERE extension_id = ?", TABLE_EXTENSION_PERMISSIONS);
|
||||
let sql = format!("SELECT * FROM {TABLE_EXTENSION_PERMISSIONS} WHERE extension_id = ?");
|
||||
let mut stmt = conn.prepare(&sql).map_err(DatabaseError::from)?;
|
||||
|
||||
let perms_iter = stmt.query_map(params![extension_id], |row| {
|
||||
@ -197,6 +195,30 @@ impl PermissionManager {
|
||||
action: Action,
|
||||
table_name: &str,
|
||||
) -> Result<(), ExtensionError> {
|
||||
// Remove quotes from table name if present (from SDK's getTableName())
|
||||
let clean_table_name = table_name.trim_matches('"');
|
||||
|
||||
// Auto-allow: Extensions have full access to their own tables
|
||||
// Table format: {publicKey}__{extensionName}__{tableName}
|
||||
// Extension ID format: dev_{publicKey}_{extensionName} or {publicKey}_{extensionName}
|
||||
|
||||
// Get the extension to check if this is its own table
|
||||
let extension = app_state
|
||||
.extension_manager
|
||||
.get_extension(extension_id)
|
||||
.ok_or_else(|| ExtensionError::ValidationError {
|
||||
reason: format!("Extension with ID {extension_id} not found"),
|
||||
})?;
|
||||
|
||||
// Build expected table prefix: {publicKey}__{extensionName}__
|
||||
let expected_prefix = format!("{}__{}__", extension.manifest.public_key, extension.manifest.name);
|
||||
|
||||
if clean_table_name.starts_with(&expected_prefix) {
|
||||
// This is the extension's own table - auto-allow
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
// Not own table - check explicit permissions
|
||||
let permissions = Self::get_permissions(app_state, extension_id).await?;
|
||||
|
||||
let has_permission = permissions
|
||||
@ -205,7 +227,7 @@ impl PermissionManager {
|
||||
.filter(|perm| perm.resource_type == ResourceType::Db)
|
||||
.filter(|perm| perm.action == action) // action ist nicht mehr Option
|
||||
.any(|perm| {
|
||||
if perm.target != "*" && perm.target != table_name {
|
||||
if perm.target != "*" && perm.target != clean_table_name {
|
||||
return false;
|
||||
}
|
||||
true
|
||||
@ -214,8 +236,8 @@ impl PermissionManager {
|
||||
if !has_permission {
|
||||
return Err(ExtensionError::permission_denied(
|
||||
extension_id,
|
||||
&format!("{:?}", action),
|
||||
&format!("database table '{}'", table_name),
|
||||
&format!("{action:?}"),
|
||||
&format!("database table '{table_name}'"),
|
||||
));
|
||||
}
|
||||
|
||||
@ -391,7 +413,7 @@ impl PermissionManager {
|
||||
"db" => Ok(ResourceType::Db),
|
||||
"shell" => Ok(ResourceType::Shell),
|
||||
_ => Err(DatabaseError::SerializationError {
|
||||
reason: format!("Unknown resource type: {}", s),
|
||||
reason: format!("Unknown resource type: {s}"),
|
||||
}),
|
||||
}
|
||||
}
|
||||
@ -399,8 +421,7 @@ impl PermissionManager {
|
||||
|
||||
|
||||
fn matches_path_pattern(pattern: &str, path: &str) -> bool {
|
||||
if pattern.ends_with("/*") {
|
||||
let prefix = &pattern[..pattern.len() - 2];
|
||||
if let Some(prefix) = pattern.strip_suffix("/*") {
|
||||
return path.starts_with(prefix);
|
||||
}
|
||||
|
||||
|
||||
@ -267,7 +267,7 @@ impl ResourceType {
|
||||
"db" => Ok(ResourceType::Db),
|
||||
"shell" => Ok(ResourceType::Shell),
|
||||
_ => Err(ExtensionError::ValidationError {
|
||||
reason: format!("Unknown resource type: {}", s),
|
||||
reason: format!("Unknown resource type: {s}"),
|
||||
}),
|
||||
}
|
||||
}
|
||||
@ -301,7 +301,7 @@ impl Action {
|
||||
ResourceType::Fs => Ok(Action::Filesystem(FsAction::from_str(s)?)),
|
||||
ResourceType::Http => {
|
||||
let action: HttpAction =
|
||||
serde_json::from_str(&format!("\"{}\"", s)).map_err(|_| {
|
||||
serde_json::from_str(&format!("\"{s}\"")).map_err(|_| {
|
||||
ExtensionError::InvalidActionString {
|
||||
input: s.to_string(),
|
||||
resource_type: "http".to_string(),
|
||||
@ -329,7 +329,7 @@ impl PermissionStatus {
|
||||
"granted" => Ok(PermissionStatus::Granted),
|
||||
"denied" => Ok(PermissionStatus::Denied),
|
||||
_ => Err(ExtensionError::ValidationError {
|
||||
reason: format!("Unknown permission status: {}", s),
|
||||
reason: format!("Unknown permission status: {s}"),
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
||||
@ -17,7 +17,7 @@ impl SqlPermissionValidator {
|
||||
fn is_own_table(extension_id: &str, table_name: &str) -> bool {
|
||||
// Tabellennamen sind im Format: {keyHash}_{extensionName}_{tableName}
|
||||
// extension_id ist der keyHash der Extension
|
||||
table_name.starts_with(&format!("{}_", extension_id))
|
||||
table_name.starts_with(&format!("{extension_id}_"))
|
||||
}
|
||||
|
||||
/// Validiert ein SQL-Statement gegen die Permissions einer Extension
|
||||
@ -45,7 +45,7 @@ impl SqlPermissionValidator {
|
||||
Self::validate_schema_statement(app_state, extension_id, &statement).await
|
||||
}
|
||||
_ => Err(ExtensionError::ValidationError {
|
||||
reason: format!("Statement type not allowed: {}", sql),
|
||||
reason: format!("Statement type not allowed: {sql}"),
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
||||
@ -26,7 +26,7 @@ pub fn run() {
|
||||
let state = app_handle.state::<AppState>();
|
||||
|
||||
// Rufe den Handler mit allen benötigten Parametern auf
|
||||
match extension::core::extension_protocol_handler(state, &app_handle, &request) {
|
||||
match extension::core::extension_protocol_handler(state, app_handle, &request) {
|
||||
Ok(response) => response,
|
||||
Err(e) => {
|
||||
eprintln!(
|
||||
@ -38,11 +38,10 @@ pub fn run() {
|
||||
.status(500)
|
||||
.header("Content-Type", "text/plain")
|
||||
.body(Vec::from(format!(
|
||||
"Interner Serverfehler im Protokollhandler: {}",
|
||||
e
|
||||
"Interner Serverfehler im Protokollhandler: {e}"
|
||||
)))
|
||||
.unwrap_or_else(|build_err| {
|
||||
eprintln!("Konnte Fehler-Response nicht erstellen: {}", build_err);
|
||||
eprintln!("Konnte Fehler-Response nicht erstellen: {build_err}");
|
||||
tauri::http::Response::builder()
|
||||
.status(500)
|
||||
.body(Vec::new())
|
||||
@ -68,6 +67,7 @@ pub fn run() {
|
||||
.invoke_handler(tauri::generate_handler![
|
||||
database::create_encrypted_database,
|
||||
database::delete_vault,
|
||||
database::move_vault_to_trash,
|
||||
database::list_vaults,
|
||||
database::open_encrypted_database,
|
||||
database::sql_execute_with_crdt,
|
||||
|
||||
@ -1,13 +1,13 @@
|
||||
{
|
||||
"$schema": "https://schema.tauri.app/config/2",
|
||||
"productName": "haex-hub",
|
||||
"version": "0.1.0",
|
||||
"version": "0.1.4",
|
||||
"identifier": "space.haex.hub",
|
||||
"build": {
|
||||
"beforeDevCommand": "pnpm dev",
|
||||
"devUrl": "http://localhost:3003",
|
||||
"beforeBuildCommand": "pnpm generate",
|
||||
"frontendDist": "../dist"
|
||||
"frontendDist": "../.output/public"
|
||||
},
|
||||
|
||||
"app": {
|
||||
@ -20,16 +20,21 @@
|
||||
],
|
||||
"security": {
|
||||
"csp": {
|
||||
"default-src": ["'self'", "http://tauri.localhost", "haex-extension:"],
|
||||
"default-src": ["'self'", "http://tauri.localhost", "https://tauri.localhost", "asset:", "haex-extension:"],
|
||||
"script-src": [
|
||||
"'self'",
|
||||
"http://tauri.localhost",
|
||||
"https://tauri.localhost",
|
||||
"asset:",
|
||||
"haex-extension:",
|
||||
"'wasm-unsafe-eval'"
|
||||
"'wasm-unsafe-eval'",
|
||||
"'unsafe-inline'"
|
||||
],
|
||||
"style-src": [
|
||||
"'self'",
|
||||
"http://tauri.localhost",
|
||||
"https://tauri.localhost",
|
||||
"asset:",
|
||||
"haex-extension:",
|
||||
"'unsafe-inline'"
|
||||
],
|
||||
@ -44,20 +49,22 @@
|
||||
"img-src": [
|
||||
"'self'",
|
||||
"http://tauri.localhost",
|
||||
"https://tauri.localhost",
|
||||
"asset:",
|
||||
"haex-extension:",
|
||||
"data:",
|
||||
"blob:"
|
||||
],
|
||||
"font-src": ["'self'", "http://tauri.localhost", "haex-extension:"],
|
||||
"font-src": ["'self'", "http://tauri.localhost", "https://tauri.localhost", "asset:", "haex-extension:"],
|
||||
"object-src": ["'none'"],
|
||||
"media-src": ["'self'", "http://tauri.localhost", "haex-extension:"],
|
||||
"media-src": ["'self'", "http://tauri.localhost", "https://tauri.localhost", "asset:", "haex-extension:"],
|
||||
"frame-src": ["haex-extension:"],
|
||||
"frame-ancestors": ["'none'"],
|
||||
"base-uri": ["'self'"]
|
||||
},
|
||||
"assetProtocol": {
|
||||
"enable": true,
|
||||
"scope": ["$APPDATA", "$RESOURCE"]
|
||||
"scope": ["$APPDATA", "$RESOURCE", "$APPLOCALDATA/**"]
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
@ -3,6 +3,8 @@ export default defineAppConfig({
|
||||
colors: {
|
||||
primary: 'sky',
|
||||
secondary: 'fuchsia',
|
||||
warning: 'yellow',
|
||||
danger: 'red',
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
@ -14,18 +14,44 @@
|
||||
@apply cursor-not-allowed;
|
||||
}
|
||||
|
||||
/* Define safe-area-insets as CSS custom properties for JavaScript access */
|
||||
:root {
|
||||
--safe-area-inset-top: env(safe-area-inset-top, 0px);
|
||||
--safe-area-inset-bottom: env(safe-area-inset-bottom, 0px);
|
||||
--safe-area-inset-left: env(safe-area-inset-left, 0px);
|
||||
--safe-area-inset-right: env(safe-area-inset-right, 0px);
|
||||
}
|
||||
|
||||
/* Verhindere Scrolling auf html und body */
|
||||
html {
|
||||
overflow: hidden;
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
height: 100dvh;
|
||||
height: 100vh; /* Fallback */
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
body {
|
||||
overflow: hidden;
|
||||
margin: 0;
|
||||
height: 100%;
|
||||
width: 100%;
|
||||
padding: 0;
|
||||
}
|
||||
|
||||
#__nuxt {
|
||||
/* Stellt sicher, dass die App immer die volle Höhe hat */
|
||||
min-height: 100vh;
|
||||
/* Volle Höhe des body */
|
||||
height: 100%;
|
||||
width: 100%;
|
||||
|
||||
/* Sorgt dafür, dass Padding die Höhe nicht sprengt */
|
||||
@apply box-border;
|
||||
/* Safe-Area Paddings auf root element - damit ALLES davon profitiert */
|
||||
padding-top: var(--safe-area-inset-top);
|
||||
padding-bottom: var(--safe-area-inset-bottom);
|
||||
padding-left: var(--safe-area-inset-left);
|
||||
padding-right: var(--safe-area-inset-right);
|
||||
|
||||
/* Die Safe-Area Paddings */
|
||||
padding-top: env(safe-area-inset-top);
|
||||
padding-bottom: env(safe-area-inset-bottom);
|
||||
padding-left: env(safe-area-inset-left);
|
||||
padding-right: env(safe-area-inset-right);
|
||||
box-sizing: border-box;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
61
src/components/haex/debug/overlay.vue
Normal file
61
src/components/haex/debug/overlay.vue
Normal file
@ -0,0 +1,61 @@
|
||||
<template>
|
||||
<div
|
||||
v-if="data"
|
||||
class="fixed top-2 right-2 bg-black/90 text-white text-xs p-3 rounded-lg shadow-2xl max-w-sm z-[9999] backdrop-blur-sm"
|
||||
>
|
||||
<div class="flex justify-between items-start gap-3 mb-2">
|
||||
<span class="font-bold text-sm">{{ title }}</span>
|
||||
<div class="flex gap-1">
|
||||
<button
|
||||
class="bg-white/20 hover:bg-white/30 px-2 py-1 rounded text-xs transition-colors"
|
||||
@click="copyToClipboardAsync"
|
||||
>
|
||||
Copy
|
||||
</button>
|
||||
<button
|
||||
v-if="dismissible"
|
||||
class="bg-white/20 hover:bg-white/30 px-2 py-1 rounded text-xs transition-colors"
|
||||
@click="handleDismiss"
|
||||
>
|
||||
✕
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
<pre class="text-xs whitespace-pre-wrap font-mono overflow-auto max-h-96">{{ formattedData }}</pre>
|
||||
</div>
|
||||
</template>
|
||||
|
||||
<script setup lang="ts">
|
||||
const props = withDefaults(
|
||||
defineProps<{
|
||||
data: Record<string, any> | null
|
||||
title?: string
|
||||
dismissible?: boolean
|
||||
}>(),
|
||||
{
|
||||
title: 'Debug Info',
|
||||
dismissible: false,
|
||||
},
|
||||
)
|
||||
|
||||
const emit = defineEmits<{
|
||||
dismiss: []
|
||||
}>()
|
||||
|
||||
const formattedData = computed(() => {
|
||||
if (!props.data) return ''
|
||||
return JSON.stringify(props.data, null, 2)
|
||||
})
|
||||
|
||||
const copyToClipboardAsync = async () => {
|
||||
try {
|
||||
await navigator.clipboard.writeText(formattedData.value)
|
||||
} catch (err) {
|
||||
console.error('Failed to copy debug info:', err)
|
||||
}
|
||||
}
|
||||
|
||||
const handleDismiss = () => {
|
||||
emit('dismiss')
|
||||
}
|
||||
</script>
|
||||
@ -1,7 +1,7 @@
|
||||
<template>
|
||||
<div
|
||||
ref="desktopEl"
|
||||
class="w-full h-full relative overflow-hidden"
|
||||
class="absolute inset-0 overflow-hidden"
|
||||
>
|
||||
<Swiper
|
||||
:modules="[SwiperNavigation]"
|
||||
@ -13,7 +13,7 @@
|
||||
:no-swiping="true"
|
||||
no-swiping-class="no-swipe"
|
||||
:allow-touch-move="allowSwipe"
|
||||
class="w-full h-full"
|
||||
class="h-full w-full"
|
||||
direction="vertical"
|
||||
@swiper="onSwiperInit"
|
||||
@slide-change="onSlideChange"
|
||||
@ -23,176 +23,198 @@
|
||||
:key="workspace.id"
|
||||
class="w-full h-full"
|
||||
>
|
||||
<div
|
||||
class="w-full h-full relative"
|
||||
@click.self.stop="handleDesktopClick"
|
||||
@mousedown.left.self="handleAreaSelectStart"
|
||||
@dragover.prevent="handleDragOver"
|
||||
@drop.prevent="handleDrop($event, workspace.id)"
|
||||
>
|
||||
<!-- Grid Pattern Background -->
|
||||
<UContextMenu :items="getWorkspaceContextMenuItems(workspace.id)">
|
||||
<div
|
||||
class="absolute inset-0 pointer-events-none opacity-30"
|
||||
:style="{
|
||||
backgroundImage:
|
||||
'linear-gradient(rgba(0, 0, 0, 0.1) 1px, transparent 1px), linear-gradient(90deg, rgba(0, 0, 0, 0.1) 1px, transparent 1px)',
|
||||
backgroundSize: '32px 32px',
|
||||
}"
|
||||
/>
|
||||
|
||||
<!-- Snap Dropzones (only visible when window drag near edge) -->
|
||||
|
||||
<div
|
||||
class="absolute left-0 top-0 bottom-0 border-blue-500 pointer-events-none backdrop-blur-sm z-50 transition-all duration-500 ease-in-out"
|
||||
:class="showLeftSnapZone ? 'w-1/2 bg-blue-500/20 border-2' : 'w-0'"
|
||||
/>
|
||||
|
||||
<div
|
||||
class="absolute right-0 top-0 bottom-0 border-blue-500 pointer-events-none backdrop-blur-sm z-50 transition-all duration-500 ease-in-out"
|
||||
:class="showRightSnapZone ? 'w-1/2 bg-blue-500/20 border-2' : 'w-0'"
|
||||
/>
|
||||
|
||||
<!-- Area Selection Box -->
|
||||
<div
|
||||
v-if="isAreaSelecting"
|
||||
class="absolute bg-blue-500/20 border-2 border-blue-500 pointer-events-none z-30"
|
||||
:style="selectionBoxStyle"
|
||||
/>
|
||||
|
||||
<!-- Icons for this workspace -->
|
||||
<HaexDesktopIcon
|
||||
v-for="item in getWorkspaceIcons(workspace.id)"
|
||||
:id="item.id"
|
||||
:key="item.id"
|
||||
:item-type="item.itemType"
|
||||
:reference-id="item.referenceId"
|
||||
:initial-x="item.positionX"
|
||||
:initial-y="item.positionY"
|
||||
:label="item.label"
|
||||
:icon="item.icon"
|
||||
class="no-swipe"
|
||||
@position-changed="handlePositionChanged"
|
||||
@drag-start="handleDragStart"
|
||||
@drag-end="handleDragEnd"
|
||||
/>
|
||||
|
||||
<!-- Windows for this workspace -->
|
||||
<template
|
||||
v-for="window in getWorkspaceWindows(workspace.id)"
|
||||
:key="window.id"
|
||||
class="w-full h-full relative"
|
||||
:style="getWorkspaceBackgroundStyle(workspace)"
|
||||
@click.self.stop="handleDesktopClick"
|
||||
@mousedown.left.self="handleAreaSelectStart"
|
||||
@dragover.prevent="handleDragOver"
|
||||
@drop.prevent="handleDrop($event, workspace.id)"
|
||||
>
|
||||
<!-- Overview Mode: Teleport to window preview -->
|
||||
<Teleport
|
||||
v-if="
|
||||
windowManager.showWindowOverview &&
|
||||
overviewWindowState.has(window.id)
|
||||
<!-- Grid Pattern Background -->
|
||||
<div
|
||||
class="absolute inset-0 pointer-events-none opacity-30"
|
||||
:style="{
|
||||
backgroundImage:
|
||||
'linear-gradient(rgba(0, 0, 0, 0.1) 1px, transparent 1px), linear-gradient(90deg, rgba(0, 0, 0, 0.1) 1px, transparent 1px)',
|
||||
backgroundSize: '32px 32px',
|
||||
}"
|
||||
/>
|
||||
|
||||
<!-- Snap Dropzones (only visible when window drag near edge) -->
|
||||
|
||||
<div
|
||||
class="absolute left-0 top-0 bottom-0 border-blue-500 pointer-events-none backdrop-blur-sm z-50 transition-all duration-500 ease-in-out"
|
||||
:class="
|
||||
showLeftSnapZone ? 'w-1/2 bg-blue-500/20 border-2' : 'w-0'
|
||||
"
|
||||
:to="`#window-preview-${window.id}`"
|
||||
>
|
||||
<div
|
||||
class="absolute origin-top-left"
|
||||
:style="{
|
||||
transform: `scale(${overviewWindowState.get(window.id)!.scale})`,
|
||||
width: `${overviewWindowState.get(window.id)!.width}px`,
|
||||
height: `${overviewWindowState.get(window.id)!.height}px`,
|
||||
}"
|
||||
>
|
||||
<HaexWindow
|
||||
v-show="
|
||||
windowManager.showWindowOverview || !window.isMinimized
|
||||
"
|
||||
:id="window.id"
|
||||
v-model:x="overviewWindowState.get(window.id)!.x"
|
||||
v-model:y="overviewWindowState.get(window.id)!.y"
|
||||
v-model:width="overviewWindowState.get(window.id)!.width"
|
||||
v-model:height="overviewWindowState.get(window.id)!.height"
|
||||
:title="window.title"
|
||||
:icon="window.icon"
|
||||
:is-active="windowManager.isWindowActive(window.id)"
|
||||
:source-x="window.sourceX"
|
||||
:source-y="window.sourceY"
|
||||
:source-width="window.sourceWidth"
|
||||
:source-height="window.sourceHeight"
|
||||
:is-opening="window.isOpening"
|
||||
:is-closing="window.isClosing"
|
||||
class="no-swipe"
|
||||
@close="windowManager.closeWindow(window.id)"
|
||||
@minimize="windowManager.minimizeWindow(window.id)"
|
||||
@activate="windowManager.activateWindow(window.id)"
|
||||
@position-changed="
|
||||
(x, y) =>
|
||||
windowManager.updateWindowPosition(window.id, x, y)
|
||||
"
|
||||
@size-changed="
|
||||
(width, height) =>
|
||||
windowManager.updateWindowSize(window.id, width, height)
|
||||
"
|
||||
@drag-start="handleWindowDragStart(window.id)"
|
||||
@drag-end="handleWindowDragEnd"
|
||||
>
|
||||
<!-- System Window: Render Vue Component -->
|
||||
<component
|
||||
:is="getSystemWindowComponent(window.sourceId)"
|
||||
v-if="window.type === 'system'"
|
||||
/>
|
||||
/>
|
||||
|
||||
<!-- Extension Window: Render iFrame -->
|
||||
<HaexDesktopExtensionFrame
|
||||
v-else
|
||||
:extension-id="window.sourceId"
|
||||
:window-id="window.id"
|
||||
/>
|
||||
</HaexWindow>
|
||||
</div>
|
||||
</Teleport>
|
||||
<div
|
||||
class="absolute right-0 top-0 bottom-0 border-blue-500 pointer-events-none backdrop-blur-sm z-50 transition-all duration-500 ease-in-out"
|
||||
:class="
|
||||
showRightSnapZone ? 'w-1/2 bg-blue-500/20 border-2' : 'w-0'
|
||||
"
|
||||
/>
|
||||
|
||||
<!-- Desktop Mode: Render directly in workspace -->
|
||||
<HaexWindow
|
||||
v-else
|
||||
v-show="windowManager.showWindowOverview || !window.isMinimized"
|
||||
:id="window.id"
|
||||
v-model:x="window.x"
|
||||
v-model:y="window.y"
|
||||
v-model:width="window.width"
|
||||
v-model:height="window.height"
|
||||
:title="window.title"
|
||||
:icon="window.icon"
|
||||
:is-active="windowManager.isWindowActive(window.id)"
|
||||
:source-x="window.sourceX"
|
||||
:source-y="window.sourceY"
|
||||
:source-width="window.sourceWidth"
|
||||
:source-height="window.sourceHeight"
|
||||
:is-opening="window.isOpening"
|
||||
:is-closing="window.isClosing"
|
||||
<!-- Area Selection Box -->
|
||||
<div
|
||||
v-if="isAreaSelecting"
|
||||
class="absolute bg-blue-500/20 border-2 border-blue-500 pointer-events-none z-30"
|
||||
:style="selectionBoxStyle"
|
||||
/>
|
||||
|
||||
<!-- Icons for this workspace -->
|
||||
<HaexDesktopIcon
|
||||
v-for="item in getWorkspaceIcons(workspace.id)"
|
||||
:id="item.id"
|
||||
:key="item.id"
|
||||
:item-type="item.itemType"
|
||||
:reference-id="item.referenceId"
|
||||
:initial-x="item.positionX"
|
||||
:initial-y="item.positionY"
|
||||
:label="item.label"
|
||||
:icon="item.icon"
|
||||
class="no-swipe"
|
||||
@close="windowManager.closeWindow(window.id)"
|
||||
@minimize="windowManager.minimizeWindow(window.id)"
|
||||
@activate="windowManager.activateWindow(window.id)"
|
||||
@position-changed="
|
||||
(x, y) => windowManager.updateWindowPosition(window.id, x, y)
|
||||
"
|
||||
@size-changed="
|
||||
(width, height) =>
|
||||
windowManager.updateWindowSize(window.id, width, height)
|
||||
"
|
||||
@drag-start="handleWindowDragStart(window.id)"
|
||||
@drag-end="handleWindowDragEnd"
|
||||
>
|
||||
<!-- System Window: Render Vue Component -->
|
||||
<component
|
||||
:is="getSystemWindowComponent(window.sourceId)"
|
||||
v-if="window.type === 'system'"
|
||||
/>
|
||||
@position-changed="handlePositionChanged"
|
||||
@drag-start="handleDragStart"
|
||||
@drag-end="handleDragEnd"
|
||||
/>
|
||||
|
||||
<!-- Extension Window: Render iFrame -->
|
||||
<HaexDesktopExtensionFrame
|
||||
<!-- Windows for this workspace -->
|
||||
<template
|
||||
v-for="window in getWorkspaceWindows(workspace.id)"
|
||||
:key="window.id"
|
||||
>
|
||||
<!-- Overview Mode: Teleport to window preview -->
|
||||
<Teleport
|
||||
v-if="
|
||||
windowManager.showWindowOverview &&
|
||||
overviewWindowState.has(window.id)
|
||||
"
|
||||
:to="`#window-preview-${window.id}`"
|
||||
>
|
||||
<div
|
||||
class="absolute origin-top-left"
|
||||
:style="{
|
||||
transform: `scale(${overviewWindowState.get(window.id)!.scale})`,
|
||||
width: `${overviewWindowState.get(window.id)!.width}px`,
|
||||
height: `${overviewWindowState.get(window.id)!.height}px`,
|
||||
}"
|
||||
>
|
||||
<HaexWindow
|
||||
v-show="
|
||||
windowManager.showWindowOverview || !window.isMinimized
|
||||
"
|
||||
:id="window.id"
|
||||
v-model:x="overviewWindowState.get(window.id)!.x"
|
||||
v-model:y="overviewWindowState.get(window.id)!.y"
|
||||
v-model:width="overviewWindowState.get(window.id)!.width"
|
||||
v-model:height="overviewWindowState.get(window.id)!.height"
|
||||
:title="window.title"
|
||||
:icon="window.icon"
|
||||
:is-active="windowManager.isWindowActive(window.id)"
|
||||
:source-x="window.sourceX"
|
||||
:source-y="window.sourceY"
|
||||
:source-width="window.sourceWidth"
|
||||
:source-height="window.sourceHeight"
|
||||
:is-opening="window.isOpening"
|
||||
:is-closing="window.isClosing"
|
||||
:warning-level="
|
||||
window.type === 'extension' &&
|
||||
availableExtensions.find(
|
||||
(ext) => ext.id === window.sourceId,
|
||||
)?.devServerUrl
|
||||
? 'warning'
|
||||
: undefined
|
||||
"
|
||||
class="no-swipe"
|
||||
@close="windowManager.closeWindow(window.id)"
|
||||
@minimize="windowManager.minimizeWindow(window.id)"
|
||||
@activate="windowManager.activateWindow(window.id)"
|
||||
@position-changed="
|
||||
(x, y) =>
|
||||
windowManager.updateWindowPosition(window.id, x, y)
|
||||
"
|
||||
@size-changed="
|
||||
(width, height) =>
|
||||
windowManager.updateWindowSize(window.id, width, height)
|
||||
"
|
||||
@drag-start="handleWindowDragStart(window.id)"
|
||||
@drag-end="handleWindowDragEnd"
|
||||
>
|
||||
<!-- System Window: Render Vue Component -->
|
||||
<component
|
||||
:is="getSystemWindowComponent(window.sourceId)"
|
||||
v-if="window.type === 'system'"
|
||||
/>
|
||||
|
||||
<!-- Extension Window: Render iFrame -->
|
||||
<HaexDesktopExtensionFrame
|
||||
v-else
|
||||
:extension-id="window.sourceId"
|
||||
:window-id="window.id"
|
||||
/>
|
||||
</HaexWindow>
|
||||
</div>
|
||||
</Teleport>
|
||||
|
||||
<!-- Desktop Mode: Render directly in workspace -->
|
||||
<HaexWindow
|
||||
v-else
|
||||
:extension-id="window.sourceId"
|
||||
:window-id="window.id"
|
||||
/>
|
||||
</HaexWindow>
|
||||
</template>
|
||||
</div>
|
||||
v-show="windowManager.showWindowOverview || !window.isMinimized"
|
||||
:id="window.id"
|
||||
v-model:x="window.x"
|
||||
v-model:y="window.y"
|
||||
v-model:width="window.width"
|
||||
v-model:height="window.height"
|
||||
:title="window.title"
|
||||
:icon="window.icon"
|
||||
:is-active="windowManager.isWindowActive(window.id)"
|
||||
:source-x="window.sourceX"
|
||||
:source-y="window.sourceY"
|
||||
:source-width="window.sourceWidth"
|
||||
:source-height="window.sourceHeight"
|
||||
:is-opening="window.isOpening"
|
||||
:is-closing="window.isClosing"
|
||||
:warning-level="
|
||||
window.type === 'extension' &&
|
||||
availableExtensions.find((ext) => ext.id === window.sourceId)
|
||||
?.devServerUrl
|
||||
? 'warning'
|
||||
: undefined
|
||||
"
|
||||
class="no-swipe"
|
||||
@close="windowManager.closeWindow(window.id)"
|
||||
@minimize="windowManager.minimizeWindow(window.id)"
|
||||
@activate="windowManager.activateWindow(window.id)"
|
||||
@position-changed="
|
||||
(x, y) => windowManager.updateWindowPosition(window.id, x, y)
|
||||
"
|
||||
@size-changed="
|
||||
(width, height) =>
|
||||
windowManager.updateWindowSize(window.id, width, height)
|
||||
"
|
||||
@drag-start="handleWindowDragStart(window.id)"
|
||||
@drag-end="handleWindowDragEnd"
|
||||
>
|
||||
<!-- System Window: Render Vue Component -->
|
||||
<component
|
||||
:is="getSystemWindowComponent(window.sourceId)"
|
||||
v-if="window.type === 'system'"
|
||||
/>
|
||||
|
||||
<!-- Extension Window: Render iFrame -->
|
||||
<HaexDesktopExtensionFrame
|
||||
v-else
|
||||
:extension-id="window.sourceId"
|
||||
:window-id="window.id"
|
||||
/>
|
||||
</HaexWindow>
|
||||
</template>
|
||||
</div>
|
||||
</UContextMenu>
|
||||
</SwiperSlide>
|
||||
</Swiper>
|
||||
|
||||
@ -224,6 +246,8 @@ const {
|
||||
allowSwipe,
|
||||
isOverviewMode,
|
||||
} = storeToRefs(workspaceStore)
|
||||
const { getWorkspaceBackgroundStyle, getWorkspaceContextMenuItems } =
|
||||
workspaceStore
|
||||
|
||||
const { x: mouseX } = useMouse()
|
||||
|
||||
|
||||
@ -15,7 +15,7 @@
|
||||
<div class="flex items-start gap-4">
|
||||
<div
|
||||
v-if="preview?.manifest.icon"
|
||||
class="w-16 h-16 flex-shrink-0"
|
||||
class="w-16 h-16 shrink-0"
|
||||
>
|
||||
<UIcon
|
||||
:name="preview.manifest.icon"
|
||||
@ -184,7 +184,6 @@ const shellPermissions = computed({
|
||||
},
|
||||
})
|
||||
|
||||
|
||||
const permissionAccordionItems = computed(() => {
|
||||
const items = []
|
||||
|
||||
|
||||
@ -1,5 +1,16 @@
|
||||
<template>
|
||||
<UPopover v-model:open="open">
|
||||
<UDrawer
|
||||
v-model:open="open"
|
||||
direction="right"
|
||||
:title="t('launcher.title')"
|
||||
:description="t('launcher.description')"
|
||||
:overlay="false"
|
||||
:modal="false"
|
||||
:handle-only="true"
|
||||
:ui="{
|
||||
content: 'w-dvw max-w-md sm:max-w-fit',
|
||||
}"
|
||||
>
|
||||
<UButton
|
||||
icon="material-symbols:apps"
|
||||
color="neutral"
|
||||
@ -9,58 +20,63 @@
|
||||
/>
|
||||
|
||||
<template #content>
|
||||
<ul class="p-4 max-h-96 grid grid-cols-3 gap-2 overflow-scroll">
|
||||
<!-- All launcher items (system windows + enabled extensions, alphabetically sorted) -->
|
||||
<UContextMenu
|
||||
v-for="item in launcherItems"
|
||||
:key="item.id"
|
||||
:items="getContextMenuItems(item)"
|
||||
>
|
||||
<div class="p-4 h-full overflow-y-auto">
|
||||
<div class="flex flex-wrap">
|
||||
<!-- All launcher items (system windows + enabled extensions, alphabetically sorted) -->
|
||||
<UContextMenu
|
||||
v-for="item in launcherItems"
|
||||
:key="item.id"
|
||||
:items="getContextMenuItems(item)"
|
||||
>
|
||||
<UiButton
|
||||
square
|
||||
size="lg"
|
||||
variant="ghost"
|
||||
:ui="{
|
||||
base: 'size-24 flex flex-wrap text-sm items-center justify-center overflow-visible cursor-grab',
|
||||
leadingIcon: 'size-10',
|
||||
label: 'w-full',
|
||||
}"
|
||||
:icon="item.icon"
|
||||
:label="item.name"
|
||||
:tooltip="item.name"
|
||||
draggable="true"
|
||||
@click="openItem(item)"
|
||||
@dragstart="handleDragStart($event, item)"
|
||||
/>
|
||||
</UContextMenu>
|
||||
|
||||
<!-- Disabled Extensions (grayed out) -->
|
||||
<UiButton
|
||||
v-for="extension in disabledExtensions"
|
||||
:key="extension.id"
|
||||
square
|
||||
size="lg"
|
||||
size="xl"
|
||||
variant="ghost"
|
||||
:disabled="true"
|
||||
:ui="{
|
||||
base: 'size-24 flex flex-wrap text-sm items-center justify-center overflow-visible cursor-grab active:cursor-grabbing',
|
||||
base: 'size-24 flex flex-wrap text-sm items-center justify-center overflow-visible opacity-40',
|
||||
leadingIcon: 'size-10',
|
||||
label: 'w-full',
|
||||
}"
|
||||
:icon="item.icon"
|
||||
:label="item.name"
|
||||
:tooltip="item.name"
|
||||
draggable="true"
|
||||
@click="openItem(item)"
|
||||
@dragstart="handleDragStart($event, item)"
|
||||
@dragend="handleDragEnd"
|
||||
:icon="extension.icon || 'i-heroicons-puzzle-piece-solid'"
|
||||
:label="extension.name"
|
||||
:tooltip="`${extension.name} (${t('disabled')})`"
|
||||
/>
|
||||
</UContextMenu>
|
||||
|
||||
<!-- Disabled Extensions (grayed out) -->
|
||||
<UiButton
|
||||
v-for="extension in disabledExtensions"
|
||||
:key="extension.id"
|
||||
square
|
||||
size="xl"
|
||||
variant="ghost"
|
||||
:disabled="true"
|
||||
:ui="{
|
||||
base: 'size-24 flex flex-wrap text-sm items-center justify-center overflow-visible opacity-40',
|
||||
leadingIcon: 'size-10',
|
||||
label: 'w-full',
|
||||
}"
|
||||
:icon="extension.icon || 'i-heroicons-puzzle-piece-solid'"
|
||||
:label="extension.name"
|
||||
:tooltip="`${extension.name} (${t('disabled')})`"
|
||||
/>
|
||||
</ul>
|
||||
</div>
|
||||
</div>
|
||||
</template>
|
||||
</UPopover>
|
||||
</UDrawer>
|
||||
|
||||
<!-- Uninstall Confirmation Dialog -->
|
||||
<UiDialogConfirm
|
||||
v-model:open="showUninstallDialog"
|
||||
:title="t('uninstall.confirm.title')"
|
||||
:description="t('uninstall.confirm.description', { name: extensionToUninstall?.name || '' })"
|
||||
:description="
|
||||
t('uninstall.confirm.description', {
|
||||
name: extensionToUninstall?.name || '',
|
||||
})
|
||||
"
|
||||
:confirm-label="t('uninstall.confirm.button')"
|
||||
confirm-icon="i-heroicons-trash"
|
||||
@confirm="confirmUninstall"
|
||||
@ -74,11 +90,14 @@ defineOptions({
|
||||
|
||||
const extensionStore = useExtensionsStore()
|
||||
const windowManagerStore = useWindowManagerStore()
|
||||
const uiStore = useUiStore()
|
||||
|
||||
const { t } = useI18n()
|
||||
|
||||
const open = ref(false)
|
||||
|
||||
const { isSmallScreen } = storeToRefs(uiStore)
|
||||
|
||||
// Uninstall dialog state
|
||||
const showUninstallDialog = ref(false)
|
||||
const extensionToUninstall = ref<LauncherItem | null>(null)
|
||||
@ -226,10 +245,11 @@ const handleDragStart = (event: DragEvent, item: LauncherItem) => {
|
||||
if (dragImage) {
|
||||
event.dataTransfer.setDragImage(dragImage, 20, 20)
|
||||
}
|
||||
}
|
||||
|
||||
const handleDragEnd = () => {
|
||||
// Cleanup if needed
|
||||
// Close drawer on small screens to reveal workspace for drop
|
||||
if (isSmallScreen.value) {
|
||||
open.value = false
|
||||
}
|
||||
}
|
||||
</script>
|
||||
|
||||
@ -237,6 +257,9 @@ const handleDragEnd = () => {
|
||||
de:
|
||||
disabled: Deaktiviert
|
||||
marketplace: Marketplace
|
||||
launcher:
|
||||
title: App Launcher
|
||||
description: Wähle eine App zum Öffnen
|
||||
contextMenu:
|
||||
open: Öffnen
|
||||
uninstall: Deinstallieren
|
||||
@ -249,6 +272,9 @@ de:
|
||||
en:
|
||||
disabled: Disabled
|
||||
marketplace: Marketplace
|
||||
launcher:
|
||||
title: App Launcher
|
||||
description: Select an app to open
|
||||
contextMenu:
|
||||
open: Open
|
||||
uninstall: Uninstall
|
||||
|
||||
@ -1,5 +1,5 @@
|
||||
<template>
|
||||
<div class="p-4 mx-auto space-y-6 bg-default/90 backdrop-blur-2xl">
|
||||
<div class="p-4 mx-auto space-y-6 bg-default">
|
||||
<div class="space-y-2">
|
||||
<h1 class="text-2xl font-bold">{{ t('title') }}</h1>
|
||||
<p class="text-sm opacity-70">{{ t('description') }}</p>
|
||||
|
||||
@ -1,5 +1,5 @@
|
||||
<template>
|
||||
<div class="w-full h-full bg-default">
|
||||
<div class="w-full h-full bg-default overflow-scroll">
|
||||
<div class="grid grid-cols-2 p-2">
|
||||
<div class="p-2">{{ t('language') }}</div>
|
||||
<div><UiDropdownLocale @select="onSelectLocaleAsync" /></div>
|
||||
@ -33,13 +33,36 @@
|
||||
/>
|
||||
</div>
|
||||
|
||||
<div class="h-full"/>
|
||||
<div class="p-2">{{ t('workspaceBackground.label') }}</div>
|
||||
<div class="flex gap-2">
|
||||
<UiButton
|
||||
:label="t('workspaceBackground.choose')"
|
||||
@click="selectBackgroundImage"
|
||||
/>
|
||||
<UiButton
|
||||
v-if="currentWorkspace?.background"
|
||||
:label="t('workspaceBackground.remove.label')"
|
||||
color="error"
|
||||
@click="removeBackgroundImage"
|
||||
/>
|
||||
</div>
|
||||
|
||||
<div class="h-full" />
|
||||
</div>
|
||||
</div>
|
||||
</template>
|
||||
|
||||
<script setup lang="ts">
|
||||
import type { Locale } from 'vue-i18n'
|
||||
import { open } from '@tauri-apps/plugin-dialog'
|
||||
import {
|
||||
readFile,
|
||||
writeFile,
|
||||
mkdir,
|
||||
exists,
|
||||
remove,
|
||||
} from '@tauri-apps/plugin-fs'
|
||||
import { appLocalDataDir } from '@tauri-apps/api/path'
|
||||
|
||||
const { t, setLocale } = useI18n()
|
||||
|
||||
@ -77,6 +100,10 @@ const { requestNotificationPermissionAsync } = useNotificationStore()
|
||||
const { deviceName } = storeToRefs(useDeviceStore())
|
||||
const { updateDeviceNameAsync, readDeviceNameAsync } = useDeviceStore()
|
||||
|
||||
const workspaceStore = useWorkspaceStore()
|
||||
const { currentWorkspace } = storeToRefs(workspaceStore)
|
||||
const { updateWorkspaceBackgroundAsync } = workspaceStore
|
||||
|
||||
onMounted(async () => {
|
||||
await readDeviceNameAsync()
|
||||
})
|
||||
@ -92,6 +119,152 @@ const onUpdateDeviceNameAsync = async () => {
|
||||
add({ description: t('deviceName.update.error'), color: 'error' })
|
||||
}
|
||||
}
|
||||
|
||||
const selectBackgroundImage = async () => {
|
||||
if (!currentWorkspace.value) return
|
||||
|
||||
try {
|
||||
const selected = await open({
|
||||
multiple: false,
|
||||
filters: [
|
||||
{
|
||||
name: 'Images',
|
||||
extensions: ['png', 'jpg', 'jpeg', 'webp'],
|
||||
},
|
||||
],
|
||||
})
|
||||
|
||||
if (!selected || typeof selected !== 'string') {
|
||||
return
|
||||
}
|
||||
|
||||
// Read the selected file (works with Android photo picker URIs)
|
||||
let fileData: Uint8Array
|
||||
try {
|
||||
fileData = await readFile(selected)
|
||||
} catch (readError) {
|
||||
add({
|
||||
description: `Fehler beim Lesen: ${readError instanceof Error ? readError.message : String(readError)}`,
|
||||
color: 'error',
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
// Detect file type from file signature
|
||||
let ext = 'jpg' // default
|
||||
if (fileData.length > 4) {
|
||||
// PNG signature: 89 50 4E 47
|
||||
if (
|
||||
fileData[0] === 0x89 &&
|
||||
fileData[1] === 0x50 &&
|
||||
fileData[2] === 0x4e &&
|
||||
fileData[3] === 0x47
|
||||
) {
|
||||
ext = 'png'
|
||||
}
|
||||
// JPEG signature: FF D8 FF
|
||||
else if (
|
||||
fileData[0] === 0xff &&
|
||||
fileData[1] === 0xd8 &&
|
||||
fileData[2] === 0xff
|
||||
) {
|
||||
ext = 'jpg'
|
||||
}
|
||||
// WebP signature: RIFF xxxx WEBP
|
||||
else if (
|
||||
fileData[0] === 0x52 &&
|
||||
fileData[1] === 0x49 &&
|
||||
fileData[2] === 0x46 &&
|
||||
fileData[3] === 0x46
|
||||
) {
|
||||
ext = 'webp'
|
||||
}
|
||||
}
|
||||
|
||||
// Get app local data directory
|
||||
const appDataPath = await appLocalDataDir()
|
||||
|
||||
// Construct target path manually to avoid path joining issues
|
||||
const fileName = `workspace-${currentWorkspace.value.id}-background.${ext}`
|
||||
const targetPath = `${appDataPath}/files/${fileName}`
|
||||
|
||||
// Create parent directory if it doesn't exist
|
||||
const parentDir = `${appDataPath}/files`
|
||||
try {
|
||||
if (!(await exists(parentDir))) {
|
||||
await mkdir(parentDir, { recursive: true })
|
||||
}
|
||||
} catch (mkdirError) {
|
||||
add({
|
||||
description: `Fehler beim Erstellen des Ordners: ${mkdirError instanceof Error ? mkdirError.message : String(mkdirError)}`,
|
||||
color: 'error',
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
// Write file to app data directory
|
||||
try {
|
||||
await writeFile(targetPath, fileData)
|
||||
} catch (writeError) {
|
||||
add({
|
||||
description: `Fehler beim Schreiben: ${writeError instanceof Error ? writeError.message : String(writeError)}`,
|
||||
color: 'error',
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
// Store the absolute file path in database
|
||||
try {
|
||||
await updateWorkspaceBackgroundAsync(
|
||||
currentWorkspace.value.id,
|
||||
targetPath,
|
||||
)
|
||||
add({
|
||||
description: t('workspaceBackground.update.success'),
|
||||
color: 'success',
|
||||
})
|
||||
} catch (dbError) {
|
||||
add({
|
||||
description: `Fehler beim DB-Update: ${dbError instanceof Error ? dbError.message : String(dbError)}`,
|
||||
color: 'error',
|
||||
})
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Error selecting background:', error)
|
||||
add({
|
||||
description: `${t('workspaceBackground.update.error')}: ${error instanceof Error ? error.message : String(error)}`,
|
||||
color: 'error',
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
const removeBackgroundImage = async () => {
|
||||
if (!currentWorkspace.value) return
|
||||
|
||||
try {
|
||||
// Delete the background file if it exists
|
||||
if (currentWorkspace.value.background) {
|
||||
try {
|
||||
// The background field contains the absolute file path
|
||||
if (await exists(currentWorkspace.value.background)) {
|
||||
await remove(currentWorkspace.value.background)
|
||||
}
|
||||
} catch (err) {
|
||||
console.warn('Could not delete background file:', err)
|
||||
// Continue anyway to clear the database entry
|
||||
}
|
||||
}
|
||||
|
||||
await updateWorkspaceBackgroundAsync(currentWorkspace.value.id, null)
|
||||
add({
|
||||
description: t('workspaceBackground.remove.success'),
|
||||
color: 'success',
|
||||
})
|
||||
} catch (error) {
|
||||
console.error('Error removing background:', error)
|
||||
add({ description: t('workspaceBackground.remove.error'), color: 'error' })
|
||||
}
|
||||
}
|
||||
</script>
|
||||
|
||||
<i18n lang="yaml">
|
||||
@ -112,6 +285,16 @@ de:
|
||||
update:
|
||||
success: Gerätename wurde erfolgreich aktualisiert
|
||||
error: Gerätename konnte nich aktualisiert werden
|
||||
workspaceBackground:
|
||||
label: Workspace-Hintergrund
|
||||
choose: Bild auswählen
|
||||
update:
|
||||
success: Hintergrund erfolgreich aktualisiert
|
||||
error: Fehler beim Aktualisieren des Hintergrunds
|
||||
remove:
|
||||
label: Hintergrund entfernen
|
||||
success: Hintergrund erfolgreich entfernt
|
||||
error: Fehler beim Entfernen des Hintergrunds
|
||||
en:
|
||||
language: Language
|
||||
design: Design
|
||||
@ -129,4 +312,14 @@ en:
|
||||
update:
|
||||
success: Device name has been successfully updated
|
||||
error: Device name could not be updated
|
||||
workspaceBackground:
|
||||
label: Workspace Background
|
||||
choose: Choose Image
|
||||
update:
|
||||
success: Background successfully updated
|
||||
error: Error updating background
|
||||
remove:
|
||||
label: Remove Background
|
||||
success: Background successfully removed
|
||||
error: Error removing background
|
||||
</i18n>
|
||||
|
||||
@ -2,6 +2,7 @@
|
||||
<UiDialogConfirm
|
||||
:confirm-label="t('create')"
|
||||
@confirm="onCreateAsync"
|
||||
:description="t('description')"
|
||||
>
|
||||
<UiButton
|
||||
:label="t('vault.create')"
|
||||
@ -55,7 +56,9 @@
|
||||
<script setup lang="ts">
|
||||
import { vaultSchema } from './schema'
|
||||
|
||||
const { t } = useI18n()
|
||||
const { t } = useI18n({
|
||||
useScope: 'local',
|
||||
})
|
||||
|
||||
const vault = reactive<{
|
||||
name: string
|
||||
@ -118,6 +121,7 @@ de:
|
||||
name: HaexVault
|
||||
title: Neue {haexvault} erstellen
|
||||
create: Erstellen
|
||||
description: Erstelle eine neue Vault für deine Daten
|
||||
|
||||
en:
|
||||
vault:
|
||||
@ -127,4 +131,5 @@ en:
|
||||
name: HaexVault
|
||||
title: Create new {haexvault}
|
||||
create: Create
|
||||
description: Create a new vault for your data
|
||||
</i18n>
|
||||
|
||||
@ -58,7 +58,9 @@ const props = defineProps<{
|
||||
path?: string
|
||||
}>()
|
||||
|
||||
const { t } = useI18n()
|
||||
const { t } = useI18n({
|
||||
useScope: 'local',
|
||||
})
|
||||
|
||||
const vault = reactive<{
|
||||
name: string
|
||||
|
||||
@ -3,13 +3,20 @@
|
||||
ref="windowEl"
|
||||
:style="windowStyle"
|
||||
:class="[
|
||||
'absolute bg-default/80 backdrop-blur-xl rounded-lg shadow-xl overflow-hidden isolate',
|
||||
'border border-gray-200 dark:border-gray-700 transition-all ease-out duration-600 ',
|
||||
'absolute bg-default/80 backdrop-blur-xl rounded-lg shadow-xl overflow-hidden',
|
||||
'transition-all ease-out duration-600',
|
||||
'flex flex-col @container',
|
||||
{ 'select-none': isResizingOrDragging },
|
||||
isActive ? 'z-20' : 'z-10',
|
||||
// Border colors based on warning level
|
||||
warningLevel === 'warning'
|
||||
? 'border-2 border-warning-500'
|
||||
: warningLevel === 'danger'
|
||||
? 'border-2 border-danger-500'
|
||||
: 'border border-gray-200 dark:border-gray-700',
|
||||
]"
|
||||
@mousedown="handleActivate"
|
||||
@contextmenu.stop.prevent
|
||||
>
|
||||
<!-- Window Titlebar -->
|
||||
<div
|
||||
@ -44,6 +51,7 @@
|
||||
/>
|
||||
|
||||
<HaexWindowButton
|
||||
v-if="!isSmallScreen"
|
||||
:is-maximized
|
||||
variant="maximize"
|
||||
@click.stop="handleMaximize"
|
||||
@ -68,7 +76,7 @@
|
||||
|
||||
<!-- Resize Handles -->
|
||||
<HaexWindowResizeHandles
|
||||
:disabled="isMaximized"
|
||||
:disabled="isMaximized || isSmallScreen"
|
||||
@resize-start="handleResizeStart"
|
||||
/>
|
||||
</div>
|
||||
@ -86,6 +94,7 @@ const props = defineProps<{
|
||||
sourceHeight?: number
|
||||
isOpening?: boolean
|
||||
isClosing?: boolean
|
||||
warningLevel?: 'warning' | 'danger' // Warning indicator (e.g., dev extension, dangerous permissions)
|
||||
}>()
|
||||
|
||||
const emit = defineEmits<{
|
||||
@ -107,12 +116,16 @@ const height = defineModel<number>('height', { default: 600 })
|
||||
const windowEl = useTemplateRef('windowEl')
|
||||
const titlebarEl = useTemplateRef('titlebarEl')
|
||||
|
||||
const uiStore = useUiStore()
|
||||
const { isSmallScreen } = storeToRefs(uiStore)
|
||||
|
||||
// Inject viewport size from parent desktop
|
||||
const viewportSize = inject<{
|
||||
width: Ref<number>
|
||||
height: Ref<number>
|
||||
}>('viewportSize')
|
||||
const isMaximized = ref(false) // Don't start maximized
|
||||
// Start maximized on small screens
|
||||
const isMaximized = ref(isSmallScreen.value)
|
||||
|
||||
// Store initial position/size for restore
|
||||
const preMaximizeState = ref({
|
||||
@ -144,7 +157,8 @@ const isResizingOrDragging = computed(
|
||||
// Setup drag with useDrag composable (supports mouse + touch)
|
||||
useDrag(
|
||||
({ movement: [mx, my], first, last }) => {
|
||||
if (isMaximized.value) return
|
||||
// Disable dragging on small screens (always fullscreen)
|
||||
if (isMaximized.value || isSmallScreen.value) return
|
||||
|
||||
if (first) {
|
||||
// Drag started - save initial position
|
||||
@ -315,13 +329,33 @@ const handleMaximize = () => {
|
||||
const bounds = getViewportBounds()
|
||||
|
||||
if (bounds && bounds.width > 0 && bounds.height > 0) {
|
||||
// Get safe-area-insets from CSS variables for debug
|
||||
const safeAreaTop = parseFloat(
|
||||
getComputedStyle(document.documentElement).getPropertyValue(
|
||||
'--safe-area-inset-top',
|
||||
) || '0',
|
||||
)
|
||||
const safeAreaBottom = parseFloat(
|
||||
getComputedStyle(document.documentElement).getPropertyValue(
|
||||
'--safe-area-inset-bottom',
|
||||
) || '0',
|
||||
)
|
||||
|
||||
// Desktop container uses 'absolute inset-0' which stretches over full viewport
|
||||
// bounds.height = full viewport height (includes header area + safe-areas)
|
||||
// We need to calculate available space properly
|
||||
|
||||
// Get header height from UI store (measured reactively in layout)
|
||||
const uiStore = useUiStore()
|
||||
const headerHeight = uiStore.headerHeight
|
||||
|
||||
x.value = 0
|
||||
y.value = 0
|
||||
y.value = 0 // Start below header and status bar
|
||||
width.value = bounds.width
|
||||
height.value = bounds.height
|
||||
// Height: viewport - header - both safe-areas
|
||||
height.value = bounds.height - headerHeight - safeAreaTop - safeAreaBottom
|
||||
isMaximized.value = true
|
||||
}
|
||||
console.log('handleMaximize', preMaximizeState, bounds)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -1,90 +1,76 @@
|
||||
<template>
|
||||
<UModal
|
||||
<UDrawer
|
||||
v-model:open="localShowWindowOverview"
|
||||
direction="bottom"
|
||||
:title="t('modal.title')"
|
||||
:description="t('modal.description')"
|
||||
fullscreen
|
||||
>
|
||||
<template #content>
|
||||
<div class="flex flex-col h-full">
|
||||
<!-- Header -->
|
||||
<div class="h-full overflow-y-auto p-6 justify-center flex">
|
||||
<!-- Window Thumbnails Flex Layout -->
|
||||
|
||||
<div
|
||||
class="flex items-center justify-end border-b p-2 border-gray-200 dark:border-gray-700"
|
||||
v-if="windows.length > 0"
|
||||
class="flex flex-wrap gap-6 justify-center-safe items-start"
|
||||
>
|
||||
<UButton
|
||||
icon="i-heroicons-x-mark"
|
||||
color="error"
|
||||
variant="soft"
|
||||
@click="localShowWindowOverview = false"
|
||||
/>
|
||||
</div>
|
||||
|
||||
<!-- Scrollable Content -->
|
||||
<div class="flex-1 overflow-y-auto p-6 justify-center flex">
|
||||
<!-- Window Thumbnails Flex Layout -->
|
||||
<div
|
||||
v-if="windows.length > 0"
|
||||
class="flex flex-wrap gap-6 justify-center-safe items-start"
|
||||
v-for="window in windows"
|
||||
:key="window.id"
|
||||
class="relative group cursor-pointer"
|
||||
>
|
||||
<div
|
||||
v-for="window in windows"
|
||||
:key="window.id"
|
||||
class="relative group cursor-pointer"
|
||||
>
|
||||
<!-- Window Title Bar -->
|
||||
<div class="flex items-center gap-3 mb-2 px-2">
|
||||
<UIcon
|
||||
v-if="window.icon"
|
||||
:name="window.icon"
|
||||
class="size-5 shrink-0"
|
||||
/>
|
||||
<div class="flex-1 min-w-0">
|
||||
<p class="font-semibold text-sm truncate">
|
||||
{{ window.title }}
|
||||
</p>
|
||||
</div>
|
||||
<!-- Minimized Badge -->
|
||||
<UBadge
|
||||
v-if="window.isMinimized"
|
||||
color="info"
|
||||
size="xs"
|
||||
:title="t('minimized')"
|
||||
/>
|
||||
<!-- Window Title Bar -->
|
||||
<div class="flex items-center gap-3 mb-2 px-2">
|
||||
<UIcon
|
||||
v-if="window.icon"
|
||||
:name="window.icon"
|
||||
class="size-5 shrink-0"
|
||||
/>
|
||||
<div class="flex-1 min-w-0">
|
||||
<p class="font-semibold text-sm truncate">
|
||||
{{ window.title }}
|
||||
</p>
|
||||
</div>
|
||||
<!-- Minimized Badge -->
|
||||
<UBadge
|
||||
v-if="window.isMinimized"
|
||||
color="info"
|
||||
size="xs"
|
||||
:title="t('minimized')"
|
||||
/>
|
||||
</div>
|
||||
|
||||
<!-- Scaled Window Preview Container / Teleport Target -->
|
||||
<!-- Scaled Window Preview Container / Teleport Target -->
|
||||
<div
|
||||
:id="`window-preview-${window.id}`"
|
||||
class="relative bg-gray-100 dark:bg-gray-900 rounded-xl overflow-hidden border-2 border-gray-200 dark:border-gray-700 group-hover:border-primary-500 transition-all shadow-lg"
|
||||
:style="getCardStyle(window)"
|
||||
@click="handleRestoreAndActivateWindow(window.id)"
|
||||
>
|
||||
<!-- Hover Overlay -->
|
||||
<div
|
||||
:id="`window-preview-${window.id}`"
|
||||
class="relative bg-gray-100 dark:bg-gray-900 rounded-xl overflow-hidden border-2 border-gray-200 dark:border-gray-700 group-hover:border-primary-500 transition-all shadow-lg"
|
||||
:style="getCardStyle(window)"
|
||||
@click="handleRestoreAndActivateWindow(window.id)"
|
||||
>
|
||||
<!-- Hover Overlay -->
|
||||
<div
|
||||
class="absolute inset-0 bg-primary-500/10 opacity-0 group-hover:opacity-100 transition-opacity pointer-events-none z-40"
|
||||
/>
|
||||
</div>
|
||||
class="absolute inset-0 bg-primary-500/10 opacity-0 group-hover:opacity-100 transition-opacity pointer-events-none z-40"
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Empty State -->
|
||||
<div
|
||||
v-else
|
||||
class="flex flex-col items-center justify-center py-12 text-gray-500 dark:text-gray-400"
|
||||
>
|
||||
<UIcon
|
||||
name="i-heroicons-window"
|
||||
class="size-16 mb-4 shrink-0"
|
||||
/>
|
||||
<p class="text-lg font-medium">No windows open</p>
|
||||
<p class="text-sm">
|
||||
Open an extension or system window to see it here
|
||||
</p>
|
||||
</div>
|
||||
<!-- Empty State -->
|
||||
<div
|
||||
v-else
|
||||
class="flex flex-col items-center justify-center py-12 text-gray-500 dark:text-gray-400"
|
||||
>
|
||||
<UIcon
|
||||
name="i-heroicons-window"
|
||||
class="size-16 mb-4 shrink-0"
|
||||
/>
|
||||
<p class="text-lg font-medium">No windows open</p>
|
||||
<p class="text-sm">
|
||||
Open an extension or system window to see it here
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
</template>
|
||||
</UModal>
|
||||
</UDrawer>
|
||||
</template>
|
||||
|
||||
<script setup lang="ts">
|
||||
|
||||
28
src/components/ui/button/context.vue
Normal file
28
src/components/ui/button/context.vue
Normal file
@ -0,0 +1,28 @@
|
||||
<template>
|
||||
<UContextMenu :items="contextMenuItems">
|
||||
<UiButton
|
||||
v-bind="$attrs"
|
||||
@click="$emit('click', $event)"
|
||||
>
|
||||
<template
|
||||
v-for="(_, slotName) in $slots"
|
||||
#[slotName]="slotProps"
|
||||
>
|
||||
<slot
|
||||
:name="slotName"
|
||||
v-bind="slotProps"
|
||||
/>
|
||||
</template>
|
||||
</UiButton>
|
||||
</UContextMenu>
|
||||
</template>
|
||||
|
||||
<script setup lang="ts">
|
||||
import type { ContextMenuItem } from '@nuxt/ui'
|
||||
|
||||
defineProps<{
|
||||
contextMenuItems: ContextMenuItem[]
|
||||
}>()
|
||||
|
||||
defineEmits<{ click: [Event] }>()
|
||||
</script>
|
||||
@ -4,11 +4,11 @@
|
||||
<UButton
|
||||
class="pointer-events-auto"
|
||||
v-bind="{
|
||||
...{ size: isSmallScreen ? 'lg' : 'md' },
|
||||
...buttonProps,
|
||||
...$attrs,
|
||||
}"
|
||||
@click="(e) => $emit('click', e)"
|
||||
size="lg"
|
||||
@click="$emit('click', $event)"
|
||||
>
|
||||
<template
|
||||
v-for="(_, slotName) in $slots"
|
||||
|
||||
@ -5,7 +5,7 @@
|
||||
:readonly="props.readOnly"
|
||||
:leading-icon="props.leadingIcon"
|
||||
:ui="{ base: 'peer' }"
|
||||
:size="isSmallScreen ? 'lg' : 'md'"
|
||||
size="lg"
|
||||
@change="(e) => $emit('change', e)"
|
||||
@blur="(e) => $emit('blur', e)"
|
||||
@keyup="(e: KeyboardEvent) => $emit('keyup', e)"
|
||||
|
||||
@ -368,7 +368,8 @@ async function handleDatabaseMethodAsync(
|
||||
const rows = await invoke<unknown[]>('extension_sql_select', {
|
||||
sql: params.query || '',
|
||||
params: params.params || [],
|
||||
extensionId: extension.id,
|
||||
publicKey: extension.publicKey,
|
||||
name: extension.name,
|
||||
})
|
||||
|
||||
return {
|
||||
@ -379,14 +380,15 @@ async function handleDatabaseMethodAsync(
|
||||
}
|
||||
|
||||
case 'haextension.db.execute': {
|
||||
await invoke<string[]>('extension_sql_execute', {
|
||||
const rows = await invoke<unknown[]>('extension_sql_execute', {
|
||||
sql: params.query || '',
|
||||
params: params.params || [],
|
||||
extensionId: extension.id,
|
||||
publicKey: extension.publicKey,
|
||||
name: extension.name,
|
||||
})
|
||||
|
||||
return {
|
||||
rows: [],
|
||||
rows,
|
||||
rowsAffected: 1,
|
||||
lastInsertId: undefined,
|
||||
}
|
||||
@ -400,7 +402,8 @@ async function handleDatabaseMethodAsync(
|
||||
await invoke('extension_sql_execute', {
|
||||
sql: stmt,
|
||||
params: [],
|
||||
extensionId: extension.id,
|
||||
publicKey: extension.publicKey,
|
||||
name: extension.name,
|
||||
})
|
||||
}
|
||||
|
||||
|
||||
1
src/database/index.ts
Normal file
1
src/database/index.ts
Normal file
@ -0,0 +1 @@
|
||||
export * as schema from './schemas'
|
||||
@ -1,12 +1,12 @@
|
||||
import { integer, sqliteTable, text, index } from 'drizzle-orm/sqlite-core'
|
||||
import tableNames from '../tableNames.json'
|
||||
import tableNames from '@/database/tableNames.json'
|
||||
|
||||
export const haexCrdtLogs = sqliteTable(
|
||||
tableNames.haex.crdt.logs.name,
|
||||
{
|
||||
id: text()
|
||||
.primaryKey()
|
||||
.$defaultFn(() => crypto.randomUUID()),
|
||||
.$defaultFn(() => crypto.randomUUID())
|
||||
.primaryKey(),
|
||||
haexTimestamp: text(tableNames.haex.crdt.logs.columns.haexTimestamp),
|
||||
tableName: text(tableNames.haex.crdt.logs.columns.tableName),
|
||||
rowPks: text(tableNames.haex.crdt.logs.columns.rowPks, { mode: 'json' }),
|
||||
@ -33,8 +33,8 @@ export const haexCrdtSnapshots = sqliteTable(
|
||||
tableNames.haex.crdt.snapshots.name,
|
||||
{
|
||||
snapshotId: text(tableNames.haex.crdt.snapshots.columns.snapshotId)
|
||||
.primaryKey()
|
||||
.$defaultFn(() => crypto.randomUUID()),
|
||||
.$defaultFn(() => crypto.randomUUID())
|
||||
.primaryKey(),
|
||||
created: text(),
|
||||
epochHlc: text(tableNames.haex.crdt.snapshots.columns.epochHlc),
|
||||
locationUrl: text(tableNames.haex.crdt.snapshots.columns.locationUrl),
|
||||
@ -45,8 +45,6 @@ export const haexCrdtSnapshots = sqliteTable(
|
||||
)
|
||||
|
||||
export const haexCrdtConfigs = sqliteTable(tableNames.haex.crdt.configs.name, {
|
||||
key: text()
|
||||
.primaryKey()
|
||||
.$defaultFn(() => crypto.randomUUID()),
|
||||
key: text().primaryKey(),
|
||||
value: text(),
|
||||
})
|
||||
@ -8,8 +8,11 @@ import {
|
||||
type AnySQLiteColumn,
|
||||
type SQLiteColumnBuilderBase,
|
||||
} from 'drizzle-orm/sqlite-core'
|
||||
import tableNames from '../tableNames.json'
|
||||
import { crdtColumnNames } from '.'
|
||||
import tableNames from '@/database/tableNames.json'
|
||||
|
||||
const crdtColumnNames = {
|
||||
haexTimestamp: 'haex_timestamp',
|
||||
}
|
||||
|
||||
// Helper function to add common CRDT columns ( haexTimestamp)
|
||||
export const withCrdtColumns = <
|
||||
@ -25,8 +28,8 @@ export const haexSettings = sqliteTable(
|
||||
tableNames.haex.settings.name,
|
||||
withCrdtColumns({
|
||||
id: text()
|
||||
.primaryKey()
|
||||
.$defaultFn(() => crypto.randomUUID()),
|
||||
.$defaultFn(() => crypto.randomUUID())
|
||||
.primaryKey(),
|
||||
key: text(),
|
||||
type: text(),
|
||||
value: text(),
|
||||
@ -40,18 +43,19 @@ export const haexExtensions = sqliteTable(
|
||||
tableNames.haex.extensions.name,
|
||||
withCrdtColumns({
|
||||
id: text()
|
||||
.primaryKey()
|
||||
.$defaultFn(() => crypto.randomUUID()),
|
||||
.$defaultFn(() => crypto.randomUUID())
|
||||
.primaryKey(),
|
||||
public_key: text().notNull(),
|
||||
name: text().notNull(),
|
||||
version: text().notNull(),
|
||||
author: text(),
|
||||
description: text(),
|
||||
entry: text().notNull().default('index.html'),
|
||||
entry: text().default('index.html'),
|
||||
homepage: text(),
|
||||
enabled: integer({ mode: 'boolean' }).default(true),
|
||||
icon: text(),
|
||||
signature: text().notNull(),
|
||||
single_instance: integer({ mode: 'boolean' }).default(false),
|
||||
}),
|
||||
(table) => [
|
||||
// UNIQUE constraint: Pro Developer (public_key) kann nur eine Extension mit diesem Namen existieren
|
||||
@ -65,8 +69,8 @@ export const haexExtensionPermissions = sqliteTable(
|
||||
tableNames.haex.extension_permissions.name,
|
||||
withCrdtColumns({
|
||||
id: text()
|
||||
.primaryKey()
|
||||
.$defaultFn(() => crypto.randomUUID()),
|
||||
.$defaultFn(() => crypto.randomUUID())
|
||||
.primaryKey(),
|
||||
extensionId: text(tableNames.haex.extension_permissions.columns.extensionId)
|
||||
.notNull()
|
||||
.references((): AnySQLiteColumn => haexExtensions.id, {
|
||||
@ -103,7 +107,9 @@ export type SelecthaexExtensionPermissions =
|
||||
export const haexNotifications = sqliteTable(
|
||||
tableNames.haex.notifications.name,
|
||||
withCrdtColumns({
|
||||
id: text().primaryKey(),
|
||||
id: text()
|
||||
.$defaultFn(() => crypto.randomUUID())
|
||||
.primaryKey(),
|
||||
alt: text(),
|
||||
date: text(),
|
||||
icon: text(),
|
||||
@ -124,13 +130,14 @@ export const haexWorkspaces = sqliteTable(
|
||||
tableNames.haex.workspaces.name,
|
||||
withCrdtColumns({
|
||||
id: text(tableNames.haex.workspaces.columns.id)
|
||||
.primaryKey()
|
||||
.$defaultFn(() => crypto.randomUUID()),
|
||||
.$defaultFn(() => crypto.randomUUID())
|
||||
.primaryKey(),
|
||||
deviceId: text(tableNames.haex.workspaces.columns.deviceId).notNull(),
|
||||
name: text(tableNames.haex.workspaces.columns.name).notNull(),
|
||||
position: integer(tableNames.haex.workspaces.columns.position)
|
||||
.notNull()
|
||||
.default(0),
|
||||
background: text(),
|
||||
}),
|
||||
(table) => [unique().on(table.position)],
|
||||
)
|
||||
@ -141,8 +148,8 @@ export const haexDesktopItems = sqliteTable(
|
||||
tableNames.haex.desktop_items.name,
|
||||
withCrdtColumns({
|
||||
id: text(tableNames.haex.desktop_items.columns.id)
|
||||
.primaryKey()
|
||||
.$defaultFn(() => crypto.randomUUID()),
|
||||
.$defaultFn(() => crypto.randomUUID())
|
||||
.primaryKey(),
|
||||
workspaceId: text(tableNames.haex.desktop_items.columns.workspaceId)
|
||||
.notNull()
|
||||
.references(() => haexWorkspaces.id, { onDelete: 'cascade' }),
|
||||
2
src/database/schemas/index.ts
Normal file
2
src/database/schemas/index.ts
Normal file
@ -0,0 +1,2 @@
|
||||
export * from './crdt'
|
||||
export * from './haex'
|
||||
@ -1,6 +1,7 @@
|
||||
<template>
|
||||
<div class="w-dvw h-dvh flex flex-col">
|
||||
<div class="w-full h-dvh flex flex-col">
|
||||
<UPageHeader
|
||||
ref="headerEl"
|
||||
as="header"
|
||||
:ui="{
|
||||
root: ['px-8 py-0'],
|
||||
@ -24,7 +25,7 @@
|
||||
variant="outline"
|
||||
icon="i-bi-person-workspace"
|
||||
size="lg"
|
||||
:tooltip="t('header.workspaces')"
|
||||
:tooltip="t('workspaces.label')"
|
||||
@click="isOverviewMode = !isOverviewMode"
|
||||
/>
|
||||
</div>
|
||||
@ -53,7 +54,7 @@
|
||||
</template>
|
||||
</UPageHeader>
|
||||
|
||||
<main class="flex-1 overflow-hidden bg-elevated flex flex-col">
|
||||
<main class="overflow-hidden relative bg-elevated h-full">
|
||||
<slot />
|
||||
</main>
|
||||
|
||||
@ -93,12 +94,9 @@
|
||||
variant="outline"
|
||||
class="mt-6"
|
||||
@click="handleAddWorkspace"
|
||||
>
|
||||
<template #leading>
|
||||
<UIcon name="i-heroicons-plus" />
|
||||
</template>
|
||||
New Workspace
|
||||
</UButton>
|
||||
icon="i-heroicons-plus"
|
||||
:label="t('workspaces.add')"
|
||||
/>
|
||||
</div>
|
||||
</template>
|
||||
</UDrawer>
|
||||
@ -127,6 +125,15 @@ const handleAddWorkspace = async () => {
|
||||
workspaceStore.slideToWorkspace(workspace?.id)
|
||||
})
|
||||
}
|
||||
|
||||
// Measure header height and store it in UI store
|
||||
const headerEl = useTemplateRef('headerEl')
|
||||
const { height } = useElementSize(headerEl)
|
||||
const uiStore = useUiStore()
|
||||
|
||||
watch(height, (newHeight) => {
|
||||
uiStore.headerHeight = newHeight
|
||||
})
|
||||
</script>
|
||||
|
||||
<i18n lang="yaml">
|
||||
@ -134,12 +141,14 @@ de:
|
||||
search:
|
||||
label: Suche
|
||||
|
||||
header:
|
||||
workspaces: Workspaces
|
||||
workspaces:
|
||||
label: Workspaces
|
||||
add: Workspace hinzufügen
|
||||
en:
|
||||
search:
|
||||
label: Search
|
||||
|
||||
header:
|
||||
workspaces: Workspaces
|
||||
workspaces:
|
||||
label: Workspaces
|
||||
add: Add Workspace
|
||||
</i18n>
|
||||
|
||||
@ -1,112 +1,101 @@
|
||||
<template>
|
||||
<div>
|
||||
<div class="h-full">
|
||||
<NuxtLayout>
|
||||
<UDashboardPanel
|
||||
id="inbox-1"
|
||||
resizable
|
||||
class=""
|
||||
<div
|
||||
class="flex flex-col justify-center items-center gap-5 mx-auto h-full overflow-scroll"
|
||||
>
|
||||
<template #body>
|
||||
<div class="items-center justify-center flex relative flex-1">
|
||||
<!-- <div class="absolute top-0 right-0">
|
||||
<UiDropdownLocale @select="onSelectLocale" />
|
||||
</div> -->
|
||||
<UiLogoHaexhub class="bg-primary p-3 size-16 rounded-full shrink-0" />
|
||||
<span
|
||||
class="flex flex-wrap font-bold text-pretty text-xl gap-2 justify-center"
|
||||
>
|
||||
<p class="whitespace-nowrap">
|
||||
{{ t('welcome') }}
|
||||
</p>
|
||||
<UiTextGradient>Haex Hub</UiTextGradient>
|
||||
</span>
|
||||
|
||||
<div
|
||||
class="flex flex-col justify-center items-center gap-5 max-w-3xl"
|
||||
>
|
||||
<UiLogoHaexhub
|
||||
class="bg-primary p-3 size-16 rounded-full shrink-0"
|
||||
/>
|
||||
<span
|
||||
class="flex flex-wrap font-bold text-pretty text-xl gap-2 justify-center"
|
||||
>
|
||||
<p class="whitespace-nowrap">
|
||||
{{ t('welcome') }}
|
||||
</p>
|
||||
<UiTextGradient>Haex Hub</UiTextGradient>
|
||||
</span>
|
||||
<div class="flex flex-col gap-4 h-24 items-stretch justify-center">
|
||||
<HaexVaultCreate />
|
||||
|
||||
<div
|
||||
class="flex flex-col md:flex-row gap-4 w-full h-24 md:h-auto"
|
||||
>
|
||||
<HaexVaultCreate />
|
||||
<HaexVaultOpen
|
||||
v-model:open="passwordPromptOpen"
|
||||
:path="selectedVault?.path"
|
||||
/>
|
||||
</div>
|
||||
|
||||
<HaexVaultOpen
|
||||
v-model:open="passwordPromptOpen"
|
||||
:path="selectedVault?.path"
|
||||
/>
|
||||
</div>
|
||||
|
||||
<div
|
||||
v-show="lastVaults.length"
|
||||
class="w-full"
|
||||
>
|
||||
<div class="font-thin text-sm justify-start px-2 pb-1">
|
||||
{{ t('lastUsed') }}
|
||||
</div>
|
||||
|
||||
<div
|
||||
class="relative border-base-content/25 divide-base-content/25 flex w-full flex-col divide-y rounded-md border overflow-scroll"
|
||||
>
|
||||
<div
|
||||
v-for="vault in lastVaults"
|
||||
:key="vault.name"
|
||||
class="flex items-center justify-between group overflow-x-scroll"
|
||||
>
|
||||
<UButton
|
||||
variant="ghost"
|
||||
color="neutral"
|
||||
class="flex items-center no-underline justify-between text-nowrap text-sm md:text-base shrink w-full px-3"
|
||||
@click="
|
||||
() => {
|
||||
passwordPromptOpen = true
|
||||
selectedVault = vault
|
||||
}
|
||||
"
|
||||
>
|
||||
<span class="block">
|
||||
{{ vault.name }}
|
||||
</span>
|
||||
</UButton>
|
||||
<UButton
|
||||
color="error"
|
||||
square
|
||||
class="absolute right-2 hidden group-hover:flex min-w-6"
|
||||
>
|
||||
<Icon
|
||||
name="mdi:trash-can-outline"
|
||||
@click="prepareRemoveVault(vault.name)"
|
||||
/>
|
||||
</UButton>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="flex flex-col items-center gap-2">
|
||||
<h4>{{ t('sponsors') }}</h4>
|
||||
<div>
|
||||
<UButton
|
||||
variant="link"
|
||||
@click="openUrl('https://itemis.com')"
|
||||
>
|
||||
<UiLogoItemis class="text-[#00457C]" />
|
||||
</UButton>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<UiDialogConfirm
|
||||
v-model:open="showRemoveDialog"
|
||||
:title="t('remove.title')"
|
||||
:description="
|
||||
t('remove.description', { vaultName: vaultToBeRemoved })
|
||||
"
|
||||
@confirm="onConfirmRemoveAsync"
|
||||
/>
|
||||
<div
|
||||
v-show="lastVaults.length"
|
||||
class="w-56"
|
||||
>
|
||||
<div class="font-thin text-sm pb-1 w-full">
|
||||
{{ t('lastUsed') }}
|
||||
</div>
|
||||
</template>
|
||||
</UDashboardPanel>
|
||||
|
||||
<div
|
||||
class="relative border-base-content/25 divide-base-content/25 flex w-full flex-col divide-y rounded-md border overflow-scroll"
|
||||
>
|
||||
<div
|
||||
v-for="vault in lastVaults"
|
||||
:key="vault.name"
|
||||
class="flex items-center justify-between group overflow-x-scroll"
|
||||
>
|
||||
<UiButtonContext
|
||||
variant="ghost"
|
||||
color="neutral"
|
||||
size="xl"
|
||||
class="flex items-center no-underline justify-between text-nowrap text-sm md:text-base shrink w-full hover:bg-default"
|
||||
:context-menu-items="[
|
||||
{
|
||||
icon: 'mdi:trash-can-outline',
|
||||
label: t('remove.button'),
|
||||
onSelect: () => prepareRemoveVault(vault.name),
|
||||
color: 'error',
|
||||
},
|
||||
]"
|
||||
@click="
|
||||
() => {
|
||||
passwordPromptOpen = true
|
||||
selectedVault = vault
|
||||
}
|
||||
"
|
||||
>
|
||||
<span class="block">
|
||||
{{ vault.name }}
|
||||
</span>
|
||||
</UiButtonContext>
|
||||
<UButton
|
||||
color="error"
|
||||
square
|
||||
class="absolute right-2 hidden group-hover:flex min-w-6"
|
||||
>
|
||||
<Icon
|
||||
name="mdi:trash-can-outline"
|
||||
@click="prepareRemoveVault(vault.name)"
|
||||
/>
|
||||
</UButton>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="flex flex-col items-center gap-2">
|
||||
<h4>{{ t('sponsors') }}</h4>
|
||||
<div>
|
||||
<UButton
|
||||
variant="link"
|
||||
@click="openUrl('https://itemis.com')"
|
||||
>
|
||||
<UiLogoItemis class="text-[#00457C]" />
|
||||
</UButton>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<UiDialogConfirm
|
||||
v-model:open="showRemoveDialog"
|
||||
:title="t('remove.title')"
|
||||
:description="t('remove.description', { vaultName: vaultToBeRemoved })"
|
||||
@confirm="onConfirmRemoveAsync"
|
||||
/>
|
||||
</NuxtLayout>
|
||||
</div>
|
||||
</template>
|
||||
@ -129,6 +118,9 @@ const showRemoveDialog = ref(false)
|
||||
|
||||
const { lastVaults } = storeToRefs(useLastVaultStore())
|
||||
|
||||
const { syncLastVaultsAsync, moveVaultToTrashAsync } = useLastVaultStore()
|
||||
const { syncDeviceIdAsync } = useDeviceStore()
|
||||
|
||||
const vaultToBeRemoved = ref('')
|
||||
const prepareRemoveVault = (vaultName: string) => {
|
||||
vaultToBeRemoved.value = vaultName
|
||||
@ -138,7 +130,7 @@ const prepareRemoveVault = (vaultName: string) => {
|
||||
const toast = useToast()
|
||||
const onConfirmRemoveAsync = async () => {
|
||||
try {
|
||||
await removeVaultAsync(vaultToBeRemoved.value)
|
||||
await moveVaultToTrashAsync(vaultToBeRemoved.value)
|
||||
showRemoveDialog.value = false
|
||||
await syncLastVaultsAsync()
|
||||
} catch (error) {
|
||||
@ -149,9 +141,6 @@ const onConfirmRemoveAsync = async () => {
|
||||
}
|
||||
}
|
||||
|
||||
const { syncLastVaultsAsync, removeVaultAsync } = useLastVaultStore()
|
||||
const { syncDeviceIdAsync } = useDeviceStore()
|
||||
|
||||
onMounted(async () => {
|
||||
try {
|
||||
await syncLastVaultsAsync()
|
||||
@ -168,6 +157,7 @@ de:
|
||||
lastUsed: 'Zuletzt verwendete Vaults'
|
||||
sponsors: Supported by
|
||||
remove:
|
||||
button: Löschen
|
||||
title: Vault löschen
|
||||
description: Möchtest du die Vault {vaultName} wirklich löschen?
|
||||
|
||||
@ -176,6 +166,7 @@ en:
|
||||
lastUsed: 'Last used Vaults'
|
||||
sponsors: 'Supported by'
|
||||
remove:
|
||||
button: Delete
|
||||
title: Delete Vault
|
||||
description: Are you sure you really want to delete {vaultName}?
|
||||
</i18n>
|
||||
|
||||
@ -9,6 +9,7 @@
|
||||
v-model:open="showNewDeviceDialog"
|
||||
:confirm-label="t('newDevice.save')"
|
||||
:title="t('newDevice.title')"
|
||||
:description="t('newDevice.setName')"
|
||||
confirm-icon="mdi:content-save-outline"
|
||||
@abort="showNewDeviceDialog = false"
|
||||
@confirm="onSetDeviceNameAsync"
|
||||
|
||||
@ -1,6 +1,8 @@
|
||||
<template>
|
||||
<div class="w-full h-full flex items-center justify-center">
|
||||
<HaexDesktop />
|
||||
<div>
|
||||
<UDashboardPanel resizable>
|
||||
<HaexDesktop />
|
||||
</UDashboardPanel>
|
||||
</div>
|
||||
</template>
|
||||
|
||||
|
||||
25
src/plugins/init-logger.ts
Normal file
25
src/plugins/init-logger.ts
Normal file
@ -0,0 +1,25 @@
|
||||
export default defineNuxtPlugin({
|
||||
name: 'init-logger',
|
||||
enforce: 'pre',
|
||||
parallel: false,
|
||||
setup() {
|
||||
// Add global error handler for better debugging
|
||||
window.addEventListener('error', (event) => {
|
||||
console.error('[HaexHub] Global error caught:', {
|
||||
message: event.message,
|
||||
filename: event.filename,
|
||||
lineno: event.lineno,
|
||||
colno: event.colno,
|
||||
error: event.error,
|
||||
stack: event.error?.stack,
|
||||
})
|
||||
})
|
||||
|
||||
window.addEventListener('unhandledrejection', (event) => {
|
||||
console.error('[HaexHub] Unhandled rejection:', {
|
||||
reason: event.reason,
|
||||
promise: event.promise,
|
||||
})
|
||||
})
|
||||
},
|
||||
})
|
||||
@ -1,9 +1,9 @@
|
||||
import { eq } from 'drizzle-orm'
|
||||
import { haexDesktopItems } from '~~/src-tauri/database/schemas'
|
||||
import { haexDesktopItems } from '~/database/schemas'
|
||||
import type {
|
||||
InsertHaexDesktopItems,
|
||||
SelectHaexDesktopItems,
|
||||
} from '~~/src-tauri/database/schemas'
|
||||
} from '~/database/schemas'
|
||||
import de from './de.json'
|
||||
import en from './en.json'
|
||||
|
||||
@ -298,6 +298,28 @@ export const useDesktopStore = defineStore('desktopStore', () => {
|
||||
openDesktopItem(itemType, referenceId)
|
||||
}
|
||||
|
||||
// Build second menu group based on item type
|
||||
const secondGroup = [
|
||||
{
|
||||
label: $i18n.t('desktop.contextMenu.removeFromDesktop'),
|
||||
icon: 'i-heroicons-x-mark',
|
||||
onSelect: async () => {
|
||||
await removeDesktopItemAsync(id)
|
||||
},
|
||||
},
|
||||
]
|
||||
|
||||
// Only show uninstall option for extensions
|
||||
if (itemType === 'extension') {
|
||||
secondGroup.push({
|
||||
label: $i18n.t('desktop.contextMenu.uninstall'),
|
||||
icon: 'i-heroicons-trash',
|
||||
onSelect: async () => {
|
||||
onUninstall()
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
return [
|
||||
[
|
||||
{
|
||||
@ -306,20 +328,7 @@ export const useDesktopStore = defineStore('desktopStore', () => {
|
||||
onSelect: handleOpen,
|
||||
},
|
||||
],
|
||||
[
|
||||
{
|
||||
label: $i18n.t('desktop.contextMenu.removeFromDesktop'),
|
||||
icon: 'i-heroicons-x-mark',
|
||||
onSelect: async () => {
|
||||
await removeDesktopItemAsync(id)
|
||||
},
|
||||
},
|
||||
{
|
||||
label: $i18n.t('desktop.contextMenu.uninstall'),
|
||||
icon: 'i-heroicons-trash',
|
||||
onSelect: onUninstall,
|
||||
},
|
||||
],
|
||||
secondGroup,
|
||||
]
|
||||
}
|
||||
|
||||
|
||||
@ -300,6 +300,7 @@ export const useWindowManagerStore = defineStore('windowManager', () => {
|
||||
const window = windows.value.find((w) => w.id === windowId)
|
||||
if (window) {
|
||||
window.zIndex = nextZIndex.value++
|
||||
window.isMinimized = false
|
||||
activeWindowId.value = windowId
|
||||
}
|
||||
}
|
||||
|
||||
@ -2,8 +2,9 @@ import { asc, eq } from 'drizzle-orm'
|
||||
import {
|
||||
haexWorkspaces,
|
||||
type SelectHaexWorkspaces,
|
||||
} from '~~/src-tauri/database/schemas'
|
||||
} from '~/database/schemas'
|
||||
import type { Swiper } from 'swiper/types'
|
||||
import { convertFileSrc } from '@tauri-apps/api/core'
|
||||
|
||||
export type IWorkspace = SelectHaexWorkspaces
|
||||
|
||||
@ -203,12 +204,86 @@ export const useWorkspaceStore = defineStore('workspaceStore', () => {
|
||||
isOverviewMode.value = false
|
||||
}
|
||||
|
||||
const updateWorkspaceBackgroundAsync = async (
|
||||
workspaceId: string,
|
||||
base64Image: string | null,
|
||||
) => {
|
||||
if (!currentVault.value?.drizzle) {
|
||||
throw new Error('Kein Vault geöffnet')
|
||||
}
|
||||
|
||||
try {
|
||||
const result = await currentVault.value.drizzle
|
||||
.update(haexWorkspaces)
|
||||
.set({ background: base64Image })
|
||||
.where(eq(haexWorkspaces.id, workspaceId))
|
||||
.returning()
|
||||
|
||||
if (result.length > 0 && result[0]) {
|
||||
const index = workspaces.value.findIndex((ws) => ws.id === workspaceId)
|
||||
if (index !== -1) {
|
||||
workspaces.value[index] = result[0]
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Fehler beim Aktualisieren des Workspace-Hintergrunds:', error)
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
const getWorkspaceBackgroundStyle = (workspace: IWorkspace) => {
|
||||
if (!workspace.background) return {}
|
||||
|
||||
// The background field contains the absolute file path
|
||||
// Convert it to an asset URL
|
||||
const assetUrl = convertFileSrc(workspace.background)
|
||||
|
||||
return {
|
||||
backgroundImage: `url(${assetUrl})`,
|
||||
backgroundSize: 'cover',
|
||||
backgroundPosition: 'center',
|
||||
backgroundRepeat: 'no-repeat',
|
||||
}
|
||||
}
|
||||
|
||||
const getWorkspaceContextMenuItems = (workspaceId: string) => {
|
||||
const windowManager = useWindowManagerStore()
|
||||
|
||||
return [[
|
||||
{
|
||||
label: 'Hintergrund ändern',
|
||||
icon: 'i-mdi-image',
|
||||
onSelect: async () => {
|
||||
// Store the workspace ID for settings to use
|
||||
currentWorkspaceIndex.value = workspaces.value.findIndex(
|
||||
(ws) => ws.id === workspaceId,
|
||||
)
|
||||
// Get settings window info
|
||||
const settingsWindow = windowManager.getAllSystemWindows()
|
||||
.find((win) => win.id === 'settings')
|
||||
|
||||
if (settingsWindow) {
|
||||
await windowManager.openWindowAsync({
|
||||
type: 'system',
|
||||
sourceId: settingsWindow.id,
|
||||
title: settingsWindow.name,
|
||||
icon: settingsWindow.icon || undefined,
|
||||
workspaceId,
|
||||
})
|
||||
}
|
||||
},
|
||||
},
|
||||
]]
|
||||
}
|
||||
|
||||
return {
|
||||
addWorkspaceAsync,
|
||||
allowSwipe,
|
||||
closeWorkspaceAsync,
|
||||
currentWorkspace,
|
||||
currentWorkspaceIndex,
|
||||
getWorkspaceBackgroundStyle,
|
||||
getWorkspaceContextMenuItems,
|
||||
isOverviewMode,
|
||||
slideToWorkspace,
|
||||
loadWorkspacesAsync,
|
||||
@ -218,6 +293,7 @@ export const useWorkspaceStore = defineStore('workspaceStore', () => {
|
||||
switchToNext,
|
||||
switchToPrevious,
|
||||
switchToWorkspace,
|
||||
updateWorkspaceBackgroundAsync,
|
||||
workspaces,
|
||||
}
|
||||
})
|
||||
|
||||
@ -50,7 +50,7 @@ export const useExtensionsStore = defineStore('extensionsStore', () => {
|
||||
currentExtension.value.publicKey,
|
||||
currentExtension.value.name,
|
||||
currentExtension.value.version,
|
||||
'index.html',
|
||||
currentExtension.value.entry ?? 'index.html',
|
||||
currentExtension.value.devServerUrl ?? undefined,
|
||||
)
|
||||
})
|
||||
|
||||
@ -1,5 +1,6 @@
|
||||
import { breakpointsTailwind } from '@vueuse/core'
|
||||
import { broadcastContextToAllExtensions } from '~/composables/extensionMessageHandler'
|
||||
|
||||
import de from './de.json'
|
||||
import en from './en.json'
|
||||
|
||||
@ -10,8 +11,9 @@ export const useUiStore = defineStore('uiStore', () => {
|
||||
const isSmallScreen = breakpoints.smaller('sm')
|
||||
|
||||
const { $i18n } = useNuxtApp()
|
||||
const { locale } = useI18n()
|
||||
const { platform } = useDeviceStore()
|
||||
const { locale } = useI18n({
|
||||
useScope: 'global',
|
||||
})
|
||||
|
||||
$i18n.setLocaleMessage('de', {
|
||||
ui: de,
|
||||
@ -60,19 +62,23 @@ export const useUiStore = defineStore('uiStore', () => {
|
||||
})
|
||||
|
||||
// Broadcast theme and locale changes to extensions
|
||||
watch([currentThemeName, locale], () => {
|
||||
watch([currentThemeName, locale], async () => {
|
||||
const deviceStore = useDeviceStore()
|
||||
const platformValue = await deviceStore.platform
|
||||
broadcastContextToAllExtensions({
|
||||
theme: currentThemeName.value,
|
||||
locale: locale.value,
|
||||
platform,
|
||||
platform: platformValue,
|
||||
})
|
||||
})
|
||||
|
||||
const viewportHeightWithoutHeader = ref(0)
|
||||
const headerHeight = ref(0)
|
||||
|
||||
return {
|
||||
availableThemes,
|
||||
viewportHeightWithoutHeader,
|
||||
headerHeight,
|
||||
currentTheme,
|
||||
currentThemeName,
|
||||
defaultTheme,
|
||||
|
||||
@ -2,7 +2,7 @@
|
||||
|
||||
import { drizzle } from 'drizzle-orm/sqlite-proxy'
|
||||
import { invoke } from '@tauri-apps/api/core'
|
||||
import { schema } from '@/../src-tauri/database/index'
|
||||
import { schema } from '~/database'
|
||||
import type {
|
||||
AsyncRemoteCallback,
|
||||
SqliteRemoteDatabase,
|
||||
@ -21,11 +21,12 @@ export const useVaultStore = defineStore('vaultStore', () => {
|
||||
public: { haexVault },
|
||||
} = useRuntimeConfig()
|
||||
|
||||
const router = useRouter()
|
||||
const currentVaultId = computed<string | undefined>({
|
||||
get: () =>
|
||||
getSingleRouteParam(useRouter().currentRoute.value.params.vaultId),
|
||||
getSingleRouteParam(router.currentRoute.value.params.vaultId),
|
||||
set: (newVaultId) => {
|
||||
useRouter().currentRoute.value.params.vaultId = newVaultId ?? ''
|
||||
router.currentRoute.value.params.vaultId = newVaultId ?? ''
|
||||
},
|
||||
})
|
||||
|
||||
|
||||
@ -22,9 +22,14 @@ export const useLastVaultStore = defineStore('lastVaultStore', () => {
|
||||
return await invoke('delete_vault', { vaultName })
|
||||
}
|
||||
|
||||
const moveVaultToTrashAsync = async (vaultName: string) => {
|
||||
return await invoke('move_vault_to_trash', { vaultName })
|
||||
}
|
||||
|
||||
return {
|
||||
syncLastVaultsAsync,
|
||||
lastVaults,
|
||||
removeVaultAsync,
|
||||
moveVaultToTrashAsync,
|
||||
}
|
||||
})
|
||||
|
||||
@ -2,7 +2,7 @@ import { and, eq, or, type SQLWrapper } from 'drizzle-orm'
|
||||
import {
|
||||
haexNotifications,
|
||||
type InsertHaexNotifications,
|
||||
} from '~~/src-tauri/database/schemas/haex'
|
||||
} from '~/database/schemas/haex'
|
||||
import {
|
||||
isPermissionGranted,
|
||||
requestPermission,
|
||||
@ -31,7 +31,12 @@ export const useNotificationStore = defineStore('notificationStore', () => {
|
||||
}
|
||||
|
||||
const checkNotificationAsync = async () => {
|
||||
isNotificationAllowed.value = await isPermissionGranted()
|
||||
try {
|
||||
isNotificationAllowed.value = await isPermissionGranted()
|
||||
} catch (error) {
|
||||
console.warn('Notification permission check failed:', error)
|
||||
isNotificationAllowed.value = false
|
||||
}
|
||||
return isNotificationAllowed.value
|
||||
}
|
||||
|
||||
|
||||
@ -1,6 +1,6 @@
|
||||
import { and, eq } from 'drizzle-orm'
|
||||
import { z } from 'zod'
|
||||
import * as schema from '@/../src-tauri/database/schemas/haex'
|
||||
import * as schema from '~/database/schemas/haex'
|
||||
import type { Locale } from 'vue-i18n'
|
||||
|
||||
export enum VaultSettingsTypeEnum {
|
||||
|
||||
@ -1,14 +0,0 @@
|
||||
Blocking waiting for file lock on build directory
|
||||
23.313630402s INFO prepare_target{force=false package_id=haex-hub v0.1.0 (/home/haex/Projekte/haex-hub/src-tauri) target="haex_hub_lib"}: cargo::core::compiler::fingerprint: stale: missing "/home/haex/Projekte/haex-hub/src-tauri/src/database/schemas/crdt.ts"
|
||||
23.319711685s INFO prepare_target{force=false package_id=haex-hub v0.1.0 (/home/haex/Projekte/haex-hub/src-tauri) target="haex_hub_lib"}: cargo::core::compiler::fingerprint: fingerprint dirty for haex-hub v0.1.0 (/home/haex/Projekte/haex-hub/src-tauri)/Check { test: false }/TargetInner { ..: lib_target("haex_hub_lib", ["staticlib", "cdylib", "rlib"], "/home/haex/Projekte/haex-hub/src-tauri/src/lib.rs", Edition2021) }
|
||||
23.319734303s INFO prepare_target{force=false package_id=haex-hub v0.1.0 (/home/haex/Projekte/haex-hub/src-tauri) target="haex_hub_lib"}: cargo::core::compiler::fingerprint: dirty: FsStatusOutdated(StaleDepFingerprint { name: "build_script_build" })
|
||||
23.322781234s INFO prepare_target{force=false package_id=haex-hub v0.1.0 (/home/haex/Projekte/haex-hub/src-tauri) target="build-script-build"}: cargo::core::compiler::fingerprint: fingerprint dirty for haex-hub v0.1.0 (/home/haex/Projekte/haex-hub/src-tauri)/RunCustomBuild/TargetInner { ..: custom_build_target("build-script-build", "/home/haex/Projekte/haex-hub/src-tauri/build.rs", Edition2021) }
|
||||
23.322837026s INFO prepare_target{force=false package_id=haex-hub v0.1.0 (/home/haex/Projekte/haex-hub/src-tauri) target="build-script-build"}: cargo::core::compiler::fingerprint: dirty: FsStatusOutdated(StaleItem(MissingFile("/home/haex/Projekte/haex-hub/src-tauri/src/database/schemas/crdt.ts")))
|
||||
23.335082427s INFO prepare_target{force=false package_id=haex-hub v0.1.0 (/home/haex/Projekte/haex-hub/src-tauri) target="haex_hub_lib"}: cargo::core::compiler::fingerprint: fingerprint dirty for haex-hub v0.1.0 (/home/haex/Projekte/haex-hub/src-tauri)/Check { test: true }/TargetInner { ..: lib_target("haex_hub_lib", ["staticlib", "cdylib", "rlib"], "/home/haex/Projekte/haex-hub/src-tauri/src/lib.rs", Edition2021) }
|
||||
23.335103454s INFO prepare_target{force=false package_id=haex-hub v0.1.0 (/home/haex/Projekte/haex-hub/src-tauri) target="haex_hub_lib"}: cargo::core::compiler::fingerprint: dirty: FsStatusOutdated(StaleDepFingerprint { name: "build_script_build" })
|
||||
23.336844253s INFO prepare_target{force=false package_id=haex-hub v0.1.0 (/home/haex/Projekte/haex-hub/src-tauri) target="haex-hub"}: cargo::core::compiler::fingerprint: fingerprint dirty for haex-hub v0.1.0 (/home/haex/Projekte/haex-hub/src-tauri)/Check { test: false }/TargetInner { name: "haex-hub", doc: true, ..: with_path("/home/haex/Projekte/haex-hub/src-tauri/src/main.rs", Edition2021) }
|
||||
23.336854407s INFO prepare_target{force=false package_id=haex-hub v0.1.0 (/home/haex/Projekte/haex-hub/src-tauri) target="haex-hub"}: cargo::core::compiler::fingerprint: dirty: FsStatusOutdated(StaleDepFingerprint { name: "haex_hub_lib" })
|
||||
23.338162602s INFO prepare_target{force=false package_id=haex-hub v0.1.0 (/home/haex/Projekte/haex-hub/src-tauri) target="haex-hub"}: cargo::core::compiler::fingerprint: fingerprint dirty for haex-hub v0.1.0 (/home/haex/Projekte/haex-hub/src-tauri)/Check { test: true }/TargetInner { name: "haex-hub", doc: true, ..: with_path("/home/haex/Projekte/haex-hub/src-tauri/src/main.rs", Edition2021) }
|
||||
23.338170106s INFO prepare_target{force=false package_id=haex-hub v0.1.0 (/home/haex/Projekte/haex-hub/src-tauri) target="haex-hub"}: cargo::core::compiler::fingerprint: dirty: FsStatusOutdated(StaleDepFingerprint { name: "haex_hub_lib" })
|
||||
Compiling haex-hub v0.1.0 (/home/haex/Projekte/haex-hub/src-tauri)
|
||||
Finished `dev` profile [unoptimized + debuginfo] target(s) in 25.44s
|
||||
File diff suppressed because one or more lines are too long
Reference in New Issue
Block a user