1 Commits

Author SHA1 Message Date
5960613357 test 2025-05-07 11:32:09 +02:00
245 changed files with 11625 additions and 50453 deletions

4
.gitignore vendored
View File

@ -22,6 +22,6 @@ dist-ssr
*.njsproj *.njsproj
*.sln *.sln
*.sw? *.sw?
.nuxt .nuxt
src-tauri/target .output
nogit*

1
.npmrc
View File

@ -1 +0,0 @@
shamefully-hoist=true

10
.prettierrc.toml Normal file
View File

@ -0,0 +1,10 @@
# .prettierrc.toml
useTabs = false
tabWidth = 2
printWidth = 100
endOfLine = "lf"
# Not supported yet
# trailingComma = "es5"
# embeddedLanguageFormatting = "auto"

148
11111tailwind.config.ts.old Normal file
View File

@ -0,0 +1,148 @@
import { defineConfig } from '@nuxtjs/tailwindcss/config';
//import { iconsPlugin, dynamicIconsPlugin } from '@egoist/tailwindcss-icons';
//import colors from 'tailwindcss/colors';
import themes from 'flyonui/src/theming/themes';
//import * as tailwindMotion from 'tailwindcss-motion';
import { addDynamicIconSelectors } from '@iconify/tailwind';
export default defineConfig({
content: ['./src/**/*.{vue,ts,svg}', './node_modules/flyonui/dist/js/*.js'],
darkMode: 'selector',
plugins: [
/* iconsPlugin(),
dynamicIconsPlugin(), */
addDynamicIconSelectors(),
require('flyonui'),
require('flyonui/plugin'),
//tailwindMotion,
],
flyonui: {
themes: [
{
light: {
...themes.light,
/* primary: colors.teal[500],
secondary: colors.purple[500], */
},
soft: {
...themes.soft,
/* primary: colors.teal[500],
secondary: colors.purple[500], */
},
dark: {
...themes.dark,
/* primary: colors.cyan[700], //colors.teal[600],
secondary: colors.purple[500], */
/* 'primary-content': '#000516',
'secondary': '#008f9c',
'secondary-content': '#000709',
'accent': '#007f7a',
'accent-content': '#d3e5e3',
'neutral': '#321a15',
'neutral-content': '#d3ccca',
'base-100': '#002732',
'base-200': '#00202a',
'base-300': '#001a22',
'base-content': '#c8cfd2',
'info': '#0086b2',
'info-content': '#00060c',
'success': '#a5da00',
'success-content': '#0a1100',
'warning': '#ff8d00',
'warning-content': '#160700',
'error': '#c83849',
'error-content': '#f9d9d9', */
},
},
],
/* themes: [
{
dark: {
'primary': colors.teal[500],
'primary-content': '#010811',
'secondary': colors.purple[500],
'secondary-content': '#130201',
'accent': '#9b59b6',
'accent-content': '#ebddf1',
'neutral': '#95a5a6',
'neutral-content': '#080a0a',
'base-100': colors.slate[100],
'base-200': colors.slate[400],
'base-300': colors.slate[900],
'base-content': colors.slate[800],
'info': '#1abc9c',
'info-content': '#000d09',
'success': '#2ecc71',
'success-content': '#010f04',
'warning': '#f1c40f',
'warning-content': '#140e00',
'error': '#e74c3c',
'error-content': '#130201',
},
light: {
'primary': colors.teal[500],
'primary-content': '#010811',
'secondary': colors.purple[500],
'secondary-content': '#130201',
'accent': '#9b59b6',
'accent-content': '#ebddf1',
'neutral': '#95a5a6',
'neutral-content': '#080a0a',
'base-100': '#ecf0f1',
'base-200': '#cdd1d2',
'base-300': '#afb2b3',
'base-content': '#131414',
'info': '#1abc9c',
'info-content': '#000d09',
'success': '#2ecc71',
'success-content': '#010f04',
'warning': '#f1c40f',
'warning-content': '#140e00',
'error': '#e74c3c',
'error-content': '#130201',
},
},
], */
},
/* theme: {
extend: {
colors: {
'primary-active': colors.teal[600],
'primary-focus': colors.teal[400],
'primary-hover': colors.teal[400],
'primary': colors.teal[500],
'dark': {
'primary-active': colors.teal[700],
'primary-focus': colors.teal[600],
'primary-hover': colors.teal[600],
'primary': colors.teal[500],
},
'secondary': colors.sky[500],
},
fontFamily: {
sans: [
'Adelle',
'Roboto Slab',
'DejaVu Serif',
'Georgia',
'Graphik',
'sans-serif',
],
serif: ['Merriweather', 'serif'],
},
screens: {
xs: '360px',
},
transitionProperty: {
height: 'height',
},
},
}, */
}); // satisfies Config;

View File

@ -4,9 +4,9 @@
Today, we undoubtedly find ourselves in the computer age. Almost everyone owns at least one computer, often even more. Most probably have at least a smartphone and a standard PC. On each of these devices (Desktop PC, Laptop, Tablet, Smartphone) and systems (Windows, macOS, Linux (all flavors), Android, iOS), there are various programs and data, which can be highly individual and sensitive. Unfortunately, interoperability between these devices and systems often proves difficult, sometimes even impossible, for a multitude of reasons. On one hand, there are the system providers themselves (like Microsoft, Apple, Google), who often design their systems to make it as easy as possible for users to enter their ecosystems, but place many hurdles in the way when users wish to leave again. The golden cage, as we say in Germany, or walled garden. However, it's not just the system providers per se who make cross-device and cross-system work difficult. Another problem lies with the software manufacturers/providers. Since it is already challenging and above all resource-intensive (time, money, and technical know-how) to provide a good and "secure" product for one device class and/or system, it's not uncommon for a program to be developed (initially) for only one platform. So, there might be a program for Windows or Apple, but not for Linux, or only in one distribution/package format. Or there might be an app for iOS and/or Android, but not for the PC. This is partly due to the fact that it would simply be too complex to develop and, especially, maintain a product for multiple systems and devices (simultaneously). This effort is almost insurmountable, particularly for startups, small businesses, and individual open-source developers working on their passion projects in their spare time. Today, we undoubtedly find ourselves in the computer age. Almost everyone owns at least one computer, often even more. Most probably have at least a smartphone and a standard PC. On each of these devices (Desktop PC, Laptop, Tablet, Smartphone) and systems (Windows, macOS, Linux (all flavors), Android, iOS), there are various programs and data, which can be highly individual and sensitive. Unfortunately, interoperability between these devices and systems often proves difficult, sometimes even impossible, for a multitude of reasons. On one hand, there are the system providers themselves (like Microsoft, Apple, Google), who often design their systems to make it as easy as possible for users to enter their ecosystems, but place many hurdles in the way when users wish to leave again. The golden cage, as we say in Germany, or walled garden. However, it's not just the system providers per se who make cross-device and cross-system work difficult. Another problem lies with the software manufacturers/providers. Since it is already challenging and above all resource-intensive (time, money, and technical know-how) to provide a good and "secure" product for one device class and/or system, it's not uncommon for a program to be developed (initially) for only one platform. So, there might be a program for Windows or Apple, but not for Linux, or only in one distribution/package format. Or there might be an app for iOS and/or Android, but not for the PC. This is partly due to the fact that it would simply be too complex to develop and, especially, maintain a product for multiple systems and devices (simultaneously). This effort is almost insurmountable, particularly for startups, small businesses, and individual open-source developers working on their passion projects in their spare time.
Let's not even start talking about application distribution. For each platform, you end up with a separate build pipeline that builds, tests, signs, packages the application into the appropriate format (msi, exe, deb, flatpak, snap, AppImage, Apk, etc.), and delivers it to the corresponding store (AppStore, PlayStore, Windows Store, and the various repositories of Linux distributions). This is a huge cascade of tasks that especially causes problems for small companies (at least if you want to serve ALL platforms simultaneously). Let's not even start talking about application distribution. For each platform, you end up with a separate build pipeline that builds, tests, signs, packages the application into the appropriate format (msi, exe, deb, flatpak, snap, AppImage, Apk, etc.), and delivers it to the corresponding store (AppStore, PlayStore, Windows Store, and the various repositories of Linux distributions). This is a huge cascade of tasks that especially causes problems for small companies (at least if you want to serve ALL platforms simultaneously).
Wouldn't it be nice if there were a simple(r) way for developers to develop and build their application just once and then be able to serve ALL\* devices and systems? PWAs were already on the right track, but there is often a lack of more in-depth access to system resources, such as file or console access. Wouldn't it be nice if there were a simple way for developers to develop and build their application just once and then be able to serve ALL\* devices and systems? To have your "entire computer" on your USB stick, everywhere, at any time? And no matter which computer in the world you're currently using, you have everything with you? All programs, files, passwords, etc., on every device (Desktop PC, Laptop, Tablet, Smartphone), every system (Windows, macOS, Linux (all flavors), Android, iOS), anytime. Yes, this might sound confusing and unreal at first, but the idea is fantastic. It would give users back more digital self-empowerment. Only when users can no longer be held captive in golden cages can they emancipate themselves from the tech giants. Only when users can decide for themselves at any time which data they want to share with whom, for what purpose, and for what period, are they truly masters and not just commodities of their data.
HaexHub gives any web application/PWA superpowers.
Extensions can be used to add any functions to HaexHub, whereby almost any access to the underlying system is possible, provided that the necessary authorizations have been granted by the user beforehand. And HaexHub would be the path to achieve this.
\*In principle, the approach presented here allows an application to run on all devices and systems. However, some applications might still only be usable on certain devices or systems. For example, if an application absolutely requires an NFC device, which is typically not found on a desktop PC, then this application will probably only work on mobile devices. Or if an application requires system-specific interfaces or programs, such as the Registry on Windows or systemd on Linux, then this application will naturally only work where these dependencies are found. However, developers who create their applications without such dependencies can immediately serve all devices and systems. \*In principle, the approach presented here allows an application to run on all devices and systems. However, some applications might still only be usable on certain devices or systems. For example, if an application absolutely requires an NFC device, which is typically not found on a desktop PC, then this application will probably only work on mobile devices. Or if an application requires system-specific interfaces or programs, such as the Registry on Windows or systemd on Linux, then this application will naturally only work where these dependencies are found. However, developers who create their applications without such dependencies can immediately serve all devices and systems.
@ -31,7 +31,7 @@ But first things first.
The technical foundation of the project is Tauri. This framework makes it possible to provide native applications for all common devices (Desktops, Laptops, Tablets, Smartphones) and systems (Windows, Linux, macOS, Android, iOS) with the same codebase. Tauri is comparable to Electron (the technical basis for Visual Studio Code, for example), but the applications created with it are significantly smaller because Tauri uses the native rendering engine of the respective platform (WebView2 (Windows), WKWebView (macOS), WebKitGTK (Linux)) and does not bundle a (customized Chromium) browser, as is the case with Electron. Furthermore, Tauri offers significant advantages over Electron in terms of security and resource efficiency. There is also a sophisticated permission system, which effectively shields the frontend from the host. All access to the host system is only possible with the appropriate permission. This permission concept is also used for the (HaexHub) extensions, thereby ensuring the security of third-party extensions as well. The technical foundation of the project is Tauri. This framework makes it possible to provide native applications for all common devices (Desktops, Laptops, Tablets, Smartphones) and systems (Windows, Linux, macOS, Android, iOS) with the same codebase. Tauri is comparable to Electron (the technical basis for Visual Studio Code, for example), but the applications created with it are significantly smaller because Tauri uses the native rendering engine of the respective platform (WebView2 (Windows), WKWebView (macOS), WebKitGTK (Linux)) and does not bundle a (customized Chromium) browser, as is the case with Electron. Furthermore, Tauri offers significant advantages over Electron in terms of security and resource efficiency. There is also a sophisticated permission system, which effectively shields the frontend from the host. All access to the host system is only possible with the appropriate permission. This permission concept is also used for the (HaexHub) extensions, thereby ensuring the security of third-party extensions as well.
The project follows a strict local-first approach. This means that HaexHub can fundamentally be used without any form of online account or internet access. The extensions are also stored locally and can be used offline, provided, of course, that the extension itself can function without the internet. A messenger extension will likely make limited sense without internet access. An image viewer or text editor, however, should work fine without the internet. The project follows a strict local-first approach. This means that HaexHub can fundamentally be used without any form of online account or internet access. The extensions are also stored locally and can be used offline, provided, of course, that the extension itself can function without the internet. A messenger extension will likely make limited sense without internet access. An image viewer or text editor, however, should work fine without the internet.
All user data can be persistently stored and used in a locally encrypted SQLite database, even across extensions, with the appropriate permissions, of course. Unlike many other applications that call themselves local-first, this project implements this approach more consistently. Most applications claiming to be local-first often aren't truly so. The data usually resides (unencrypted) on a backend server and is merely "cached" to varying degrees in the frontend. While this allows these applications to be used offline for a while, the usage is either restricted (read-only in Bitwarden, for example) or the persistence is temporary at best. Most approaches, like this project, use an SQLite (or similar) database in the frontend to achieve offline capability, but this is usually implemented in a browser via IndexedDB or OPFS. Examples include [powersync](https://www.powersync.com/) , [evolu](https://www.evolu.dev/), or [electricSql](https://electric-sql.com/). The problem here is that such persistence is never truly permanent, as the operating system and/or browser can decide when to free up storage. For instance, it's common for Apple to clear the storage of web applications that haven't been used for over a week. As long as the user's data is still present in the backend, this is only moderately tragic, as the "source of truth" residing there can be synchronized back to the frontend at any time. However, this always requires an online account and internet access. Furthermore, with these approaches, the user cannot simply copy their data onto a USB stick and take it with them to use on a completely different computer (perhaps where only intranet is available). All user data can be persistently stored and used in a locally encrypted SQLite database, even across extensions, with the appropriate permissions, of course. Unlike many other applications that call themselves local-first, this project implements this approach more consistently. Most applications claiming to be local-first often aren't truly so. The data usually resides (unencrypted) on a backend server and is merely "cached" to varying degrees in the frontend. While this allows these applications to be used offline for a while, the usage is either restricted (read-only in Bitwarden, for example) or the persistence is temporary at best. Most approaches, like this project, use an SQLite (or similar) database in the frontend to achieve offline capability, but this is usually implemented in a browser via IndexedDB or OPFS. Examples include [powersync](https://www.powersync.com/), [evolu](https://www.evolu.dev/), or [electricSql](https://electric-sql.com/). The problem here is that such persistence is never truly permanent, as the operating system and/or browser can decide when to free up storage. For instance, it's common for Apple to clear the storage of web applications that haven't been used for over a week. As long as the user's data is still present in the backend, this is only moderately tragic, as the "source of truth" residing there can be synchronized back to the frontend at any time. However, this always requires an online account and internet access. Furthermore, with these approaches, the user cannot simply copy their data onto a USB stick and take it with them to use on a completely different computer (perhaps where only intranet is available).
Moreover, all these approaches are subject to the limitations of the respective browser. The limitation on persistent storage is particularly noteworthy here. All browsers have strict limits, which is why this approach is not suitable for all requirements. Since HaexHub stores data not in the browser, but in a real SQLite database on the hard drive, it is only subject to the hardware limitations of the host system (or USB stick/storage medium). Moreover, all these approaches are subject to the limitations of the respective browser. The limitation on persistent storage is particularly noteworthy here. All browsers have strict limits, which is why this approach is not suitable for all requirements. Since HaexHub stores data not in the browser, but in a real SQLite database on the hard drive, it is only subject to the hardware limitations of the host system (or USB stick/storage medium).
With HaexHub, all user and extension data can be permanently stored in the local and encrypted database without requiring an online account. However, to make the user's data conveniently and securely available on multiple devices, there will be a synchronization service to synchronize the database state across the user's various devices and systems. The user can, of course, also host this service themselves on their (local) systems or servers. The database state is thus temporarily stored on a (third-party) server and can be synchronized from there with other instances of the local SQLite database. To further enhance data security, the user can also encrypt the data before sending it to the backend, making it unreadable by third parties. This will likely be enabled by default, but it can also be turned off, as there are legitimate use cases where it might be disadvantageous or undesirable. Particularly in corporate or government environments, it could be problematic if all user (employee) data were stored encrypted on the company servers. If the employee becomes unavailable (resignation, accident, death) and their database password (or the encryption key stored in the database) is unknown, there would be no way to access this data. With HaexHub, all user and extension data can be permanently stored in the local and encrypted database without requiring an online account. However, to make the user's data conveniently and securely available on multiple devices, there will be a synchronization service to synchronize the database state across the user's various devices and systems. The user can, of course, also host this service themselves on their (local) systems or servers. The database state is thus temporarily stored on a (third-party) server and can be synchronized from there with other instances of the local SQLite database. To further enhance data security, the user can also encrypt the data before sending it to the backend, making it unreadable by third parties. This will likely be enabled by default, but it can also be turned off, as there are legitimate use cases where it might be disadvantageous or undesirable. Particularly in corporate or government environments, it could be problematic if all user (employee) data were stored encrypted on the company servers. If the employee becomes unavailable (resignation, accident, death) and their database password (or the encryption key stored in the database) is unknown, there would be no way to access this data.
@ -68,30 +68,9 @@ Further examples of extensions include calendars, (collaborative) document manag
install: install:
- [nodejs/nvm](https://nodejs.org/en/download) - [nodejs](https://nodejs.org/en/download)
- [tauri](https://v2.tauri.app/start/prerequisites/) - [tauri](https://v2.tauri.app/start/prerequisites/)
- [rust](https://v2.tauri.app/start/prerequisites/#rust) - [rust](https://v2.tauri.app/start/prerequisites/#rust)
- [android-studio](https://developer.android.com/studio?hl=de)
- webkit2gtk + GTK3
```bash
# debian/ubuntu
sudo apt update
sudo apt install \
libwebkit2gtk-4.1-dev \
libgtk-3-dev \
libayatana-appindicator3-dev \
librsvg2-dev
```
```bash
# fedora
sudo dnf install \
webkit2gtk4.1-devel \
gtk3-devel \
libappindicator-gtk3 \
librsvg2-devel
```
- port 3003 needs to be open/free or you need to adjust it in `nuxt.config.ts` AND `src-tauri/tauri.conf.json` - port 3003 needs to be open/free or you need to adjust it in `nuxt.config.ts` AND `src-tauri/tauri.conf.json`

View File

@ -1,4 +1,4 @@
import { defineConfig } from 'drizzle-kit' import { defineConfig } from 'drizzle-kit';
export default defineConfig({ export default defineConfig({
schema: './src-tauri/database/schemas/**.ts', schema: './src-tauri/database/schemas/**.ts',
@ -7,4 +7,4 @@ export default defineConfig({
dbCredentials: { dbCredentials: {
url: './src-tauri/database/vault.db', url: './src-tauri/database/vault.db',
}, },
}) });

View File

@ -1,4 +0,0 @@
import withNuxt from './.nuxt/eslint.config.mjs'
export default withNuxt()
// Your custom configs here

View File

@ -1,115 +1,95 @@
//import tailwindcss from '@tailwindcss/vite'
// https://nuxt.com/docs/api/configuration/nuxt-config // https://nuxt.com/docs/api/configuration/nuxt-config
import tailwindcss from "@tailwindcss/vite";
export default defineNuxtConfig({ export default defineNuxtConfig({
compatibilityDate: '2025-07-15', compatibilityDate: "2024-11-01",
devtools: { enabled: true },
srcDir: './src',
app: {
pageTransition: {
name: 'fade',
},
},
modules: [ modules: [
'nuxt-zod-i18n', "nuxt-zod-i18n",
'@nuxtjs/i18n', "@nuxtjs/i18n",
'@pinia/nuxt', "@nuxtjs/tailwindcss",
'@vueuse/nuxt', "@pinia/nuxt",
'@nuxt/icon', "@vueuse/nuxt",
'@nuxt/eslint', "@nuxt/icon",
//"@nuxt/image", "nuxt-snackbar",
'@nuxt/fonts', "@nuxt/image",
'@nuxt/ui',
], ],
imports: { imports: {
dirs: [ dirs: ["composables/**", "stores/**", "components/**", "pages/**"],
'composables/**',
'stores/**',
'components/**',
'pages/**',
'types/**',
],
},
css: ['./assets/css/main.css'],
icon: {
provider: 'server',
mode: 'svg',
clientBundle: {
icons: ['solar:global-outline', 'gg:extension', 'hugeicons:corporate'],
scan: true,
includeCustomCollections: true,
},
serverBundle: {
collections: ['mdi', 'line-md', 'solar', 'gg', 'emojione'],
},
customCollections: [
{
prefix: 'my-icon',
dir: './src/assets/icons/',
},
],
}, },
i18n: { i18n: {
strategy: 'prefix_and_default', strategy: "prefix_and_default",
defaultLocale: 'de', defaultLocale: "de",
vueI18n: "../src/i18n/i18n.config.ts",
locales: [ locales: [
{ code: 'de', language: 'de-DE', isCatchallLocale: true }, { code: "de", language: "de-DE", isCatchallLocale: true },
{ code: 'en', language: 'en-EN' }, { code: "en", language: "en-EN" },
], ],
detectBrowserLanguage: { detectBrowserLanguage: {
useCookie: true, useCookie: true,
cookieKey: 'i18n_redirected', cookieKey: "i18n_redirected",
redirectOn: 'root', // recommended redirectOn: "root", // recommended
},
types: "composition",
bundle: {
optimizeTranslationDirective: false,
}, },
types: 'composition',
}, },
zodI18n: { zodI18n: {
localeCodesMapping: { localeCodesMapping: {
'en-GB': 'en', "en-GB": "en",
'de-DE': 'de', "de-DE": "de",
}, },
}, },
runtimeConfig: { runtimeConfig: {
public: { public: {
haexVault: { haexVault: {
lastVaultFileName: 'lastVaults.json', lastVaultFileName: "lastVaults.json",
instanceFileName: 'instance.json', //defaultDatabase: 'src/database/default.db',
defaultVaultName: 'HaexHub',
}, },
}, },
}, },
/* tailwindcss: {
cssPath: [`assets/css/main.css`, { injectPosition: "first" }],
config: {},
viewer: true,
exposeConfig: false,
},
*/
css: ["~/assets/css/main.css"],
devtools: { enabled: true },
srcDir: "./src",
// Enable SSG
ssr: false, ssr: false,
// Enables the development server to be discoverable by other devices when running on iOS physical devices // Enables the development server to be discoverable by other devices when running on iOS physical devices
devServer: { devServer: { host: process.env.TAURI_DEV_HOST || "localhost", port: 3003 },
host: '0',
port: 3003,
},
vite: { vite: {
//plugins: [tailwindcss()],
// Better support for Tauri CLI output // Better support for Tauri CLI output
clearScreen: false, clearScreen: false,
// Enable environment variables // Enable environment variables
// Additional environment variables can be found at // Additional environment variables can be found at
// https://v2.tauri.app/reference/environment-variables/ // https://v2.tauri.app/reference/environment-variables/
envPrefix: ['VITE_', 'TAURI_'], envPrefix: ["VITE_", "TAURI_"],
server: { server: {
// Tauri requires a consistent port // Tauri requires a consistent port
strictPort: true, strictPort: true,
}, },
plugins: [tailwindcss()],
/* plugins: [wasm(), topLevelAwait()],
worker: {
format: 'es',
plugins: () => [wasm(), topLevelAwait()],
}, */
}, },
ignore: ['**/src-tauri/**'], });
})

View File

@ -1,5 +1,5 @@
{ {
"name": "tauri-app", "name": "haex-hub",
"private": true, "private": true,
"version": "0.1.0", "version": "0.1.0",
"type": "module", "type": "module",
@ -10,70 +10,52 @@
"preview": "nuxt preview", "preview": "nuxt preview",
"postinstall": "nuxt prepare", "postinstall": "nuxt prepare",
"tauri": "tauri", "tauri": "tauri",
"tauri:build:debug": "tauri build --debug",
"generate:rust-types": "tsx ./src-tauri/database/generate-rust-types.ts",
"drizzle:generate": "drizzle-kit generate", "drizzle:generate": "drizzle-kit generate",
"drizzle:migrate": "drizzle-kit migrate", "drizzle:migrate": "drizzle-kit migrate"
"eslint:fix": "eslint --fix"
}, },
"dependencies": { "dependencies": {
"@nuxt/eslint": "1.9.0", "@libsql/client": "^0.15.4",
"@nuxt/fonts": "0.11.4", "@nuxt/icon": "1.11.0",
"@nuxt/icon": "2.0.0", "@nuxt/image": "1.10.0",
"@nuxt/ui": "4.0.0", "@nuxtjs/i18n": "^9.5.4",
"@nuxtjs/i18n": "10.0.6", "@pinia/nuxt": "^0.11.0",
"@pinia/nuxt": "^0.11.1", "@tailwindcss/vite": "^4.1.5",
"@tailwindcss/vite": "^4.1.10",
"@tauri-apps/api": "^2.5.0", "@tauri-apps/api": "^2.5.0",
"@tauri-apps/plugin-dialog": "^2.2.2", "@tauri-apps/plugin-dialog": "^2.2.1",
"@tauri-apps/plugin-fs": "^2.3.0", "@tauri-apps/plugin-fs": "^2.2.1",
"@tauri-apps/plugin-http": "2.5.2", "@tauri-apps/plugin-http": "~2.4.3",
"@tauri-apps/plugin-notification": "2.3.1", "@tauri-apps/plugin-opener": "^2.2.6",
"@tauri-apps/plugin-opener": "^2.3.0", "@tauri-apps/plugin-os": "^2.2.1",
"@tauri-apps/plugin-os": "^2.2.2", "@tauri-apps/plugin-sql": "~2.2.0",
"@tauri-apps/plugin-sql": "2.3.0", "@tauri-apps/plugin-store": "^2.2.0",
"@tauri-apps/plugin-store": "^2.2.1", "@vueuse/core": "^13.1.0",
"@vueuse/components": "^13.9.0", "@vueuse/nuxt": "^13.1.0",
"@vueuse/core": "^13.4.0", "drizzle-orm": "^0.43.0",
"@vueuse/nuxt": "^13.4.0", "nuxt": "^3.17.2",
"drizzle-orm": "^0.44.2", "nuxt-snackbar": "1.3.0",
"eslint": "^9.34.0", "nuxt-zod-i18n": "^1.11.5",
"fuse.js": "^7.1.0", "tailwindcss": "^4.1.5",
"nuxt": "^4.0.3", "vue": "^3.5.13",
"nuxt-zod-i18n": "^1.12.0", "zod": "^3.24.4"
"tailwindcss": "^4.1.10",
"vue": "^3.5.20",
"vue-router": "^4.5.1",
"zod": "4.1.5"
}, },
"devDependencies": { "devDependencies": {
"@iconify/json": "^2.2.351", "@iconify/json": "^2.2.336",
"@iconify/tailwind4": "^1.0.6", "@iconify/tailwind4": "^1.0.6",
"@libsql/client": "^0.15.15", "@nuxtjs/tailwindcss": "^6.14.0",
"@tauri-apps/cli": "^2.5.0", "@tauri-apps/cli": "^2.5.0",
"@types/node": "^24.6.2", "@vitejs/plugin-vue": "^5.2.3",
"@vitejs/plugin-vue": "6.0.1", "drizzle-kit": "^0.31.1",
"@vue/compiler-sfc": "^3.5.17", "flyonui": "^2.1.0",
"drizzle-kit": "^0.31.2", "typescript": "~5.8.3",
"globals": "^16.2.0", "vite": "^6.3.5",
"prettier": "3.6.2", "vue-tsc": "^2.2.10"
"tsx": "^4.20.6",
"tw-animate-css": "^1.3.8",
"typescript": "^5.8.3",
"vite": "7.1.3",
"vue-tsc": "3.0.6"
},
"prettier": {
"trailingComma": "all",
"tabWidth": 2,
"semi": false,
"singleQuote": true,
"singleAttributePerLine": true
}, },
"packageManager": "pnpm@10.10.0+sha512.d615db246fe70f25dcfea6d8d73dee782ce23e2245e3c4f6f888249fb568149318637dca73c2c5c8ef2a4ca0d5657fb9567188bfab47f566d1ee6ce987815c39",
"pnpm": { "pnpm": {
"overrides": { "ignoredBuiltDependencies": [
"zod": "^3.22.4" "@parcel/watcher",
"esbuild",
"vue-demi"
]
} }
},
"packageManager": "pnpm@10.15.1+sha512.34e538c329b5553014ca8e8f4535997f96180a1d0f614339357449935350d924e22f8614682191264ec33d1462ac21561aff97f6bb18065351c162c7e8f6de67"
} }

9987
pnpm-lock.yaml generated

File diff suppressed because it is too large Load Diff

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1.4 MiB

View File

@ -1,9 +0,0 @@
# Nur dieser Inhalt in src-tauri/.cargo/config.toml
[target.aarch64-linux-android]
# Ersetze die Pfade durch deine tatsächlichen NDK-Pfade
# Dein NDK-Basispfad: /home/haex/Android/Sdk/ndk/29.0.13113456
# Stelle sicher, dass der clang-Name (mit API-Level, z.B. ...24-clang) korrekt ist.
linker = "/home/haex/Android/Sdk/ndk/29.0.13113456/toolchains/llvm/prebuilt/linux-x86_64/bin/aarch64-linux-android33-clang"
#ar = "/home/haex/Android/Sdk/ndk/29.0.13113456/toolchains/llvm/prebuilt/linux-x86_64/bin/llvm-ar"
#ranlib = "/home/haex/Android/Sdk/ndk/29.0.13113456/toolchains/llvm/prebuilt/linux-x86_64/bin/llvm-ranlib"

2
src-tauri/.env Normal file
View File

@ -0,0 +1,2 @@
DATABASE_URL=sqlite:database/vault.db
SQLX_OFFLINE=true

7
src-tauri/.gitignore vendored Normal file
View File

@ -0,0 +1,7 @@
# Generated by Cargo
# will have compiled files and executables
/target/
# Generated by Tauri
# will have schema files for capabilities auto-completion
/gen/schemas

View File

@ -0,0 +1,44 @@
{
"db_name": "SQLite",
"query": "\n SELECT id, extension_id, resource, operation, path \n FROM haex_extensions_permissions \n WHERE extension_id = ? AND resource = ? AND operation = ?\n ",
"describe": {
"columns": [
{
"name": "id",
"ordinal": 0,
"type_info": "Text"
},
{
"name": "extension_id",
"ordinal": 1,
"type_info": "Text"
},
{
"name": "resource",
"ordinal": 2,
"type_info": "Text"
},
{
"name": "operation",
"ordinal": 3,
"type_info": "Text"
},
{
"name": "path",
"ordinal": 4,
"type_info": "Text"
}
],
"parameters": {
"Right": 3
},
"nullable": [
false,
true,
true,
true,
true
]
},
"hash": "a73e92ff12dca9b046a6440b9a68b002662b594f7f569ee71de11e00c23ca625"
}

1166
src-tauri/Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -15,41 +15,28 @@ name = "haex_hub_lib"
crate-type = ["staticlib", "cdylib", "rlib"] crate-type = ["staticlib", "cdylib", "rlib"]
[build-dependencies] [build-dependencies]
serde_json = "1.0.145"
tauri-build = { version = "2.2", features = [] } tauri-build = { version = "2.2", features = [] }
serde = { version = "1.0.228", features = ["derive"] }
[dependencies] [dependencies]
rusqlite = { version = "0.37.0", features = [ rusqlite = { version = "0.35.0", features = [
"load_extension", "load_extension",
"bundled-sqlcipher-vendored-openssl", "bundled-sqlcipher",
"functions",
] } ] }
#libsqlite3-sys = { version = "0.28", features = ["bundled-sqlcipher"] }
#tauri-plugin-sql = { version = "2", features = ["sqlite"] }tokio = { version = "1.47.1", features = ["macros", "rt-multi-thread"] }#libsqlite3-sys = { version = "0.31", features = ["bundled-sqlcipher"] }
#sqlx = { version = "0.8", features = ["runtime-tokio-rustls", "sqlite"] } #sqlx = { version = "0.8", features = ["runtime-tokio-rustls", "sqlite"] }
base64 = "0.22" tokio = { version = "1.44", features = ["macros", "rt-multi-thread"] }
ed25519-dalek = "2.1"
fs_extra = "1.3.0"
hex = "0.4"
mime = "0.3"
mime_guess = "2.0"
serde = { version = "1", features = ["derive"] } serde = { version = "1", features = ["derive"] }
serde_json = "1.0.143" serde_json = "1"
sha2 = "0.10.9" base64 = "0.22"
sqlparser = { version = "0.59.0", features = ["visitor"] } mime_guess = "2.0"
tauri = { version = "2.8.5", features = ["protocol-asset", "devtools"] } mime = "0.3"
tauri-plugin-dialog = "2.4.0" fs_extra = "1.3.0"
tauri-plugin-fs = "2.4.0" sqlparser = { version = "0.56.0", features = [] }
tauri-plugin-http = "2.5.2" tauri = { version = "2.5", features = ["protocol-asset", "custom-protocol"] }
tauri-plugin-notification = "2.3.1" tauri-plugin-dialog = "2.2"
tauri-plugin-opener = "2.5.0" tauri-plugin-fs = "2.2.0"
tauri-plugin-os = "2.3" tauri-plugin-opener = "2.2"
tauri-plugin-persisted-scope = "2.3.2" tauri-plugin-os = "2"
tauri-plugin-store = "2.4.0" tauri-plugin-store = "2"
thiserror = "2.0.17" tauri-plugin-http = "2.4"
ts-rs = "11.0.1" #tauri-plugin-sql = { version = "2", features = ["sqlite"] }
uhlc = "0.8"
uuid = { version = "1.18.1", features = ["v4"] }
zip = "5.1.1"
url = "2.5.7"

View File

@ -1,3 +0,0 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
export type TriggerSetupResult = "Success" | "TableNotFound";

View File

@ -1,7 +1,3 @@
mod generator;
fn main() { fn main() {
generator::table_names::generate_table_names(); tauri_build::build()
generator::rust_types::generate_rust_types();
tauri_build::build();
} }

View File

@ -5,41 +5,25 @@
"windows": ["main"], "windows": ["main"],
"permissions": [ "permissions": [
"core:default", "core:default",
"core:webview:allow-create-webview-window",
"core:webview:allow-create-webview",
"core:webview:allow-webview-show",
"core:webview:default",
"core:window:allow-create",
"core:window:allow-get-all-windows",
"core:window:allow-show",
"core:window:default",
"dialog:default", "dialog:default",
"fs:allow-appconfig-read-recursive",
"fs:allow-appconfig-write-recursive",
"fs:allow-appdata-read-recursive",
"fs:allow-appdata-write-recursive",
"fs:allow-read-file", "fs:allow-read-file",
"fs:allow-read-dir",
"fs:allow-resource-read-recursive", "fs:allow-resource-read-recursive",
"fs:allow-resource-write-recursive",
"fs:allow-download-read-recursive",
"fs:allow-download-write-recursive",
"fs:default", "fs:default",
{ "fs:allow-resource-write-recursive",
"identifier": "fs:scope",
"allow": [{ "path": "**" }]
},
"http:allow-fetch-send", "http:allow-fetch-send",
"http:allow-fetch", "http:allow-fetch",
"http:default", "http:default",
"notification:allow-create-channel",
"notification:allow-list-channels",
"notification:allow-notify",
"notification:default",
"opener:allow-open-url", "opener:allow-open-url",
"opener:default", "opener:default",
"os:allow-hostname",
"os:default", "os:default",
"store:default" "store:default",
"core:window:allow-create",
"core:window:default",
"core:window:allow-get-all-windows",
"core:window:allow-show",
"core:webview:allow-create-webview",
"core:webview:allow-create-webview-window",
"core:webview:default",
"core:webview:allow-webview-show"
] ]
} }

View File

@ -1,192 +0,0 @@
import { writeFileSync, mkdirSync } from 'node:fs'
import { join, dirname } from 'node:path'
import { fileURLToPath } from 'node:url'
import tablesNames from './tableNames.json'
import { schema } from './index'
import { getTableColumns } from 'drizzle-orm'
import type { AnySQLiteColumn, SQLiteTable } from 'drizzle-orm/sqlite-core'
const __filename = fileURLToPath(import.meta.url)
const __dirname = dirname(__filename)
interface Column {
name: string
rustType: string
isOptional: boolean
}
function drizzleToRustType(colDef: AnySQLiteColumn): {
rustType: string
isOptional: boolean
} {
let baseType = 'String'
let isOptional = !colDef.notNull
if (colDef.columnType === 'SQLiteText') {
if ('mode' in colDef && colDef.mode === 'json') {
baseType = 'serde_json::Value'
} else {
baseType = 'String'
}
} else if (colDef.columnType === 'SQLiteInteger') {
baseType = 'i64'
} else if (colDef.columnType === 'SQLiteBoolean') {
baseType = 'bool'
} else if (colDef.columnType === 'SQLiteReal') {
baseType = 'f64'
} else if (colDef.columnType === 'SQLiteBlob') {
baseType = 'Vec<u8>'
}
// Drizzle verwendet 'primary' für den Primärschlüssel-Status
if (colDef.primary) {
isOptional = false
}
return { rustType: baseType, isOptional }
}
function extractColumns(table: SQLiteTable): Column[] {
const columns: Column[] = []
// getTableColumns gibt ein Record<string, AnySQLiteColumn> zurück
const tableColumns = getTableColumns(table)
// Object.values gibt uns ein Array vom Typ AnySQLiteColumn[]
for (const colDef of Object.values(tableColumns)) {
// Die relevanten Infos stehen im 'config' Property der Spalte.
// TypeScript kennt den Typ von 'config' bereits!
const { rustType, isOptional } = drizzleToRustType(colDef)
columns.push({
name: colDef.name,
rustType: isOptional ? `Option<${rustType}>` : rustType,
isOptional,
})
}
return columns
}
function toSnakeCase(str: string): string {
return str.replace(/[A-Z]/g, (letter, index) =>
index === 0 ? letter.toLowerCase() : `_${letter.toLowerCase()}`,
)
}
function toPascalCase(str: string): string {
console.log('toPascalCase:', str)
return str
.split('_')
.map((word) => word.charAt(0).toUpperCase() + word.slice(1).toLowerCase())
.join('')
}
const RUST_KEYWORDS = new Set([
'type',
'struct',
'enum',
'pub',
'use',
'as',
'crate',
'super',
'self',
'let',
'mut',
])
function generateStruct(name: string, columns: Column[]): string {
let structName = toPascalCase(name)
if (RUST_KEYWORDS.has(structName.toLowerCase())) {
structName = `r#${structName}`
}
// --- Teil 1: Struct-Definition ---
let code = `#[derive(Debug, Clone, Serialize, Deserialize)]\n`
code += `#[serde(rename_all = "camelCase")]\n`
code += `pub struct ${structName} {\n`
for (const col of columns) {
let fieldName = toSnakeCase(col.name)
// Prüfen, ob der Name ein Keyword ist
if (RUST_KEYWORDS.has(fieldName)) {
fieldName = `r#${fieldName}`
}
if (col.isOptional) {
code += ` #[serde(skip_serializing_if = "Option::is_none")]\n`
}
// Wichtig: #[serde(rename = "...")] hinzufügen, falls der Feldname geändert wurde!
if (fieldName.startsWith('r#')) {
const originalName = fieldName.substring(2)
code += ` #[serde(rename = "${originalName}")]\n`
}
code += ` pub ${fieldName}: ${col.rustType},\n`
}
code += `}\n\n`
// --- Teil 2: Impl-Block ---
code += `impl ${structName} {\n`
code += ` pub fn from_row(row: &rusqlite::Row) -> rusqlite::Result<Self> {\n`
code += ` Ok(Self {\n`
columns.forEach((col, idx) => {
let fieldName = toSnakeCase(col.name)
if (RUST_KEYWORDS.has(fieldName)) {
fieldName = `r#${fieldName}`
}
code += ` ${fieldName}: row.get(${idx})?,\n`
})
code += ` })\n`
code += ` }\n`
code += `}\n\n`
return code
}
function main() {
let output = `// Auto-generated from Drizzle schema
// DO NOT EDIT MANUALLY
// Run 'pnpm generate:rust-types' to regenerate
use serde::{Deserialize, Serialize};
`
const schemas = [
{ name: tablesNames.haex.settings.name, table: schema.haexSettings },
{ name: tablesNames.haex.extensions.name, table: schema.haexExtensions },
{
name: tablesNames.haex.extension_permissions.name,
table: schema.haexExtensionPermissions,
},
{ name: tablesNames.haex.crdt.logs.name, table: schema.haexCrdtLogs },
{
name: tablesNames.haex.crdt.snapshots.name,
table: schema.haexCrdtSnapshots,
},
{ name: tablesNames.haex.crdt.configs.name, table: schema.haexCrdtConfigs },
]
for (const { name, table } of schemas) {
console.log(`\n=== Processing table: ${name} ===`)
const columns = extractColumns(table)
console.log(`Found ${columns.length} columns`)
if (columns.length > 0) {
output += generateStruct(name, columns)
}
}
const outputPath = join(__dirname, '../src/database/generated.rs')
mkdirSync(dirname(outputPath), { recursive: true })
writeFileSync(outputPath, output, 'utf-8')
console.log('\n✅ Rust types generated:', outputPath)
}
main()

View File

@ -1,23 +1,23 @@
import { drizzle } from 'drizzle-orm/sqlite-proxy' // Adapter für Query Building ohne direkte Verbindung import { drizzle } from "drizzle-orm/sqlite-proxy"; // Adapter für Query Building ohne direkte Verbindung
import * as schema from './schemas' // Importiere alles aus deiner Schema-Datei import * as schema from "./schemas/vault"; // Importiere alles aus deiner Schema-Datei
export * as schema from './schemas'
// sqlite-proxy benötigt eine (dummy) Ausführungsfunktion als Argument. // sqlite-proxy benötigt eine (dummy) Ausführungsfunktion als Argument.
// Diese wird in unserem Tauri-Workflow nie aufgerufen, da wir nur .toSQL() verwenden. // Diese wird in unserem Tauri-Workflow nie aufgerufen, da wir nur .toSQL() verwenden.
// Sie muss aber vorhanden sein, um drizzle() aufrufen zu können. // Sie muss aber vorhanden sein, um drizzle() aufrufen zu können.
const dummyExecutor = async ( const dummyExecutor = async (
sql: string, sql: string,
params: unknown[], params: any[],
method: 'all' | 'run' | 'get' | 'values', method: "all" | "run" | "get" | "values"
) => { ) => {
console.warn( console.warn(
`Frontend Drizzle Executor wurde aufgerufen (Methode: ${method}). Das sollte im Tauri-Invoke-Workflow nicht passieren!`, `Frontend Drizzle Executor wurde aufgerufen (Methode: ${method}). Das sollte im Tauri-Invoke-Workflow nicht passieren!`
) );
// Wir geben leere Ergebnisse zurück, um die Typen zufriedenzustellen, falls es doch aufgerufen wird. // Wir geben leere Ergebnisse zurück, um die Typen zufriedenzustellen, falls es doch aufgerufen wird.
return { rows: [] } // Für 'run' (z.B. bei INSERT/UPDATE) return { rows: [] }; // Für 'run' (z.B. bei INSERT/UPDATE)
} };
// Erstelle die Drizzle-Instanz für den SQLite-Dialekt // Erstelle die Drizzle-Instanz für den SQLite-Dialekt
// Übergib den dummyExecutor und das importierte Schema // Übergib den dummyExecutor und das importierte Schema
export const db = drizzle(dummyExecutor, { schema }) export const db = drizzle(dummyExecutor, { schema });
// Exportiere auch alle Schema-Definitionen weiter, damit man alles aus einer Datei importieren kann // Exportiere auch alle Schema-Definitionen weiter, damit man alles aus einer Datei importieren kann

View File

@ -1,139 +0,0 @@
CREATE TABLE `haex_crdt_configs` (
`key` text PRIMARY KEY NOT NULL,
`value` text
);
--> statement-breakpoint
CREATE TABLE `haex_crdt_logs` (
`id` text PRIMARY KEY NOT NULL,
`haex_timestamp` text,
`table_name` text,
`row_pks` text,
`op_type` text,
`column_name` text,
`new_value` text,
`old_value` text
);
--> statement-breakpoint
CREATE INDEX `idx_haex_timestamp` ON `haex_crdt_logs` (`haex_timestamp`);--> statement-breakpoint
CREATE INDEX `idx_table_row` ON `haex_crdt_logs` (`table_name`,`row_pks`);--> statement-breakpoint
CREATE TABLE `haex_crdt_snapshots` (
`snapshot_id` text PRIMARY KEY NOT NULL,
`created` text,
`epoch_hlc` text,
`location_url` text,
`file_size_bytes` integer
);
--> statement-breakpoint
CREATE TABLE `haex_extension_permissions` (
`id` text PRIMARY KEY NOT NULL,
`extension_id` text,
`resource_type` text,
`action` text,
`target` text,
`constraints` text,
`status` text DEFAULT 'denied' NOT NULL,
`created_at` text DEFAULT (CURRENT_TIMESTAMP),
`updated_at` integer,
`haex_tombstone` integer,
`haex_timestamp` text,
FOREIGN KEY (`extension_id`) REFERENCES `haex_extensions`(`id`) ON UPDATE no action ON DELETE no action
);
--> statement-breakpoint
CREATE UNIQUE INDEX `haex_extension_permissions_extension_id_resource_type_action_target_unique` ON `haex_extension_permissions` (`extension_id`,`resource_type`,`action`,`target`);--> statement-breakpoint
CREATE TABLE `haex_extensions` (
`id` text PRIMARY KEY NOT NULL,
`author` text,
`description` text,
`entry` text,
`homepage` text,
`enabled` integer,
`icon` text,
`name` text,
`public_key` text,
`signature` text,
`url` text,
`version` text,
`haex_tombstone` integer,
`haex_timestamp` text
);
--> statement-breakpoint
CREATE TABLE `haex_settings` (
`id` text PRIMARY KEY NOT NULL,
`key` text,
`type` text,
`value` text,
`haex_tombstone` integer,
`haex_timestamp` text
);
--> statement-breakpoint
CREATE TABLE `haex_notifications` (
`id` text PRIMARY KEY NOT NULL,
`alt` text,
`date` text,
`icon` text,
`image` text,
`read` integer,
`source` text,
`text` text,
`title` text,
`type` text NOT NULL,
`haex_tombstone` integer
);
--> statement-breakpoint
CREATE TABLE `haex_passwords_group_items` (
`group_id` text,
`item_id` text,
`haex_tombstone` integer,
PRIMARY KEY(`item_id`, `group_id`),
FOREIGN KEY (`group_id`) REFERENCES `haex_passwords_groups`(`id`) ON UPDATE no action ON DELETE no action,
FOREIGN KEY (`item_id`) REFERENCES `haex_passwords_item_details`(`id`) ON UPDATE no action ON DELETE no action
);
--> statement-breakpoint
CREATE TABLE `haex_passwords_groups` (
`id` text PRIMARY KEY NOT NULL,
`name` text,
`description` text,
`icon` text,
`order` integer,
`color` text,
`parent_id` text,
`created_at` text DEFAULT (CURRENT_TIMESTAMP),
`updated_at` integer,
`haex_tombstone` integer,
FOREIGN KEY (`parent_id`) REFERENCES `haex_passwords_groups`(`id`) ON UPDATE no action ON DELETE no action
);
--> statement-breakpoint
CREATE TABLE `haex_passwords_item_details` (
`id` text PRIMARY KEY NOT NULL,
`title` text,
`username` text,
`password` text,
`note` text,
`icon` text,
`tags` text,
`url` text,
`created_at` text DEFAULT (CURRENT_TIMESTAMP),
`updated_at` integer,
`haex_tombstone` integer
);
--> statement-breakpoint
CREATE TABLE `haex_passwords_item_history` (
`id` text PRIMARY KEY NOT NULL,
`item_id` text,
`changed_property` text,
`old_value` text,
`new_value` text,
`created_at` text DEFAULT (CURRENT_TIMESTAMP),
`haex_tombstone` integer,
FOREIGN KEY (`item_id`) REFERENCES `haex_passwords_item_details`(`id`) ON UPDATE no action ON DELETE no action
);
--> statement-breakpoint
CREATE TABLE `haex_passwords_item_key_values` (
`id` text PRIMARY KEY NOT NULL,
`item_id` text,
`key` text,
`value` text,
`updated_at` integer,
`haex_tombstone` integer,
FOREIGN KEY (`item_id`) REFERENCES `haex_passwords_item_details`(`id`) ON UPDATE no action ON DELETE no action
);

View File

@ -0,0 +1,26 @@
CREATE TABLE `haex_extensions` (
`id` text PRIMARY KEY NOT NULL,
`author` text,
`enabled` integer,
`name` text,
`url` text,
`version` text
);
--> statement-breakpoint
CREATE TABLE `haex_extensions_permissions` (
`id` text PRIMARY KEY NOT NULL,
`extension_id` text,
`resource` text,
`operation` text,
`path` text,
FOREIGN KEY (`extension_id`) REFERENCES `haex_extensions`(`id`) ON UPDATE no action ON DELETE no action
);
--> statement-breakpoint
CREATE UNIQUE INDEX `haex_extensions_permissions_extension_id_resource_operation_path_unique` ON `haex_extensions_permissions` (`extension_id`,`resource`,`operation`,`path`);--> statement-breakpoint
CREATE TABLE `haex_settings` (
`id` text PRIMARY KEY NOT NULL,
`key` text,
`value_text` text,
`value_json` text,
`value_number` numeric
);

View File

@ -1 +0,0 @@
ALTER TABLE `haex_notifications` ADD `haex_timestamp` text;

View File

@ -0,0 +1,7 @@
CREATE TABLE `testTable` (
`id` text PRIMARY KEY NOT NULL,
`author` text,
`test` text
);
--> statement-breakpoint
ALTER TABLE `haex_extensions` ADD `icon` text;

View File

@ -1,274 +1,9 @@
{ {
"version": "6", "version": "6",
"dialect": "sqlite", "dialect": "sqlite",
"id": "3bbe52b8-5933-4b21-8b24-de3927a2f9b0", "id": "fc5a7c9d-4846-4120-a762-cc2ea00504b9",
"prevId": "00000000-0000-0000-0000-000000000000", "prevId": "00000000-0000-0000-0000-000000000000",
"tables": { "tables": {
"haex_crdt_configs": {
"name": "haex_crdt_configs",
"columns": {
"key": {
"name": "key",
"type": "text",
"primaryKey": true,
"notNull": true,
"autoincrement": false
},
"value": {
"name": "value",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
}
},
"indexes": {},
"foreignKeys": {},
"compositePrimaryKeys": {},
"uniqueConstraints": {},
"checkConstraints": {}
},
"haex_crdt_logs": {
"name": "haex_crdt_logs",
"columns": {
"id": {
"name": "id",
"type": "text",
"primaryKey": true,
"notNull": true,
"autoincrement": false
},
"haex_timestamp": {
"name": "haex_timestamp",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"table_name": {
"name": "table_name",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"row_pks": {
"name": "row_pks",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"op_type": {
"name": "op_type",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"column_name": {
"name": "column_name",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"new_value": {
"name": "new_value",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"old_value": {
"name": "old_value",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
}
},
"indexes": {
"idx_haex_timestamp": {
"name": "idx_haex_timestamp",
"columns": [
"haex_timestamp"
],
"isUnique": false
},
"idx_table_row": {
"name": "idx_table_row",
"columns": [
"table_name",
"row_pks"
],
"isUnique": false
}
},
"foreignKeys": {},
"compositePrimaryKeys": {},
"uniqueConstraints": {},
"checkConstraints": {}
},
"haex_crdt_snapshots": {
"name": "haex_crdt_snapshots",
"columns": {
"snapshot_id": {
"name": "snapshot_id",
"type": "text",
"primaryKey": true,
"notNull": true,
"autoincrement": false
},
"created": {
"name": "created",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"epoch_hlc": {
"name": "epoch_hlc",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"location_url": {
"name": "location_url",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"file_size_bytes": {
"name": "file_size_bytes",
"type": "integer",
"primaryKey": false,
"notNull": false,
"autoincrement": false
}
},
"indexes": {},
"foreignKeys": {},
"compositePrimaryKeys": {},
"uniqueConstraints": {},
"checkConstraints": {}
},
"haex_extension_permissions": {
"name": "haex_extension_permissions",
"columns": {
"id": {
"name": "id",
"type": "text",
"primaryKey": true,
"notNull": true,
"autoincrement": false
},
"extension_id": {
"name": "extension_id",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"resource_type": {
"name": "resource_type",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"action": {
"name": "action",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"target": {
"name": "target",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"constraints": {
"name": "constraints",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"status": {
"name": "status",
"type": "text",
"primaryKey": false,
"notNull": true,
"autoincrement": false,
"default": "'denied'"
},
"created_at": {
"name": "created_at",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false,
"default": "(CURRENT_TIMESTAMP)"
},
"updated_at": {
"name": "updated_at",
"type": "integer",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"haex_tombstone": {
"name": "haex_tombstone",
"type": "integer",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"haex_timestamp": {
"name": "haex_timestamp",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
}
},
"indexes": {
"haex_extension_permissions_extension_id_resource_type_action_target_unique": {
"name": "haex_extension_permissions_extension_id_resource_type_action_target_unique",
"columns": [
"extension_id",
"resource_type",
"action",
"target"
],
"isUnique": true
}
},
"foreignKeys": {
"haex_extension_permissions_extension_id_haex_extensions_id_fk": {
"name": "haex_extension_permissions_extension_id_haex_extensions_id_fk",
"tableFrom": "haex_extension_permissions",
"tableTo": "haex_extensions",
"columnsFrom": [
"extension_id"
],
"columnsTo": [
"id"
],
"onDelete": "no action",
"onUpdate": "no action"
}
},
"compositePrimaryKeys": {},
"uniqueConstraints": {},
"checkConstraints": {}
},
"haex_extensions": { "haex_extensions": {
"name": "haex_extensions", "name": "haex_extensions",
"columns": { "columns": {
@ -286,27 +21,6 @@
"notNull": false, "notNull": false,
"autoincrement": false "autoincrement": false
}, },
"description": {
"name": "description",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"entry": {
"name": "entry",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"homepage": {
"name": "homepage",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"enabled": { "enabled": {
"name": "enabled", "name": "enabled",
"type": "integer", "type": "integer",
@ -314,13 +28,6 @@
"notNull": false, "notNull": false,
"autoincrement": false "autoincrement": false
}, },
"icon": {
"name": "icon",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"name": { "name": {
"name": "name", "name": "name",
"type": "text", "type": "text",
@ -328,20 +35,6 @@
"notNull": false, "notNull": false,
"autoincrement": false "autoincrement": false
}, },
"public_key": {
"name": "public_key",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"signature": {
"name": "signature",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"url": { "url": {
"name": "url", "name": "url",
"type": "text", "type": "text",
@ -355,24 +48,80 @@
"primaryKey": false, "primaryKey": false,
"notNull": false, "notNull": false,
"autoincrement": false "autoincrement": false
}
}, },
"haex_tombstone": { "indexes": {},
"name": "haex_tombstone", "foreignKeys": {},
"type": "integer", "compositePrimaryKeys": {},
"uniqueConstraints": {},
"checkConstraints": {}
},
"haex_extensions_permissions": {
"name": "haex_extensions_permissions",
"columns": {
"id": {
"name": "id",
"type": "text",
"primaryKey": true,
"notNull": true,
"autoincrement": false
},
"extension_id": {
"name": "extension_id",
"type": "text",
"primaryKey": false, "primaryKey": false,
"notNull": false, "notNull": false,
"autoincrement": false "autoincrement": false
}, },
"haex_timestamp": { "resource": {
"name": "haex_timestamp", "name": "resource",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"operation": {
"name": "operation",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"path": {
"name": "path",
"type": "text", "type": "text",
"primaryKey": false, "primaryKey": false,
"notNull": false, "notNull": false,
"autoincrement": false "autoincrement": false
} }
}, },
"indexes": {}, "indexes": {
"foreignKeys": {}, "haex_extensions_permissions_extension_id_resource_operation_path_unique": {
"name": "haex_extensions_permissions_extension_id_resource_operation_path_unique",
"columns": [
"extension_id",
"resource",
"operation",
"path"
],
"isUnique": true
}
},
"foreignKeys": {
"haex_extensions_permissions_extension_id_haex_extensions_id_fk": {
"name": "haex_extensions_permissions_extension_id_haex_extensions_id_fk",
"tableFrom": "haex_extensions_permissions",
"tableTo": "haex_extensions",
"columnsFrom": [
"extension_id"
],
"columnsTo": [
"id"
],
"onDelete": "no action",
"onUpdate": "no action"
}
},
"compositePrimaryKeys": {}, "compositePrimaryKeys": {},
"uniqueConstraints": {}, "uniqueConstraints": {},
"checkConstraints": {} "checkConstraints": {}
@ -394,30 +143,23 @@
"notNull": false, "notNull": false,
"autoincrement": false "autoincrement": false
}, },
"type": { "value_text": {
"name": "type", "name": "value_text",
"type": "text", "type": "text",
"primaryKey": false, "primaryKey": false,
"notNull": false, "notNull": false,
"autoincrement": false "autoincrement": false
}, },
"value": { "value_json": {
"name": "value", "name": "value_json",
"type": "text", "type": "text",
"primaryKey": false, "primaryKey": false,
"notNull": false, "notNull": false,
"autoincrement": false "autoincrement": false
}, },
"haex_tombstone": { "value_number": {
"name": "haex_tombstone", "name": "value_number",
"type": "integer", "type": "numeric",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"haex_timestamp": {
"name": "haex_timestamp",
"type": "text",
"primaryKey": false, "primaryKey": false,
"notNull": false, "notNull": false,
"autoincrement": false "autoincrement": false
@ -428,482 +170,6 @@
"compositePrimaryKeys": {}, "compositePrimaryKeys": {},
"uniqueConstraints": {}, "uniqueConstraints": {},
"checkConstraints": {} "checkConstraints": {}
},
"haex_notifications": {
"name": "haex_notifications",
"columns": {
"id": {
"name": "id",
"type": "text",
"primaryKey": true,
"notNull": true,
"autoincrement": false
},
"alt": {
"name": "alt",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"date": {
"name": "date",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"icon": {
"name": "icon",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"image": {
"name": "image",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"read": {
"name": "read",
"type": "integer",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"source": {
"name": "source",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"text": {
"name": "text",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"title": {
"name": "title",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"type": {
"name": "type",
"type": "text",
"primaryKey": false,
"notNull": true,
"autoincrement": false
},
"haex_tombstone": {
"name": "haex_tombstone",
"type": "integer",
"primaryKey": false,
"notNull": false,
"autoincrement": false
}
},
"indexes": {},
"foreignKeys": {},
"compositePrimaryKeys": {},
"uniqueConstraints": {},
"checkConstraints": {}
},
"haex_passwords_group_items": {
"name": "haex_passwords_group_items",
"columns": {
"group_id": {
"name": "group_id",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"item_id": {
"name": "item_id",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"haex_tombstone": {
"name": "haex_tombstone",
"type": "integer",
"primaryKey": false,
"notNull": false,
"autoincrement": false
}
},
"indexes": {},
"foreignKeys": {
"haex_passwords_group_items_group_id_haex_passwords_groups_id_fk": {
"name": "haex_passwords_group_items_group_id_haex_passwords_groups_id_fk",
"tableFrom": "haex_passwords_group_items",
"tableTo": "haex_passwords_groups",
"columnsFrom": [
"group_id"
],
"columnsTo": [
"id"
],
"onDelete": "no action",
"onUpdate": "no action"
},
"haex_passwords_group_items_item_id_haex_passwords_item_details_id_fk": {
"name": "haex_passwords_group_items_item_id_haex_passwords_item_details_id_fk",
"tableFrom": "haex_passwords_group_items",
"tableTo": "haex_passwords_item_details",
"columnsFrom": [
"item_id"
],
"columnsTo": [
"id"
],
"onDelete": "no action",
"onUpdate": "no action"
}
},
"compositePrimaryKeys": {
"haex_passwords_group_items_item_id_group_id_pk": {
"columns": [
"item_id",
"group_id"
],
"name": "haex_passwords_group_items_item_id_group_id_pk"
}
},
"uniqueConstraints": {},
"checkConstraints": {}
},
"haex_passwords_groups": {
"name": "haex_passwords_groups",
"columns": {
"id": {
"name": "id",
"type": "text",
"primaryKey": true,
"notNull": true,
"autoincrement": false
},
"name": {
"name": "name",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"description": {
"name": "description",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"icon": {
"name": "icon",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"order": {
"name": "order",
"type": "integer",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"color": {
"name": "color",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"parent_id": {
"name": "parent_id",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"created_at": {
"name": "created_at",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false,
"default": "(CURRENT_TIMESTAMP)"
},
"updated_at": {
"name": "updated_at",
"type": "integer",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"haex_tombstone": {
"name": "haex_tombstone",
"type": "integer",
"primaryKey": false,
"notNull": false,
"autoincrement": false
}
},
"indexes": {},
"foreignKeys": {
"haex_passwords_groups_parent_id_haex_passwords_groups_id_fk": {
"name": "haex_passwords_groups_parent_id_haex_passwords_groups_id_fk",
"tableFrom": "haex_passwords_groups",
"tableTo": "haex_passwords_groups",
"columnsFrom": [
"parent_id"
],
"columnsTo": [
"id"
],
"onDelete": "no action",
"onUpdate": "no action"
}
},
"compositePrimaryKeys": {},
"uniqueConstraints": {},
"checkConstraints": {}
},
"haex_passwords_item_details": {
"name": "haex_passwords_item_details",
"columns": {
"id": {
"name": "id",
"type": "text",
"primaryKey": true,
"notNull": true,
"autoincrement": false
},
"title": {
"name": "title",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"username": {
"name": "username",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"password": {
"name": "password",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"note": {
"name": "note",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"icon": {
"name": "icon",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"tags": {
"name": "tags",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"url": {
"name": "url",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"created_at": {
"name": "created_at",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false,
"default": "(CURRENT_TIMESTAMP)"
},
"updated_at": {
"name": "updated_at",
"type": "integer",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"haex_tombstone": {
"name": "haex_tombstone",
"type": "integer",
"primaryKey": false,
"notNull": false,
"autoincrement": false
}
},
"indexes": {},
"foreignKeys": {},
"compositePrimaryKeys": {},
"uniqueConstraints": {},
"checkConstraints": {}
},
"haex_passwords_item_history": {
"name": "haex_passwords_item_history",
"columns": {
"id": {
"name": "id",
"type": "text",
"primaryKey": true,
"notNull": true,
"autoincrement": false
},
"item_id": {
"name": "item_id",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"changed_property": {
"name": "changed_property",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"old_value": {
"name": "old_value",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"new_value": {
"name": "new_value",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"created_at": {
"name": "created_at",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false,
"default": "(CURRENT_TIMESTAMP)"
},
"haex_tombstone": {
"name": "haex_tombstone",
"type": "integer",
"primaryKey": false,
"notNull": false,
"autoincrement": false
}
},
"indexes": {},
"foreignKeys": {
"haex_passwords_item_history_item_id_haex_passwords_item_details_id_fk": {
"name": "haex_passwords_item_history_item_id_haex_passwords_item_details_id_fk",
"tableFrom": "haex_passwords_item_history",
"tableTo": "haex_passwords_item_details",
"columnsFrom": [
"item_id"
],
"columnsTo": [
"id"
],
"onDelete": "no action",
"onUpdate": "no action"
}
},
"compositePrimaryKeys": {},
"uniqueConstraints": {},
"checkConstraints": {}
},
"haex_passwords_item_key_values": {
"name": "haex_passwords_item_key_values",
"columns": {
"id": {
"name": "id",
"type": "text",
"primaryKey": true,
"notNull": true,
"autoincrement": false
},
"item_id": {
"name": "item_id",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"key": {
"name": "key",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"value": {
"name": "value",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"updated_at": {
"name": "updated_at",
"type": "integer",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"haex_tombstone": {
"name": "haex_tombstone",
"type": "integer",
"primaryKey": false,
"notNull": false,
"autoincrement": false
}
},
"indexes": {},
"foreignKeys": {
"haex_passwords_item_key_values_item_id_haex_passwords_item_details_id_fk": {
"name": "haex_passwords_item_key_values_item_id_haex_passwords_item_details_id_fk",
"tableFrom": "haex_passwords_item_key_values",
"tableTo": "haex_passwords_item_details",
"columnsFrom": [
"item_id"
],
"columnsTo": [
"id"
],
"onDelete": "no action",
"onUpdate": "no action"
}
},
"compositePrimaryKeys": {},
"uniqueConstraints": {},
"checkConstraints": {}
} }
}, },
"views": {}, "views": {},

View File

@ -1,274 +1,9 @@
{ {
"version": "6", "version": "6",
"dialect": "sqlite", "dialect": "sqlite",
"id": "862ac1d5-3065-4244-8652-2b6782254862", "id": "6fb5396b-9f87-4fb5-87a2-22d4eecaa11e",
"prevId": "3bbe52b8-5933-4b21-8b24-de3927a2f9b0", "prevId": "fc5a7c9d-4846-4120-a762-cc2ea00504b9",
"tables": { "tables": {
"haex_crdt_configs": {
"name": "haex_crdt_configs",
"columns": {
"key": {
"name": "key",
"type": "text",
"primaryKey": true,
"notNull": true,
"autoincrement": false
},
"value": {
"name": "value",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
}
},
"indexes": {},
"foreignKeys": {},
"compositePrimaryKeys": {},
"uniqueConstraints": {},
"checkConstraints": {}
},
"haex_crdt_logs": {
"name": "haex_crdt_logs",
"columns": {
"id": {
"name": "id",
"type": "text",
"primaryKey": true,
"notNull": true,
"autoincrement": false
},
"haex_timestamp": {
"name": "haex_timestamp",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"table_name": {
"name": "table_name",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"row_pks": {
"name": "row_pks",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"op_type": {
"name": "op_type",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"column_name": {
"name": "column_name",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"new_value": {
"name": "new_value",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"old_value": {
"name": "old_value",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
}
},
"indexes": {
"idx_haex_timestamp": {
"name": "idx_haex_timestamp",
"columns": [
"haex_timestamp"
],
"isUnique": false
},
"idx_table_row": {
"name": "idx_table_row",
"columns": [
"table_name",
"row_pks"
],
"isUnique": false
}
},
"foreignKeys": {},
"compositePrimaryKeys": {},
"uniqueConstraints": {},
"checkConstraints": {}
},
"haex_crdt_snapshots": {
"name": "haex_crdt_snapshots",
"columns": {
"snapshot_id": {
"name": "snapshot_id",
"type": "text",
"primaryKey": true,
"notNull": true,
"autoincrement": false
},
"created": {
"name": "created",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"epoch_hlc": {
"name": "epoch_hlc",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"location_url": {
"name": "location_url",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"file_size_bytes": {
"name": "file_size_bytes",
"type": "integer",
"primaryKey": false,
"notNull": false,
"autoincrement": false
}
},
"indexes": {},
"foreignKeys": {},
"compositePrimaryKeys": {},
"uniqueConstraints": {},
"checkConstraints": {}
},
"haex_extension_permissions": {
"name": "haex_extension_permissions",
"columns": {
"id": {
"name": "id",
"type": "text",
"primaryKey": true,
"notNull": true,
"autoincrement": false
},
"extension_id": {
"name": "extension_id",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"resource_type": {
"name": "resource_type",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"action": {
"name": "action",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"target": {
"name": "target",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"constraints": {
"name": "constraints",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"status": {
"name": "status",
"type": "text",
"primaryKey": false,
"notNull": true,
"autoincrement": false,
"default": "'denied'"
},
"created_at": {
"name": "created_at",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false,
"default": "(CURRENT_TIMESTAMP)"
},
"updated_at": {
"name": "updated_at",
"type": "integer",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"haex_tombstone": {
"name": "haex_tombstone",
"type": "integer",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"haex_timestamp": {
"name": "haex_timestamp",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
}
},
"indexes": {
"haex_extension_permissions_extension_id_resource_type_action_target_unique": {
"name": "haex_extension_permissions_extension_id_resource_type_action_target_unique",
"columns": [
"extension_id",
"resource_type",
"action",
"target"
],
"isUnique": true
}
},
"foreignKeys": {
"haex_extension_permissions_extension_id_haex_extensions_id_fk": {
"name": "haex_extension_permissions_extension_id_haex_extensions_id_fk",
"tableFrom": "haex_extension_permissions",
"tableTo": "haex_extensions",
"columnsFrom": [
"extension_id"
],
"columnsTo": [
"id"
],
"onDelete": "no action",
"onUpdate": "no action"
}
},
"compositePrimaryKeys": {},
"uniqueConstraints": {},
"checkConstraints": {}
},
"haex_extensions": { "haex_extensions": {
"name": "haex_extensions", "name": "haex_extensions",
"columns": { "columns": {
@ -286,27 +21,6 @@
"notNull": false, "notNull": false,
"autoincrement": false "autoincrement": false
}, },
"description": {
"name": "description",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"entry": {
"name": "entry",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"homepage": {
"name": "homepage",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"enabled": { "enabled": {
"name": "enabled", "name": "enabled",
"type": "integer", "type": "integer",
@ -328,20 +42,6 @@
"notNull": false, "notNull": false,
"autoincrement": false "autoincrement": false
}, },
"public_key": {
"name": "public_key",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"signature": {
"name": "signature",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"url": { "url": {
"name": "url", "name": "url",
"type": "text", "type": "text",
@ -355,20 +55,6 @@
"primaryKey": false, "primaryKey": false,
"notNull": false, "notNull": false,
"autoincrement": false "autoincrement": false
},
"haex_tombstone": {
"name": "haex_tombstone",
"type": "integer",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"haex_timestamp": {
"name": "haex_timestamp",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
} }
}, },
"indexes": {}, "indexes": {},
@ -377,8 +63,8 @@
"uniqueConstraints": {}, "uniqueConstraints": {},
"checkConstraints": {} "checkConstraints": {}
}, },
"haex_notifications": { "haex_extensions_permissions": {
"name": "haex_notifications", "name": "haex_extensions_permissions",
"columns": { "columns": {
"id": { "id": {
"name": "id", "name": "id",
@ -387,86 +73,62 @@
"notNull": true, "notNull": true,
"autoincrement": false "autoincrement": false
}, },
"alt": { "extension_id": {
"name": "alt", "name": "extension_id",
"type": "text", "type": "text",
"primaryKey": false, "primaryKey": false,
"notNull": false, "notNull": false,
"autoincrement": false "autoincrement": false
}, },
"date": { "resource": {
"name": "date", "name": "resource",
"type": "text", "type": "text",
"primaryKey": false, "primaryKey": false,
"notNull": false, "notNull": false,
"autoincrement": false "autoincrement": false
}, },
"icon": { "operation": {
"name": "icon", "name": "operation",
"type": "text", "type": "text",
"primaryKey": false, "primaryKey": false,
"notNull": false, "notNull": false,
"autoincrement": false "autoincrement": false
}, },
"image": { "path": {
"name": "image", "name": "path",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"read": {
"name": "read",
"type": "integer",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"source": {
"name": "source",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"text": {
"name": "text",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"title": {
"name": "title",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"type": {
"name": "type",
"type": "text",
"primaryKey": false,
"notNull": true,
"autoincrement": false
},
"haex_tombstone": {
"name": "haex_tombstone",
"type": "integer",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"haex_timestamp": {
"name": "haex_timestamp",
"type": "text", "type": "text",
"primaryKey": false, "primaryKey": false,
"notNull": false, "notNull": false,
"autoincrement": false "autoincrement": false
} }
}, },
"indexes": {}, "indexes": {
"foreignKeys": {}, "haex_extensions_permissions_extension_id_resource_operation_path_unique": {
"name": "haex_extensions_permissions_extension_id_resource_operation_path_unique",
"columns": [
"extension_id",
"resource",
"operation",
"path"
],
"isUnique": true
}
},
"foreignKeys": {
"haex_extensions_permissions_extension_id_haex_extensions_id_fk": {
"name": "haex_extensions_permissions_extension_id_haex_extensions_id_fk",
"tableFrom": "haex_extensions_permissions",
"tableTo": "haex_extensions",
"columnsFrom": [
"extension_id"
],
"columnsTo": [
"id"
],
"onDelete": "no action",
"onUpdate": "no action"
}
},
"compositePrimaryKeys": {}, "compositePrimaryKeys": {},
"uniqueConstraints": {}, "uniqueConstraints": {},
"checkConstraints": {} "checkConstraints": {}
@ -488,30 +150,23 @@
"notNull": false, "notNull": false,
"autoincrement": false "autoincrement": false
}, },
"type": { "value_text": {
"name": "type", "name": "value_text",
"type": "text", "type": "text",
"primaryKey": false, "primaryKey": false,
"notNull": false, "notNull": false,
"autoincrement": false "autoincrement": false
}, },
"value": { "value_json": {
"name": "value", "name": "value_json",
"type": "text", "type": "text",
"primaryKey": false, "primaryKey": false,
"notNull": false, "notNull": false,
"autoincrement": false "autoincrement": false
}, },
"haex_tombstone": { "value_number": {
"name": "haex_tombstone", "name": "value_number",
"type": "integer", "type": "numeric",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"haex_timestamp": {
"name": "haex_timestamp",
"type": "text",
"primaryKey": false, "primaryKey": false,
"notNull": false, "notNull": false,
"autoincrement": false "autoincrement": false
@ -523,74 +178,8 @@
"uniqueConstraints": {}, "uniqueConstraints": {},
"checkConstraints": {} "checkConstraints": {}
}, },
"haex_passwords_group_items": { "testTable": {
"name": "haex_passwords_group_items", "name": "testTable",
"columns": {
"group_id": {
"name": "group_id",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"item_id": {
"name": "item_id",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"haex_tombstone": {
"name": "haex_tombstone",
"type": "integer",
"primaryKey": false,
"notNull": false,
"autoincrement": false
}
},
"indexes": {},
"foreignKeys": {
"haex_passwords_group_items_group_id_haex_passwords_groups_id_fk": {
"name": "haex_passwords_group_items_group_id_haex_passwords_groups_id_fk",
"tableFrom": "haex_passwords_group_items",
"tableTo": "haex_passwords_groups",
"columnsFrom": [
"group_id"
],
"columnsTo": [
"id"
],
"onDelete": "no action",
"onUpdate": "no action"
},
"haex_passwords_group_items_item_id_haex_passwords_item_details_id_fk": {
"name": "haex_passwords_group_items_item_id_haex_passwords_item_details_id_fk",
"tableFrom": "haex_passwords_group_items",
"tableTo": "haex_passwords_item_details",
"columnsFrom": [
"item_id"
],
"columnsTo": [
"id"
],
"onDelete": "no action",
"onUpdate": "no action"
}
},
"compositePrimaryKeys": {
"haex_passwords_group_items_item_id_group_id_pk": {
"columns": [
"item_id",
"group_id"
],
"name": "haex_passwords_group_items_item_id_group_id_pk"
}
},
"uniqueConstraints": {},
"checkConstraints": {}
},
"haex_passwords_groups": {
"name": "haex_passwords_groups",
"columns": { "columns": {
"id": { "id": {
"name": "id", "name": "id",
@ -599,171 +188,19 @@
"notNull": true, "notNull": true,
"autoincrement": false "autoincrement": false
}, },
"name": { "author": {
"name": "name", "name": "author",
"type": "text", "type": "text",
"primaryKey": false, "primaryKey": false,
"notNull": false, "notNull": false,
"autoincrement": false "autoincrement": false
}, },
"description": { "test": {
"name": "description", "name": "test",
"type": "text", "type": "text",
"primaryKey": false, "primaryKey": false,
"notNull": false, "notNull": false,
"autoincrement": false "autoincrement": false
},
"icon": {
"name": "icon",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"order": {
"name": "order",
"type": "integer",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"color": {
"name": "color",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"parent_id": {
"name": "parent_id",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"created_at": {
"name": "created_at",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false,
"default": "(CURRENT_TIMESTAMP)"
},
"updated_at": {
"name": "updated_at",
"type": "integer",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"haex_tombstone": {
"name": "haex_tombstone",
"type": "integer",
"primaryKey": false,
"notNull": false,
"autoincrement": false
}
},
"indexes": {},
"foreignKeys": {
"haex_passwords_groups_parent_id_haex_passwords_groups_id_fk": {
"name": "haex_passwords_groups_parent_id_haex_passwords_groups_id_fk",
"tableFrom": "haex_passwords_groups",
"tableTo": "haex_passwords_groups",
"columnsFrom": [
"parent_id"
],
"columnsTo": [
"id"
],
"onDelete": "no action",
"onUpdate": "no action"
}
},
"compositePrimaryKeys": {},
"uniqueConstraints": {},
"checkConstraints": {}
},
"haex_passwords_item_details": {
"name": "haex_passwords_item_details",
"columns": {
"id": {
"name": "id",
"type": "text",
"primaryKey": true,
"notNull": true,
"autoincrement": false
},
"title": {
"name": "title",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"username": {
"name": "username",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"password": {
"name": "password",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"note": {
"name": "note",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"icon": {
"name": "icon",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"tags": {
"name": "tags",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"url": {
"name": "url",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"created_at": {
"name": "created_at",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false,
"default": "(CURRENT_TIMESTAMP)"
},
"updated_at": {
"name": "updated_at",
"type": "integer",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"haex_tombstone": {
"name": "haex_tombstone",
"type": "integer",
"primaryKey": false,
"notNull": false,
"autoincrement": false
} }
}, },
"indexes": {}, "indexes": {},
@ -771,146 +208,6 @@
"compositePrimaryKeys": {}, "compositePrimaryKeys": {},
"uniqueConstraints": {}, "uniqueConstraints": {},
"checkConstraints": {} "checkConstraints": {}
},
"haex_passwords_item_history": {
"name": "haex_passwords_item_history",
"columns": {
"id": {
"name": "id",
"type": "text",
"primaryKey": true,
"notNull": true,
"autoincrement": false
},
"item_id": {
"name": "item_id",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"changed_property": {
"name": "changed_property",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"old_value": {
"name": "old_value",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"new_value": {
"name": "new_value",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"created_at": {
"name": "created_at",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false,
"default": "(CURRENT_TIMESTAMP)"
},
"haex_tombstone": {
"name": "haex_tombstone",
"type": "integer",
"primaryKey": false,
"notNull": false,
"autoincrement": false
}
},
"indexes": {},
"foreignKeys": {
"haex_passwords_item_history_item_id_haex_passwords_item_details_id_fk": {
"name": "haex_passwords_item_history_item_id_haex_passwords_item_details_id_fk",
"tableFrom": "haex_passwords_item_history",
"tableTo": "haex_passwords_item_details",
"columnsFrom": [
"item_id"
],
"columnsTo": [
"id"
],
"onDelete": "no action",
"onUpdate": "no action"
}
},
"compositePrimaryKeys": {},
"uniqueConstraints": {},
"checkConstraints": {}
},
"haex_passwords_item_key_values": {
"name": "haex_passwords_item_key_values",
"columns": {
"id": {
"name": "id",
"type": "text",
"primaryKey": true,
"notNull": true,
"autoincrement": false
},
"item_id": {
"name": "item_id",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"key": {
"name": "key",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"value": {
"name": "value",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"updated_at": {
"name": "updated_at",
"type": "integer",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"haex_tombstone": {
"name": "haex_tombstone",
"type": "integer",
"primaryKey": false,
"notNull": false,
"autoincrement": false
}
},
"indexes": {},
"foreignKeys": {
"haex_passwords_item_key_values_item_id_haex_passwords_item_details_id_fk": {
"name": "haex_passwords_item_key_values_item_id_haex_passwords_item_details_id_fk",
"tableFrom": "haex_passwords_item_key_values",
"tableTo": "haex_passwords_item_details",
"columnsFrom": [
"item_id"
],
"columnsTo": [
"id"
],
"onDelete": "no action",
"onUpdate": "no action"
}
},
"compositePrimaryKeys": {},
"uniqueConstraints": {},
"checkConstraints": {}
} }
}, },
"views": {}, "views": {},

View File

@ -5,15 +5,15 @@
{ {
"idx": 0, "idx": 0,
"version": "6", "version": "6",
"when": 1759402321133, "when": 1742903332283,
"tag": "0000_glamorous_hulk", "tag": "0000_zippy_scourge",
"breakpoints": true "breakpoints": true
}, },
{ {
"idx": 1, "idx": 1,
"version": "6", "version": "6",
"when": 1759418087677, "when": 1746281577722,
"tag": "0001_green_stark_industries", "tag": "0001_wealthy_thaddeus_ross",
"breakpoints": true "breakpoints": true
} }
] ]

View File

@ -1,52 +0,0 @@
import { integer, sqliteTable, text, index } from 'drizzle-orm/sqlite-core'
import tableNames from '../tableNames.json'
export const haexCrdtLogs = sqliteTable(
tableNames.haex.crdt.logs.name,
{
id: text()
.primaryKey()
.$defaultFn(() => crypto.randomUUID()),
haexTimestamp: text(tableNames.haex.crdt.logs.columns.haexTimestamp),
tableName: text(tableNames.haex.crdt.logs.columns.tableName),
rowPks: text(tableNames.haex.crdt.logs.columns.rowPks, { mode: 'json' }),
opType: text(tableNames.haex.crdt.logs.columns.opType, {
enum: ['INSERT', 'UPDATE', 'DELETE'],
}),
columnName: text(tableNames.haex.crdt.logs.columns.columnName),
newValue: text(tableNames.haex.crdt.logs.columns.newValue, {
mode: 'json',
}),
oldValue: text(tableNames.haex.crdt.logs.columns.oldValue, {
mode: 'json',
}),
},
(table) => [
index('idx_haex_timestamp').on(table.haexTimestamp),
index('idx_table_row').on(table.tableName, table.rowPks),
],
)
export type InsertHaexCrdtLogs = typeof haexCrdtLogs.$inferInsert
export type SelectHaexCrdtLogs = typeof haexCrdtLogs.$inferSelect
export const haexCrdtSnapshots = sqliteTable(
tableNames.haex.crdt.snapshots.name,
{
snapshotId: text(tableNames.haex.crdt.snapshots.columns.snapshotId)
.primaryKey()
.$defaultFn(() => crypto.randomUUID()),
created: text(),
epochHlc: text(tableNames.haex.crdt.snapshots.columns.epochHlc),
locationUrl: text(tableNames.haex.crdt.snapshots.columns.locationUrl),
fileSizeBytes: integer(
tableNames.haex.crdt.snapshots.columns.fileSizeBytes,
),
},
)
export const haexCrdtConfigs = sqliteTable(tableNames.haex.crdt.configs.name, {
key: text()
.primaryKey()
.$defaultFn(() => crypto.randomUUID()),
value: text(),
})

View File

@ -1,116 +0,0 @@
import { sql } from 'drizzle-orm'
import {
integer,
sqliteTable,
text,
unique,
type AnySQLiteColumn,
} from 'drizzle-orm/sqlite-core'
import tableNames from '../tableNames.json'
export const haexSettings = sqliteTable(tableNames.haex.settings.name, {
id: text()
.primaryKey()
.$defaultFn(() => crypto.randomUUID()),
key: text(),
type: text(),
value: text(),
haexTombstone: integer(tableNames.haex.settings.columns.haexTombstone, {
mode: 'boolean',
}),
haexTimestamp: text(tableNames.haex.settings.columns.haexTimestamp),
})
export type InsertHaexSettings = typeof haexSettings.$inferInsert
export type SelectHaexSettings = typeof haexSettings.$inferSelect
export const haexExtensions = sqliteTable(tableNames.haex.extensions.name, {
id: text()
.primaryKey()
.$defaultFn(() => crypto.randomUUID()),
author: text(),
description: text(),
entry: text(),
homepage: text(),
enabled: integer({ mode: 'boolean' }),
icon: text(),
name: text(),
public_key: text(),
signature: text(),
url: text(),
version: text(),
haexTombstone: integer(tableNames.haex.extensions.columns.haexTombstone, {
mode: 'boolean',
}),
haexTimestamp: text(tableNames.haex.extensions.columns.haexTimestamp),
})
export type InsertHaexExtensions = typeof haexExtensions.$inferInsert
export type SelectHaexExtensions = typeof haexExtensions.$inferSelect
export const haexExtensionPermissions = sqliteTable(
tableNames.haex.extension_permissions.name,
{
id: text()
.primaryKey()
.$defaultFn(() => crypto.randomUUID()),
extensionId: text(
tableNames.haex.extension_permissions.columns.extensionId,
).references((): AnySQLiteColumn => haexExtensions.id),
resourceType: text('resource_type', {
enum: ['fs', 'http', 'db', 'shell'],
}),
action: text({ enum: ['read', 'write'] }),
target: text(),
constraints: text({ mode: 'json' }),
status: text({ enum: ['ask', 'granted', 'denied'] })
.notNull()
.default('denied'),
createdAt: text('created_at').default(sql`(CURRENT_TIMESTAMP)`),
updateAt: integer('updated_at', { mode: 'timestamp' }).$onUpdate(
() => new Date(),
),
haexTombstone: integer(
tableNames.haex.extension_permissions.columns.haexTombstone,
{ mode: 'boolean' },
),
haexTimestamp: text(
tableNames.haex.extension_permissions.columns.haexTimestamp,
),
},
(table) => [
unique().on(
table.extensionId,
table.resourceType,
table.action,
table.target,
),
],
)
export type InserthaexExtensionPermissions =
typeof haexExtensionPermissions.$inferInsert
export type SelecthaexExtensionPermissions =
typeof haexExtensionPermissions.$inferSelect
export const haexNotifications = sqliteTable(
tableNames.haex.notifications.name,
{
id: text().primaryKey(),
alt: text(),
date: text(),
icon: text(),
image: text(),
read: integer({ mode: 'boolean' }),
source: text(),
text: text(),
title: text(),
type: text({
enum: ['error', 'success', 'warning', 'info', 'log'],
}).notNull(),
haexTombstone: integer(
tableNames.haex.notifications.columns.haexTombstone,
{ mode: 'boolean' },
),
haexTimestamp: text(tableNames.haex.notifications.columns.haexTimestamp),
},
)
export type InsertHaexNotifications = typeof haexNotifications.$inferInsert
export type SelectHaexNotifications = typeof haexNotifications.$inferSelect

View File

@ -1,2 +0,0 @@
export * from './crdt'
export * from './haex'

View File

@ -1,112 +1,53 @@
import { sql } from 'drizzle-orm'
import { import {
integer, integer,
primaryKey, numeric,
sqliteTable, sqliteTable,
text, text,
type AnySQLiteColumn, type AnySQLiteColumn,
} from 'drizzle-orm/sqlite-core' unique,
import tableNames from '../tableNames.json' } from "drizzle-orm/sqlite-core";
export const haexPasswordsItemDetails = sqliteTable( export const haexSettings = sqliteTable("haex_settings", {
tableNames.haex.passwords.item_details,
{
id: text().primaryKey(), id: text().primaryKey(),
title: text(),
username: text(),
password: text(),
note: text(),
icon: text(),
tags: text(),
url: text(),
createdAt: text('created_at').default(sql`(CURRENT_TIMESTAMP)`),
updateAt: integer('updated_at', { mode: 'timestamp' }).$onUpdate(
() => new Date(),
),
haex_tombstone: integer({ mode: 'boolean' }),
},
)
export type InsertHaexPasswordsItemDetails =
typeof haexPasswordsItemDetails.$inferInsert
export type SelectHaexPasswordsItemDetails =
typeof haexPasswordsItemDetails.$inferSelect
export const haexPasswordsItemKeyValues = sqliteTable(
tableNames.haex.passwords.item_key_values,
{
id: text().primaryKey(),
itemId: text('item_id').references(
(): AnySQLiteColumn => haexPasswordsItemDetails.id,
),
key: text(), key: text(),
value: text(), value_text: text(),
updateAt: integer('updated_at', { mode: 'timestamp' }).$onUpdate( value_json: text({ mode: "json" }),
() => new Date(), value_number: numeric(),
), });
haex_tombstone: integer({ mode: 'boolean' }),
},
)
export type InserthaexPasswordsItemKeyValues =
typeof haexPasswordsItemKeyValues.$inferInsert
export type SelectHaexPasswordsItemKeyValues =
typeof haexPasswordsItemKeyValues.$inferSelect
export const haexPasswordsItemHistory = sqliteTable( export const haexExtensions = sqliteTable("haex_extensions", {
tableNames.haex.passwords.item_histories,
{
id: text().primaryKey(), id: text().primaryKey(),
itemId: text('item_id').references( author: text(),
(): AnySQLiteColumn => haexPasswordsItemDetails.id, enabled: integer({ mode: "boolean" }),
),
changedProperty:
text('changed_property').$type<keyof typeof haexPasswordsItemDetails>(),
oldValue: text('old_value'),
newValue: text('new_value'),
createdAt: text('created_at').default(sql`(CURRENT_TIMESTAMP)`),
haex_tombstone: integer({ mode: 'boolean' }),
},
)
export type InserthaexPasswordsItemHistory =
typeof haexPasswordsItemHistory.$inferInsert
export type SelectHaexPasswordsItemHistory =
typeof haexPasswordsItemHistory.$inferSelect
export const haexPasswordsGroups = sqliteTable(
tableNames.haex.passwords.groups,
{
id: text().primaryKey(),
name: text(),
description: text(),
icon: text(), icon: text(),
order: integer(), name: text(),
color: text(), url: text(),
parentId: text('parent_id').references( version: text(),
(): AnySQLiteColumn => haexPasswordsGroups.id, });
),
createdAt: text('created_at').default(sql`(CURRENT_TIMESTAMP)`),
updateAt: integer('updated_at', { mode: 'timestamp' }).$onUpdate(
() => new Date(),
),
haex_tombstone: integer({ mode: 'boolean' }),
},
)
export type InsertHaexPasswordsGroups = typeof haexPasswordsGroups.$inferInsert
export type SelectHaexPasswordsGroups = typeof haexPasswordsGroups.$inferSelect
export const haexPasswordsGroupItems = sqliteTable( export const testTable = sqliteTable("testTable", {
tableNames.haex.passwords.group_items, id: text().primaryKey(),
author: text(),
test: text(),
});
export const haexExtensionsPermissions = sqliteTable(
"haex_extensions_permissions",
{ {
groupId: text('group_id').references( id: text().primaryKey(),
(): AnySQLiteColumn => haexPasswordsGroups.id, extensionId: text("extension_id").references((): AnySQLiteColumn => haexExtensions.id),
), resource: text({ enum: ["fs", "http", "database"] }),
itemId: text('item_id').references( operation: text({ enum: ["read", "write", "create"] }),
(): AnySQLiteColumn => haexPasswordsItemDetails.id, path: text(),
),
haex_tombstone: integer({ mode: 'boolean' }),
}, },
(table) => [primaryKey({ columns: [table.itemId, table.groupId] })], (table) => [unique().on(table.extensionId, table.resource, table.operation, table.path)]
) );
export type InsertHaexPasswordsGroupItems =
typeof haexPasswordsGroupItems.$inferInsert export type InsertHaexSettings = typeof haexSettings.$inferInsert;
export type SelectHaexPasswordsGroupItems = export type SelectHaexSettings = typeof haexSettings.$inferSelect;
typeof haexPasswordsGroupItems.$inferSelect
export type InsertHaexExtensions = typeof haexExtensions.$inferInsert;
export type SelectHaexExtensions = typeof haexExtensions.$inferSelect;
export type InsertHaexExtensionsPermissions = typeof haexExtensionsPermissions.$inferInsert;
export type SelectHaexExtensionsPermissions = typeof haexExtensionsPermissions.$inferSelect;

View File

@ -1,106 +0,0 @@
{
"haex": {
"settings": {
"name": "haex_settings",
"columns": {
"id": "id",
"key": "key",
"type": "type",
"value": "value",
"haexTombstone": "haex_tombstone",
"haexTimestamp": "haex_timestamp"
}
},
"extensions": {
"name": "haex_extensions",
"columns": {
"id": "id",
"author": "author",
"description": "description",
"entry": "entry",
"homepage": "homepage",
"enabled": "enabled",
"icon": "icon",
"name": "name",
"public_key": "public_key",
"signature": "signature",
"url": "url",
"version": "version",
"haexTombstone": "haex_tombstone",
"haexTimestamp": "haex_timestamp"
}
},
"extension_permissions": {
"name": "haex_extension_permissions",
"columns": {
"id": "id",
"extensionId": "extension_id",
"resourceType": "resource_type",
"action": "action",
"target": "target",
"constraints": "constraints",
"status": "status",
"createdAt": "created_at",
"updateAt": "updated_at",
"haexTombstone": "haex_tombstone",
"haexTimestamp": "haex_timestamp"
}
},
"notifications": {
"name": "haex_notifications",
"columns": {
"id": "id",
"alt": "alt",
"date": "date",
"icon": "icon",
"image": "image",
"read": "read",
"source": "source",
"text": "text",
"title": "title",
"type": "type",
"haexTombstone": "haex_tombstone",
"haexTimestamp": "haex_timestamp"
}
},
"passwords": {
"groups": "haex_passwords_groups",
"group_items": "haex_passwords_group_items",
"item_details": "haex_passwords_item_details",
"item_key_values": "haex_passwords_item_key_values",
"item_histories": "haex_passwords_item_history"
},
"crdt": {
"logs": {
"name": "haex_crdt_logs",
"columns": {
"id": "id",
"haexTimestamp": "haex_timestamp",
"tableName": "table_name",
"rowPks": "row_pks",
"opType": "op_type",
"columnName": "column_name",
"newValue": "new_value",
"oldValue": "old_value"
}
},
"snapshots": {
"name": "haex_crdt_snapshots",
"columns": {
"snapshotId": "snapshot_id",
"created": "created",
"epochHlc": "epoch_hlc",
"locationUrl": "location_url",
"fileSizeBytes": "file_size_bytes"
}
},
"configs": {
"name": "haex_crdt_configs",
"columns": {
"key": "key",
"value": "value"
}
}
}
}
}

Binary file not shown.

View File

@ -1,12 +0,0 @@
# EditorConfig is awesome: https://EditorConfig.org
# top-most EditorConfig file
root = true
[*]
indent_style = space
indent_size = 2
end_of_line = lf
charset = utf-8
trim_trailing_whitespace = false
insert_final_newline = false

View File

@ -1,19 +0,0 @@
*.iml
.gradle
/local.properties
/.idea/caches
/.idea/libraries
/.idea/modules.xml
/.idea/workspace.xml
/.idea/navEditor.xml
/.idea/assetWizardSettings.xml
.DS_Store
build
/captures
.externalNativeBuild
.cxx
local.properties
key.properties
/.tauri
/tauri.settings.gradle

View File

@ -1,6 +0,0 @@
/src/main/java/space/haex/hub/generated
/src/main/jniLibs/**/*.so
/src/main/assets/tauri.conf.json
/tauri.build.gradle.kts
/proguard-tauri.pro
/tauri.properties

View File

@ -1,70 +0,0 @@
import java.util.Properties
plugins {
id("com.android.application")
id("org.jetbrains.kotlin.android")
id("rust")
}
val tauriProperties = Properties().apply {
val propFile = file("tauri.properties")
if (propFile.exists()) {
propFile.inputStream().use { load(it) }
}
}
android {
compileSdk = 36
namespace = "space.haex.hub"
defaultConfig {
manifestPlaceholders["usesCleartextTraffic"] = "false"
applicationId = "space.haex.hub"
minSdk = 24
targetSdk = 36
versionCode = tauriProperties.getProperty("tauri.android.versionCode", "1").toInt()
versionName = tauriProperties.getProperty("tauri.android.versionName", "1.0")
}
buildTypes {
getByName("debug") {
manifestPlaceholders["usesCleartextTraffic"] = "true"
isDebuggable = true
isJniDebuggable = true
isMinifyEnabled = false
packaging { jniLibs.keepDebugSymbols.add("*/arm64-v8a/*.so")
jniLibs.keepDebugSymbols.add("*/armeabi-v7a/*.so")
jniLibs.keepDebugSymbols.add("*/x86/*.so")
jniLibs.keepDebugSymbols.add("*/x86_64/*.so")
}
}
getByName("release") {
isMinifyEnabled = true
proguardFiles(
*fileTree(".") { include("**/*.pro") }
.plus(getDefaultProguardFile("proguard-android-optimize.txt"))
.toList().toTypedArray()
)
}
}
kotlinOptions {
jvmTarget = "1.8"
}
buildFeatures {
buildConfig = true
}
}
rust {
rootDirRel = "../../../"
}
dependencies {
implementation("androidx.webkit:webkit:1.14.0")
implementation("androidx.appcompat:appcompat:1.7.1")
implementation("androidx.activity:activity-ktx:1.10.1")
implementation("com.google.android.material:material:1.12.0")
testImplementation("junit:junit:4.13.2")
androidTestImplementation("androidx.test.ext:junit:1.1.4")
androidTestImplementation("androidx.test.espresso:espresso-core:3.5.0")
}
apply(from = "tauri.build.gradle.kts")

View File

@ -1,21 +0,0 @@
# Add project specific ProGuard rules here.
# You can control the set of applied configuration files using the
# proguardFiles setting in build.gradle.
#
# For more details, see
# http://developer.android.com/guide/developing/tools/proguard.html
# If your project uses WebView with JS, uncomment the following
# and specify the fully qualified class name to the JavaScript interface
# class:
#-keepclassmembers class fqcn.of.javascript.interface.for.webview {
# public *;
#}
# Uncomment this to preserve the line number information for
# debugging stack traces.
#-keepattributes SourceFile,LineNumberTable
# If you keep the line number information, uncomment this to
# hide the original source file name.
#-renamesourcefileattribute SourceFile

View File

@ -1,37 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android">
<uses-permission android:name="android.permission.INTERNET" />
<!-- AndroidTV support -->
<uses-feature android:name="android.software.leanback" android:required="false" />
<application
android:icon="@mipmap/ic_launcher"
android:label="@string/app_name"
android:theme="@style/Theme.haex_hub"
android:usesCleartextTraffic="${usesCleartextTraffic}">
<activity
android:configChanges="orientation|keyboardHidden|keyboard|screenSize|locale|smallestScreenSize|screenLayout|uiMode"
android:launchMode="singleTask"
android:label="@string/main_activity_title"
android:name=".MainActivity"
android:exported="true">
<intent-filter>
<action android:name="android.intent.action.MAIN" />
<category android:name="android.intent.category.LAUNCHER" />
<!-- AndroidTV support -->
<category android:name="android.intent.category.LEANBACK_LAUNCHER" />
</intent-filter>
</activity>
<provider
android:name="androidx.core.content.FileProvider"
android:authorities="${applicationId}.fileprovider"
android:exported="false"
android:grantUriPermissions="true">
<meta-data
android:name="android.support.FILE_PROVIDER_PATHS"
android:resource="@xml/file_paths" />
</provider>
</application>
</manifest>

View File

@ -1,11 +0,0 @@
package space.haex.hub
import android.os.Bundle
import androidx.activity.enableEdgeToEdge
class MainActivity : TauriActivity() {
override fun onCreate(savedInstanceState: Bundle?) {
enableEdgeToEdge()
super.onCreate(savedInstanceState)
}
}

View File

@ -1,30 +0,0 @@
<vector xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:aapt="http://schemas.android.com/aapt"
android:width="108dp"
android:height="108dp"
android:viewportWidth="108"
android:viewportHeight="108">
<path android:pathData="M31,63.928c0,0 6.4,-11 12.1,-13.1c7.2,-2.6 26,-1.4 26,-1.4l38.1,38.1L107,108.928l-32,-1L31,63.928z">
<aapt:attr name="android:fillColor">
<gradient
android:endX="85.84757"
android:endY="92.4963"
android:startX="42.9492"
android:startY="49.59793"
android:type="linear">
<item
android:color="#44000000"
android:offset="0.0" />
<item
android:color="#00000000"
android:offset="1.0" />
</gradient>
</aapt:attr>
</path>
<path
android:fillColor="#FFFFFF"
android:fillType="nonZero"
android:pathData="M65.3,45.828l3.8,-6.6c0.2,-0.4 0.1,-0.9 -0.3,-1.1c-0.4,-0.2 -0.9,-0.1 -1.1,0.3l-3.9,6.7c-6.3,-2.8 -13.4,-2.8 -19.7,0l-3.9,-6.7c-0.2,-0.4 -0.7,-0.5 -1.1,-0.3C38.8,38.328 38.7,38.828 38.9,39.228l3.8,6.6C36.2,49.428 31.7,56.028 31,63.928h46C76.3,56.028 71.8,49.428 65.3,45.828zM43.4,57.328c-0.8,0 -1.5,-0.5 -1.8,-1.2c-0.3,-0.7 -0.1,-1.5 0.4,-2.1c0.5,-0.5 1.4,-0.7 2.1,-0.4c0.7,0.3 1.2,1 1.2,1.8C45.3,56.528 44.5,57.328 43.4,57.328L43.4,57.328zM64.6,57.328c-0.8,0 -1.5,-0.5 -1.8,-1.2s-0.1,-1.5 0.4,-2.1c0.5,-0.5 1.4,-0.7 2.1,-0.4c0.7,0.3 1.2,1 1.2,1.8C66.5,56.528 65.6,57.328 64.6,57.328L64.6,57.328z"
android:strokeWidth="1"
android:strokeColor="#00000000" />
</vector>

View File

@ -1,170 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<vector xmlns:android="http://schemas.android.com/apk/res/android"
android:width="108dp"
android:height="108dp"
android:viewportWidth="108"
android:viewportHeight="108">
<path
android:fillColor="#3DDC84"
android:pathData="M0,0h108v108h-108z" />
<path
android:fillColor="#00000000"
android:pathData="M9,0L9,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M19,0L19,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M29,0L29,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M39,0L39,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M49,0L49,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M59,0L59,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M69,0L69,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M79,0L79,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M89,0L89,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M99,0L99,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,9L108,9"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,19L108,19"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,29L108,29"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,39L108,39"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,49L108,49"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,59L108,59"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,69L108,69"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,79L108,79"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,89L108,89"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,99L108,99"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M19,29L89,29"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M19,39L89,39"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M19,49L89,49"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M19,59L89,59"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M19,69L89,69"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M19,79L89,79"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M29,19L29,89"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M39,19L39,89"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M49,19L49,89"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M59,19L59,89"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M69,19L69,89"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M79,19L79,89"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
</vector>

View File

@ -1,18 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<androidx.constraintlayout.widget.ConstraintLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:app="http://schemas.android.com/apk/res-auto"
xmlns:tools="http://schemas.android.com/tools"
android:layout_width="match_parent"
android:layout_height="match_parent"
tools:context=".MainActivity">
<TextView
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="Hello World!"
app:layout_constraintBottom_toBottomOf="parent"
app:layout_constraintLeft_toLeftOf="parent"
app:layout_constraintRight_toRightOf="parent"
app:layout_constraintTop_toTopOf="parent" />
</androidx.constraintlayout.widget.ConstraintLayout>

Binary file not shown.

Before

Width:  |  Height:  |  Size: 3.4 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 14 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 3.4 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 3.3 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 8.9 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 3.3 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 7.8 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 18 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 7.8 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 12 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 29 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 12 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 16 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 40 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 16 KiB

View File

@ -1,6 +0,0 @@
<resources xmlns:tools="http://schemas.android.com/tools">
<!-- Base application theme. -->
<style name="Theme.haex_hub" parent="Theme.MaterialComponents.DayNight.NoActionBar">
<!-- Customize your theme here. -->
</style>
</resources>

View File

@ -1,10 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<resources>
<color name="purple_200">#FFBB86FC</color>
<color name="purple_500">#FF6200EE</color>
<color name="purple_700">#FF3700B3</color>
<color name="teal_200">#FF03DAC5</color>
<color name="teal_700">#FF018786</color>
<color name="black">#FF000000</color>
<color name="white">#FFFFFFFF</color>
</resources>

View File

@ -1,4 +0,0 @@
<resources>
<string name="app_name">haex-hub</string>
<string name="main_activity_title">haex-hub</string>
</resources>

View File

@ -1,6 +0,0 @@
<resources xmlns:tools="http://schemas.android.com/tools">
<!-- Base application theme. -->
<style name="Theme.haex_hub" parent="Theme.MaterialComponents.DayNight.NoActionBar">
<!-- Customize your theme here. -->
</style>
</resources>

View File

@ -1,5 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<paths xmlns:android="http://schemas.android.com/apk/res/android">
<external-path name="my_images" path="." />
<cache-path name="my_cache_images" path="." />
</paths>

View File

@ -1,22 +0,0 @@
buildscript {
repositories {
google()
mavenCentral()
}
dependencies {
classpath("com.android.tools.build:gradle:8.11.0")
classpath("org.jetbrains.kotlin:kotlin-gradle-plugin:1.9.25")
}
}
allprojects {
repositories {
google()
mavenCentral()
}
}
tasks.register("clean").configure {
delete("build")
}

View File

@ -1,23 +0,0 @@
plugins {
`kotlin-dsl`
}
gradlePlugin {
plugins {
create("pluginsForCoolKids") {
id = "rust"
implementationClass = "RustPlugin"
}
}
}
repositories {
google()
mavenCentral()
}
dependencies {
compileOnly(gradleApi())
implementation("com.android.tools.build:gradle:8.11.0")
}

View File

@ -1,52 +0,0 @@
import java.io.File
import org.apache.tools.ant.taskdefs.condition.Os
import org.gradle.api.DefaultTask
import org.gradle.api.GradleException
import org.gradle.api.logging.LogLevel
import org.gradle.api.tasks.Input
import org.gradle.api.tasks.TaskAction
open class BuildTask : DefaultTask() {
@Input
var rootDirRel: String? = null
@Input
var target: String? = null
@Input
var release: Boolean? = null
@TaskAction
fun assemble() {
val executable = """pnpm""";
try {
runTauriCli(executable)
} catch (e: Exception) {
if (Os.isFamily(Os.FAMILY_WINDOWS)) {
runTauriCli("$executable.cmd")
} else {
throw e;
}
}
}
fun runTauriCli(executable: String) {
val rootDirRel = rootDirRel ?: throw GradleException("rootDirRel cannot be null")
val target = target ?: throw GradleException("target cannot be null")
val release = release ?: throw GradleException("release cannot be null")
val args = listOf("tauri", "android", "android-studio-script");
project.exec {
workingDir(File(project.projectDir, rootDirRel))
executable(executable)
args(args)
if (project.logger.isEnabled(LogLevel.DEBUG)) {
args("-vv")
} else if (project.logger.isEnabled(LogLevel.INFO)) {
args("-v")
}
if (release) {
args("--release")
}
args(listOf("--target", target))
}.assertNormalExitValue()
}
}

View File

@ -1,85 +0,0 @@
import com.android.build.api.dsl.ApplicationExtension
import org.gradle.api.DefaultTask
import org.gradle.api.Plugin
import org.gradle.api.Project
import org.gradle.kotlin.dsl.configure
import org.gradle.kotlin.dsl.get
const val TASK_GROUP = "rust"
open class Config {
lateinit var rootDirRel: String
}
open class RustPlugin : Plugin<Project> {
private lateinit var config: Config
override fun apply(project: Project) = with(project) {
config = extensions.create("rust", Config::class.java)
val defaultAbiList = listOf("arm64-v8a", "armeabi-v7a", "x86", "x86_64");
val abiList = (findProperty("abiList") as? String)?.split(',') ?: defaultAbiList
val defaultArchList = listOf("arm64", "arm", "x86", "x86_64");
val archList = (findProperty("archList") as? String)?.split(',') ?: defaultArchList
val targetsList = (findProperty("targetList") as? String)?.split(',') ?: listOf("aarch64", "armv7", "i686", "x86_64")
extensions.configure<ApplicationExtension> {
@Suppress("UnstableApiUsage")
flavorDimensions.add("abi")
productFlavors {
create("universal") {
dimension = "abi"
ndk {
abiFilters += abiList
}
}
defaultArchList.forEachIndexed { index, arch ->
create(arch) {
dimension = "abi"
ndk {
abiFilters.add(defaultAbiList[index])
}
}
}
}
}
afterEvaluate {
for (profile in listOf("debug", "release")) {
val profileCapitalized = profile.replaceFirstChar { it.uppercase() }
val buildTask = tasks.maybeCreate(
"rustBuildUniversal$profileCapitalized",
DefaultTask::class.java
).apply {
group = TASK_GROUP
description = "Build dynamic library in $profile mode for all targets"
}
tasks["mergeUniversal${profileCapitalized}JniLibFolders"].dependsOn(buildTask)
for (targetPair in targetsList.withIndex()) {
val targetName = targetPair.value
val targetArch = archList[targetPair.index]
val targetArchCapitalized = targetArch.replaceFirstChar { it.uppercase() }
val targetBuildTask = project.tasks.maybeCreate(
"rustBuild$targetArchCapitalized$profileCapitalized",
BuildTask::class.java
).apply {
group = TASK_GROUP
description = "Build dynamic library in $profile mode for $targetArch"
rootDirRel = config.rootDirRel
target = targetName
release = profile == "release"
}
buildTask.dependsOn(targetBuildTask)
tasks["merge$targetArchCapitalized${profileCapitalized}JniLibFolders"].dependsOn(
targetBuildTask
)
}
}
}
}
}

View File

@ -1,24 +0,0 @@
# Project-wide Gradle settings.
# IDE (e.g. Android Studio) users:
# Gradle settings configured through the IDE *will override*
# any settings specified in this file.
# For more details on how to configure your build environment visit
# http://www.gradle.org/docs/current/userguide/build_environment.html
# Specifies the JVM arguments used for the daemon process.
# The setting is particularly useful for tweaking memory settings.
org.gradle.jvmargs=-Xmx2048m -Dfile.encoding=UTF-8
# When configured, Gradle will run in incubating parallel mode.
# This option should only be used with decoupled projects. More details, visit
# http://www.gradle.org/docs/current/userguide/multi_project_builds.html#sec:decoupled_projects
# org.gradle.parallel=true
# AndroidX package structure to make it clearer which packages are bundled with the
# Android operating system, and which are packaged with your app"s APK
# https://developer.android.com/topic/libraries/support-library/androidx-rn
android.useAndroidX=true
# Kotlin code style for this project: "official" or "obsolete":
kotlin.code.style=official
# Enables namespacing of each library's R class so that its R class includes only the
# resources declared in the library itself and none from the library's dependencies,
# thereby reducing the size of the R class for that library
android.nonTransitiveRClass=true
android.nonFinalResIds=false

View File

@ -1,6 +0,0 @@
#Tue May 10 19:22:52 CST 2022
distributionBase=GRADLE_USER_HOME
distributionUrl=https\://services.gradle.org/distributions/gradle-8.14.3-bin.zip
distributionPath=wrapper/dists
zipStorePath=wrapper/dists
zipStoreBase=GRADLE_USER_HOME

View File

@ -1,185 +0,0 @@
#!/usr/bin/env sh
#
# Copyright 2015 the original author or authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
##############################################################################
##
## Gradle start up script for UN*X
##
##############################################################################
# Attempt to set APP_HOME
# Resolve links: $0 may be a link
PRG="$0"
# Need this for relative symlinks.
while [ -h "$PRG" ] ; do
ls=`ls -ld "$PRG"`
link=`expr "$ls" : '.*-> \(.*\)$'`
if expr "$link" : '/.*' > /dev/null; then
PRG="$link"
else
PRG=`dirname "$PRG"`"/$link"
fi
done
SAVED="`pwd`"
cd "`dirname \"$PRG\"`/" >/dev/null
APP_HOME="`pwd -P`"
cd "$SAVED" >/dev/null
APP_NAME="Gradle"
APP_BASE_NAME=`basename "$0"`
# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"'
# Use the maximum available, or set MAX_FD != -1 to use that value.
MAX_FD="maximum"
warn () {
echo "$*"
}
die () {
echo
echo "$*"
echo
exit 1
}
# OS specific support (must be 'true' or 'false').
cygwin=false
msys=false
darwin=false
nonstop=false
case "`uname`" in
CYGWIN* )
cygwin=true
;;
Darwin* )
darwin=true
;;
MINGW* )
msys=true
;;
NONSTOP* )
nonstop=true
;;
esac
CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
# Determine the Java command to use to start the JVM.
if [ -n "$JAVA_HOME" ] ; then
if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
# IBM's JDK on AIX uses strange locations for the executables
JAVACMD="$JAVA_HOME/jre/sh/java"
else
JAVACMD="$JAVA_HOME/bin/java"
fi
if [ ! -x "$JAVACMD" ] ; then
die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
Please set the JAVA_HOME variable in your environment to match the
location of your Java installation."
fi
else
JAVACMD="java"
which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
Please set the JAVA_HOME variable in your environment to match the
location of your Java installation."
fi
# Increase the maximum file descriptors if we can.
if [ "$cygwin" = "false" -a "$darwin" = "false" -a "$nonstop" = "false" ] ; then
MAX_FD_LIMIT=`ulimit -H -n`
if [ $? -eq 0 ] ; then
if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then
MAX_FD="$MAX_FD_LIMIT"
fi
ulimit -n $MAX_FD
if [ $? -ne 0 ] ; then
warn "Could not set maximum file descriptor limit: $MAX_FD"
fi
else
warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT"
fi
fi
# For Darwin, add options to specify how the application appears in the dock
if $darwin; then
GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\""
fi
# For Cygwin or MSYS, switch paths to Windows format before running java
if [ "$cygwin" = "true" -o "$msys" = "true" ] ; then
APP_HOME=`cygpath --path --mixed "$APP_HOME"`
CLASSPATH=`cygpath --path --mixed "$CLASSPATH"`
JAVACMD=`cygpath --unix "$JAVACMD"`
# We build the pattern for arguments to be converted via cygpath
ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`
SEP=""
for dir in $ROOTDIRSRAW ; do
ROOTDIRS="$ROOTDIRS$SEP$dir"
SEP="|"
done
OURCYGPATTERN="(^($ROOTDIRS))"
# Add a user-defined pattern to the cygpath arguments
if [ "$GRADLE_CYGPATTERN" != "" ] ; then
OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)"
fi
# Now convert the arguments - kludge to limit ourselves to /bin/sh
i=0
for arg in "$@" ; do
CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -`
CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option
if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition
eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"`
else
eval `echo args$i`="\"$arg\""
fi
i=`expr $i + 1`
done
case $i in
0) set -- ;;
1) set -- "$args0" ;;
2) set -- "$args0" "$args1" ;;
3) set -- "$args0" "$args1" "$args2" ;;
4) set -- "$args0" "$args1" "$args2" "$args3" ;;
5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;;
6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;;
7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;;
8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;;
9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;;
esac
fi
# Escape application args
save () {
for i do printf %s\\n "$i" | sed "s/'/'\\\\''/g;1s/^/'/;\$s/\$/' \\\\/" ; done
echo " "
}
APP_ARGS=`save "$@"`
# Collect all arguments for the java command, following the shell quoting and substitution rules
eval set -- $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS "\"-Dorg.gradle.appname=$APP_BASE_NAME\"" -classpath "\"$CLASSPATH\"" org.gradle.wrapper.GradleWrapperMain "$APP_ARGS"
exec "$JAVACMD" "$@"

View File

@ -1,89 +0,0 @@
@rem
@rem Copyright 2015 the original author or authors.
@rem
@rem Licensed under the Apache License, Version 2.0 (the "License");
@rem you may not use this file except in compliance with the License.
@rem You may obtain a copy of the License at
@rem
@rem https://www.apache.org/licenses/LICENSE-2.0
@rem
@rem Unless required by applicable law or agreed to in writing, software
@rem distributed under the License is distributed on an "AS IS" BASIS,
@rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@rem See the License for the specific language governing permissions and
@rem limitations under the License.
@rem
@if "%DEBUG%" == "" @echo off
@rem ##########################################################################
@rem
@rem Gradle startup script for Windows
@rem
@rem ##########################################################################
@rem Set local scope for the variables with windows NT shell
if "%OS%"=="Windows_NT" setlocal
set DIRNAME=%~dp0
if "%DIRNAME%" == "" set DIRNAME=.
set APP_BASE_NAME=%~n0
set APP_HOME=%DIRNAME%
@rem Resolve any "." and ".." in APP_HOME to make it shorter.
for %%i in ("%APP_HOME%") do set APP_HOME=%%~fi
@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
set DEFAULT_JVM_OPTS="-Xmx64m" "-Xms64m"
@rem Find java.exe
if defined JAVA_HOME goto findJavaFromJavaHome
set JAVA_EXE=java.exe
%JAVA_EXE% -version >NUL 2>&1
if "%ERRORLEVEL%" == "0" goto execute
echo.
echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
echo.
echo Please set the JAVA_HOME variable in your environment to match the
echo location of your Java installation.
goto fail
:findJavaFromJavaHome
set JAVA_HOME=%JAVA_HOME:"=%
set JAVA_EXE=%JAVA_HOME%/bin/java.exe
if exist "%JAVA_EXE%" goto execute
echo.
echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
echo.
echo Please set the JAVA_HOME variable in your environment to match the
echo location of your Java installation.
goto fail
:execute
@rem Setup the command line
set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
@rem Execute Gradle
"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %*
:end
@rem End local scope for the variables with windows NT shell
if "%ERRORLEVEL%"=="0" goto mainEnd
:fail
rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
rem the _cmd.exe /c_ return code!
if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1
exit /b 1
:mainEnd
if "%OS%"=="Windows_NT" endlocal
:omega

View File

@ -1,3 +0,0 @@
include ':app'
apply from: 'tauri.settings.gradle'

File diff suppressed because one or more lines are too long

File diff suppressed because it is too large Load Diff

View File

@ -1 +0,0 @@
{"default":{"identifier":"default","description":"Capability for the main window","local":true,"windows":["main"],"permissions":["core:default","core:webview:allow-create-webview-window","core:webview:allow-create-webview","core:webview:allow-webview-show","core:webview:default","core:window:allow-create","core:window:allow-get-all-windows","core:window:allow-show","core:window:default","dialog:default","fs:allow-appconfig-read-recursive","fs:allow-appconfig-write-recursive","fs:allow-appdata-read-recursive","fs:allow-appdata-write-recursive","fs:allow-read-file","fs:allow-read-dir","fs:allow-resource-read-recursive","fs:allow-resource-write-recursive","fs:allow-download-read-recursive","fs:allow-download-write-recursive","fs:default",{"identifier":"fs:scope","allow":[{"path":"**"}]},"http:allow-fetch-send","http:allow-fetch","http:default","notification:allow-create-channel","notification:allow-list-channels","notification:allow-notify","notification:default","opener:allow-open-url","opener:default","os:allow-hostname","os:default","store:default"]}}

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -1,3 +0,0 @@
// build/mod.rs
pub mod rust_types;
pub mod table_names;

View File

@ -1,24 +0,0 @@
// src-tauri/src/build/rust_types.rs
use std::fs;
use std::path::Path;
pub fn generate_rust_types() {
// Prüfe ob die generierte Datei vom TypeScript-Script existiert
let generated_path = Path::new("src/database/generated.rs");
if !generated_path.exists() {
eprintln!("⚠️ Warning: src/database/generated.rs not found!");
eprintln!(" Run 'pnpm generate:rust-types' first.");
// Erstelle eine leere Datei als Fallback
fs::write(
generated_path,
"// Run 'pnpm generate:rust-types' to generate this file\n",
)
.ok();
}
println!("cargo:rerun-if-changed=src/database/generated.rs");
println!("cargo:rerun-if-changed=src/database/schemas/crdt.ts");
println!("cargo:rerun-if-changed=src/database/schemas/haex.ts");
}

View File

@ -1,213 +0,0 @@
// src-tarui/src/build/table_names.rs
use serde::Deserialize;
use std::collections::HashMap;
use std::env;
use std::fs::File;
use std::io::{BufReader, Write};
use std::path::Path;
#[derive(Debug, Deserialize)]
struct Schema {
haex: Haex,
}
#[derive(Debug, Deserialize)]
#[allow(non_snake_case)]
struct Haex {
settings: TableDefinition,
extensions: TableDefinition,
extension_permissions: TableDefinition,
notifications: TableDefinition,
crdt: Crdt,
}
#[derive(Debug, Deserialize)]
struct Crdt {
logs: TableDefinition,
snapshots: TableDefinition,
configs: TableDefinition,
}
#[derive(Debug, Deserialize)]
struct TableDefinition {
name: String,
columns: HashMap<String, String>,
}
pub fn generate_table_names() {
let out_dir = env::var("OUT_DIR").expect("OUT_DIR ist nicht gesetzt.");
println!("Generiere Tabellennamen nach {}", out_dir);
let schema_path = Path::new("database/tableNames.json");
let dest_path = Path::new(&out_dir).join("tableNames.rs");
let file = File::open(&schema_path).expect("Konnte tableNames.json nicht öffnen");
let reader = BufReader::new(file);
let schema: Schema =
serde_json::from_reader(reader).expect("Konnte tableNames.json nicht parsen");
let haex = schema.haex;
let code = format!(
r#"
// ==================================================================
// HINWEIS: Diese Datei wurde automatisch von build.rs generiert.
// Manuelle Änderungen werden bei der nächsten Kompilierung überschrieben!
// ==================================================================
// --- Table: haex_settings ---
pub const TABLE_SETTINGS: &str = "{t_settings}";
pub const COL_SETTINGS_ID: &str = "{c_settings_id}";
pub const COL_SETTINGS_KEY: &str = "{c_settings_key}";
pub const COL_SETTINGS_TYPE: &str = "{c_settings_type}";
pub const COL_SETTINGS_VALUE: &str = "{c_settings_value}";
pub const COL_SETTINGS_HAEX_TOMBSTONE: &str = "{c_settings_tombstone}";
pub const COL_SETTINGS_HAEX_TIMESTAMP: &str = "{c_settings_timestamp}";
// --- Table: haex_extensions ---
pub const TABLE_EXTENSIONS: &str = "{t_extensions}";
pub const COL_EXTENSIONS_ID: &str = "{c_ext_id}";
pub const COL_EXTENSIONS_AUTHOR: &str = "{c_ext_author}";
pub const COL_EXTENSIONS_DESCRIPTION: &str = "{c_ext_description}";
pub const COL_EXTENSIONS_ENTRY: &str = "{c_ext_entry}";
pub const COL_EXTENSIONS_HOMEPAGE: &str = "{c_ext_homepage}";
pub const COL_EXTENSIONS_ENABLED: &str = "{c_ext_enabled}";
pub const COL_EXTENSIONS_ICON: &str = "{c_ext_icon}";
pub const COL_EXTENSIONS_NAME: &str = "{c_ext_name}";
pub const COL_EXTENSIONS_PUBLIC_KEY: &str = "{c_ext_public_key}";
pub const COL_EXTENSIONS_SIGNATURE: &str = "{c_ext_signature}";
pub const COL_EXTENSIONS_URL: &str = "{c_ext_url}";
pub const COL_EXTENSIONS_VERSION: &str = "{c_ext_version}";
pub const COL_EXTENSIONS_HAEX_TOMBSTONE: &str = "{c_ext_tombstone}";
pub const COL_EXTENSIONS_HAEX_TIMESTAMP: &str = "{c_ext_timestamp}";
// --- Table: haex_extension_permissions ---
pub const TABLE_EXTENSION_PERMISSIONS: &str = "{t_ext_perms}";
pub const COL_EXT_PERMS_ID: &str = "{c_extp_id}";
pub const COL_EXT_PERMS_EXTENSION_ID: &str = "{c_extp_extensionId}";
pub const COL_EXT_PERMS_RESOURCE_TYPE: &str = "{c_extp_resourceType}";
pub const COL_EXT_PERMS_ACTION: &str = "{c_extp_action}";
pub const COL_EXT_PERMS_TARGET: &str = "{c_extp_target}";
pub const COL_EXT_PERMS_CONSTRAINTS: &str = "{c_extp_constraints}";
pub const COL_EXT_PERMS_STATUS: &str = "{c_extp_status}";
pub const COL_EXT_PERMS_CREATED_AT: &str = "{c_extp_createdAt}";
pub const COL_EXT_PERMS_UPDATE_AT: &str = "{c_extp_updateAt}";
pub const COL_EXT_PERMS_HAEX_TOMBSTONE: &str = "{c_extp_tombstone}";
pub const COL_EXT_PERMS_HAEX_TIMESTAMP: &str = "{c_extp_timestamp}";
// --- Table: haex_notifications ---
pub const TABLE_NOTIFICATIONS: &str = "{t_notifications}";
pub const COL_NOTIFICATIONS_ID: &str = "{c_notif_id}";
pub const COL_NOTIFICATIONS_ALT: &str = "{c_notif_alt}";
pub const COL_NOTIFICATIONS_DATE: &str = "{c_notif_date}";
pub const COL_NOTIFICATIONS_ICON: &str = "{c_notif_icon}";
pub const COL_NOTIFICATIONS_IMAGE: &str = "{c_notif_image}";
pub const COL_NOTIFICATIONS_READ: &str = "{c_notif_read}";
pub const COL_NOTIFICATIONS_SOURCE: &str = "{c_notif_source}";
pub const COL_NOTIFICATIONS_TEXT: &str = "{c_notif_text}";
pub const COL_NOTIFICATIONS_TITLE: &str = "{c_notif_title}";
pub const COL_NOTIFICATIONS_TYPE: &str = "{c_notif_type}";
pub const COL_NOTIFICATIONS_HAEX_TOMBSTONE: &str = "{c_notif_tombstone}";
// --- Table: haex_crdt_logs ---
pub const TABLE_CRDT_LOGS: &str = "{t_crdt_logs}";
pub const COL_CRDT_LOGS_ID: &str = "{c_crdt_logs_id}";
pub const COL_CRDT_LOGS_HAEX_TIMESTAMP: &str = "{c_crdt_logs_timestamp}";
pub const COL_CRDT_LOGS_TABLE_NAME: &str = "{c_crdt_logs_tableName}";
pub const COL_CRDT_LOGS_ROW_PKS: &str = "{c_crdt_logs_rowPks}";
pub const COL_CRDT_LOGS_OP_TYPE: &str = "{c_crdt_logs_opType}";
pub const COL_CRDT_LOGS_COLUMN_NAME: &str = "{c_crdt_logs_columnName}";
pub const COL_CRDT_LOGS_NEW_VALUE: &str = "{c_crdt_logs_newValue}";
pub const COL_CRDT_LOGS_OLD_VALUE: &str = "{c_crdt_logs_oldValue}";
// --- Table: haex_crdt_snapshots ---
pub const TABLE_CRDT_SNAPSHOTS: &str = "{t_crdt_snapshots}";
pub const COL_CRDT_SNAPSHOTS_ID: &str = "{c_crdt_snap_id}";
pub const COL_CRDT_SNAPSHOTS_CREATED: &str = "{c_crdt_snap_created}";
pub const COL_CRDT_SNAPSHOTS_EPOCH_HLC: &str = "{c_crdt_snap_epoch}";
pub const COL_CRDT_SNAPSHOTS_LOCATION_URL: &str = "{c_crdt_snap_location}";
pub const COL_CRDT_SNAPSHOTS_FILE_SIZE: &str = "{c_crdt_snap_size}";
// --- Table: haex_crdt_configs ---
pub const TABLE_CRDT_CONFIGS: &str = "{t_crdt_configs}";
pub const COL_CRDT_CONFIGS_KEY: &str = "{c_crdt_configs_key}";
pub const COL_CRDT_CONFIGS_VALUE: &str = "{c_crdt_configs_value}";
"#,
// Settings
t_settings = haex.settings.name,
c_settings_id = haex.settings.columns["id"],
c_settings_key = haex.settings.columns["key"],
c_settings_type = haex.settings.columns["type"],
c_settings_value = haex.settings.columns["value"],
c_settings_tombstone = haex.settings.columns["haexTombstone"],
c_settings_timestamp = haex.settings.columns["haexTimestamp"],
// Extensions
t_extensions = haex.extensions.name,
c_ext_id = haex.extensions.columns["id"],
c_ext_author = haex.extensions.columns["author"],
c_ext_description = haex.extensions.columns["description"],
c_ext_entry = haex.extensions.columns["entry"],
c_ext_homepage = haex.extensions.columns["homepage"],
c_ext_enabled = haex.extensions.columns["enabled"],
c_ext_icon = haex.extensions.columns["icon"],
c_ext_name = haex.extensions.columns["name"],
c_ext_public_key = haex.extensions.columns["public_key"],
c_ext_signature = haex.extensions.columns["signature"],
c_ext_url = haex.extensions.columns["url"],
c_ext_version = haex.extensions.columns["version"],
c_ext_tombstone = haex.extensions.columns["haexTombstone"],
c_ext_timestamp = haex.extensions.columns["haexTimestamp"],
// Extension Permissions
t_ext_perms = haex.extension_permissions.name,
c_extp_id = haex.extension_permissions.columns["id"],
c_extp_extensionId = haex.extension_permissions.columns["extensionId"],
c_extp_resourceType = haex.extension_permissions.columns["resourceType"],
c_extp_action = haex.extension_permissions.columns["action"],
c_extp_target = haex.extension_permissions.columns["target"],
c_extp_constraints = haex.extension_permissions.columns["constraints"],
c_extp_status = haex.extension_permissions.columns["status"],
c_extp_createdAt = haex.extension_permissions.columns["createdAt"],
c_extp_updateAt = haex.extension_permissions.columns["updateAt"],
c_extp_tombstone = haex.extension_permissions.columns["haexTombstone"],
c_extp_timestamp = haex.extension_permissions.columns["haexTimestamp"],
// Notifications
t_notifications = haex.notifications.name,
c_notif_id = haex.notifications.columns["id"],
c_notif_alt = haex.notifications.columns["alt"],
c_notif_date = haex.notifications.columns["date"],
c_notif_icon = haex.notifications.columns["icon"],
c_notif_image = haex.notifications.columns["image"],
c_notif_read = haex.notifications.columns["read"],
c_notif_source = haex.notifications.columns["source"],
c_notif_text = haex.notifications.columns["text"],
c_notif_title = haex.notifications.columns["title"],
c_notif_type = haex.notifications.columns["type"],
c_notif_tombstone = haex.notifications.columns["haexTombstone"],
// CRDT Logs
t_crdt_logs = haex.crdt.logs.name,
c_crdt_logs_id = haex.crdt.logs.columns["id"],
c_crdt_logs_timestamp = haex.crdt.logs.columns["haexTimestamp"],
c_crdt_logs_tableName = haex.crdt.logs.columns["tableName"],
c_crdt_logs_rowPks = haex.crdt.logs.columns["rowPks"],
c_crdt_logs_opType = haex.crdt.logs.columns["opType"],
c_crdt_logs_columnName = haex.crdt.logs.columns["columnName"],
c_crdt_logs_newValue = haex.crdt.logs.columns["newValue"],
c_crdt_logs_oldValue = haex.crdt.logs.columns["oldValue"],
// CRDT Snapshots
t_crdt_snapshots = haex.crdt.snapshots.name,
c_crdt_snap_id = haex.crdt.snapshots.columns["snapshotId"],
c_crdt_snap_created = haex.crdt.snapshots.columns["created"],
c_crdt_snap_epoch = haex.crdt.snapshots.columns["epochHlc"],
c_crdt_snap_location = haex.crdt.snapshots.columns["locationUrl"],
c_crdt_snap_size = haex.crdt.snapshots.columns["fileSizeBytes"],
// CRDT Configs
t_crdt_configs = haex.crdt.configs.name,
c_crdt_configs_key = haex.crdt.configs.columns["key"],
c_crdt_configs_value = haex.crdt.configs.columns["value"]
);
// --- Datei schreiben ---
let mut f = File::create(&dest_path).expect("Konnte Zieldatei nicht erstellen");
f.write_all(code.as_bytes())
.expect("Konnte nicht in Zieldatei schreiben");
println!("cargo:rerun-if-changed=database/tableNames.json");
}

View File

@ -0,0 +1,285 @@
//mod middleware;
use std::collections::HashMap;
use std::sync::{Arc, Mutex};
use tauri::{webview, AppHandle, LogicalPosition, LogicalSize, Manager, WebviewUrl, Window};
//use uuid::Uuid;
#[derive(Debug, Clone)]
pub struct Tab {
pub id: String,
pub webview_label: String,
pub title: String,
pub url: String,
pub is_loading: bool,
pub is_visible: bool,
}
pub struct BrowserManager {
tabs: Arc<Mutex<HashMap<String, Tab>>>,
active_tab_id: Arc<Mutex<Option<String>>>,
//middleware: Arc<RoutingMiddleware>,
}
impl BrowserManager {
pub fn new() -> Self {
Self {
tabs: Arc::new(Mutex::new(HashMap::new())),
active_tab_id: Arc::new(Mutex::new(None)),
//middleware: Arc::new(RoutingMiddleware::new()),
}
}
/* pub async fn create_window(app: tauri::AppHandle) -> Result<tauri::WebviewWindow, _> {
let webview_window = tauri::WebviewWindowBuilder::new(
&app,
"label",
tauri::WebviewUrl::App("index.html".into()),
)
.build()
.unwrap();
Ok(webview_window);
} */
pub fn create_tab(&self, app: AppHandle, url: &str) {
// Generiere eine eindeutige ID für den Tab
/* let tab_id = Uuid::new_v4().to_string();
let webview_label = format!("webview-{}", tab_id); */
// Überprüfe URL mit Middleware
//let processed_url = self.middleware.process_url(url);
// Hole das Hauptfenster
let main_window = app.get_webview_window("main").unwrap();
// Berechne die Position und Größe für den Webview
// Hier nehmen wir an, dass wir einen Header-Bereich von 100 Pixeln haben
/* let window_size = main_window.inner_size()?;
let header_height = 100.0;
let webview_position = LogicalPosition::new(0.0, header_height);
let webview_size = LogicalSize::new(window_size.width, window_size.height - header_height);
*/
/* let webview = tauri::WebviewWindowBuilder::new(
&app,
"label",
//WebviewUrl::External(processed_url.parse().unwrap()),
WebviewUrl::External(url),
)
.build()
.unwrap() */
/* .on_navigation(move |url| {
// Middleware für Navigation anwenden
self.middleware.process_navigation(url.as_str())
})
.on_web_resource_request(move |request, response| {
// Middleware für HTTP-Anfragen anwenden
self.middleware.process_request(request, response)
}); */
// Erstelle Tab-Objekt
/* let tab = Tab {
id: tab_id.clone(),
webview_label: webview_label.clone(),
title: "Neuer Tab".to_string(),
url: processed_url.to_string(),
is_loading: true,
is_visible: false,
}; */
// Speichere Tab
/* {
let mut tabs = self.tabs.lock().unwrap();
tabs.insert(tab_id.clone(), tab.clone());
} */
// Setze als aktiven Tab
//self.activate_tab(app, &tab_id)?;
// Injiziere die Webview-Bridge
/* let script = include_str!("../assets/webview-bridge.js");
webview.evaluate_script(script)?; */
// Registriere Event-Handler für Titeländerungen
let tab_manager = self.clone();
//let tab_id_clone = tab_id.clone();
/* webview.listen("tauri://title-changed", move |event| {
if let Some(title) = event.payload().and_then(|p| p.as_str()) {
tab_manager.update_tab_title(&tab_id_clone, title);
}
}); */
// Registriere Event-Handler für Ladestatus
let tab_manager = self.clone();
//let tab_id_clone = tab_id.clone();
/* webview.listen("tauri://load-changed", move |event| {
if let Some(status) = event.payload().and_then(|p| p.as_str()) {
let is_loading = status == "loading";
tab_manager.update_tab_loading_status(&tab_id_clone, is_loading);
}
}); */
//Ok()
}
pub fn close_tab(&self, app: &AppHandle, tab_id: &str) -> Result<(), tauri::Error> {
// Hole das Hauptfenster
let main_window = app.get_webview_window("main").unwrap();
// Entferne Tab aus der Verwaltung
let webview_label = {
let mut tabs = self.tabs.lock().unwrap();
if let Some(tab) = tabs.remove(tab_id) {
tab.webview_label
} else {
return Ok(());
}
};
// Entferne den Webview
//main_window.remove_child(&webview_label)?;
// Aktualisiere aktiven Tab, falls nötig
{
let mut active_tab_id = self.active_tab_id.lock().unwrap();
if active_tab_id.as_ref().map_or(false, |id| id == tab_id) {
// Wähle einen anderen Tab als aktiv
let tabs = self.tabs.lock().unwrap();
*active_tab_id = tabs.keys().next().cloned();
// Aktiviere den neuen Tab, falls vorhanden
if let Some(new_active_id) = active_tab_id.clone() {
drop(active_tab_id); // Mutex freigeben vor dem rekursiven Aufruf
self.activate_tab(app, &new_active_id)?;
}
}
}
Ok(())
}
pub fn activate_tab(&self, app: &AppHandle, tab_id: &str) -> Result<(), tauri::Error> {
// Hole das Hauptfenster
let main_window = app.get_webview_window("main").unwrap();
// Setze Tab als aktiv
{
let mut active_tab_id = self.active_tab_id.lock().unwrap();
*active_tab_id = Some(tab_id.to_string());
}
// Verstecke alle anderen Tabs und zeige den aktiven
let mut tabs = self.tabs.lock().unwrap();
for (id, tab) in tabs.iter_mut() {
if id == tab_id {
// Zeige den aktiven Tab
/* main_window
.get_webview_window(&tab.webview_label)?
.set_visible(true)?; */
tab.is_visible = true;
} else {
// Verstecke alle anderen Tabs
/* main_window
.get_webview_window(&tab.webview_label)?
.set_visible(false)?; */
tab.is_visible = false;
}
}
Ok(())
}
pub fn navigate_to_url(
&self,
app: &AppHandle,
tab_id: &str,
url: &str,
) -> Result<(), tauri::Error> {
// Überprüfe URL mit Middleware
//let processed_url = self.middleware.process_url(url);
// Aktualisiere URL im Tab
{
let mut tabs = self.tabs.lock().unwrap();
if let Some(tab) = tabs.get_mut(tab_id) {
tab.url = url.to_string() //processed_url.to_string();
}
}
// Navigiere zum URL im Webview
let tabs = self.tabs.lock().unwrap();
if let Some(tab) = tabs.get(tab_id) {
let main_window = app.get_webview_window("main").unwrap();
/* let webview = main_window.get_webview_window(&tab.webview_label)?;
webview.navigate(&processed_url)?; */
}
Ok(())
}
pub fn get_all_tabs(&self) -> Vec<Tab> {
let tabs = self.tabs.lock().unwrap();
tabs.values().cloned().collect()
}
pub fn get_active_tab_id(&self) -> Option<String> {
let active_tab_id = self.active_tab_id.lock().unwrap();
active_tab_id.clone()
}
pub fn update_tab_title(&self, tab_id: &str, title: &str) {
let mut tabs = self.tabs.lock().unwrap();
if let Some(tab) = tabs.get_mut(tab_id) {
tab.title = title.to_string();
}
}
pub fn update_tab_loading_status(&self, tab_id: &str, is_loading: bool) {
let mut tabs = self.tabs.lock().unwrap();
if let Some(tab) = tabs.get_mut(tab_id) {
tab.is_loading = is_loading;
}
}
// Weitere Methoden für Browser-Navigation
pub fn go_back(&self, app: &AppHandle, tab_id: &str) -> Result<(), tauri::Error> {
let tabs = self.tabs.lock().unwrap();
if let Some(tab) = tabs.get(tab_id) {
let main_window = app.get_webview_window("main").unwrap();
/* let webview = main_window.get_webview(&tab.webview_label)?;
webview.evaluate_script("window.history.back()")?; */
}
Ok(())
}
pub fn go_forward(&self, app: &AppHandle, tab_id: &str) -> Result<(), tauri::Error> {
let tabs = self.tabs.lock().unwrap();
if let Some(tab) = tabs.get(tab_id) {
let main_window = app.get_webview_window("main").unwrap();
/* let webview = main_window.get_webview(&tab.webview_label)?;
webview.evaluate_script("window.history.forward()")?; */
}
Ok(())
}
pub fn inject_content_script(
&self,
app: &AppHandle,
tab_id: &str,
script: &str,
) -> Result<(), tauri::Error> {
let tabs = self.tabs.lock().unwrap();
if let Some(tab) = tabs.get(tab_id) {
let main_window = app.get_webview_window("main").unwrap();
/* let webview = main_window.get_webview(&tab.webview_label)?;
webview.evaluate_script(script)?; */
}
Ok(())
}
pub fn clone(&self) -> Self {
Self {
tabs: Arc::clone(&self.tabs),
active_tab_id: Arc::clone(&self.active_tab_id),
//middleware: Arc::clone(&self.middleware),
}
}
}

View File

@ -0,0 +1,125 @@
use std::sync::{Arc, Mutex};
use tauri::http::{Request, Response, ResponseBuilder};
pub struct RoutingMiddleware {
extensions: Arc<Mutex<Vec<Box<dyn MiddlewareExtension + Send + Sync>>>>,
}
pub trait MiddlewareExtension: Send + Sync {
fn name(&self) -> &str;
fn process_url(&self, url: &str) -> String;
fn process_navigation(&self, url: &str) -> bool;
fn process_request(&self, request: &Request, response: &mut Response) -> bool;
}
impl RoutingMiddleware {
pub fn new() -> Self {
let mut middleware = Self {
extensions: Arc::new(Mutex::new(Vec::new())),
};
// Registriere Standard-Erweiterungen
//middleware.register_extension(Box::new(AdBlockerExtension::new()));
middleware
}
pub fn register_extension(&mut self, extension: Box<dyn MiddlewareExtension + Send + Sync>) {
let mut extensions = self.extensions.lock().unwrap();
extensions.push(extension);
}
pub fn process_url(&self, url: &str) -> String {
let extensions = self.extensions.lock().unwrap();
let mut processed_url = url.to_string();
for extension in extensions.iter() {
processed_url = extension.process_url(&processed_url);
}
processed_url
}
pub fn process_navigation(&self, url: &str) -> bool {
let extensions = self.extensions.lock().unwrap();
for extension in extensions.iter() {
if !extension.process_navigation(url) {
return false;
}
}
true
}
pub fn process_request(&self, request: &Request, response: &mut Response) -> bool {
let extensions = self.extensions.lock().unwrap();
for extension in extensions.iter() {
if extension.process_request(request, response) {
return true;
}
}
false
}
}
// Beispiel für eine Ad-Blocker-Erweiterung
struct AdBlockerExtension {
block_patterns: Vec<String>,
}
impl AdBlockerExtension {
fn new() -> Self {
Self {
block_patterns: vec![
"ads".to_string(),
"analytics".to_string(),
"tracker".to_string(),
"banner".to_string(),
],
}
}
fn is_blocked_url(&self, url: &str) -> bool {
for pattern in &self.block_patterns {
if url.contains(pattern) {
return true;
}
}
false
}
}
impl MiddlewareExtension for AdBlockerExtension {
fn name(&self) -> &str {
"AdBlocker"
}
fn process_url(&self, url: &str) -> String {
// Für vollständige Navigationen blockieren wir normalerweise nicht die ganze Seite
url.to_string()
}
fn process_navigation(&self, url: &str) -> bool {
// Blockiere nur vollständige Navigationen zu Werbeseiten
let is_ad_site = url.contains("doubleclick.net")
|| url.contains("googleadservices.com")
|| url.contains("ads.example.com");
!is_ad_site
}
fn process_request(&self, request: &Request, response: &mut Response) -> bool {
let url = request.uri().to_string();
if self.is_blocked_url(&url) {
println!("AdBlocker: Blockiere Anfrage: {}", url);
*response = ResponseBuilder::new()
.status(403)
.body("Zugriff verweigert durch AdBlocker".as_bytes().to_vec())
.unwrap();
return true;
}
false
}
}

View File

@ -0,0 +1,188 @@
use serde::{Deserialize, Serialize};
use tauri::{AppHandle, Manager, State};
mod manager;
#[derive(Serialize, Deserialize)]
pub struct TabInfo {
id: String,
title: String,
url: String,
is_loading: bool,
is_active: bool,
}
// Einfache Kommandos für die Tab-Verwaltung
#[tauri::command]
pub fn create_tab(app_handle: tauri::AppHandle, tab_id: String, url: String) -> Result<(), String> {
let main_window = app_handle
.get_webview_window("main")
.ok_or("Hauptfenster nicht gefunden")?;
let window_size = main_window.inner_size().map_err(|e| e.to_string())?;
// Erstelle eine neue Webview als eigenständiges Fenster
let webview = tauri::WebviewWindowBuilder::new(
&app_handle,
tab_id.clone(),
tauri::WebviewUrl::External(url.parse::<tauri::Url>().map_err(|e| e.to_string())?),
//tauri::WebviewUrl::External("http://google.de"),
)
.title(format!("Tab: {}", tab_id))
.inner_size(window_size.width as f64, window_size.height as f64 - 50.0)
.position(0.0, 50.0)
.build()
.map_err(|e| e.to_string())?;
// Sende die Tab-ID zurück an das Hauptfenster
/* main_window
.emit("tab-created", tab_id)
.map_err(|e| e.to_string())?; */
Ok(())
}
#[tauri::command]
pub fn show_tab(app_handle: tauri::AppHandle, tab_id: String) -> Result<(), String> {
// Hole alle Webview-Fenster
let windows = app_handle.webview_windows();
// Zeige das ausgewählte Tab und verstecke die anderen
for (id, window) in windows {
if id != "main" {
// Hauptfenster nicht verstecken
if id == tab_id {
window.show().map_err(|e| e.to_string())?;
window.set_focus().map_err(|e| e.to_string())?;
} else {
window.hide().map_err(|e| e.to_string())?;
}
}
}
Ok(())
}
#[tauri::command]
pub fn close_tab(app_handle: tauri::AppHandle, tab_id: String) -> Result<(), String> {
if let Some(window) = app_handle.get_webview_window(&tab_id) {
window.close().map_err(|e| e.to_string())?;
}
Ok(())
}
/* #[tauri::command]
pub fn create_tab(app: AppHandle, url: String) -> Result<TabInfo, String> {
let browser_manager = app.state::<manager::BrowserManager>();
match browser_manager.create_tab(&app, &url) {
Ok(tab) => {
let active_tab_id = browser_manager.get_active_tab_id();
let is_active = active_tab_id.as_ref().map_or(false, |id| id == &tab.id);
let main = app.get_webview_window("main");
//main.unwrap().
// Sende Event an Frontend
/* app.emit_all(
"tab-created",
TabInfo {
id: tab.id.clone(),
title: tab.title.clone(),
url: tab.url.clone(),
is_loading: tab.is_loading,
is_active,
},
)
.unwrap(); */
Ok(TabInfo {
id: tab.id,
title: tab.title,
url: tab.url,
is_loading: tab.is_loading,
is_active: true,
})
}
Err(e) => Err(format!("Fehler beim Erstellen des Tabs: {}", e)),
}
} */
/* #[tauri::command]
pub fn close_tab(app: AppHandle, tab_id: String) -> Result<(), String> {
let browser_manager = app.state::<manager::BrowserManager>();
match browser_manager.close_tab(&app, &tab_id) {
Ok(_) => {
// Sende Event an Frontend
//app.emit_all("tab-closed", tab_id).unwrap();
Ok(())
}
Err(e) => Err(format!("Fehler beim Schließen des Tabs: {}", e)),
}
} */
#[tauri::command]
pub fn navigate_to_url(app: AppHandle, tab_id: String, url: String) -> Result<(), String> {
let browser_manager = app.state::<manager::BrowserManager>();
match browser_manager.navigate_to_url(&app, &tab_id, &url) {
Ok(_) => Ok(()),
Err(e) => Err(format!("Fehler bei der Navigation: {}", e)),
}
}
#[tauri::command]
pub fn get_current_url(app: AppHandle, tab_id: String) -> Result<String, String> {
let browser_manager = app.state::<manager::BrowserManager>();
let tabs = browser_manager.get_all_tabs();
for tab in tabs {
if tab.id == tab_id {
return Ok(tab.url);
}
}
Err("Tab nicht gefunden".to_string())
}
#[tauri::command]
pub fn go_back(app: AppHandle, tab_id: String) -> Result<(), String> {
let browser_manager = app.state::<manager::BrowserManager>();
match browser_manager.go_back(&app, &tab_id) {
Ok(_) => Ok(()),
Err(e) => Err(format!("Fehler beim Zurückgehen: {}", e)),
}
}
#[tauri::command]
pub fn go_forward(app: AppHandle, tab_id: String) -> Result<(), String> {
let browser_manager = app.state::<manager::BrowserManager>();
match browser_manager.go_forward(&app, &tab_id) {
Ok(_) => Ok(()),
Err(e) => Err(format!("Fehler beim Vorwärtsgehen: {}", e)),
}
}
#[tauri::command]
pub fn block_resource_request(url: String, resource_type: String) -> bool {
// Diese Funktion wird vom Frontend aufgerufen, um zu prüfen, ob eine Ressource blockiert werden soll
// Die eigentliche Logik wird im JavaScript-Erweiterungssystem implementiert
// Hier könnten Sie zusätzliche Rust-seitige Prüfungen durchführen
println!("Prüfe Ressourcenanfrage: {} (Typ: {})", url, resource_type);
// Einfache Prüfung für Beispielzwecke
url.contains("ads") || url.contains("analytics") || url.contains("tracker")
}
#[tauri::command]
pub fn inject_content_script(app: AppHandle, tab_id: String, script: String) -> Result<(), String> {
let browser_manager = app.state::<manager::BrowserManager>();
match browser_manager.inject_content_script(&app, &tab_id, &script) {
Ok(_) => Ok(()),
Err(e) => Err(format!("Fehler beim Injizieren des Scripts: {}", e)),
}
}

View File

@ -1,226 +0,0 @@
// src-tauri/src/crdt/hlc.rs
use crate::table_names::TABLE_CRDT_CONFIGS;
use rusqlite::{params, Connection, Transaction};
use serde_json::json;
use std::{
fmt::Debug,
path::PathBuf,
str::FromStr,
sync::{Arc, Mutex},
time::Duration,
};
use tauri::AppHandle;
use tauri_plugin_store::StoreExt;
use thiserror::Error;
use uhlc::{HLCBuilder, Timestamp, HLC, ID};
use uuid::Uuid;
const HLC_NODE_ID_TYPE: &str = "hlc_node_id";
const HLC_TIMESTAMP_TYPE: &str = "hlc_timestamp";
#[derive(Error, Debug)]
pub enum HlcError {
#[error("Database error: {0}")]
Database(#[from] rusqlite::Error),
#[error("Failed to parse persisted HLC timestamp: {0}")]
ParseTimestamp(String),
#[error("Failed to parse persisted HLC state: {0}")]
Parse(String),
#[error("Failed to parse HLC Node ID: {0}")]
ParseNodeId(String),
#[error("HLC mutex was poisoned")]
MutexPoisoned,
#[error("Failed to create node ID: {0}")]
CreateNodeId(#[from] uhlc::SizeError),
#[error("No database connection available")]
NoConnection,
#[error("HLC service not initialized")]
NotInitialized,
#[error("Hex decode error: {0}")]
HexDecode(String),
#[error("UTF-8 conversion error: {0}")]
Utf8Error(String),
#[error("Failed to access device store: {0}")]
DeviceStore(String),
}
impl From<tauri_plugin_store::Error> for HlcError {
fn from(error: tauri_plugin_store::Error) -> Self {
HlcError::DeviceStore(error.to_string())
}
}
/// A thread-safe, persistent HLC service.
#[derive(Clone)]
pub struct HlcService {
hlc: Arc<Mutex<Option<HLC>>>,
}
impl HlcService {
/// Creates a new HLC service. The HLC will be initialized on first database access.
pub fn new() -> Self {
HlcService {
hlc: Arc::new(Mutex::new(None)),
}
}
/// Factory-Funktion: Erstellt und initialisiert einen neuen HLC-Service aus einer bestehenden DB-Verbindung.
/// Dies ist die bevorzugte Methode zur Instanziierung.
pub fn try_initialize(conn: &Connection, app_handle: &AppHandle) -> Result<Self, HlcError> {
// 1. Hole oder erstelle eine persistente Node-ID
let node_id_str = Self::get_or_create_device_id(app_handle)?;
// Parse den String in ein Uuid-Objekt.
let uuid = Uuid::parse_str(&node_id_str).map_err(|e| {
HlcError::ParseNodeId(format!(
"Stored device ID is not a valid UUID: {}. Error: {}",
node_id_str, e
))
})?;
// Hol dir die rohen 16 Bytes und erstelle daraus die uhlc::ID.
// Das `*` dereferenziert den `&[u8; 16]` zu `[u8; 16]`, was `try_from` erwartet.
let node_id = ID::try_from(*uuid.as_bytes()).map_err(|e| {
HlcError::ParseNodeId(format!("Invalid node ID format from device store: {:?}", e))
})?;
// 2. Erstelle eine HLC-Instanz mit stabiler Identität
let hlc = HLCBuilder::new()
.with_id(node_id)
.with_max_delta(Duration::from_secs(1))
.build();
// 3. Lade und wende den letzten persistenten Zeitstempel an
if let Some(last_timestamp) = Self::load_last_timestamp(conn)? {
hlc.update_with_timestamp(&last_timestamp).map_err(|e| {
HlcError::Parse(format!(
"Failed to update HLC with persisted timestamp: {:?}",
e
))
})?;
}
Ok(HlcService {
hlc: Arc::new(Mutex::new(Some(hlc))),
})
}
/// Holt die Geräte-ID aus dem Tauri Store oder erstellt eine neue, wenn keine existiert.
fn get_or_create_device_id(app_handle: &AppHandle) -> Result<String, HlcError> {
let store_path = PathBuf::from("instance.json");
let store = app_handle
.store(store_path)
.map_err(|e| HlcError::DeviceStore(e.to_string()))?;
let id_exists = match store.get("id") {
// Fall 1: Der Schlüssel "id" existiert UND sein Wert ist ein String.
Some(value) => {
if let Some(s) = value.as_str() {
// Das ist unser Erfolgsfall. Wir haben einen &str und können
// eine Kopie davon zurückgeben.
println!("Gefundene und validierte Geräte-ID: {}", s);
if Uuid::parse_str(s).is_ok() {
// Erfolgsfall: Der Wert ist ein String UND eine gültige UUID.
// Wir können die Funktion direkt mit dem Wert verlassen.
return Ok(s.to_string());
}
}
// Der Wert existiert, ist aber kein String (z.B. eine Zahl).
// Wir behandeln das, als gäbe es keine ID.
false
}
// Fall 2: Der Schlüssel "id" existiert nicht.
None => false,
};
// Wenn wir hier ankommen, bedeutet das, `id_exists` ist `false`.
// Entweder weil der Schlüssel fehlte oder weil der Wert kein String war.
// Also erstellen wir eine neue ID.
if !id_exists {
let new_id = Uuid::new_v4().to_string();
store.set("id".to_string(), json!(new_id.clone()));
store.save()?;
return Ok(new_id);
}
// Dieser Teil des Codes sollte nie erreicht werden, aber der Compiler
// braucht einen finalen return-Wert. Wir können hier einen Fehler werfen.
Err(HlcError::DeviceStore(
"Unreachable code: Failed to determine device ID".to_string(),
))
}
/// Generiert einen neuen Zeitstempel und persistiert den neuen Zustand des HLC sofort.
/// Muss innerhalb einer bestehenden Datenbanktransaktion aufgerufen werden.
pub fn new_timestamp_and_persist<'tx>(
&self,
tx: &Transaction<'tx>,
) -> Result<Timestamp, HlcError> {
let mut hlc_guard = self.hlc.lock().map_err(|_| HlcError::MutexPoisoned)?;
let hlc = hlc_guard.as_mut().ok_or(HlcError::NotInitialized)?;
let new_timestamp = hlc.new_timestamp();
Self::persist_timestamp(tx, &new_timestamp)?;
Ok(new_timestamp)
}
/// Erstellt einen neuen Zeitstempel, ohne ihn zu persistieren (z.B. für Leseoperationen).
pub fn new_timestamp(&self) -> Result<Timestamp, HlcError> {
let mut hlc_guard = self.hlc.lock().map_err(|_| HlcError::MutexPoisoned)?;
let hlc = hlc_guard.as_mut().ok_or(HlcError::NotInitialized)?;
Ok(hlc.new_timestamp())
}
/// Aktualisiert den HLC mit einem externen Zeitstempel (für die Synchronisation).
pub fn update_with_timestamp(&self, timestamp: &Timestamp) -> Result<(), HlcError> {
let mut hlc_guard = self.hlc.lock().map_err(|_| HlcError::MutexPoisoned)?;
let hlc = hlc_guard.as_mut().ok_or(HlcError::NotInitialized)?;
hlc.update_with_timestamp(timestamp)
.map_err(|e| HlcError::Parse(format!("Failed to update HLC: {:?}", e)))
}
/// Lädt den letzten persistierten Zeitstempel aus der Datenbank.
fn load_last_timestamp(conn: &Connection) -> Result<Option<Timestamp>, HlcError> {
let query = format!("SELECT value FROM {} WHERE key = ?1", TABLE_CRDT_CONFIGS);
match conn.query_row(&query, params![HLC_TIMESTAMP_TYPE], |row| {
row.get::<_, String>(0)
}) {
Ok(state_str) => {
let timestamp = Timestamp::from_str(&state_str).map_err(|e| {
HlcError::ParseTimestamp(format!("Invalid timestamp format: {:?}", e))
})?;
Ok(Some(timestamp))
}
Err(rusqlite::Error::QueryReturnedNoRows) => Ok(None),
Err(e) => Err(HlcError::Database(e)),
}
}
/// Persistiert einen Zeitstempel in der Datenbank innerhalb einer Transaktion.
fn persist_timestamp(tx: &Transaction, timestamp: &Timestamp) -> Result<(), HlcError> {
let timestamp_str = timestamp.to_string();
tx.execute(
&format!(
"INSERT INTO {} (key, value) VALUES (?1, ?2)
ON CONFLICT(key) DO UPDATE SET value = excluded.value",
TABLE_CRDT_CONFIGS
),
params![HLC_TIMESTAMP_TYPE, timestamp_str],
)?;
Ok(())
}
}
impl Default for HlcService {
fn default() -> Self {
Self::new()
}
}

View File

@ -1,3 +0,0 @@
pub mod hlc;
pub mod transformer;
pub mod trigger;

View File

@ -1,784 +0,0 @@
use crate::crdt::trigger::{HLC_TIMESTAMP_COLUMN, TOMBSTONE_COLUMN};
use crate::database::error::DatabaseError;
use crate::table_names::{TABLE_CRDT_CONFIGS, TABLE_CRDT_LOGS};
use sqlparser::ast::{
Assignment, AssignmentTarget, BinaryOperator, ColumnDef, DataType, Expr, Ident, Insert,
ObjectName, ObjectNamePart, SelectItem, SetExpr, Statement, TableFactor, TableObject, Value,
};
use std::borrow::Cow;
use std::collections::HashSet;
use uhlc::Timestamp;
/// Konfiguration für CRDT-Spalten
#[derive(Clone)]
struct CrdtColumns {
tombstone: &'static str,
hlc_timestamp: &'static str,
}
impl CrdtColumns {
const DEFAULT: Self = Self {
tombstone: TOMBSTONE_COLUMN,
hlc_timestamp: HLC_TIMESTAMP_COLUMN,
};
/// Erstellt einen Tombstone-Filter für eine Tabelle
fn create_tombstone_filter(&self, table_alias: Option<&str>) -> Expr {
let column_expr = match table_alias {
Some(alias) => {
// Qualifizierte Referenz: alias.tombstone
Expr::CompoundIdentifier(vec![Ident::new(alias), Ident::new(self.tombstone)])
}
None => {
// Einfache Referenz: tombstone
Expr::Identifier(Ident::new(self.tombstone))
}
};
Expr::BinaryOp {
left: Box::new(column_expr),
op: BinaryOperator::NotEq,
right: Box::new(Expr::Value(Value::Number("1".to_string(), false).into())),
}
}
/// Erstellt eine Tombstone-Zuweisung für UPDATE/DELETE
fn create_tombstone_assignment(&self) -> Assignment {
Assignment {
target: AssignmentTarget::ColumnName(ObjectName(vec![ObjectNamePart::Identifier(
Ident::new(self.tombstone),
)])),
value: Expr::Value(Value::Number("1".to_string(), false).into()),
}
}
/// Erstellt eine HLC-Zuweisung für UPDATE/DELETE
fn create_hlc_assignment(&self, timestamp: &Timestamp) -> Assignment {
Assignment {
target: AssignmentTarget::ColumnName(ObjectName(vec![ObjectNamePart::Identifier(
Ident::new(self.hlc_timestamp),
)])),
value: Expr::Value(Value::SingleQuotedString(timestamp.to_string()).into()),
}
}
/// Fügt CRDT-Spalten zu einer Tabellendefinition hinzu
fn add_to_table_definition(&self, columns: &mut Vec<ColumnDef>) {
if !columns.iter().any(|c| c.name.value == self.tombstone) {
columns.push(ColumnDef {
name: Ident::new(self.tombstone),
data_type: DataType::Integer(None),
options: vec![],
});
}
if !columns.iter().any(|c| c.name.value == self.hlc_timestamp) {
columns.push(ColumnDef {
name: Ident::new(self.hlc_timestamp),
data_type: DataType::String(None),
options: vec![],
});
}
}
}
pub struct CrdtTransformer {
columns: CrdtColumns,
excluded_tables: HashSet<&'static str>,
}
impl CrdtTransformer {
pub fn new() -> Self {
let mut excluded_tables = HashSet::new();
excluded_tables.insert(TABLE_CRDT_CONFIGS);
excluded_tables.insert(TABLE_CRDT_LOGS);
Self {
columns: CrdtColumns::DEFAULT,
excluded_tables,
}
}
/// Prüft, ob eine Tabelle CRDT-Synchronisation unterstützen soll
fn is_crdt_sync_table(&self, name: &ObjectName) -> bool {
let table_name = self.normalize_table_name(name);
!self.excluded_tables.contains(table_name.as_ref())
}
/// Normalisiert Tabellennamen (entfernt Anführungszeichen)
fn normalize_table_name(&self, name: &ObjectName) -> Cow<str> {
let name_str = name.to_string().to_lowercase();
Cow::Owned(name_str.trim_matches('`').trim_matches('"').to_string())
}
pub fn transform_select_statement(&self, stmt: &mut Statement) -> Result<(), DatabaseError> {
match stmt {
Statement::Query(query) => self.transform_query_recursive(query),
// Fange alle anderen Fälle ab und gib einen Fehler zurück
_ => Err(DatabaseError::UnsupportedStatement {
sql: stmt.to_string(),
reason: "This operation only accepts SELECT statements.".to_string(),
}),
}
}
pub fn transform_execute_statement(
&self,
stmt: &mut Statement,
hlc_timestamp: &Timestamp,
) -> Result<Option<String>, DatabaseError> {
match stmt {
Statement::CreateTable(create_table) => {
if self.is_crdt_sync_table(&create_table.name) {
self.columns
.add_to_table_definition(&mut create_table.columns);
Ok(Some(
self.normalize_table_name(&create_table.name).into_owned(),
))
} else {
Ok(None)
}
}
Statement::Insert(insert_stmt) => {
if let TableObject::TableName(name) = &insert_stmt.table {
if self.is_crdt_sync_table(name) {
self.transform_insert(insert_stmt, hlc_timestamp)?;
}
}
Ok(None)
}
Statement::Update {
table, assignments, ..
} => {
if let TableFactor::Table { name, .. } = &table.relation {
if self.is_crdt_sync_table(name) {
assignments.push(self.columns.create_hlc_assignment(hlc_timestamp));
}
}
Ok(None)
}
Statement::Delete(del_stmt) => {
if let Some(table_name) = self.extract_table_name_from_delete(del_stmt) {
if self.is_crdt_sync_table(&table_name) {
self.transform_delete_to_update(stmt, hlc_timestamp)?;
}
Ok(None)
} else {
Err(DatabaseError::UnsupportedStatement {
sql: del_stmt.to_string(),
reason: "DELETE from non-table source or multiple tables".to_string(),
})
}
}
Statement::AlterTable { name, .. } => {
if self.is_crdt_sync_table(name) {
Ok(Some(self.normalize_table_name(name).into_owned()))
} else {
Ok(None)
}
}
_ => Ok(None),
}
}
/// Transformiert Query-Statements (fügt Tombstone-Filter hinzu)
fn transform_query_recursive(
&self,
query: &mut sqlparser::ast::Query,
) -> Result<(), DatabaseError> {
self.add_tombstone_filters_recursive(&mut query.body)
}
/// Rekursive Behandlung aller SetExpr-Typen mit vollständiger Subquery-Unterstützung
fn add_tombstone_filters_recursive(&self, set_expr: &mut SetExpr) -> Result<(), DatabaseError> {
match set_expr {
SetExpr::Select(select) => {
self.add_tombstone_filters_to_select(select)?;
// Transformiere auch Subqueries in Projektionen
for projection in &mut select.projection {
match projection {
SelectItem::UnnamedExpr(expr) | SelectItem::ExprWithAlias { expr, .. } => {
self.transform_expression_subqueries(expr)?;
}
_ => {} // Wildcard projections ignorieren
}
}
// Transformiere Subqueries in WHERE
if let Some(where_clause) = &mut select.selection {
self.transform_expression_subqueries(where_clause)?;
}
// Transformiere Subqueries in GROUP BY
match &mut select.group_by {
sqlparser::ast::GroupByExpr::All(_) => {
// GROUP BY ALL - keine Expressions zu transformieren
}
sqlparser::ast::GroupByExpr::Expressions(exprs, _) => {
for group_expr in exprs {
self.transform_expression_subqueries(group_expr)?;
}
}
}
// Transformiere Subqueries in HAVING
if let Some(having) = &mut select.having {
self.transform_expression_subqueries(having)?;
}
}
SetExpr::SetOperation { left, right, .. } => {
self.add_tombstone_filters_recursive(left)?;
self.add_tombstone_filters_recursive(right)?;
}
SetExpr::Query(query) => {
self.add_tombstone_filters_recursive(&mut query.body)?;
}
SetExpr::Values(values) => {
// Transformiere auch Subqueries in Values-Listen
for row in &mut values.rows {
for expr in row {
self.transform_expression_subqueries(expr)?;
}
}
}
_ => {} // Andere Fälle
}
Ok(())
}
/// Transformiert Subqueries innerhalb von Expressions
fn transform_expression_subqueries(&self, expr: &mut Expr) -> Result<(), DatabaseError> {
match expr {
// Einfache Subqueries
Expr::Subquery(query) => {
self.add_tombstone_filters_recursive(&mut query.body)?;
}
// EXISTS Subqueries
Expr::Exists { subquery, .. } => {
self.add_tombstone_filters_recursive(&mut subquery.body)?;
}
// IN Subqueries
Expr::InSubquery {
expr: left_expr,
subquery,
..
} => {
self.transform_expression_subqueries(left_expr)?;
self.add_tombstone_filters_recursive(&mut subquery.body)?;
}
// ANY/ALL Subqueries
Expr::AnyOp { left, right, .. } | Expr::AllOp { left, right, .. } => {
self.transform_expression_subqueries(left)?;
self.transform_expression_subqueries(right)?;
}
// Binäre Operationen
Expr::BinaryOp { left, right, .. } => {
self.transform_expression_subqueries(left)?;
self.transform_expression_subqueries(right)?;
}
// Unäre Operationen
Expr::UnaryOp {
expr: inner_expr, ..
} => {
self.transform_expression_subqueries(inner_expr)?;
}
// Verschachtelte Ausdrücke
Expr::Nested(nested) => {
self.transform_expression_subqueries(nested)?;
}
// CASE-Ausdrücke
Expr::Case {
operand,
conditions,
else_result,
..
} => {
if let Some(op) = operand {
self.transform_expression_subqueries(op)?;
}
for case_when in conditions {
self.transform_expression_subqueries(&mut case_when.condition)?;
self.transform_expression_subqueries(&mut case_when.result)?;
}
if let Some(else_res) = else_result {
self.transform_expression_subqueries(else_res)?;
}
}
// Funktionsaufrufe
Expr::Function(func) => match &mut func.args {
sqlparser::ast::FunctionArguments::List(sqlparser::ast::FunctionArgumentList {
args,
..
}) => {
for arg in args {
if let sqlparser::ast::FunctionArg::Unnamed(
sqlparser::ast::FunctionArgExpr::Expr(expr),
) = arg
{
self.transform_expression_subqueries(expr)?;
}
}
}
_ => {}
},
// BETWEEN
Expr::Between {
expr: main_expr,
low,
high,
..
} => {
self.transform_expression_subqueries(main_expr)?;
self.transform_expression_subqueries(low)?;
self.transform_expression_subqueries(high)?;
}
// IN Liste
Expr::InList {
expr: main_expr,
list,
..
} => {
self.transform_expression_subqueries(main_expr)?;
for list_expr in list {
self.transform_expression_subqueries(list_expr)?;
}
}
// IS NULL/IS NOT NULL
Expr::IsNull(inner) | Expr::IsNotNull(inner) => {
self.transform_expression_subqueries(inner)?;
}
// Andere Expression-Typen benötigen keine Transformation
_ => {}
}
Ok(())
}
/// Fügt Tombstone-Filter zu SELECT-Statements hinzu (nur wenn nicht explizit in WHERE gesetzt)
fn add_tombstone_filters_to_select(
&self,
select: &mut sqlparser::ast::Select,
) -> Result<(), DatabaseError> {
// Sammle alle CRDT-Tabellen mit ihren Aliasen
let mut crdt_tables = Vec::new();
for twj in &select.from {
if let TableFactor::Table { name, alias, .. } = &twj.relation {
if self.is_crdt_sync_table(name) {
let table_alias = alias.as_ref().map(|a| a.name.value.as_str());
crdt_tables.push((name.clone(), table_alias));
}
}
}
if crdt_tables.is_empty() {
return Ok(());
}
// Prüfe, welche Tombstone-Spalten bereits in der WHERE-Klausel referenziert werden
let explicitly_filtered_tables = if let Some(where_clause) = &select.selection {
self.find_explicitly_filtered_tombstone_tables(where_clause, &crdt_tables)
} else {
HashSet::new()
};
// Erstelle Filter nur für Tabellen, die noch nicht explizit gefiltert werden
let mut tombstone_filters = Vec::new();
for (table_name, table_alias) in crdt_tables {
let table_name_string = table_name.to_string();
let table_key = table_alias.unwrap_or(&table_name_string);
if !explicitly_filtered_tables.contains(table_key) {
tombstone_filters.push(self.columns.create_tombstone_filter(table_alias));
}
}
// Füge die automatischen Filter hinzu
if !tombstone_filters.is_empty() {
let combined_filter = tombstone_filters
.into_iter()
.reduce(|acc, expr| Expr::BinaryOp {
left: Box::new(acc),
op: BinaryOperator::And,
right: Box::new(expr),
})
.unwrap();
match &mut select.selection {
Some(existing) => {
*existing = Expr::BinaryOp {
left: Box::new(existing.clone()),
op: BinaryOperator::And,
right: Box::new(combined_filter),
};
}
None => {
select.selection = Some(combined_filter);
}
}
}
Ok(())
}
/// Findet alle Tabellen, die bereits explizit Tombstone-Filter in der WHERE-Klausel haben
fn find_explicitly_filtered_tombstone_tables(
&self,
where_expr: &Expr,
crdt_tables: &[(ObjectName, Option<&str>)],
) -> HashSet<String> {
let mut filtered_tables = HashSet::new();
self.scan_expression_for_tombstone_references(
where_expr,
crdt_tables,
&mut filtered_tables,
);
filtered_tables
}
/// Rekursiv durchsucht einen Expression-Baum nach Tombstone-Spalten-Referenzen
fn scan_expression_for_tombstone_references(
&self,
expr: &Expr,
crdt_tables: &[(ObjectName, Option<&str>)],
filtered_tables: &mut HashSet<String>,
) {
match expr {
// Einfache Spaltenreferenz: tombstone = ?
Expr::Identifier(ident) => {
if ident.value == self.columns.tombstone {
// Wenn keine Tabelle spezifiziert ist und es nur eine CRDT-Tabelle gibt
if crdt_tables.len() == 1 {
let table_name_str = crdt_tables[0].0.to_string();
let table_key = crdt_tables[0].1.unwrap_or(&table_name_str);
filtered_tables.insert(table_key.to_string());
}
}
}
// Qualifizierte Spaltenreferenz: table.tombstone = ? oder alias.tombstone = ?
Expr::CompoundIdentifier(idents) => {
if idents.len() == 2 && idents[1].value == self.columns.tombstone {
let table_ref = &idents[0].value;
// Prüfe, ob es eine unserer CRDT-Tabellen ist (nach Name oder Alias)
for (table_name, alias) in crdt_tables {
let table_name_str = table_name.to_string();
if table_ref == &table_name_str || alias.map_or(false, |a| a == table_ref) {
filtered_tables.insert(table_ref.clone());
break;
}
}
}
}
// Binäre Operationen: AND, OR, etc.
Expr::BinaryOp { left, right, .. } => {
self.scan_expression_for_tombstone_references(left, crdt_tables, filtered_tables);
self.scan_expression_for_tombstone_references(right, crdt_tables, filtered_tables);
}
// Unäre Operationen: NOT, etc.
Expr::UnaryOp { expr, .. } => {
self.scan_expression_for_tombstone_references(expr, crdt_tables, filtered_tables);
}
// Verschachtelte Ausdrücke
Expr::Nested(nested) => {
self.scan_expression_for_tombstone_references(nested, crdt_tables, filtered_tables);
}
// IN-Klauseln
Expr::InList { expr, .. } => {
self.scan_expression_for_tombstone_references(expr, crdt_tables, filtered_tables);
}
// BETWEEN-Klauseln
Expr::Between { expr, .. } => {
self.scan_expression_for_tombstone_references(expr, crdt_tables, filtered_tables);
}
// IS NULL/IS NOT NULL
Expr::IsNull(expr) | Expr::IsNotNull(expr) => {
self.scan_expression_for_tombstone_references(expr, crdt_tables, filtered_tables);
}
// Funktionsaufrufe - KORRIGIERT
Expr::Function(func) => {
match &func.args {
sqlparser::ast::FunctionArguments::List(
sqlparser::ast::FunctionArgumentList { args, .. },
) => {
for arg in args {
if let sqlparser::ast::FunctionArg::Unnamed(
sqlparser::ast::FunctionArgExpr::Expr(expr),
) = arg
{
self.scan_expression_for_tombstone_references(
expr,
crdt_tables,
filtered_tables,
);
}
}
}
_ => {} // Andere FunctionArguments-Varianten ignorieren
}
}
// CASE-Ausdrücke - KORRIGIERT
Expr::Case {
operand,
conditions,
else_result,
..
} => {
if let Some(op) = operand {
self.scan_expression_for_tombstone_references(op, crdt_tables, filtered_tables);
}
for case_when in conditions {
self.scan_expression_for_tombstone_references(
&case_when.condition,
crdt_tables,
filtered_tables,
);
self.scan_expression_for_tombstone_references(
&case_when.result,
crdt_tables,
filtered_tables,
);
}
if let Some(else_res) = else_result {
self.scan_expression_for_tombstone_references(
else_res,
crdt_tables,
filtered_tables,
);
}
}
// Subqueries mit vollständiger Unterstützung
Expr::Subquery(query) => {
self.transform_query_recursive_for_tombstone_analysis(
query,
crdt_tables,
filtered_tables,
)
.ok();
}
// EXISTS/NOT EXISTS Subqueries
Expr::Exists { subquery, .. } => {
self.transform_query_recursive_for_tombstone_analysis(
subquery,
crdt_tables,
filtered_tables,
)
.ok();
}
// IN/NOT IN Subqueries
Expr::InSubquery { expr, subquery, .. } => {
self.scan_expression_for_tombstone_references(expr, crdt_tables, filtered_tables);
self.transform_query_recursive_for_tombstone_analysis(
subquery,
crdt_tables,
filtered_tables,
)
.ok();
}
// ANY/ALL Subqueries
Expr::AnyOp { left, right, .. } | Expr::AllOp { left, right, .. } => {
self.scan_expression_for_tombstone_references(left, crdt_tables, filtered_tables);
self.scan_expression_for_tombstone_references(right, crdt_tables, filtered_tables);
}
// Andere Expression-Typen ignorieren wir für jetzt
_ => {}
}
}
/// Analysiert eine Subquery und sammelt Tombstone-Referenzen
fn transform_query_recursive_for_tombstone_analysis(
&self,
query: &sqlparser::ast::Query,
crdt_tables: &[(ObjectName, Option<&str>)],
filtered_tables: &mut HashSet<String>,
) -> Result<(), DatabaseError> {
self.analyze_set_expr_for_tombstone_references(&query.body, crdt_tables, filtered_tables)
}
/// Rekursiv analysiert SetExpr für Tombstone-Referenzen
fn analyze_set_expr_for_tombstone_references(
&self,
set_expr: &SetExpr,
crdt_tables: &[(ObjectName, Option<&str>)],
filtered_tables: &mut HashSet<String>,
) -> Result<(), DatabaseError> {
match set_expr {
SetExpr::Select(select) => {
// Analysiere WHERE-Klausel
if let Some(where_clause) = &select.selection {
self.scan_expression_for_tombstone_references(
where_clause,
crdt_tables,
filtered_tables,
);
}
// Analysiere alle Projektionen (können auch Subqueries enthalten)
for projection in &select.projection {
match projection {
SelectItem::UnnamedExpr(expr) | SelectItem::ExprWithAlias { expr, .. } => {
self.scan_expression_for_tombstone_references(
expr,
crdt_tables,
filtered_tables,
);
}
_ => {} // Wildcard projections ignorieren
}
}
// Analysiere GROUP BY
match &select.group_by {
sqlparser::ast::GroupByExpr::All(_) => {
// GROUP BY ALL - keine Expressions zu analysieren
}
sqlparser::ast::GroupByExpr::Expressions(exprs, _) => {
for group_expr in exprs {
self.scan_expression_for_tombstone_references(
group_expr,
crdt_tables,
filtered_tables,
);
}
}
}
// Analysiere HAVING
if let Some(having) = &select.having {
self.scan_expression_for_tombstone_references(
having,
crdt_tables,
filtered_tables,
);
}
}
SetExpr::SetOperation { left, right, .. } => {
self.analyze_set_expr_for_tombstone_references(left, crdt_tables, filtered_tables)?;
self.analyze_set_expr_for_tombstone_references(
right,
crdt_tables,
filtered_tables,
)?;
}
SetExpr::Query(query) => {
self.analyze_set_expr_for_tombstone_references(
&query.body,
crdt_tables,
filtered_tables,
)?;
}
SetExpr::Values(values) => {
// Analysiere Values-Listen
for row in &values.rows {
for expr in row {
self.scan_expression_for_tombstone_references(
expr,
crdt_tables,
filtered_tables,
);
}
}
}
_ => {} // Andere Varianten
}
Ok(())
}
/// Transformiert INSERT-Statements (fügt HLC-Timestamp hinzu)
fn transform_insert(
&self,
insert_stmt: &mut Insert,
timestamp: &Timestamp,
) -> Result<(), DatabaseError> {
insert_stmt
.columns
.push(Ident::new(self.columns.hlc_timestamp));
match insert_stmt.source.as_mut() {
Some(query) => match &mut *query.body {
SetExpr::Values(values) => {
for row in &mut values.rows {
row.push(Expr::Value(
Value::SingleQuotedString(timestamp.to_string()).into(),
));
}
}
SetExpr::Select(select) => {
let hlc_expr =
Expr::Value(Value::SingleQuotedString(timestamp.to_string()).into());
select.projection.push(SelectItem::UnnamedExpr(hlc_expr));
}
_ => {
return Err(DatabaseError::UnsupportedStatement {
sql: insert_stmt.to_string(),
reason: "INSERT with unsupported source type".to_string(),
});
}
},
None => {
return Err(DatabaseError::UnsupportedStatement {
reason: "INSERT statement has no source".to_string(),
sql: insert_stmt.to_string(),
});
}
}
Ok(())
}
/// Transformiert DELETE zu UPDATE (soft delete)
fn transform_delete_to_update(
&self,
stmt: &mut Statement,
timestamp: &Timestamp,
) -> Result<(), DatabaseError> {
if let Statement::Delete(del_stmt) = stmt {
let table_to_update = match &del_stmt.from {
sqlparser::ast::FromTable::WithFromKeyword(from)
| sqlparser::ast::FromTable::WithoutKeyword(from) => {
if from.len() == 1 {
from[0].clone()
} else {
return Err(DatabaseError::UnsupportedStatement {
reason: "DELETE with multiple tables not supported".to_string(),
sql: stmt.to_string(),
});
}
}
};
let assignments = vec![
self.columns.create_tombstone_assignment(),
self.columns.create_hlc_assignment(timestamp),
];
*stmt = Statement::Update {
table: table_to_update,
assignments,
from: None,
selection: del_stmt.selection.clone(),
returning: None,
or: None,
limit: None,
};
}
Ok(())
}
/// Extrahiert Tabellennamen aus DELETE-Statement
fn extract_table_name_from_delete(
&self,
del_stmt: &sqlparser::ast::Delete,
) -> Option<ObjectName> {
let tables = match &del_stmt.from {
sqlparser::ast::FromTable::WithFromKeyword(from)
| sqlparser::ast::FromTable::WithoutKeyword(from) => from,
};
if tables.len() == 1 {
if let TableFactor::Table { name, .. } = &tables[0].relation {
Some(name.clone())
} else {
None
}
} else {
None
}
}
}

View File

@ -1,354 +0,0 @@
// src-tauri/src/crdt/trigger.rs
use crate::table_names::TABLE_CRDT_LOGS;
use rusqlite::{Connection, Result as RusqliteResult, Row, Transaction};
use serde::Serialize;
use std::error::Error;
use std::fmt::{self, Display, Formatter, Write};
use ts_rs::TS;
// Der "z_"-Präfix soll sicherstellen, dass diese Trigger als Letzte ausgeführt werden
const INSERT_TRIGGER_TPL: &str = "z_crdt_{TABLE_NAME}_insert";
const UPDATE_TRIGGER_TPL: &str = "z_crdt_{TABLE_NAME}_update";
//const SYNC_ACTIVE_KEY: &str = "sync_active";
pub const TOMBSTONE_COLUMN: &str = "haex_tombstone";
pub const HLC_TIMESTAMP_COLUMN: &str = "haex_timestamp";
#[derive(Debug)]
pub enum CrdtSetupError {
/// Kapselt einen Fehler, der von der rusqlite-Bibliothek kommt.
DatabaseError(rusqlite::Error),
/// Die Tabelle hat keine Tombstone-Spalte, was eine CRDT-Voraussetzung ist.
TombstoneColumnMissing {
table_name: String,
column_name: String,
},
HlcColumnMissing {
table_name: String,
column_name: String,
},
/// Die Tabelle hat keinen Primärschlüssel, was eine CRDT-Voraussetzung ist.
PrimaryKeyMissing { table_name: String },
}
// Implementierung, damit unser Error-Typ schön formatiert werden kann.
impl Display for CrdtSetupError {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
match self {
CrdtSetupError::DatabaseError(e) => write!(f, "Database error: {}", e),
CrdtSetupError::TombstoneColumnMissing {
table_name,
column_name,
} => write!(
f,
"Table '{}' is missing the required tombstone column '{}'",
table_name, column_name
),
CrdtSetupError::HlcColumnMissing {
table_name,
column_name,
} => write!(
f,
"Table '{}' is missing the required hlc column '{}'",
table_name, column_name
),
CrdtSetupError::PrimaryKeyMissing { table_name } => {
write!(f, "Table '{}' has no primary key", table_name)
}
}
}
}
// Implementierung, damit unser Typ als "echter" Error erkannt wird.
impl Error for CrdtSetupError {}
// Wichtige Konvertierung: Erlaubt uns, den `?`-Operator auf Funktionen zu verwenden,
// die `rusqlite::Error` zurückgeben. Der Fehler wird automatisch in unseren
// `CrdtSetupError::DatabaseError` verpackt.
impl From<rusqlite::Error> for CrdtSetupError {
fn from(err: rusqlite::Error) -> Self {
CrdtSetupError::DatabaseError(err)
}
}
#[derive(Debug, Serialize, TS)]
#[ts(export)]
pub enum TriggerSetupResult {
Success,
TableNotFound,
}
#[derive(Debug)]
struct ColumnInfo {
name: String,
is_pk: bool,
}
impl ColumnInfo {
fn from_row(row: &Row) -> RusqliteResult<Self> {
Ok(ColumnInfo {
name: row.get("name")?,
is_pk: row.get::<_, i64>("pk")? > 0,
})
}
}
fn is_safe_identifier(name: &str) -> bool {
!name.is_empty() && name.chars().all(|c| c.is_alphanumeric() || c == '_')
}
/// Richtet CRDT-Trigger für eine einzelne Tabelle ein.
pub fn setup_triggers_for_table(
tx: &Transaction,
table_name: &str,
recreate: bool,
) -> Result<TriggerSetupResult, CrdtSetupError> {
let columns = get_table_schema(tx, table_name)?;
if columns.is_empty() {
return Ok(TriggerSetupResult::TableNotFound);
}
if !columns.iter().any(|c| c.name == TOMBSTONE_COLUMN) {
return Err(CrdtSetupError::TombstoneColumnMissing {
table_name: table_name.to_string(),
column_name: TOMBSTONE_COLUMN.to_string(),
});
}
if !columns.iter().any(|c| c.name == HLC_TIMESTAMP_COLUMN) {
return Err(CrdtSetupError::HlcColumnMissing {
table_name: table_name.to_string(),
column_name: HLC_TIMESTAMP_COLUMN.to_string(),
});
}
let pks: Vec<String> = columns
.iter()
.filter(|c| c.is_pk)
.map(|c| c.name.clone())
.collect();
if pks.is_empty() {
return Err(CrdtSetupError::PrimaryKeyMissing {
table_name: table_name.to_string(),
});
}
let cols_to_track: Vec<String> = columns
.iter()
.filter(|c| !c.is_pk) //&& c.name != TOMBSTONE_COLUMN && c.name != HLC_TIMESTAMP_COLUMN
.map(|c| c.name.clone())
.collect();
let insert_trigger_sql = generate_insert_trigger_sql(table_name, &pks, &cols_to_track);
let update_trigger_sql = generate_update_trigger_sql(table_name, &pks, &cols_to_track);
if recreate {
drop_triggers_for_table(&tx, table_name)?;
}
tx.execute_batch(&insert_trigger_sql)?;
tx.execute_batch(&update_trigger_sql)?;
Ok(TriggerSetupResult::Success)
}
/// Holt das Schema für eine gegebene Tabelle.
fn get_table_schema(conn: &Connection, table_name: &str) -> RusqliteResult<Vec<ColumnInfo>> {
if !is_safe_identifier(table_name) {
return Err(rusqlite::Error::InvalidParameterName(format!(
"Invalid or unsafe table name provided: {}",
table_name
))
.into());
}
let sql = format!("PRAGMA table_info(\"{}\");", table_name);
let mut stmt = conn.prepare(&sql)?;
let rows = stmt.query_map([], ColumnInfo::from_row)?;
rows.collect()
}
pub fn drop_triggers_for_table(
tx: &Transaction, // Arbeitet direkt auf einer Transaktion
table_name: &str,
) -> Result<(), CrdtSetupError> {
if !is_safe_identifier(table_name) {
return Err(rusqlite::Error::InvalidParameterName(format!(
"Invalid or unsafe table name provided: {}",
table_name
))
.into());
}
let drop_insert_trigger_sql =
drop_trigger_sql(INSERT_TRIGGER_TPL.replace("{TABLE_NAME}", table_name));
let drop_update_trigger_sql =
drop_trigger_sql(UPDATE_TRIGGER_TPL.replace("{TABLE_NAME}", table_name));
let sql_batch = format!("{}\n{}", drop_insert_trigger_sql, drop_update_trigger_sql);
tx.execute_batch(&sql_batch)?;
Ok(())
}
/* pub fn recreate_triggers_for_table(
conn: &mut Connection,
table_name: &str,
) -> Result<TriggerSetupResult, CrdtSetupError> {
// Starte eine einzige Transaktion für beide Operationen
let tx = conn.transaction()?;
// 1. Rufe die Drop-Funktion auf
drop_triggers_for_table(&tx, table_name)?;
// 2. Erstelle die Trigger neu (vereinfachte Logik ohne Drop)
// Wir rufen die `setup_triggers_for_table` Logik hier manuell nach,
// um die Transaktion weiterzuverwenden.
let columns = get_table_schema(&tx, table_name)?;
if columns.is_empty() {
tx.commit()?; // Wichtig: Transaktion beenden
return Ok(TriggerSetupResult::TableNotFound);
}
// ... (Validierungslogik wiederholen) ...
if !columns.iter().any(|c| c.name == TOMBSTONE_COLUMN) {
/* ... */
return Err(CrdtSetupError::TombstoneColumnMissing {
table_name: table_name.to_string(),
column_name: TOMBSTONE_COLUMN.to_string(),
});
}
let pks: Vec<String> = columns
.iter()
.filter(|c| c.is_pk)
.map(|c| c.name.clone())
.collect();
if pks.is_empty() {
/* ... */
return Err(CrdtSetupError::PrimaryKeyMissing {
table_name: table_name.to_string(),
});
}
let cols_to_track: Vec<String> = columns
.iter()
.filter(|c| !c.is_pk && c.name != TOMBSTONE_COLUMN && c.name != HLC_TIMESTAMP_COLUMN)
.map(|c| c.name.clone())
.collect();
let insert_trigger_sql = generate_insert_trigger_sql(table_name, &pks, &cols_to_track);
let update_trigger_sql = generate_update_trigger_sql(table_name, &pks, &cols_to_track);
let sql_batch = format!("{}\n{}", insert_trigger_sql, update_trigger_sql);
tx.execute_batch(&sql_batch)?;
// Beende die Transaktion
tx.commit()?;
Ok(TriggerSetupResult::Success)
}
*/
/// Generiert das SQL für den INSERT-Trigger.
fn generate_insert_trigger_sql(table_name: &str, pks: &[String], cols: &[String]) -> String {
let pk_json_payload = pks
.iter()
.map(|pk| format!("'{}', NEW.\"{}\"", pk, pk))
.collect::<Vec<_>>()
.join(", ");
let column_inserts = if cols.is_empty() {
// Nur PKs -> einfacher Insert ins Log
format!(
"INSERT INTO {log_table} (haex_timestamp, op_type, table_name, row_pks)
VALUES (NEW.\"{hlc_col}\", 'INSERT', '{table}', json_object({pk_payload}));",
log_table = TABLE_CRDT_LOGS,
hlc_col = HLC_TIMESTAMP_COLUMN,
table = table_name,
pk_payload = pk_json_payload
)
} else {
cols.iter().fold(String::new(), |mut acc, col| {
writeln!(
&mut acc,
"INSERT INTO {log_table} (haex_timestamp, op_type, table_name, row_pks, column_name, new_value)
VALUES (NEW.\"{hlc_col}\", 'INSERT', '{table}', json_object({pk_payload}), '{column}', json_object('value', NEW.\"{column}\"));",
log_table = TABLE_CRDT_LOGS,
hlc_col = HLC_TIMESTAMP_COLUMN,
table = table_name,
pk_payload = pk_json_payload,
column = col
).unwrap();
acc
})
};
let trigger_name = INSERT_TRIGGER_TPL.replace("{TABLE_NAME}", table_name);
format!(
"CREATE TRIGGER IF NOT EXISTS \"{trigger_name}\"
AFTER INSERT ON \"{table_name}\"
FOR EACH ROW
BEGIN
{column_inserts}
END;"
)
}
/// Generiert das SQL zum Löschen eines Triggers.
fn drop_trigger_sql(trigger_name: String) -> String {
format!("DROP TRIGGER IF EXISTS \"{}\";", trigger_name)
}
/// Generiert das SQL für den UPDATE-Trigger.
fn generate_update_trigger_sql(table_name: &str, pks: &[String], cols: &[String]) -> String {
let pk_json_payload = pks
.iter()
.map(|pk| format!("'{}', NEW.\"{}\"", pk, pk))
.collect::<Vec<_>>()
.join(", ");
let mut body = String::new();
// Spaltenänderungen loggen
if !cols.is_empty() {
for col in cols {
writeln!(
&mut body,
"INSERT INTO {log_table} (haex_timestamp, op_type, table_name, row_pks, column_name, new_value, old_value)
SELECT NEW.\"{hlc_col}\", 'UPDATE', '{table}', json_object({pk_payload}), '{column}',
json_object('value', NEW.\"{column}\"), json_object('value', OLD.\"{column}\")
WHERE NEW.\"{column}\" IS NOT OLD.\"{column}\";",
log_table = TABLE_CRDT_LOGS,
hlc_col = HLC_TIMESTAMP_COLUMN,
table = table_name,
pk_payload = pk_json_payload,
column = col
).unwrap();
}
}
// Soft-delete loggen
writeln!(
&mut body,
"INSERT INTO {log_table} (haex_timestamp, op_type, table_name, row_pks)
SELECT NEW.\"{hlc_col}\", 'DELETE', '{table}', json_object({pk_payload})
WHERE NEW.\"{tombstone_col}\" = 1 AND OLD.\"{tombstone_col}\" = 0;",
log_table = TABLE_CRDT_LOGS,
hlc_col = HLC_TIMESTAMP_COLUMN,
table = table_name,
pk_payload = pk_json_payload,
tombstone_col = TOMBSTONE_COLUMN
)
.unwrap();
let trigger_name = UPDATE_TRIGGER_TPL.replace("{TABLE_NAME}", table_name);
format!(
"CREATE TRIGGER IF NOT EXISTS \"{trigger_name}\"
AFTER UPDATE ON \"{table_name}\"
FOR EACH ROW
BEGIN
{body}
END;"
)
}

View File

@ -1,181 +1,123 @@
// src-tauri/src/database/core.rs // database/core.rs
use crate::database::error::DatabaseError;
use crate::database::DbConnection; use crate::database::DbConnection;
use base64::{engine::general_purpose::STANDARD, Engine as _}; use base64::{engine::general_purpose::STANDARD, Engine as _};
use rusqlite::types::Value as SqlValue;
use rusqlite::{ use rusqlite::{
types::{Value as RusqliteValue, ValueRef}, types::{Value as RusqliteValue, ValueRef},
Connection, OpenFlags, ToSql, Connection, OpenFlags, ToSql,
}; };
use serde_json::Value as JsonValue; use serde_json::Value as JsonValue;
use sqlparser::ast::{Query, Select, SetExpr, Statement, TableFactor, TableObject}; use std::fs;
use sqlparser::dialect::SQLiteDialect; use std::path::Path;
use sqlparser::parser::Parser; use tauri::State;
use std::collections::HashMap; // --- Hilfsfunktion: Konvertiert JSON Value zu etwas, das rusqlite versteht ---
// Diese Funktion ist etwas knifflig wegen Ownership und Lifetimes.
/// Öffnet und initialisiert eine Datenbank mit Verschlüsselung // Eine einfachere Variante ist oft, direkt rusqlite::types::Value zu erstellen.
/// // Hier ein Beispiel, das owned Values erstellt (braucht evtl. Anpassung je nach rusqlite-Version/Nutzung)
pub fn open_and_init_db(path: &str, key: &str, create: bool) -> Result<Connection, DatabaseError> { fn json_to_rusqlite_value(json_val: &JsonValue) -> Result<RusqliteValue, String> {
let flags = if create {
OpenFlags::SQLITE_OPEN_READ_WRITE | OpenFlags::SQLITE_OPEN_CREATE
} else {
OpenFlags::SQLITE_OPEN_READ_WRITE
};
let conn =
Connection::open_with_flags(path, flags).map_err(|e| DatabaseError::ConnectionFailed {
path: path.to_string(),
reason: e.to_string(),
})?;
conn.pragma_update(None, "key", key)
.map_err(|e| DatabaseError::PragmaError {
pragma: "key".to_string(),
reason: e.to_string(),
})?;
let journal_mode: String = conn
.query_row("PRAGMA journal_mode=WAL;", [], |row| row.get(0))
.map_err(|e| DatabaseError::PragmaError {
pragma: "journal_mode=WAL".to_string(),
reason: e.to_string(),
})?;
if journal_mode.eq_ignore_ascii_case("wal") {
println!("WAL mode successfully enabled.");
} else {
eprintln!(
"Failed to enable WAL mode, journal_mode is '{}'.",
journal_mode
);
}
Ok(conn)
}
/// Utility für SQL-Parsing - parst ein einzelnes SQL-Statement
pub fn parse_single_statement(sql: &str) -> Result<Statement, DatabaseError> {
let dialect = SQLiteDialect {};
let statements = Parser::parse_sql(&dialect, sql).map_err(|e| DatabaseError::ParseError {
reason: e.to_string(),
sql: sql.to_string(),
})?;
statements
.into_iter()
.next()
.ok_or(DatabaseError::ParseError {
reason: "No SQL statement found".to_string(),
sql: sql.to_string(),
})
}
/// Utility für SQL-Parsing - parst mehrere SQL-Statements
pub fn parse_sql_statements(sql: &str) -> Result<Vec<Statement>, DatabaseError> {
let dialect = SQLiteDialect {};
Parser::parse_sql(&dialect, sql).map_err(|e| DatabaseError::ParseError {
reason: e.to_string(),
sql: sql.to_string(),
})
}
pub struct ValueConverter;
impl ValueConverter {
pub fn json_to_rusqlite_value(json_val: &JsonValue) -> Result<SqlValue, DatabaseError> {
match json_val { match json_val {
JsonValue::Null => Ok(SqlValue::Null), JsonValue::Null => Ok(RusqliteValue::Null),
JsonValue::Bool(b) => { JsonValue::Bool(b) => Ok(RusqliteValue::Integer(*b as i64)), // SQLite hat keinen BOOLEAN
// SQLite hat keinen Bool-Typ; verwende Integer 0/1
Ok(SqlValue::Integer(if *b { 1 } else { 0 }))
}
JsonValue::Number(n) => { JsonValue::Number(n) => {
if let Some(i) = n.as_i64() { if let Some(i) = n.as_i64() {
Ok(SqlValue::Integer(i)) Ok(RusqliteValue::Integer(i))
} else if let Some(f) = n.as_f64() { } else if let Some(f) = n.as_f64() {
Ok(SqlValue::Real(f)) Ok(RusqliteValue::Real(f))
} else { } else {
// Fallback: als Text Err("Ungültiger Zahlenwert".to_string())
Ok(SqlValue::Text(n.to_string()))
} }
} }
JsonValue::String(s) => Ok(SqlValue::Text(s.clone())), JsonValue::String(s) => Ok(RusqliteValue::Text(s.clone())),
JsonValue::Array(_) | JsonValue::Object(_) => { JsonValue::Array(_) | JsonValue::Object(_) => {
// Arrays/Objects als JSON-Text speichern // SQLite kann Arrays/Objects nicht direkt speichern (außer als TEXT/BLOB)
serde_json::to_string(json_val) // Konvertiere sie zu JSON-Strings, wenn das gewünscht ist
.map(SqlValue::Text) Ok(RusqliteValue::Text(
.map_err(|e| DatabaseError::SerializationError { serde_json::to_string(json_val).map_err(|e| e.to_string())?,
reason: format!("Failed to serialize JSON param: {}", e), ))
}) // Oder gib einen Fehler zurück, wenn Arrays/Objekte nicht erlaubt sind
// Err("Arrays oder Objekte werden nicht direkt als Parameter unterstützt".to_string())
} }
} }
}
pub fn convert_params(params: &[JsonValue]) -> Result<Vec<SqlValue>, DatabaseError> {
params.iter().map(Self::json_to_rusqlite_value).collect()
}
} }
pub fn execute( // --- Tauri Command für INSERT/UPDATE/DELETE ---
#[tauri::command]
pub async fn execute(
sql: String, sql: String,
params: Vec<JsonValue>, params: Vec<JsonValue>,
connection: &DbConnection, state: &State<'_, DbConnection>,
) -> Result<usize, DatabaseError> { ) -> Result<usize, String> {
// Konvertiere Parameter // Gibt Anzahl betroffener Zeilen zurück
let params_converted: Vec<RusqliteValue> = params let params_converted: Vec<RusqliteValue> = params
.iter() .iter()
.map(ValueConverter::json_to_rusqlite_value) .map(json_to_rusqlite_value)
.collect::<Result<Vec<_>, _>>()?; .collect::<Result<Vec<_>, _>>()?;
let params_sql: Vec<&dyn ToSql> = params_converted.iter().map(|v| v as &dyn ToSql).collect(); let params_sql: Vec<&dyn ToSql> = params_converted.iter().map(|v| v as &dyn ToSql).collect();
with_connection(connection, |conn| { let db_lock = state
let affected_rows = conn.execute(&sql, &params_sql[..]).map_err(|e| { .0
// "Lazy Parsing": Extrahiere den Tabellennamen nur, wenn ein Fehler auftritt, .lock()
// um den Overhead bei erfolgreichen Operationen zu vermeiden. .map_err(|e| format!("Mutex Lock Fehler: {}", e))?;
let table_name = extract_primary_table_name_from_sql(&sql).unwrap_or(None); let conn = db_lock.as_ref().ok_or("Keine Datenbankverbindung")?;
DatabaseError::ExecutionError { let affected_rows = conn
sql: sql.clone(), .execute(&sql, &params_sql[..])
reason: e.to_string(), .map_err(|e| format!("SQL Execute Fehler: {}", e))?;
table: table_name,
}
})?;
Ok(affected_rows) Ok(affected_rows)
})
} }
pub fn select( /// Führt SQL-Schreiboperationen (INSERT, UPDATE, DELETE, CREATE) ohne Berechtigungsprüfung aus
/* pub async fn execute(
sql: &str,
params: &[String],
state: &State<'_, DbConnection>,
) -> Result<String, String> {
let db = state.0.lock().map_err(|e| format!("Mutex-Fehler: {}", e))?;
let conn = db.as_ref().ok_or("Keine Datenbankverbindung vorhanden")?;
let rows_affected = conn
.execute(sql, rusqlite::params_from_iter(params.iter()))
.map_err(|e| format!("SQL-Ausführungsfehler: {}", e))?;
let last_id = conn.last_insert_rowid();
Ok(serde_json::to_string(&json!({
"rows_affected": rows_affected,
"last_insert_id": last_id
}))
.map_err(|e| format!("JSON-Serialisierungsfehler: {}", e))?)
} */
#[tauri::command]
pub async fn select(
sql: String, sql: String,
params: Vec<JsonValue>, params: Vec<JsonValue>, // Parameter als JSON Values empfangen
connection: &DbConnection, state: &State<'_, DbConnection>,
) -> Result<Vec<HashMap<String, JsonValue>>, DatabaseError> { ) -> Result<Vec<Vec<JsonValue>>, String> {
// Validiere SQL-Statement // Ergebnis als Vec<RowObject>
let statement = parse_single_statement(&sql)?;
// Stelle sicher, dass es eine Query ist // Konvertiere JSON Params zu rusqlite Values für die Abfrage
if !matches!(statement, Statement::Query(_)) { // Wir sammeln sie als owned Values, da `params_from_iter` Referenzen braucht,
return Err(DatabaseError::StatementError { // was mit lokalen Konvertierungen schwierig ist.
reason: "Only SELECT statements are allowed in select function".to_string(),
});
}
// Konvertiere Parameter
let params_converted: Vec<RusqliteValue> = params let params_converted: Vec<RusqliteValue> = params
.iter() .iter()
.map(ValueConverter::json_to_rusqlite_value) .map(json_to_rusqlite_value)
.collect::<Result<Vec<_>, _>>()?; .collect::<Result<Vec<_>, _>>()?; // Sammle Ergebnisse, gibt Fehler weiter
// Konvertiere zu Slice von ToSql-Referenzen (erfordert, dass die Values leben)
let params_sql: Vec<&dyn ToSql> = params_converted.iter().map(|v| v as &dyn ToSql).collect(); let params_sql: Vec<&dyn ToSql> = params_converted.iter().map(|v| v as &dyn ToSql).collect();
with_connection(connection, |conn| { // Zugriff auf die Verbindung (blockierend, okay für SQLite in vielen Fällen)
let db_lock = state
.0
.lock()
.map_err(|e| format!("Mutex Lock Fehler: {}", e))?;
let conn = db_lock.as_ref().ok_or("Keine Datenbankverbindung")?;
let mut stmt = conn let mut stmt = conn
.prepare(&sql) .prepare(&sql)
.map_err(|e| DatabaseError::PrepareError { .map_err(|e| format!("SQL Prepare Fehler: {}", e))?;
reason: e.to_string(),
})?;
let column_names: Vec<String> = stmt let column_names: Vec<String> = stmt
.column_names() .column_names()
.into_iter() .into_iter()
@ -185,352 +127,195 @@ pub fn select(
let mut rows = stmt let mut rows = stmt
.query(&params_sql[..]) .query(&params_sql[..])
.map_err(|e| DatabaseError::QueryError { .map_err(|e| format!("SQL Query Fehler: {}", e))?;
reason: e.to_string(), let mut result_vec: Vec<Vec<JsonValue>> = Vec::new();
})?;
let mut result_vec: Vec<HashMap<String, JsonValue>> = Vec::new(); println!();
println!();
while let Some(row) = rows.next().map_err(|e| DatabaseError::RowProcessingError { println!();
reason: format!("Row iteration error: {}", e), println!();
})? {
let mut row_map: HashMap<String, JsonValue> = HashMap::with_capacity(num_columns);
while let Some(row) = rows.next().map_err(|e| format!("Row Next Fehler: {}", e))? {
//let mut row_map = HashMap::new();
let mut row_data: Vec<JsonValue> = Vec::with_capacity(num_columns);
for i in 0..num_columns { for i in 0..num_columns {
let col_name = &column_names[i]; let col_name = &column_names[i];
/* println!( println!(
"--- Processing Column --- Index: {}, Name: '{}'", "--- Processing Column --- Index: {}, Name: '{}'",
i, col_name i, col_name
); */ );
let value_ref = row let value_ref = row
.get_ref(i) .get_ref(i)
.map_err(|e| DatabaseError::RowProcessingError { .map_err(|e| format!("Get Ref Fehler Spalte {}: {}", i, e))?;
reason: format!("Failed to get column {} ('{}'): {}", i, col_name, e),
})?;
let json_val = convert_value_ref_to_json(value_ref)?; // Wandle rusqlite ValueRef zurück zu serde_json Value
//println!("Column: {} = {}", column_names[i], json_val);
row_map.insert(col_name.clone(), json_val);
}
result_vec.push(row_map);
}
Ok(result_vec)
})
}
/// Konvertiert rusqlite ValueRef zu JSON
fn convert_value_ref_to_json(value_ref: ValueRef) -> Result<JsonValue, DatabaseError> {
let json_val = match value_ref { let json_val = match value_ref {
ValueRef::Null => JsonValue::Null, ValueRef::Null => JsonValue::Null,
ValueRef::Integer(i) => JsonValue::Number(i.into()), ValueRef::Integer(i) => JsonValue::Number(i.into()),
ValueRef::Real(f) => JsonValue::Number( ValueRef::Real(f) => JsonValue::Number(
serde_json::Number::from_f64(f).unwrap_or_else(|| serde_json::Number::from(0)), serde_json::Number::from_f64(f).unwrap_or(serde_json::Number::from(0)),
), ), // Fallback für NaN/Infinity
ValueRef::Text(t) => { ValueRef::Text(t) => {
let s = String::from_utf8_lossy(t).to_string(); let s = String::from_utf8_lossy(t).to_string();
// Versuche, als JSON zu parsen, falls es ursprünglich ein Array/Objekt war
//serde_json::from_str(&s).unwrap_or(JsonValue::String(s))
JsonValue::String(s) JsonValue::String(s)
} }
ValueRef::Blob(b) => { ValueRef::Blob(b) => {
// BLOBs als Base64-String zurückgeben // BLOBs z.B. als Base64-String zurückgeben
JsonValue::String(STANDARD.encode(b)) JsonValue::String(STANDARD.encode(b))
} }
}; };
Ok(json_val) println!(
} "new row: name: {} with value: {}",
// Extrahiert alle Tabellennamen aus einem SQL-Statement über AST-Parsing column_names[i].clone(),
pub fn extract_table_names_from_sql(sql: &str) -> Result<Vec<String>, DatabaseError> { json_val,
let statement = parse_single_statement(sql)?; );
Ok(extract_table_names_from_statement(&statement)) row_data.push(json_val);
//row_map.insert(column_names[i].clone(), json_val);
}
//result_vec.push(row_map);
result_vec.push(row_data);
}
Ok(result_vec)
} }
/// Extrahiert den ersten/primären Tabellennamen aus einem SQL-Statement /// Führt SQL-Leseoperationen (SELECT) ohne Berechtigungsprüfung aus
pub fn extract_primary_table_name_from_sql(sql: &str) -> Result<Option<String>, DatabaseError> { /* pub async fn select(
let table_names = extract_table_names_from_sql(sql)?; sql: &str,
Ok(table_names.into_iter().next()) params: &[String],
state: &State<'_, DbConnection>,
) -> Result<Vec<Vec<Option<String>>>, String> {
let db = state.0.lock().map_err(|e| format!("Mutex-Fehler: {}", e))?;
let conn = db.as_ref().ok_or("Keine Datenbankverbindung vorhanden")?;
let mut stmt = conn
.prepare(sql)
.map_err(|e| format!("SQL-Vorbereitungsfehler: {}", e))?;
let columns = stmt.column_count();
let mut rows = stmt
.query(rusqlite::params_from_iter(params.iter()))
.map_err(|e| format!("SQL-Abfragefehler: {}", e))?;
let mut result = Vec::new();
while let Some(row) = rows
.next()
.map_err(|e| format!("Zeilenabruffehler: {}", e))?
{
let mut row_data = Vec::new();
for i in 0..columns {
let value = row
.get(i)
.map_err(|e| format!("Datentypfehler in Spalte {}: {}", i, e))?;
row_data.push(value);
}
/* println!(
"Select Row Data: {}",
&row_data.clone().join("").to_string()
); */
result.push(row_data);
}
Ok(result)
} */
/// Öffnet und initialisiert eine Datenbank mit Verschlüsselung
pub fn open_and_init_db(path: &str, key: &str, create: bool) -> Result<Connection, String> {
let flags = if create {
OpenFlags::SQLITE_OPEN_READ_WRITE | OpenFlags::SQLITE_OPEN_CREATE
} else {
OpenFlags::SQLITE_OPEN_READ_WRITE
};
let conn = Connection::open_with_flags(path, flags).map_err(|e| e.to_string())?;
conn.pragma_update(None, "key", key)
.map_err(|e| e.to_string())?;
conn.execute_batch("SELECT count(*) from haex_extensions")
.map_err(|e| e.to_string())?;
Ok(conn)
} }
/// Extrahiert alle Tabellennamen aus einem AST Statement /// Kopiert eine Datei von einem Pfad zu einem anderen
pub fn extract_table_names_from_statement(statement: &Statement) -> Vec<String> { pub fn copy_file<S: AsRef<Path>, T: AsRef<Path>>(
source_path: S,
target_path: T,
) -> Result<(), String> {
let source = source_path.as_ref();
let target = target_path.as_ref();
// Check if source file exists
if !source.exists() {
return Err(format!("Source file '{}' does not exist", source.display()));
}
// Check if source is a file (not a directory)
if !source.is_file() {
return Err(format!("Source '{}' is not a file", source.display()));
}
// Copy the file and preserve metadata (permissions, timestamps)
fs::copy(source, target)
.map(|_| ())
.map_err(|e| format!("Failed to copy file: {}", e))?;
Ok(())
}
// Hilfsfunktionen für SQL-Parsing
pub fn extract_tables_from_query(query: &sqlparser::ast::Query) -> Vec<String> {
let mut tables = Vec::new(); let mut tables = Vec::new();
extract_tables_from_set_expr(&query.body, &mut tables);
match statement {
Statement::Query(query) => {
extract_tables_from_query_recursive(query, &mut tables);
}
Statement::Insert(insert) => {
if let TableObject::TableName(name) = &insert.table {
tables.push(name.to_string());
}
}
Statement::Update { table, .. } => {
extract_tables_from_table_factor(&table.relation, &mut tables);
}
Statement::Delete(delete) => {
use sqlparser::ast::FromTable;
match &delete.from {
FromTable::WithFromKeyword(table_refs) | FromTable::WithoutKeyword(table_refs) => {
for table_ref in table_refs {
extract_tables_from_table_factor(&table_ref.relation, &mut tables);
}
}
}
// Fallback für DELETE-Syntax ohne FROM
for table_name in &delete.tables {
tables.push(table_name.to_string());
}
}
Statement::CreateTable(create) => {
tables.push(create.name.to_string());
}
Statement::AlterTable { name, .. } => {
tables.push(name.to_string());
}
Statement::Drop { names, .. } => {
for name in names {
tables.push(name.to_string());
}
}
Statement::CreateIndex(create_index) => {
tables.push(create_index.table_name.to_string());
}
Statement::Truncate { table_names, .. } => {
for table_name in table_names {
tables.push(table_name.to_string());
}
}
// Weitere Statement-Typen können hier hinzugefügt werden
_ => {
// Für unbekannte Statement-Typen geben wir eine leere Liste zurück
}
}
tables tables
} }
/// Extrahiert Tabellennamen rekursiv aus Query-Strukturen fn extract_tables_from_set_expr(set_expr: &sqlparser::ast::SetExpr, tables: &mut Vec<String>) {
fn extract_tables_from_query_recursive(query: &Query, tables: &mut Vec<String>) { match set_expr {
extract_tables_from_set_expr_recursive(&query.body, tables); sqlparser::ast::SetExpr::Select(select) => {
} for from in &select.from {
extract_tables_from_table_with_joins(from, tables);
/// Extrahiert Tabellennamen aus SELECT-Statements
fn extract_tables_from_select(select: &Select, tables: &mut Vec<String>) {
// FROM clause
for table_ref in &select.from {
extract_tables_from_table_factor(&table_ref.relation, tables);
// JOINs
for join in &table_ref.joins {
extract_tables_from_table_factor(&join.relation, tables);
} }
} }
sqlparser::ast::SetExpr::Query(query) => {
extract_tables_from_set_expr(&query.body, tables);
}
sqlparser::ast::SetExpr::SetOperation { left, right, .. } => {
extract_tables_from_set_expr(left, tables);
extract_tables_from_set_expr(right, tables);
}
_ => (), // Andere Fälle wie Values oder Insert ignorieren
}
} }
/// Extrahiert Tabellennamen aus TableFactor-Strukturen fn extract_tables_from_table_with_joins(
fn extract_tables_from_table_factor(table_factor: &TableFactor, tables: &mut Vec<String>) { table_with_joins: &sqlparser::ast::TableWithJoins,
match table_factor { tables: &mut Vec<String>,
TableFactor::Table { name, .. } => { ) {
tables.push(name.to_string());
}
TableFactor::Derived { subquery, .. } => {
extract_tables_from_query_recursive(subquery, tables);
}
TableFactor::TableFunction { .. } => {
// Table functions haben normalerweise keine direkten Tabellennamen
}
TableFactor::NestedJoin {
table_with_joins, ..
} => {
extract_tables_from_table_factor(&table_with_joins.relation, tables); extract_tables_from_table_factor(&table_with_joins.relation, tables);
for join in &table_with_joins.joins { for join in &table_with_joins.joins {
extract_tables_from_table_factor(&join.relation, tables); extract_tables_from_table_factor(&join.relation, tables);
} }
}
_ => {
// TableFunction, UNNEST, JsonTable, etc. haben normalerweise keine direkten Tabellennamen
// oder sind nicht relevant für SQLite
}
}
} }
/// Extrahiert Tabellennamen rekursiv aus SetExpr-Strukturen. fn extract_tables_from_table_factor(
/// Diese Funktion enthält die eigentliche rekursive Logik. table_factor: &sqlparser::ast::TableFactor,
fn extract_tables_from_set_expr_recursive(set_expr: &SetExpr, tables: &mut Vec<String>) { tables: &mut Vec<String>,
match set_expr { ) {
SetExpr::Select(select) => { match table_factor {
extract_tables_from_select(select, tables); sqlparser::ast::TableFactor::Table { name, .. } => {
tables.push(name.to_string());
} }
SetExpr::Query(sub_query) => { sqlparser::ast::TableFactor::Derived { subquery, .. } => {
extract_tables_from_set_expr_recursive(&sub_query.body, tables); extract_tables_from_set_expr(&subquery.body, tables);
} }
SetExpr::SetOperation { left, right, .. } => { sqlparser::ast::TableFactor::NestedJoin {
extract_tables_from_set_expr_recursive(left, tables); table_with_joins, ..
extract_tables_from_set_expr_recursive(right, tables); } => {
extract_tables_from_table_with_joins(table_with_joins, tables);
} }
_ => (), // Andere Fälle wie TableFunction ignorieren
SetExpr::Values(_)
| SetExpr::Table(_)
| SetExpr::Insert(_)
| SetExpr::Update(_)
| SetExpr::Merge(_)
| SetExpr::Delete(_) => {}
}
}
pub fn with_connection<T, F>(connection: &DbConnection, f: F) -> Result<T, DatabaseError>
where
F: FnOnce(&mut Connection) -> Result<T, DatabaseError>,
{
let mut db_lock = connection
.0
.lock()
.map_err(|e| DatabaseError::MutexPoisoned {
reason: e.to_string(),
})?;
let conn = db_lock.as_mut().ok_or(DatabaseError::ConnectionError {
reason: "Connection to vault failed".to_string(),
})?;
f(conn)
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_extract_simple_select() {
let sql = "SELECT * FROM users";
let tables = extract_table_names_from_sql(sql).unwrap();
assert_eq!(tables, vec!["users"]);
}
#[test]
fn test_extract_select_with_join() {
let sql = "SELECT u.name, p.title FROM users u JOIN posts p ON u.id = p.user_id";
let tables = extract_table_names_from_sql(sql).unwrap();
assert_eq!(tables, vec!["users", "posts"]);
}
#[test]
fn test_extract_insert() {
let sql = "INSERT INTO users (name, email) VALUES (?, ?)";
let tables = extract_table_names_from_sql(sql).unwrap();
assert_eq!(tables, vec!["users"]);
}
#[test]
fn test_extract_update() {
let sql = "UPDATE users SET name = ? WHERE id = ?";
let tables = extract_table_names_from_sql(sql).unwrap();
assert_eq!(tables, vec!["users"]);
}
#[test]
fn test_extract_delete() {
let sql = "DELETE FROM users WHERE id = ?";
let tables = extract_table_names_from_sql(sql).unwrap();
assert_eq!(tables, vec!["users"]);
}
#[test]
fn test_extract_create_table() {
let sql = "CREATE TABLE new_table (id INTEGER, name TEXT)";
let tables = extract_table_names_from_sql(sql).unwrap();
assert_eq!(tables, vec!["new_table"]);
}
#[test]
fn test_extract_subquery() {
let sql = "SELECT * FROM (SELECT id FROM users) AS sub";
let tables = extract_table_names_from_sql(sql).unwrap();
assert_eq!(tables, vec!["users"]);
}
#[test]
fn test_extract_primary_table() {
let sql = "SELECT u.name FROM users u JOIN posts p ON u.id = p.user_id";
let primary_table = extract_primary_table_name_from_sql(sql).unwrap();
assert_eq!(primary_table, Some("users".to_string()));
}
#[test]
fn test_extract_complex_query() {
let sql = r#"
SELECT u.name, COUNT(p.id) as post_count
FROM users u
LEFT JOIN posts p ON u.id = p.user_id
WHERE u.created_at > (SELECT MIN(created_at) FROM sessions)
GROUP BY u.id
"#;
let tables = extract_table_names_from_sql(sql).unwrap();
assert_eq!(tables, vec!["users", "posts", "sessions"]);
}
#[test]
fn test_invalid_sql() {
let sql = "INVALID SQL";
let result = extract_table_names_from_sql(sql);
assert!(result.is_err());
}
#[test]
fn test_parse_single_statement() {
let sql = "SELECT * FROM users WHERE id = ?";
let result = parse_single_statement(sql);
assert!(result.is_ok());
assert!(matches!(result.unwrap(), Statement::Query(_)));
}
#[test]
fn test_parse_invalid_sql() {
let sql = "INVALID SQL STATEMENT";
let result = parse_single_statement(sql);
assert!(matches!(result, Err(DatabaseError::ParseError { .. })));
}
#[test]
fn test_convert_value_ref_to_json() {
use rusqlite::types::ValueRef;
assert_eq!(
convert_value_ref_to_json(ValueRef::Null).unwrap(),
JsonValue::Null
);
assert_eq!(
convert_value_ref_to_json(ValueRef::Integer(42)).unwrap(),
JsonValue::Number(42.into())
);
assert_eq!(
convert_value_ref_to_json(ValueRef::Text(b"hello")).unwrap(),
JsonValue::String("hello".to_string())
);
}
// Test für die neuen AST-basierten Funktionen
#[test]
fn test_extract_table_names_comprehensive() {
// Test verschiedene SQL-Statement-Typen
assert_eq!(
extract_primary_table_name_from_sql("SELECT * FROM users WHERE id = 1").unwrap(),
Some("users".to_string())
);
assert_eq!(
extract_primary_table_name_from_sql("INSERT INTO products (name) VALUES ('test')")
.unwrap(),
Some("products".to_string())
);
assert_eq!(
extract_primary_table_name_from_sql("UPDATE orders SET status = 'completed'").unwrap(),
Some("orders".to_string())
);
assert_eq!(
extract_primary_table_name_from_sql("DELETE FROM customers").unwrap(),
Some("customers".to_string())
);
} }
} }

View File

@ -1,203 +0,0 @@
// src-tauri/src/database/error.rs
use crate::crdt::trigger::CrdtSetupError;
use serde::{Deserialize, Serialize};
use thiserror::Error;
use ts_rs::TS;
#[derive(Error, Debug, Serialize, Deserialize, TS)]
#[ts(export)]
#[serde(tag = "type", content = "details")]
pub enum DatabaseError {
/// Der SQL-Code konnte nicht geparst werden.
#[error("Failed to parse SQL: {reason} - SQL: {sql}")]
ParseError { reason: String, sql: String },
/// Parameter-Fehler (falsche Anzahl, ungültiger Typ, etc.)
#[error("Parameter count mismatch: SQL has {expected} placeholders but {provided} provided. SQL Statement: {sql}")]
ParameterMismatchError {
expected: usize,
provided: usize,
sql: String,
},
#[error("No table provided in SQL Statement: {sql}")]
NoTableError { sql: String },
#[error("Statement Error: {reason}")]
StatementError { reason: String },
#[error("Failed to prepare statement: {reason}")]
PrepareError { reason: String },
#[error("Database error: {reason}")]
DatabaseError { reason: String },
/// Ein Fehler ist während der Ausführung in der Datenbank aufgetreten.
#[error("Execution error on table {table:?}: {reason} - SQL: {sql}")]
ExecutionError {
sql: String,
reason: String,
table: Option<String>,
},
/// Ein Fehler ist beim Verwalten der Transaktion aufgetreten.
#[error("Transaction error: {reason}")]
TransactionError { reason: String },
/// Ein SQL-Statement wird vom Proxy nicht unterstützt.
#[error("Unsupported statement. '{reason}'. - SQL: {sql}")]
UnsupportedStatement { reason: String, sql: String },
/// Fehler im HLC-Service
#[error("HLC error: {reason}")]
HlcError { reason: String },
/// Fehler beim Sperren der Datenbankverbindung
#[error("Lock error: {reason}")]
LockError { reason: String },
/// Fehler bei der Datenbankverbindung
#[error("Connection error: {reason}")]
ConnectionError { reason: String },
/// Fehler bei der JSON-Serialisierung
#[error("Serialization error: {reason}")]
SerializationError { reason: String },
/// Permission-bezogener Fehler für Extensions
#[error("Permission error for extension '{extension_id}': {reason} (operation: {operation:?}, resource: {resource:?})")]
PermissionError {
extension_id: String,
operation: Option<String>,
resource: Option<String>,
reason: String,
},
#[error("Query error: {reason}")]
QueryError { reason: String },
#[error("Row processing error: {reason}")]
RowProcessingError { reason: String },
#[error("Mutex Poisoned error: {reason}")]
MutexPoisoned { reason: String },
#[error("Datenbankverbindung fehlgeschlagen für Pfad '{path}': {reason}")]
ConnectionFailed { path: String, reason: String },
#[error("PRAGMA-Befehl '{pragma}' konnte nicht gesetzt werden: {reason}")]
PragmaError { pragma: String, reason: String },
#[error("Fehler beim Auflösen des Dateipfads: {reason}")]
PathResolutionError { reason: String },
#[error("Datei-I/O-Fehler für Pfad '{path}': {reason}")]
IoError { path: String, reason: String },
#[error("CRDT setup failed: {0}")]
CrdtSetup(String),
}
impl From<rusqlite::Error> for DatabaseError {
fn from(err: rusqlite::Error) -> Self {
DatabaseError::DatabaseError {
reason: err.to_string(),
}
}
}
impl From<String> for DatabaseError {
fn from(reason: String) -> Self {
DatabaseError::DatabaseError { reason }
}
}
impl From<CrdtSetupError> for DatabaseError {
fn from(err: CrdtSetupError) -> Self {
// Wir konvertieren den Fehler in einen String, um ihn einfach zu halten.
DatabaseError::CrdtSetup(err.to_string())
}
}
impl DatabaseError {
/// Extract extension ID if this error is related to an extension
pub fn extension_id(&self) -> Option<&str> {
match self {
DatabaseError::PermissionError { extension_id, .. } => Some(extension_id.as_str()),
_ => None,
}
}
/// Check if this is a permission-related error
pub fn is_permission_error(&self) -> bool {
matches!(self, DatabaseError::PermissionError { .. })
}
/// Get operation if available
pub fn operation(&self) -> Option<&str> {
match self {
DatabaseError::PermissionError {
operation: Some(op),
..
} => Some(op.as_str()),
_ => None,
}
}
/// Get resource if available
pub fn resource(&self) -> Option<&str> {
match self {
DatabaseError::PermissionError {
resource: Some(res),
..
} => Some(res.as_str()),
_ => None,
}
}
}
/* impl From<crate::extension::database::ExtensionDatabaseError> for DatabaseError {
fn from(err: crate::extension::database::ExtensionDatabaseError) -> Self {
match err {
crate::extension::database::ExtensionDatabaseError::Permission { source } => {
// Konvertiere PermissionError zu DatabaseError
match source {
crate::extension::database::permissions::PermissionError::AccessDenied {
extension_id,
operation,
resource,
reason,
} => DatabaseError::PermissionError {
extension_id,
operation: Some(operation),
resource: Some(resource),
reason,
},
crate::extension::database::permissions::PermissionError::Database {
source,
} => source,
other => DatabaseError::PermissionError {
extension_id: "unknown".to_string(),
operation: None,
resource: None,
reason: other.to_string(),
},
}
}
crate::extension::database::ExtensionDatabaseError::Database { source } => source,
crate::extension::database::ExtensionDatabaseError::ParameterValidation { reason } => {
DatabaseError::ParamError {
reason: reason.clone(),
expected: 0, // Kann nicht aus dem Grund extrahiert werden
provided: 0, // Kann nicht aus dem Grund extrahiert werden
}
}
crate::extension::database::ExtensionDatabaseError::StatementExecution { reason } => {
DatabaseError::ExecutionError {
sql: "unknown".to_string(),
reason,
table: None,
}
}
}
}
} */

View File

@ -1,210 +0,0 @@
// Auto-generated from Drizzle schema
// DO NOT EDIT MANUALLY
// Run 'pnpm generate:rust-types' to regenerate
use serde::{Deserialize, Serialize};
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct HaexSettings {
pub id: String,
#[serde(skip_serializing_if = "Option::is_none")]
pub key: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(rename = "type")]
pub r#type: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub value: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub haex_tombstone: Option<bool>,
#[serde(skip_serializing_if = "Option::is_none")]
pub haex_timestamp: Option<String>,
}
impl HaexSettings {
pub fn from_row(row: &rusqlite::Row) -> rusqlite::Result<Self> {
Ok(Self {
id: row.get(0)?,
key: row.get(1)?,
r#type: row.get(2)?,
value: row.get(3)?,
haex_tombstone: row.get(4)?,
haex_timestamp: row.get(5)?,
})
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct HaexExtensions {
pub id: String,
#[serde(skip_serializing_if = "Option::is_none")]
pub author: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub description: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub entry: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub homepage: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub enabled: Option<bool>,
#[serde(skip_serializing_if = "Option::is_none")]
pub icon: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub public_key: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub signature: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub url: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub version: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub haex_tombstone: Option<bool>,
#[serde(skip_serializing_if = "Option::is_none")]
pub haex_timestamp: Option<String>,
}
impl HaexExtensions {
pub fn from_row(row: &rusqlite::Row) -> rusqlite::Result<Self> {
Ok(Self {
id: row.get(0)?,
author: row.get(1)?,
description: row.get(2)?,
entry: row.get(3)?,
homepage: row.get(4)?,
enabled: row.get(5)?,
icon: row.get(6)?,
name: row.get(7)?,
public_key: row.get(8)?,
signature: row.get(9)?,
url: row.get(10)?,
version: row.get(11)?,
haex_tombstone: row.get(12)?,
haex_timestamp: row.get(13)?,
})
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct HaexExtensionPermissions {
pub id: String,
#[serde(skip_serializing_if = "Option::is_none")]
pub extension_id: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub resource_type: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub action: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub target: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub constraints: Option<String>,
pub status: String,
#[serde(skip_serializing_if = "Option::is_none")]
pub created_at: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub updated_at: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub haex_tombstone: Option<bool>,
#[serde(skip_serializing_if = "Option::is_none")]
pub haex_timestamp: Option<String>,
}
impl HaexExtensionPermissions {
pub fn from_row(row: &rusqlite::Row) -> rusqlite::Result<Self> {
Ok(Self {
id: row.get(0)?,
extension_id: row.get(1)?,
resource_type: row.get(2)?,
action: row.get(3)?,
target: row.get(4)?,
constraints: row.get(5)?,
status: row.get(6)?,
created_at: row.get(7)?,
updated_at: row.get(8)?,
haex_tombstone: row.get(9)?,
haex_timestamp: row.get(10)?,
})
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct HaexCrdtLogs {
pub id: String,
#[serde(skip_serializing_if = "Option::is_none")]
pub haex_timestamp: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub table_name: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub row_pks: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub op_type: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub column_name: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub new_value: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub old_value: Option<String>,
}
impl HaexCrdtLogs {
pub fn from_row(row: &rusqlite::Row) -> rusqlite::Result<Self> {
Ok(Self {
id: row.get(0)?,
haex_timestamp: row.get(1)?,
table_name: row.get(2)?,
row_pks: row.get(3)?,
op_type: row.get(4)?,
column_name: row.get(5)?,
new_value: row.get(6)?,
old_value: row.get(7)?,
})
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct HaexCrdtSnapshots {
pub snapshot_id: String,
#[serde(skip_serializing_if = "Option::is_none")]
pub created: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub epoch_hlc: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub location_url: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub file_size_bytes: Option<i64>,
}
impl HaexCrdtSnapshots {
pub fn from_row(row: &rusqlite::Row) -> rusqlite::Result<Self> {
Ok(Self {
snapshot_id: row.get(0)?,
created: row.get(1)?,
epoch_hlc: row.get(2)?,
location_url: row.get(3)?,
file_size_bytes: row.get(4)?,
})
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct HaexCrdtConfigs {
pub key: String,
#[serde(skip_serializing_if = "Option::is_none")]
pub value: Option<String>,
}
impl HaexCrdtConfigs {
pub fn from_row(row: &rusqlite::Row) -> rusqlite::Result<Self> {
Ok(Self {
key: row.get(0)?,
value: row.get(1)?,
})
}
}

View File

@ -1,306 +1,140 @@
// src-tauri/src/database/mod.rs // database/mod.rs
pub mod core; pub mod core;
pub mod error;
pub mod generated;
use crate::crdt::hlc::HlcService;
use crate::database::error::DatabaseError;
use crate::table_names::TABLE_CRDT_CONFIGS;
use crate::AppState;
use rusqlite::Connection; use rusqlite::Connection;
use serde::{Deserialize, Serialize};
use serde_json::Value as JsonValue; use serde_json::Value as JsonValue;
use std::collections::HashMap;
use std::path::Path; use std::path::Path;
use std::sync::Mutex; use std::sync::Mutex;
use std::time::UNIX_EPOCH;
use std::{fs, sync::Arc};
use tauri::{path::BaseDirectory, AppHandle, Manager, State}; use tauri::{path::BaseDirectory, AppHandle, Manager, State};
use tauri_plugin_fs::FsExt; pub struct DbConnection(pub Mutex<Option<Connection>>);
use ts_rs::TS;
pub struct DbConnection(pub Arc<Mutex<Option<Connection>>>);
const VAULT_EXTENSION: &str = ".db";
const VAULT_DIRECTORY: &str = "vaults";
// Öffentliche Funktionen für direkten Datenbankzugriff
#[tauri::command] #[tauri::command]
pub fn sql_select( pub async fn sql_select(
sql: String, sql: String,
params: Vec<JsonValue>, params: Vec<JsonValue>,
state: State<'_, AppState>, state: State<'_, DbConnection>,
) -> Result<Vec<HashMap<String, JsonValue>>, DatabaseError> { ) -> Result<Vec<Vec<JsonValue>>, String> {
core::select(sql, params, &state.db) core::select(sql, params, &state).await
} }
#[tauri::command] #[tauri::command]
pub fn sql_execute( pub async fn sql_execute(
sql: String, sql: String,
params: Vec<JsonValue>, params: Vec<JsonValue>,
state: State<'_, AppState>, state: State<'_, DbConnection>,
) -> Result<usize, DatabaseError> { ) -> Result<usize, String> {
core::execute(sql, params, &state.db) core::execute(sql, params, &state).await
}
/// Resolves a database name to the full vault path
fn get_vault_path(app_handle: &AppHandle, vault_name: &str) -> Result<String, DatabaseError> {
// Sicherstellen, dass der Name eine .db Endung hat
let vault_file_name = if vault_name.ends_with(VAULT_EXTENSION) {
vault_name.to_string()
} else {
format!("{}{VAULT_EXTENSION}", vault_name)
};
let vault_directory = get_vaults_directory(app_handle)?;
let vault_path = app_handle
.path()
.resolve(
format!("{vault_directory}/{}", vault_file_name),
BaseDirectory::AppLocalData,
)
.map_err(|e| DatabaseError::PathResolutionError {
reason: format!(
"Failed to resolve vault path for '{}': {}",
vault_file_name, e
),
})?;
// Sicherstellen, dass das vaults-Verzeichnis existiert
if let Some(parent) = vault_path.parent() {
fs::create_dir_all(parent).map_err(|e| DatabaseError::IoError {
path: parent.display().to_string(),
reason: format!("Failed to create vaults directory: {}", e),
})?;
}
Ok(vault_path.to_string_lossy().to_string())
}
/// Returns the vaults directory path
#[tauri::command]
pub fn get_vaults_directory(app_handle: &AppHandle) -> Result<String, DatabaseError> {
let vaults_dir = app_handle
.path()
.resolve(VAULT_DIRECTORY, BaseDirectory::AppLocalData)
.map_err(|e| DatabaseError::PathResolutionError {
reason: e.to_string(),
})?;
Ok(vaults_dir.to_string_lossy().to_string())
}
//#[serde(tag = "type", content = "details")]
#[derive(Debug, Serialize, Deserialize, TS)]
#[ts(export)]
#[serde(rename_all = "camelCase")]
pub struct VaultInfo {
name: String,
last_access: u64,
path: String,
}
/// Lists all vault databases in the vaults directory
#[tauri::command]
pub fn list_vaults(app_handle: AppHandle) -> Result<Vec<VaultInfo>, DatabaseError> {
let vaults_dir_str = get_vaults_directory(&app_handle)?;
let vaults_dir = Path::new(&vaults_dir_str);
println!("Suche vaults in {}", vaults_dir.display());
let mut vaults: Vec<VaultInfo> = vec![];
if !vaults_dir.exists() {
println!("Vaults-Verzeichnis existiert nicht, gebe leere Liste zurück.");
return Ok(vec![]);
}
for entry in fs::read_dir(vaults_dir).map_err(|e| DatabaseError::IoError {
path: "vaults directory".to_string(),
reason: e.to_string(),
})? {
let entry = entry.map_err(|e| DatabaseError::IoError {
path: "vaults directory entry".to_string(),
reason: e.to_string(),
})?;
println!("Suche entry {}", entry.path().to_string_lossy());
let path = entry.path();
if path.is_file() {
if let Some(filename) = path.file_name().and_then(|n| n.to_str()) {
if filename.ends_with(VAULT_EXTENSION) {
// Entferne .db Endung für die Rückgabe
println!("Vault gefunden {}", filename.to_string());
let metadata = fs::metadata(&path).map_err(|e| DatabaseError::IoError {
path: path.to_string_lossy().to_string(),
reason: format!("Metadaten konnten nicht gelesen werden: {}", e),
})?;
let last_access_timestamp = metadata
.accessed()
.map_err(|e| DatabaseError::IoError {
path: path.to_string_lossy().to_string(),
reason: format!("Zugriffszeit konnte nicht gelesen werden: {}", e),
})?
.duration_since(UNIX_EPOCH)
.unwrap_or_default() // Fallback für den seltenen Fall einer Zeit vor 1970
.as_secs();
let vault_name = filename.trim_end_matches(VAULT_EXTENSION).to_string();
vaults.push(VaultInfo {
name: vault_name,
last_access: last_access_timestamp,
path: path.to_string_lossy().to_string(),
});
}
}
}
}
Ok(vaults)
}
/// Checks if a vault with the given name exists
#[tauri::command]
pub fn vault_exists(app_handle: AppHandle, db_name: String) -> Result<bool, DatabaseError> {
let vault_path = get_vault_path(&app_handle, &db_name)?;
Ok(Path::new(&vault_path).exists())
}
/// Deletes a vault database file
#[tauri::command]
pub fn delete_vault(app_handle: AppHandle, db_name: String) -> Result<String, DatabaseError> {
let vault_path = get_vault_path(&app_handle, &db_name)?;
if !Path::new(&vault_path).exists() {
return Err(DatabaseError::IoError {
path: vault_path,
reason: "Vault does not exist".to_string(),
});
}
fs::remove_file(&vault_path).map_err(|e| DatabaseError::IoError {
path: vault_path.clone(),
reason: format!("Failed to delete vault: {}", e),
})?;
Ok(format!("Vault '{}' successfully deleted", db_name))
} }
/// Erstellt eine verschlüsselte Datenbank
#[tauri::command] #[tauri::command]
pub fn create_encrypted_database( pub fn create_encrypted_database(
app_handle: AppHandle, app_handle: AppHandle,
vault_name: String, path: String,
key: String, key: String,
state: State<'_, AppState>, state: State<'_, DbConnection>,
) -> Result<String, DatabaseError> { ) -> Result<String, String> {
println!("Creating encrypted vault with name: {}", vault_name); // Ressourcenpfad zur eingebundenen Datenbank auflösen
let vault_path = get_vault_path(&app_handle, &vault_name)?; let resource_path = app_handle
println!("Resolved vault path: {}", vault_path); .path()
.resolve("resources/vault.db", BaseDirectory::Resource)
.map_err(|e| format!("Fehler beim Auflösen des Ressourcenpfads: {}", e))?;
// Prüfen, ob bereits eine Vault mit diesem Namen existiert // Prüfen, ob die Ressourcendatei existiert
if Path::new(&vault_path).exists() { if !resource_path.exists() {
return Err(DatabaseError::IoError { return Err(format!(
path: vault_path, "Ressourcendatenbank wurde nicht gefunden: {}",
reason: format!("A vault with the name '{}' already exists", vault_name), resource_path.display()
}); ));
} }
/* let resource_path = app_handle
.path()
.resolve("database/vault.db", BaseDirectory::Resource)
.map_err(|e| format!("Fehler beim Auflösen des Ressourcenpfads: {}", e))?; */
let template_path = app_handle // Sicherstellen, dass das Zielverzeichnis existiert
.path() if let Some(parent) = Path::new(&path).parent() {
.resolve("database/vault.db", BaseDirectory::Resource) if !parent.exists() {
.map_err(|e| DatabaseError::PathResolutionError { std::fs::create_dir_all(parent)
reason: format!("Failed to resolve template database: {}", e), .map_err(|e| format!("Fehler beim Erstellen des Zielverzeichnisses: {}", e))?;
})?; }
}
let template_content = //core::copy_file(&resource_path, &path)?;
app_handle
.fs()
.read(&template_path)
.map_err(|e| DatabaseError::IoError {
path: template_path.display().to_string(),
reason: format!("Failed to read template database from resources: {}", e),
})?;
let temp_path = app_handle
.path()
.resolve("temp_vault.db", BaseDirectory::AppLocalData)
.map_err(|e| DatabaseError::PathResolutionError {
reason: format!("Failed to resolve temp database: {}", e),
})?;
let temp_path_clone = temp_path.to_owned();
fs::write(temp_path, template_content).map_err(|e| DatabaseError::IoError {
path: vault_path.to_string(),
reason: format!("Failed to write temporary template database: {}", e),
})?;
/* if !template_path.exists() {
return Err(DatabaseError::IoError {
path: template_path.display().to_string(),
reason: "Template database not found in resources".to_string(),
});
} */
println!("Öffne Temp-Datenbank direkt: {}", temp_path_clone.display());
let conn = Connection::open(&temp_path_clone).map_err(|e| DatabaseError::ConnectionFailed {
path: temp_path_clone.display().to_string(),
reason: format!(
"Fehler beim Öffnen der unverschlüsselten Quelldatenbank: {}",
e
),
})?;
println!( println!(
"Hänge neue, verschlüsselte Datenbank an unter '{}'", "Öffne unverschlüsselte Datenbank: {}",
&vault_path resource_path.as_path().display()
); );
// ATTACH DATABASE 'Dateiname' AS Alias KEY 'Passwort';
conn.execute( let conn = Connection::open(&resource_path).map_err(|e| {
"ATTACH DATABASE ?1 AS encrypted KEY ?2;", format!(
[&vault_path, &key], "Fehler beim Öffnen der kopierten Datenbank: {}",
e.to_string()
) )
.map_err(|e| DatabaseError::ExecutionError {
sql: "ATTACH DATABASE ...".to_string(),
reason: e.to_string(),
table: None,
})?; })?;
println!("Exportiere Daten von 'main' nach 'encrypted' ..."); //let conn = Connection::open(&resource_path)?;
if let Err(e) = conn.query_row("SELECT sqlcipher_export('encrypted');", [], |_| Ok(())) { println!("Hänge neue, verschlüsselte Datenbank an unter '{}'", &path);
// Versuche aufzuräumen, ignoriere Fehler dabei // ATTACH DATABASE 'Dateiname' AS Alias KEY 'Passwort';
let _ = conn.execute("DETACH DATABASE encrypted;", []); conn.execute("ATTACH DATABASE ?1 AS encrypted KEY ?2;", [&path, &key])
// Lösche auch die eventuell teilweise erstellte Datei .map_err(|e| format!("Fehler bei ATTACH DATABASE: {}", e.to_string()))?;
let _ = fs::remove_file(&vault_path);
let _ = fs::remove_file(&temp_path_clone); println!(
return Err(DatabaseError::QueryError { "Exportiere Daten von 'main' nach 'encrypted' mit password {} ...",
reason: format!("Fehler während sqlcipher_export: {}", e), &key
}); );
match conn.query_row("SELECT sqlcipher_export('encrypted');", [], |_row| Ok(())) {
Ok(_) => {
println!(">>> sqlcipher_export erfolgreich ausgeführt (Rückgabewert ignoriert).");
} }
Err(e) => {
eprintln!("!!! FEHLER während sqlcipher_export: {}", e);
conn.execute("DETACH DATABASE encrypted;", []).ok(); // Versuche zu detachen
return Err(e.to_string()); // Gib den Fehler zurück
}
}
// sqlcipher_export('Alias') kopiert Schema und Daten von 'main' zur Alias-DB
/* conn.execute("SELECT sqlcipher_export('encrypted');", [])
.map_err(|e| {
format!(
"Fehler bei SELECT sqlcipher_export('encrypted'): {}",
e.to_string()
)
})?; */
println!("Löse die verschlüsselte Datenbank vom Handle..."); println!("Löse die verschlüsselte Datenbank vom Handle...");
conn.execute("DETACH DATABASE encrypted;", []) conn.execute("DETACH DATABASE encrypted;", [])
.map_err(|e| DatabaseError::ExecutionError { .map_err(|e| format!("Fehler bei DETACH DATABASE: {}", e.to_string()))?;
sql: "DETACH DATABASE ...".to_string(),
reason: e.to_string(),
table: None,
})?;
println!("Datenbank erfolgreich nach '{}' verschlüsselt.", &path);
println!( println!(
"Datenbank erfolgreich nach '{}' verschlüsselt.", "Die Originaldatei '{}' ist unverändert.",
&vault_path resource_path.as_path().display()
); );
// SQLCipher-Verifizierung /* // Neue Datenbank erstellen
let conn = Connection::open_with_flags(
&path,
OpenFlags::SQLITE_OPEN_READ_WRITE | OpenFlags::SQLITE_OPEN_CREATE,
)
.map_err(|e| format!("Fehler beim Erstellen der Datenbank: {}", e.to_string()))?;
// Datenbank mit dem angegebenen Passwort verschlüsseln
conn.pragma_update(None, "key", &key)
.map_err(|e| format!("Fehler beim Verschlüsseln der Datenbank: {}", e.to_string()))?;
println!("Datenbank verschlüsselt mit key {}", &key);
// Überprüfen, ob die Datenbank korrekt verschlüsselt wurde
let validation_result: Result<i32, _> = conn.query_row("SELECT 1", [], |row| row.get(0));
if let Err(e) = validation_result {
return Err(format!(
"Fehler beim Testen der verschlüsselten Datenbank: {}",
e.to_string()
));
} */
// 2. VERSUCHEN, EINE SQLCIPHER-SPEZIFISCHE OPERATION AUSZUFÜHREN
println!("Prüfe SQLCipher-Aktivität mit 'PRAGMA cipher_version;'..."); println!("Prüfe SQLCipher-Aktivität mit 'PRAGMA cipher_version;'...");
match conn.query_row("PRAGMA cipher_version;", [], |row| { match conn.query_row("PRAGMA cipher_version;", [], |row| {
let version: String = row.get(0)?; let version: String = row.get(0)?;
@ -308,82 +142,90 @@ pub fn create_encrypted_database(
}) { }) {
Ok(version) => { Ok(version) => {
println!("SQLCipher ist aktiv! Version: {}", version); println!("SQLCipher ist aktiv! Version: {}", version);
/* // Fahre mit normalen Operationen fort
println!("Erstelle Tabelle 'benutzer'...");
conn.execute(
"CREATE TABLE benutzer (id INTEGER PRIMARY KEY, name TEXT NOT NULL)",
[],
)
.map_err(|e| format!("Fehler beim Verschlüsseln der Datenbank: {}", e.to_string()))?;
println!("Füge Benutzer 'Bob' hinzu...");
conn.execute("INSERT INTO benutzer (name) VALUES ('Bob')", [])
.map_err(|e| {
format!("Fehler beim Verschlüsseln der Datenbank: {}", e.to_string())
})?;
println!("Benutzer hinzugefügt."); */
} }
Err(e) => { Err(e) => {
eprintln!("FEHLER: SQLCipher scheint NICHT aktiv zu sein!"); eprintln!("FEHLER: SQLCipher scheint NICHT aktiv zu sein!");
eprintln!("Der Befehl 'PRAGMA cipher_version;' schlug fehl: {}", e); eprintln!("Der Befehl 'PRAGMA cipher_version;' schlug fehl: {}", e);
eprintln!("Die Datenbank wurde wahrscheinlich NICHT verschlüsselt."); eprintln!("Die Datenbank wurde wahrscheinlich NICHT verschlüsselt.");
// Optional: Hier die Verbindung schließen oder weitere Aktionen unterlassen
// return Err(e); // Beende das Programm mit dem Fehler
} }
} }
conn.close() /* // Kopieren der Ressourcen-Datenbank zum Zielpfad
.map_err(|(_, e)| DatabaseError::ConnectionFailed { core::copy_file(&resource_path, &path)?;
path: template_path.display().to_string(),
reason: format!("Fehler beim Schließen der Quelldatenbank: {}", e), // Öffnen der kopierten Datenbank ohne Verschlüsselung
let conn = Connection::open(&path).map_err(|e| {
format!(
"Fehler beim Öffnen der kopierten Datenbank: {}",
e.to_string()
)
})?; })?;
let _ = fs::remove_file(&temp_path_clone); // Verschlüsseln der Datenbank mit dem angegebenen Schlüssel
conn.pragma_update(None, "key", &key)
.map_err(|e| format!("Fehler beim Verschlüsseln der Datenbank: {}", e.to_string()))?;
initialize_session(&app_handle, &vault_path, &key, &state)?; // Schließen der Verbindung, um sicherzustellen, dass Änderungen gespeichert werden
drop(conn);
Ok(vault_path) // Öffnen der verschlüsselten Datenbank mit dem Schlüssel
let encrypted_conn = core::open_and_init_db(&path, &key, false)
.map_err(|e| format!("Fehler beim Öffnen der verschlüsselten Datenbank: {}", e))?;
// Überprüfen, ob die Datenbank korrekt verschlüsselt wurde, indem wir eine einfache Abfrage ausführen
let validation_result: Result<i32, _> =
encrypted_conn.query_row("SELECT 1", [], |row| row.get(0));
if let Err(e) = validation_result {
return Err(format!(
"Fehler beim Testen der verschlüsselten Datenbank: {}",
e.to_string()
));
}
*/
// Aktualisieren der Datenbankverbindung im State
let mut db = state
.0
.lock()
.map_err(|e| format!("Mutex-Fehler: {}", e.to_string()))?;
*db = Some(conn);
Ok(format!(
"Verschlüsselte CRDT-Datenbank erstellt unter: {} and password",
key
))
} }
/// Öffnet eine verschlüsselte Datenbank
#[tauri::command] #[tauri::command]
pub fn open_encrypted_database( pub fn open_encrypted_database(
app_handle: AppHandle, path: String,
vault_path: String,
key: String, key: String,
state: State<'_, AppState>, state: State<'_, DbConnection>,
) -> Result<String, DatabaseError> { ) -> Result<String, String> {
println!("Opening encrypted database vault_path: {}", vault_path); if !std::path::Path::new(&path).exists() {
return Err("Datenbankdatei nicht gefunden".into());
// Vault-Pfad aus dem Namen ableiten
//let vault_path = get_vault_path(&app_handle, &vault_name)?;
println!("Resolved vault path: {}", vault_path);
if !Path::new(&vault_path).exists() {
return Err(DatabaseError::IoError {
path: vault_path.to_string(),
reason: format!("Vault '{}' does not exist", vault_path),
});
} }
initialize_session(&app_handle, &vault_path, &key, &state)?; let conn = core::open_and_init_db(&path, &key, false)?;
let mut db = state.0.lock().map_err(|e| e.to_string())?;
*db = Some(conn);
Ok(format!("Vault '{}' opened successfully", vault_path)) Ok(format!("Verschlüsselte CRDT-Datenbank geöffnet: {}", path))
}
/// Opens the DB, initializes the HLC service, and stores both in the AppState.
fn initialize_session(
app_handle: &AppHandle,
path: &str,
key: &str,
state: &State<'_, AppState>,
) -> Result<(), DatabaseError> {
// 1. Establish the raw database connection
let conn = core::open_and_init_db(path, key, false)?;
// 2. Initialize the HLC service
let hlc_service = HlcService::try_initialize(&conn, app_handle).map_err(|e| {
// We convert the HlcError into a DatabaseError
DatabaseError::ExecutionError {
sql: "HLC Initialization".to_string(),
reason: e.to_string(),
table: Some(TABLE_CRDT_CONFIGS.to_string()),
}
})?;
// 3. Store everything in the global AppState
let mut db_guard = state.db.0.lock().map_err(|e| DatabaseError::LockError {
reason: e.to_string(),
})?;
*db_guard = Some(conn);
let mut hlc_guard = state.hlc.lock().map_err(|e| DatabaseError::LockError {
reason: e.to_string(),
})?;
*hlc_guard = hlc_service;
Ok(())
} }

View File

@ -0,0 +1,181 @@
use mime;
use std::fs;
use std::path::PathBuf;
use std::str::FromStr;
use tauri::{
http::{Request, Response, Uri},
AppHandle, Error as TauriError, Manager, Runtime,
};
pub fn copy_directory(source: String, destination: String) -> Result<(), String> {
println!(
"Kopiere Verzeichnis von '{}' nach '{}'",
source, destination
);
let source_path = PathBuf::from(&source);
let destination_path = PathBuf::from(&destination);
if !source_path.exists() || !source_path.is_dir() {
return Err(format!(
"Quellverzeichnis '{}' nicht gefunden oder ist kein Verzeichnis.",
source
));
}
// Optionen für fs_extra::dir::copy
let mut options = fs_extra::dir::CopyOptions::new();
options.overwrite = true; // Überschreibe Zieldateien, falls sie existieren
options.copy_inside = true; // Kopiere den *Inhalt* des Quellordners in den Zielordner
// options.content_only = true; // Alternative: nur Inhalt kopieren, Zielordner muss existieren
options.buffer_size = 64000; // Standard-Puffergröße, kann angepasst werden
// Führe die Kopieroperation aus
match fs_extra::dir::copy(&source_path, &destination_path, &options) {
Ok(bytes_copied) => {
println!("Verzeichnis erfolgreich kopiert ({} bytes)", bytes_copied);
Ok(()) // Erfolg signalisieren
}
Err(e) => {
eprintln!("Fehler beim Kopieren des Verzeichnisses: {}", e);
Err(format!("Fehler beim Kopieren: {}", e.to_string())) // Fehler als String zurückgeben
}
}
}
pub fn resolve_secure_extension_asset_path<R: Runtime>(
app_handle: &AppHandle<R>,
extension_id: &str,
requested_asset_path: &str,
) -> Result<PathBuf, String> {
// 1. Validiere die Extension ID
if extension_id.is_empty()
|| !extension_id
.chars()
.all(|c| c.is_ascii_alphanumeric() || c == '-')
{
return Err(format!("Ungültige Extension ID: {}", extension_id));
}
// 2. Bestimme das Basisverzeichnis für alle Erweiterungen (Resource Directory)
let base_extensions_dir = app_handle
.path()
.resource_dir() // Korrekt für Ressourcen
// Wenn du stattdessen App Local Data willst: .app_local_data_dir()
.map_err(|e: TauriError| format!("Basis-Verzeichnis nicht gefunden: {}", e))?
.join("extensions");
// 3. Verzeichnis für die spezifische Erweiterung
let specific_extension_dir = base_extensions_dir.join(extension_id);
// 4. Bereinige den angeforderten Asset-Pfad
let clean_relative_path = requested_asset_path
.replace('\\', "/")
.trim_start_matches('/')
.split('/')
.filter(|&part| !part.is_empty() && part != "." && part != "..")
.collect::<PathBuf>();
if clean_relative_path.as_os_str().is_empty() && requested_asset_path != "/" {
return Err("Leerer oder ungültiger Asset-Pfad".to_string());
}
// 5. Setze den finalen Pfad zusammen
let final_path = specific_extension_dir.join(clean_relative_path);
// 6. SICHERHEITSCHECK (wie vorher)
match final_path.canonicalize() {
Ok(canonical_path) => {
let canonical_base = specific_extension_dir.canonicalize().map_err(|e| {
format!(
"Kann Basis-Pfad '{}' nicht kanonisieren: {}",
specific_extension_dir.display(),
e
)
})?;
if canonical_path.starts_with(&canonical_base) {
Ok(canonical_path)
} else {
eprintln!( /* ... Sicherheitswarnung ... */ );
Err("Ungültiger oder nicht erlaubter Asset-Pfad (kanonisch)".to_string())
}
}
Err(_) => {
// Fehler bei canonicalize (z.B. Pfad existiert nicht)
if final_path.starts_with(&specific_extension_dir) {
Ok(final_path) // Nicht-kanonisierten Pfad zurückgeben
} else {
eprintln!( /* ... Sicherheitswarnung ... */ );
Err("Ungültiger oder nicht erlaubter Asset-Pfad (nicht kanonisiert)".to_string())
}
}
}
}
pub fn handle_extension_protocol<R: Runtime>(
app_handle: &AppHandle<R>,
request: &Request<Vec<u8>>,
) -> Result<Response<Vec<u8>>, Box<dyn std::error::Error>> {
let uri_ref = request.uri(); // uri_ref ist &Uri
println!("Protokoll Handler für: {}", uri_ref);
let uri_string = uri_ref.to_string(); // Konvertiere zu String
let parsed_uri = Uri::from_str(&uri_string)?; // Parse aus &str
let extension_id = parsed_uri
.host()
.ok_or("Kein Host (Extension ID) in URI gefunden")?
.to_string();
let requested_asset_path = parsed_uri.path();
let asset_path_to_load = if requested_asset_path == "/" || requested_asset_path.is_empty() {
"index.html"
} else {
requested_asset_path
};
// Sicheren Dateisystempfad auflösen (nutzt jetzt AppHandle)
let absolute_secure_path =
resolve_secure_extension_asset_path(app_handle, &extension_id, asset_path_to_load)?;
// Datei lesen und Response erstellen (Code wie vorher)
match fs::read(&absolute_secure_path) {
Ok(content) => {
let mime_type = mime_guess::from_path(&absolute_secure_path)
.first_or(mime::APPLICATION_OCTET_STREAM)
.to_string();
println!(
"Liefere {} ({}) für Extension '{}'",
absolute_secure_path.display(),
mime_type,
extension_id
);
// *** KORREKTUR: Verwende Response::builder() ***
Response::builder()
.status(200)
.header("Content-Type", mime_type) // Setze Header über .header()
.body(content) // body() gibt Result<Response<Vec<u8>>, Error> zurück
.map_err(|e| e.into()) // Wandle http::Error in Box<dyn Error> um
}
Err(e) => {
eprintln!(
"Fehler beim Lesen der Datei {}: {}",
absolute_secure_path.display(),
e
);
let status_code = if e.kind() == std::io::ErrorKind::NotFound {
404
} else if e.kind() == std::io::ErrorKind::PermissionDenied {
403
} else {
500
};
// *** KORREKTUR: Verwende Response::builder() auch für Fehler ***
Response::builder()
.status(status_code)
.body(Vec::new()) // Leerer Body für Fehler
.map_err(|e| e.into()) // Wandle http::Error in Box<dyn Error> um
}
}
}

View File

@ -1,311 +0,0 @@
// src-tauri/src/extension/core/manager.rs
use crate::extension::core::manifest::{EditablePermissions, ExtensionManifest, ExtensionPreview};
use crate::extension::core::types::{copy_directory, Extension, ExtensionSource};
use crate::extension::crypto::ExtensionCrypto;
use crate::extension::error::ExtensionError;
use crate::extension::permissions::manager::PermissionManager;
use crate::extension::permissions::types::{ExtensionPermission, PermissionStatus};
use crate::AppState;
use std::collections::HashMap;
use std::fs::File;
use std::path::PathBuf;
use std::sync::Mutex;
use std::time::{Duration, SystemTime};
use tauri::{AppHandle, Manager, State};
use zip::ZipArchive;
#[derive(Debug, Clone)]
pub struct CachedPermission {
pub permissions: Vec<ExtensionPermission>,
pub cached_at: SystemTime,
pub ttl: Duration,
}
#[derive(Default)]
pub struct ExtensionManager {
pub production_extensions: Mutex<HashMap<String, Extension>>,
pub dev_extensions: Mutex<HashMap<String, Extension>>,
pub permission_cache: Mutex<HashMap<String, CachedPermission>>,
}
impl ExtensionManager {
pub fn new() -> Self {
Self::default()
}
pub fn get_base_extension_dir(
&self,
app_handle: &AppHandle,
) -> Result<PathBuf, ExtensionError> {
let path = app_handle
.path()
.app_local_data_dir()
.map_err(|e| ExtensionError::Filesystem {
source: std::io::Error::new(std::io::ErrorKind::NotFound, e.to_string()),
})?
.join("extensions");
Ok(path)
}
pub fn get_extension_dir(
&self,
app_handle: &AppHandle,
extension_id: &str,
extension_version: &str,
) -> Result<PathBuf, ExtensionError> {
let specific_extension_dir = self
.get_base_extension_dir(app_handle)?
.join(extension_id)
.join(extension_version);
Ok(specific_extension_dir)
}
pub fn add_production_extension(&self, extension: Extension) -> Result<(), ExtensionError> {
if extension.id.is_empty() {
return Err(ExtensionError::ValidationError {
reason: "Extension ID cannot be empty".to_string(),
});
}
match &extension.source {
ExtensionSource::Production { .. } => {
let mut extensions = self.production_extensions.lock().unwrap();
extensions.insert(extension.id.clone(), extension);
Ok(())
}
_ => Err(ExtensionError::ValidationError {
reason: "Expected Production source".to_string(),
}),
}
}
pub fn add_dev_extension(&self, extension: Extension) -> Result<(), ExtensionError> {
if extension.id.is_empty() {
return Err(ExtensionError::ValidationError {
reason: "Extension ID cannot be empty".to_string(),
});
}
match &extension.source {
ExtensionSource::Development { .. } => {
let mut extensions = self.dev_extensions.lock().unwrap();
extensions.insert(extension.id.clone(), extension);
Ok(())
}
_ => Err(ExtensionError::ValidationError {
reason: "Expected Development source".to_string(),
}),
}
}
pub fn get_extension(&self, extension_id: &str) -> Option<Extension> {
let dev_extensions = self.dev_extensions.lock().unwrap();
if let Some(extension) = dev_extensions.get(extension_id) {
return Some(extension.clone());
}
let prod_extensions = self.production_extensions.lock().unwrap();
prod_extensions.get(extension_id).cloned()
}
pub fn remove_extension(&self, extension_id: &str) -> Result<(), ExtensionError> {
{
let mut dev_extensions = self.dev_extensions.lock().unwrap();
if dev_extensions.remove(extension_id).is_some() {
return Ok(());
}
}
{
let mut prod_extensions = self.production_extensions.lock().unwrap();
if prod_extensions.remove(extension_id).is_some() {
return Ok(());
}
}
Err(ExtensionError::NotFound {
id: extension_id.to_string(),
})
}
pub async fn remove_extension_internal(
&self,
app_handle: &AppHandle,
extension_id: String,
extension_version: String,
state: &State<'_, AppState>,
) -> Result<(), ExtensionError> {
PermissionManager::delete_permissions(state, &extension_id).await?;
self.remove_extension(&extension_id)?;
let extension_dir =
self.get_extension_dir(app_handle, &extension_id, &extension_version)?;
if extension_dir.exists() {
std::fs::remove_dir_all(&extension_dir)
.map_err(|e| ExtensionError::Filesystem { source: e })?;
}
Ok(())
}
pub async fn preview_extension_internal(
&self,
source_path: String,
) -> Result<ExtensionPreview, ExtensionError> {
let source = PathBuf::from(&source_path);
let temp = std::env::temp_dir().join(format!("haexhub_preview_{}", uuid::Uuid::new_v4()));
std::fs::create_dir_all(&temp).map_err(|e| ExtensionError::Filesystem { source: e })?;
let file = File::open(&source).map_err(|e| ExtensionError::Filesystem { source: e })?;
let mut archive =
ZipArchive::new(file).map_err(|e| ExtensionError::InstallationFailed {
reason: format!("Invalid ZIP: {}", e),
})?;
archive
.extract(&temp)
.map_err(|e| ExtensionError::InstallationFailed {
reason: format!("Cannot extract ZIP: {}", e),
})?;
let manifest_path = temp.join("manifest.json");
let manifest_content =
std::fs::read_to_string(&manifest_path).map_err(|e| ExtensionError::ManifestError {
reason: format!("Cannot read manifest: {}", e),
})?;
let manifest: ExtensionManifest = serde_json::from_str(&manifest_content)?;
let content_hash = ExtensionCrypto::hash_directory(&temp)
.map_err(|e| ExtensionError::SignatureVerificationFailed { reason: e })?;
let is_valid_signature = ExtensionCrypto::verify_signature(
&manifest.public_key,
&content_hash,
&manifest.signature,
)
.is_ok();
let key_hash = manifest.calculate_key_hash()?;
let editable_permissions = manifest.to_editable_permissions();
std::fs::remove_dir_all(&temp).ok();
Ok(ExtensionPreview {
manifest,
is_valid_signature,
key_hash,
editable_permissions,
})
}
pub async fn install_extension_with_permissions_internal(
&self,
app_handle: AppHandle,
source_path: String,
custom_permissions: EditablePermissions,
state: &State<'_, AppState>,
) -> Result<String, ExtensionError> {
let source = PathBuf::from(&source_path);
let temp = std::env::temp_dir().join(format!("haexhub_ext_{}", uuid::Uuid::new_v4()));
std::fs::create_dir_all(&temp).map_err(|e| ExtensionError::Filesystem { source: e })?;
let file = File::open(&source).map_err(|e| ExtensionError::Filesystem { source: e })?;
let mut archive =
ZipArchive::new(file).map_err(|e| ExtensionError::InstallationFailed {
reason: format!("Invalid ZIP: {}", e),
})?;
archive
.extract(&temp)
.map_err(|e| ExtensionError::InstallationFailed {
reason: format!("Cannot extract ZIP: {}", e),
})?;
let manifest_path = temp.join("manifest.json");
let manifest_content =
std::fs::read_to_string(&manifest_path).map_err(|e| ExtensionError::ManifestError {
reason: format!("Cannot read manifest: {}", e),
})?;
let manifest: ExtensionManifest = serde_json::from_str(&manifest_content)?;
let content_hash = ExtensionCrypto::hash_directory(&temp)
.map_err(|e| ExtensionError::SignatureVerificationFailed { reason: e })?;
ExtensionCrypto::verify_signature(&manifest.public_key, &content_hash, &manifest.signature)
.map_err(|e| ExtensionError::SignatureVerificationFailed { reason: e })?;
let key_hash = manifest.calculate_key_hash()?;
let full_extension_id = format!("{}-{}", key_hash, manifest.id);
let extensions_dir = app_handle
.path()
.app_data_dir()
.map_err(|e| ExtensionError::Filesystem {
source: std::io::Error::new(std::io::ErrorKind::NotFound, e.to_string()),
})?
.join("extensions")
.join(&full_extension_id)
.join(&manifest.version);
std::fs::create_dir_all(&extensions_dir)
.map_err(|e| ExtensionError::Filesystem { source: e })?;
copy_directory(
temp.to_string_lossy().to_string(),
extensions_dir.to_string_lossy().to_string(),
)?;
std::fs::remove_dir_all(&temp).ok();
let permissions = custom_permissions.to_internal_permissions(&full_extension_id);
let granted_permissions: Vec<_> = permissions
.into_iter()
.filter(|p| p.status == PermissionStatus::Granted)
.collect();
PermissionManager::save_permissions(state, &full_extension_id, &granted_permissions)
.await?;
let extension = Extension {
id: full_extension_id.clone(),
name: manifest.name.clone(),
source: ExtensionSource::Production {
path: extensions_dir.clone(),
version: manifest.version.clone(),
},
manifest: manifest.clone(),
enabled: true,
last_accessed: SystemTime::now(),
};
self.add_production_extension(extension)?;
Ok(full_extension_id)
}
}
// Backward compatibility
#[derive(Default)]
pub struct ExtensionState {
pub extensions: Mutex<HashMap<String, ExtensionManifest>>,
}
impl ExtensionState {
pub fn add_extension(&self, path: String, manifest: ExtensionManifest) {
let mut extensions = self.extensions.lock().unwrap();
extensions.insert(path, manifest);
}
pub fn get_extension(&self, addon_id: &str) -> Option<ExtensionManifest> {
let extensions = self.extensions.lock().unwrap();
extensions.values().find(|p| p.name == addon_id).cloned()
}
}

View File

@ -1,250 +0,0 @@
// src-tauri/src/extension/core/manifest.rs
use crate::extension::crypto::ExtensionCrypto;
use crate::extension::error::ExtensionError;
use crate::extension::permissions::types::{
Action, DbConstraints, ExtensionPermission, FsConstraints, HttpConstraints,
PermissionConstraints, PermissionStatus, ResourceType, ShellConstraints,
};
use serde::{Deserialize, Serialize};
#[derive(Serialize, Deserialize, Clone, Debug)]
pub struct ExtensionManifest {
pub id: String,
pub name: String,
pub version: String,
pub author: Option<String>,
pub entry: String,
pub icon: Option<String>,
pub public_key: String,
pub signature: String,
pub permissions: ExtensionManifestPermissions,
pub homepage: Option<String>,
pub description: Option<String>,
}
impl ExtensionManifest {
pub fn calculate_key_hash(&self) -> Result<String, ExtensionError> {
ExtensionCrypto::calculate_key_hash(&self.public_key)
.map_err(|e| ExtensionError::InvalidPublicKey { reason: e })
}
pub fn full_extension_id(&self) -> Result<String, ExtensionError> {
let key_hash = self.calculate_key_hash()?;
Ok(format!("{}-{}", key_hash, self.id))
}
pub fn to_editable_permissions(&self) -> EditablePermissions {
let mut permissions = Vec::new();
if let Some(db) = &self.permissions.database {
for resource in &db.read {
permissions.push(EditablePermission {
resource_type: "db".to_string(),
action: "read".to_string(),
target: resource.clone(),
constraints: None,
status: "granted".to_string(),
});
}
for resource in &db.write {
permissions.push(EditablePermission {
resource_type: "db".to_string(),
action: "write".to_string(),
target: resource.clone(),
constraints: None,
status: "granted".to_string(),
});
}
}
if let Some(fs) = &self.permissions.filesystem {
for path in &fs.read {
permissions.push(EditablePermission {
resource_type: "fs".to_string(),
action: "read".to_string(),
target: path.clone(),
constraints: None,
status: "granted".to_string(),
});
}
for path in &fs.write {
permissions.push(EditablePermission {
resource_type: "fs".to_string(),
action: "write".to_string(),
target: path.clone(),
constraints: None,
status: "granted".to_string(),
});
}
}
if let Some(http_list) = &self.permissions.http {
for domain in http_list {
permissions.push(EditablePermission {
resource_type: "http".to_string(),
action: "read".to_string(),
target: domain.clone(),
constraints: None,
status: "granted".to_string(),
});
}
}
if let Some(shell_list) = &self.permissions.shell {
for command in shell_list {
permissions.push(EditablePermission {
resource_type: "shell".to_string(),
action: "read".to_string(),
target: command.clone(),
constraints: None,
status: "granted".to_string(),
});
}
}
EditablePermissions { permissions }
}
}
#[derive(Serialize, Deserialize, Clone, Debug, Default)]
pub struct ExtensionManifestPermissions {
#[serde(default)]
pub database: Option<DatabaseManifestPermissions>,
#[serde(default)]
pub filesystem: Option<FilesystemManifestPermissions>,
#[serde(default)]
pub http: Option<Vec<String>>,
#[serde(default)]
pub shell: Option<Vec<String>>,
}
#[derive(Serialize, Deserialize, Clone, Debug, Default)]
pub struct DatabaseManifestPermissions {
#[serde(default)]
pub read: Vec<String>,
#[serde(default)]
pub write: Vec<String>,
}
#[derive(Serialize, Deserialize, Clone, Debug, Default)]
pub struct FilesystemManifestPermissions {
#[serde(default)]
pub read: Vec<String>,
#[serde(default)]
pub write: Vec<String>,
}
// Editable Permissions für UI
#[derive(Serialize, Deserialize, Clone, Debug)]
pub struct EditablePermissions {
pub permissions: Vec<EditablePermission>,
}
#[derive(Serialize, Deserialize, Clone, Debug)]
pub struct EditablePermission {
pub resource_type: String,
pub action: String,
pub target: String,
pub constraints: Option<serde_json::Value>,
pub status: String,
}
impl EditablePermissions {
pub fn to_internal_permissions(&self, extension_id: &str) -> Vec<ExtensionPermission> {
self.permissions
.iter()
.map(|p| ExtensionPermission {
id: uuid::Uuid::new_v4().to_string(),
extension_id: extension_id.to_string(),
resource_type: match p.resource_type.as_str() {
"fs" => ResourceType::Fs,
"http" => ResourceType::Http,
"db" => ResourceType::Db,
"shell" => ResourceType::Shell,
_ => ResourceType::Fs,
},
action: match p.action.as_str() {
"read" => Action::Read,
"write" => Action::Write,
_ => Action::Read,
},
target: p.target.clone(),
constraints: p
.constraints
.as_ref()
.and_then(|c| Self::parse_constraints(&p.resource_type, c)),
status: match p.status.as_str() {
"granted" => PermissionStatus::Granted,
"denied" => PermissionStatus::Denied,
"ask" => PermissionStatus::Ask,
_ => PermissionStatus::Denied,
},
haex_timestamp: None,
haex_tombstone: None,
})
.collect()
}
fn parse_constraints(
resource_type: &str,
json_value: &serde_json::Value,
) -> Option<PermissionConstraints> {
match resource_type {
"db" => serde_json::from_value::<DbConstraints>(json_value.clone())
.ok()
.map(PermissionConstraints::Database),
"fs" => serde_json::from_value::<FsConstraints>(json_value.clone())
.ok()
.map(PermissionConstraints::Filesystem),
"http" => serde_json::from_value::<HttpConstraints>(json_value.clone())
.ok()
.map(PermissionConstraints::Http),
"shell" => serde_json::from_value::<ShellConstraints>(json_value.clone())
.ok()
.map(PermissionConstraints::Shell),
_ => None,
}
}
}
#[derive(Serialize, Deserialize)]
pub struct ExtensionPreview {
pub manifest: ExtensionManifest,
pub is_valid_signature: bool,
pub key_hash: String,
pub editable_permissions: EditablePermissions,
}
#[derive(Serialize, Deserialize, Clone, Debug)]
pub struct ExtensionInfoResponse {
pub key_hash: String,
pub name: String,
pub full_id: String,
pub version: String,
pub display_name: Option<String>,
pub namespace: Option<String>,
pub allowed_origin: String,
}
impl ExtensionInfoResponse {
pub fn from_extension(
extension: &crate::extension::core::types::Extension,
) -> Result<Self, ExtensionError> {
use crate::extension::core::types::get_tauri_origin;
let allowed_origin = get_tauri_origin();
let key_hash = extension.manifest.calculate_key_hash()?;
let full_id = extension.manifest.full_extension_id()?;
Ok(Self {
key_hash,
name: extension.manifest.name.clone(),
full_id,
version: extension.manifest.version.clone(),
display_name: Some(extension.manifest.name.clone()),
namespace: extension.manifest.author.clone(),
allowed_origin,
})
}
}

View File

@ -1,10 +0,0 @@
// src-tauri/src/extension/core/mod.rs
pub mod manager;
pub mod manifest;
pub mod protocol;
pub mod types;
pub use manager::*;
pub use manifest::*;
pub use protocol::*;

Some files were not shown because too many files have changed in this diff Show More