diff --git a/scripts/setup-llama-server-vulkan.ps1 b/scripts/setup-llama-server-vulkan.ps1 new file mode 100644 index 0000000..49e9f1f --- /dev/null +++ b/scripts/setup-llama-server-vulkan.ps1 @@ -0,0 +1,25 @@ +# setup-llama-server-vulkan.ps1 +$Url = "https://github.com/ggerganov/llama.cpp/releases/download/b7956/llama-b7956-bin-win-vulkan-x64.zip" +$ZipFile = "llama_vulkan.zip" +$ExtractDir = "llama_vulkan_extracted" + +Write-Host "Downloading llama-server b7956 (Vulkan)..." -ForegroundColor Cyan +Invoke-WebRequest -Uri $Url -OutFile $ZipFile + +if (Test-Path $ExtractDir) { Remove-Item -Path $ExtractDir -Recurse -Force } +Write-Host "Extracting..." -ForegroundColor Cyan +Expand-Archive -Path $ZipFile -DestinationPath $ExtractDir -Force + +# 配置 +$BinDir = "src-tauri/bin" +$ResDir = "src-tauri/resources" + +if (!(Test-Path $BinDir)) { New-Item -ItemType Directory -Path $BinDir -Force } +if (!(Test-Path $ResDir)) { New-Item -ItemType Directory -Path $ResDir -Force } + +Write-Host "Copying files to $BinDir and $ResDir..." -ForegroundColor Cyan +Copy-Item "$ExtractDir/llama-server.exe" "$BinDir/llama-server-x86_64-pc-windows-msvc.exe" -Force +Copy-Item "$ExtractDir/*.dll" "$ResDir/" -Force +Copy-Item "$ExtractDir/*.dll" "$BinDir/" -Force + +Write-Host "Done." -ForegroundColor Green diff --git a/scripts/setup-model.ps1 b/scripts/setup-model.ps1 new file mode 100644 index 0000000..8a7481e --- /dev/null +++ b/scripts/setup-model.ps1 @@ -0,0 +1,28 @@ +# setup-model.ps1 +# Hugging Face から EmbeddingGemma-300m の GGUF モデルをダウンロードするスクリプト + +$ModelUrl = "https://huggingface.co/abhijithmallya/embeddinggemma-300m-Q4_0-GGUF/resolve/main/embeddinggemma-300m-q4_0.gguf" +$TargetDir = Join-Path $PSScriptRoot "../models" +$TargetFile = Join-Path $TargetDir "embeddinggemma-300m-q4_0.gguf" + +if (!(Test-Path $TargetDir)) { + New-Item -ItemType Directory -Path $TargetDir -Force +} + +if (Test-Path $TargetFile) { + Write-Host "Model already exists at $TargetFile" -ForegroundColor Yellow + exit 0 +} + +Write-Host "Downloading EmbeddingGemma-300m model from Hugging Face..." -ForegroundColor Cyan +Write-Host "URL: $ModelUrl" +Write-Host "This may take a few minutes (approx. 200MB)..." + +try { + Invoke-WebRequest -Uri $ModelUrl -OutFile $TargetFile -ErrorAction Stop + Write-Host "Successfully downloaded to $TargetFile" -ForegroundColor Green +} +catch { + Write-Error "Failed to download model: $_" + exit 1 +} diff --git a/src-tauri/capabilities/default.json b/src-tauri/capabilities/default.json index c135d7f..8436679 100644 --- a/src-tauri/capabilities/default.json +++ b/src-tauri/capabilities/default.json @@ -6,6 +6,17 @@ "main" ], "permissions": [ - "core:default" + "core:default", + "shell:allow-spawn", + { + "identifier": "shell:allow-execute", + "allow": [ + { + "args": true, + "sidecar": true, + "name": "bin/llama-server" + } + ] + } ] -} +} \ No newline at end of file diff --git a/src-tauri/src/lib.rs b/src-tauri/src/lib.rs index 531d2dc..327e9fa 100644 --- a/src-tauri/src/lib.rs +++ b/src-tauri/src/lib.rs @@ -37,12 +37,74 @@ dotenv().ok(); let app_handle = app.handle().clone(); + + // Initialize Shell Plugin + app.handle().plugin(tauri_plugin_shell::init())?; + + // Spawn llama-server sidecar + let _model_path = env::var("LLAMA_CPP_MODEL_PATH").ok(); + // サイドカーの起動 (std::process::Command を使用してDLL問題を確実に回避) + let mut sidecar_path = std::env::current_dir().unwrap_or_default(); + println!("Current working directory: {:?}", sidecar_path); + + // src-tauri フォルダの中にいる場合は1つ上がる + if sidecar_path.ends_with("src-tauri") { + sidecar_path.pop(); + } + + let project_root = sidecar_path.clone(); + sidecar_path.push("src-tauri"); + sidecar_path.push("bin"); + let sidecar_exe = sidecar_path.join("llama-server-x86_64-pc-windows-msvc.exe"); + + println!("Calculated sidecar path: {:?}", sidecar_exe); + + // モデルパスもルートからの絶対パスに変換 + let model_rel_path = env::var("LLAMA_CPP_MODEL_PATH").unwrap_or_default(); + let model_abs_path = project_root.join(&model_rel_path); + let args = vec![ + "--model".to_string(), + model_abs_path.to_string_lossy().to_string(), + "--port".to_string(), + "8080".to_string(), + "--embedding".to_string(), + "--host".to_string(), + "127.0.0.1".to_string(), + ]; + + let mut cmd = std::process::Command::new(&sidecar_exe); + cmd.args(&args); + cmd.current_dir(&sidecar_path); // DLLのあるディレクトリをカレントにする + + // PATHにも追加 + let mut current_path = env::var("PATH").unwrap_or_default(); + current_path = format!("{};{}", sidecar_path.display(), current_path); + cmd.env("PATH", current_path); + + match cmd.spawn() { + Ok(child) => { + println!("llama-server started with PID: {}", child.id()); + // アプリ終了時にプロセスを殺すためのハンドルを保持(簡易実装) + let pid = child.id(); + std::thread::spawn(move || { + // 子プロセスの終了を待機 + let _ = child.wait_with_output(); + println!("llama-server (PID {}) exited", pid); + }); + } + Err(e) => { + eprintln!("Failed to spawn llama-server: {}", e); + } + } + tauri::async_runtime::block_on(async move { let db_path = "vector.db"; - + // 拡張機能 (vec0.dll) のパスを動的に解決する - let exe_dir = env::current_exe().map(|p| p.parent().unwrap().to_path_buf()).unwrap_or_else(|_| env::current_dir().unwrap()); - + let exe_dir = env::current_exe() + .map(|p| p.parent().unwrap().to_path_buf()) + .unwrap_or_else(|_| env::current_dir().unwrap()); + let candidates = [ exe_dir.join("vec0.dll"), // 実行ファイルと同階層 exe_dir.join("../node_modules/sqlite-vec-windows-x64/vec0.dll"), // Tauri dev (target/debug/..) diff --git a/src-tauri/tauri.conf.json b/src-tauri/tauri.conf.json index fad6511..e06563a 100644 --- a/src-tauri/tauri.conf.json +++ b/src-tauri/tauri.conf.json @@ -15,9 +15,11 @@ "width": 800, "height": 600, "resizable": true, - "fullscreen": false + "fullscreen": false, + "visible": true } ], + "withGlobalTauri": true, "security": { "csp": null } @@ -25,6 +27,12 @@ "bundle": { "active": true, "targets": "all", + "externalBin": [ + "bin/llama-server" + ], + "resources": [ + "resources/*" + ], "icon": [ "icons/32x32.png", "icons/128x128.png",