diff --git a/.github/workflows/automaticBuilding.yml b/.github/workflows/automaticBuilding.yml
index 1d2008fc..152f953a 100644
--- a/.github/workflows/automaticBuilding.yml
+++ b/.github/workflows/automaticBuilding.yml
@@ -49,7 +49,8 @@ jobs:
run: bash ./utilScripts/libmongocrypt_prebuild.sh
- name: Build for Windows
- run: npm run build:win
+ # Prevent electron-builder from publishing here; publish occurs in the final job
+ run: npm run build:win -- --publish=never
- name: Get tag name
id: get-tag
@@ -145,7 +146,8 @@ jobs:
run: bash ./utilScripts/libmongocrypt_prebuild.sh
- name: Build for Linux
- run: npm run build:linux
+ # Prevent electron-builder from publishing here; publish occurs in the final job
+ run: npm run build:linux -- --publish=never
- name: Get tag name
id: get-tag
@@ -404,7 +406,8 @@ jobs:
# Publish to GitHub
publish:
- needs: [build-windows, build-linux-ubuntu, build-mac]
+ # Removed Mac build requirement for now since it needs a special signature, readd ", build-mac" later
+ needs: [build-windows, build-linux-ubuntu]
runs-on: ubuntu-latest
steps:
- name: Checkout code
@@ -414,52 +417,64 @@ jobs:
uses: actions/download-artifact@v4
with:
name: windows-built
+ path: .
- name: Download Windows blockmap
uses: actions/download-artifact@v4
with:
name: windows-built-blockmap
+ path: .
- name: Download Windows latest.yml file
uses: actions/download-artifact@v4
with:
name: windows-built-latest
+ path: .
- name: Download Linux Ubuntu
uses: actions/download-artifact@v4
with:
name: linux-built-ubuntu
-
- - name: Download Mac PKG
- uses: actions/download-artifact@v4
- with:
- name: mac-built-pkg
-
- - name: Download Mac DMG
- uses: actions/download-artifact@v4
- with:
- name: mac-built-dmg
-
- - name: Download Mac ZIP
- uses: actions/download-artifact@v4
- with:
- name: mac-built-zip
-
- - name: Download latest-mac.yml
- uses: actions/download-artifact@v4
- with:
- name: latest-mac
-
- - name: Download Mac DMG blockmap
- uses: actions/download-artifact@v4
- with:
- name: mac-built-dmg-blockmap
-
- - name: Download Mac ZIP blockmap
- uses: actions/download-artifact@v4
- with:
- name: mac-built-zip-blockmap
-
+ path: .
+
+ # Temporarily commented while mac requirements are ignored for key/cert reasons
+ # - name: Download Mac PKG
+ # uses: actions/download-artifact@v4
+ # with:
+ # name: mac-built-pkg
+
+ # - name: Download Mac DMG
+ # uses: actions/download-artifact@v4
+ # with:
+ # name: mac-built-dmg
+
+ # - name: Download Mac ZIP
+ # uses: actions/download-artifact@v4
+ # with:
+ # name: mac-built-zip
+
+ # - name: Download latest-mac.yml
+ # uses: actions/download-artifact@v4
+ # with:
+ # name: latest-mac
+
+ # - name: Download Mac DMG blockmap
+ # uses: actions/download-artifact@v4
+ # with:
+ # name: mac-built-dmg-blockmap
+
+ # - name: Download Mac ZIP blockmap
+ # uses: actions/download-artifact@v4
+ # with:
+ # name: mac-built-zip-blockmap
+
+ # Readd these lines to "with: files:" when Mac release is reenabled
+ # ./MEDomicsLab-${{ env.VERSION }}-mac-installer.pkg
+ # ./MEDomicsLab-${{ env.VERSION }}-mac.dmg
+ # ./MEDomicsLab-${{ env.VERSION }}-mac.zip
+ # ./latest-mac.yml
+ # ./MEDomicsLab-${{ env.VERSION }}-mac.dmg.blockmap
+ # ./MEDomicsLab-${{ env.VERSION }}-mac.zip.blockmap
- name: Get tag name
id: get-tag
run: |
@@ -473,6 +488,10 @@ jobs:
./MEDomics-${{ env.VERSION }}-win.exe
./MEDomics-${{ env.VERSION }}-win.exe.blockmap
./latest.yml
+ ./MEDomicsLab-${{ env.VERSION }}-ubuntu.deb
+
+
+ name: MEDomicsLab-${{ env.VERSION }}
./MEDomics-${{ env.VERSION }}-ubuntu.deb
./MEDomics-${{ env.VERSION }}-mac.pkg
./MEDomics-${{ env.VERSION }}-mac.dmg
diff --git a/.github/workflows/automaticBuildingLinux.yml b/.github/workflows/automaticBuildingLinux.yml
index a7852cd2..337a966f 100644
--- a/.github/workflows/automaticBuildingLinux.yml
+++ b/.github/workflows/automaticBuildingLinux.yml
@@ -8,6 +8,8 @@ permissions:
jobs:
build-linux-ubuntu:
runs-on: ubuntu-22.04
+ env:
+ GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
steps:
- name: Checkout code
@@ -47,7 +49,8 @@ jobs:
run: bash ./utilScripts/libmongocrypt_prebuild.sh
- name: Build for Linux
- run: npm run build:linux
+ # Prevent electron-builder from publishing here; publish occurs in the next job
+ run: npm run build:linux -- --publish=never
- name: Get tag name
id: get-tag
@@ -76,6 +79,7 @@ jobs:
uses: actions/download-artifact@v4
with:
name: linux-built-ubuntu
+ path: .
- name: Install zip
run: sudo apt-get install zip
diff --git a/.github/workflows/automaticBuildingWin.yml b/.github/workflows/automaticBuildingWin.yml
index 09d592f4..c96a6aae 100644
--- a/.github/workflows/automaticBuildingWin.yml
+++ b/.github/workflows/automaticBuildingWin.yml
@@ -97,16 +97,19 @@ jobs:
uses: actions/download-artifact@v4
with:
name: windows-built
+ path: .
- name: Download Windows blockmap
uses: actions/download-artifact@v4
with:
name: windows-built-blockmap
+ path: .
- name: Download Windows latest.yml file
uses: actions/download-artifact@v4
with:
name: windows-built-latest
+ path: .
- name: Install zip
run: sudo apt-get install zip
diff --git a/.github/workflows/clientRelease.yml b/.github/workflows/clientRelease.yml
new file mode 100644
index 00000000..922072cb
--- /dev/null
+++ b/.github/workflows/clientRelease.yml
@@ -0,0 +1,250 @@
+name: Client build and publish
+on:
+ push:
+ tags:
+ - "client-v*"
+permissions:
+ contents: write
+
+jobs:
+ build-windows:
+ runs-on: windows-latest
+ env:
+ GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@v4
+ with:
+ submodules: recursive
+
+ - name: Set up Node.js
+ uses: actions/setup-node@v4
+ with:
+ node-version: 18.13
+
+ - name: Set version from tag
+ shell: bash
+ run: |
+ TAG=${GITHUB_REF/refs\/tags\//}
+ CLEAN=${TAG#client-}
+ CLEAN=${CLEAN#v}
+ node -p "let p=require('./package.json'); p.version='${CLEAN}'; require('fs').writeFileSync('package.json', JSON.stringify(p,null,2));"
+
+ - name: Install dependencies
+ run: npm install
+
+ # Skipping libmongocrypt prebuild for client-only
+
+ - name: Build Client for Windows
+ # Prevent electron-builder from publishing; publish happens in the final job
+ run: npx nextron build --win --config electron-builder.client.yml --publish=never
+
+ - name: Get version
+ id: get-ver
+ shell: bash
+ run: |
+ TAG=${GITHUB_REF/refs\/tags\//}
+ CLEAN=${TAG#client-}
+ CLEAN=${CLEAN#v}
+ echo "VERSION=${CLEAN}" >> "$GITHUB_OUTPUT"
+
+ - name: Upload Windows artifacts
+ uses: actions/upload-artifact@v4
+ with:
+ name: client-windows
+ path: |
+ ./build/dist/MEDomicsLab-${{ steps.get-ver.outputs.VERSION }}-win.exe
+ ./build/dist/MEDomicsLab-${{ steps.get-ver.outputs.VERSION }}-win.exe.blockmap
+ ./build/dist/latest.yml
+
+ build-linux:
+ runs-on: ubuntu-22.04
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@v4
+ with:
+ submodules: recursive
+
+ - name: Set up Node.js
+ uses: actions/setup-node@v4
+ with:
+ node-version: 18.13
+
+ - name: Set version from tag
+ run: |
+ TAG=${GITHUB_REF/refs\/tags\//}
+ CLEAN=${TAG#client-}
+ CLEAN=${CLEAN#v}
+ node -p "let p=require('./package.json'); p.version='${CLEAN}'; require('fs').writeFileSync('package.json', JSON.stringify(p,null,2));"
+
+ - name: Install dependencies
+ run: npm install
+
+ # Skipping libmongocrypt prebuild for client-only
+
+ - name: Build Client for Linux
+ # Prevent electron-builder from publishing; publish happens in the final job
+ run: npx nextron build --linux --config electron-builder.client.yml --publish=never
+
+ - name: Get version
+ id: get-ver
+ run: |
+ TAG=${GITHUB_REF/refs\/tags\//}
+ CLEAN=${TAG#client-}
+ CLEAN=${CLEAN#v}
+ echo "VERSION=${CLEAN}" >> "$GITHUB_OUTPUT"
+
+ - name: Upload Linux artifacts
+ uses: actions/upload-artifact@v4
+ with:
+ name: client-linux
+ path: |
+ ./build/dist/MEDomicsLab-${{ steps.get-ver.outputs.VERSION }}-linux.deb
+
+ # build-mac:
+ # runs-on: macos-latest
+ # env:
+ # GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+ # steps:
+ # - name: Checkout code
+ # uses: actions/checkout@v4
+ # with:
+ # submodules: recursive
+ # fetch-depth: 0
+
+ # - name: Keys and certificates
+ # env:
+ # APPLE_CERT_DATA: ${{ secrets.CSC_INSTALLER_LINK }}
+ # APPLE_CERT_PASSWORD: ${{ secrets.CSC_INSTALLER_KEY_PASSWORD }}
+ # DEVELOPER_APP_CER: ${{ secrets.CSC_LINK }}
+ # DEVELOPER_APP_KEY: ${{ secrets.CSC_KEY_PASSWORD }}
+ # run: |
+ # echo -n "$APPLE_CERT_DATA" | base64 --decode -o certificate_installer.p12
+ # echo -n "$DEVELOPER_APP_CER" | base64 --decode -o certificate_application.p12
+ # KEYCHAIN_PATH=$RUNNER_TEMP/app-signing.keychain-db
+ # security create-keychain -p "${{ secrets.CSC_KEY_PASSWORD }}" $KEYCHAIN_PATH
+ # security set-keychain-settings -lut 21600 $KEYCHAIN_PATH
+ # security unlock-keychain -p "${{ secrets.CSC_KEY_PASSWORD }}" $KEYCHAIN_PATH
+ # security import certificate_installer.p12 -P "${{ secrets.CSC_KEY_PASSWORD }}" -A -t cert -f pkcs12 -k $KEYCHAIN_PATH
+ # security import certificate_application.p12 -P "${{ secrets.CSC_KEY_PASSWORD }}" -A -t cert -f pkcs12 -k $KEYCHAIN_PATH
+ # security list-keychain -d user -s $KEYCHAIN_PATH
+
+ # - name: Notary profile
+ # env:
+ # APPLE_NOTARY_USER: ${{ secrets.APPLE_ID }}
+ # APPLE_NOTARY_PASSWORD: ${{ secrets.APPLE_ID_PASSWORD }}
+ # APPLE_TEAM_ID: ${{ secrets.APPLE_TEAM_ID }}
+ # run: |
+ # xcrun notarytool store-credentials "notarytool-password" --apple-id "$APPLE_NOTARY_USER" --team-id "$APPLE_TEAM_ID" --password "$APPLE_NOTARY_PASSWORD"
+
+ # - name: Set up Node.js
+ # uses: actions/setup-node@v4
+ # with:
+ # node-version: 18.13
+
+ # - name: Set version from tag
+ # run: |
+ # TAG=${GITHUB_REF/refs\/tags\//}
+ # CLEAN=${TAG#client-}
+ # CLEAN=${CLEAN#v}
+ # node -p "let p=require('./package.json'); p.version='${CLEAN}'; require('fs').writeFileSync('package.json', JSON.stringify(p,null,2));"
+
+ # - name: Install dependencies
+ # run: npm install
+
+ # # Skipping libmongocrypt prebuild for client-only
+
+ # - name: Build Client for Mac
+ # env:
+ # APPLE_ID: ${{ secrets.APPLE_ID }}
+ # APPLE_APP_SPECIFIC_PASSWORD: ${{ secrets.APPLE_ID_PASSWORD }}
+ # APPLE_TEAM_ID: ${{ secrets.APPLE_TEAM_ID }}
+ # CSC_LINK: ${{ secrets.CSC_LINK }}
+ # CSC_KEY_PASSWORD: ${{ secrets.CSC_KEY_PASSWORD }}
+ # CSC_INSTALLER_LINK: ${{ secrets.CSC_INSTALLER_LINK }}
+ # CSC_INSTALLER_KEY_PASSWORD: ${{ secrets.CSC_INSTALLER_KEY_PASSWORD }}
+ # run: |
+ # # Prevent electron-builder from publishing; publish happens in the final job
+ # npx nextron build --mac --config electron-builder.client.yml --publish=never
+
+ # - name: Get version
+ # id: get-ver
+ # run: |
+ # TAG=${GITHUB_REF/refs\/tags\//}
+ # CLEAN=${TAG#client-}
+ # CLEAN=${CLEAN#v}
+ # echo "VERSION=${CLEAN}" >> "$GITHUB_OUTPUT"
+
+ # - name: Notarize DMG (if present)
+ # continue-on-error: true
+ # env:
+ # VERSION: ${{ steps.get-ver.outputs.VERSION }}
+ # run: |
+ # if [ -f build/dist/MEDomicsLab-${VERSION}-mac.dmg ]; then
+ # xcrun notarytool submit build/dist/MEDomicsLab-${VERSION}-mac.dmg --keychain-profile "notarytool-password" --wait
+ # fi
+
+ # - name: Upload Mac artifacts
+ # uses: actions/upload-artifact@v4
+ # with:
+ # name: client-mac
+ # path: |
+ # ./build/dist/MEDomicsLab-${{ steps.get-ver.outputs.VERSION }}-mac.dmg
+ # ./build/dist/MEDomicsLab-${{ steps.get-ver.outputs.VERSION }}-mac.zip
+ # ./build/dist/latest-mac.yml
+
+ publish:
+ # Removed Mac build requirement for now since it needs a special signature, readd ", build-mac" later
+ needs: [build-windows, build-linux]
+ runs-on: ubuntu-latest
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@v4
+
+ - uses: actions/download-artifact@v4
+ with:
+ name: client-windows
+ path: .
+
+ - uses: actions/download-artifact@v4
+ with:
+ name: client-linux
+ path: .
+
+ # Commented until keys and certificate for Mac are ready
+ # - uses: actions/download-artifact@v4
+ # with:
+ # name: client-mac
+
+ # Re-add these lines at the end of "with: files: [...]" once Mac is included
+ # ./MEDomicsLab-${{ steps.get-ver.outputs.VERSION }}-mac.dmg
+ # ./MEDomicsLab-${{ steps.get-ver.outputs.VERSION }}-mac.zip
+ # ./latest-mac.yml
+ - name: Get version
+ id: get-ver
+ shell: bash
+ run: |
+ TAG=${GITHUB_REF/refs\/tags\//}
+ CLEAN=${TAG#client-}
+ CLEAN=${CLEAN#v}
+ echo "VERSION=${CLEAN}" >> "$GITHUB_OUTPUT"
+
+ - name: Create GitHub Release (draft)
+ uses: softprops/action-gh-release@v1
+ with:
+ files: |
+ MEDomicsLab-${{ steps.get-ver.outputs.VERSION }}-win.exe
+ MEDomicsLab-${{ steps.get-ver.outputs.VERSION }}-win.exe.blockmap
+ latest.yml
+ MEDomicsLab-${{ steps.get-ver.outputs.VERSION }}-linux.deb
+ name: MEDomicsLab Client-${{ steps.get-ver.outputs.VERSION }}
+ body: |
+ MEDomicsLab Client-${{ steps.get-ver.outputs.VERSION }}
+ Client-only Electron application (no Go/Python/Mongo bundled).
+ ${{ github.ref }}
+ draft: true
+ prerelease: false
+ token: ${{ secrets.GITHUB_TOKEN }}
+ env:
+ GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+ GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
diff --git a/.github/workflows/serverRelease.yml b/.github/workflows/serverRelease.yml
new file mode 100644
index 00000000..50946c3d
--- /dev/null
+++ b/.github/workflows/serverRelease.yml
@@ -0,0 +1,195 @@
+name: Server bundle build and publish
+on:
+ push:
+ tags:
+ - "server-v*"
+permissions:
+ contents: write
+
+jobs:
+ build-windows:
+ runs-on: windows-latest
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@v4
+ with:
+ submodules: recursive
+
+ - name: Set up Node.js
+ uses: actions/setup-node@v4
+ with:
+ node-version: 18.13
+
+ - name: Set up Go
+ uses: actions/setup-go@v4
+ with:
+ go-version: 1.21
+
+ - name: Set version from tag
+ shell: bash
+ run: |
+ TAG=${GITHUB_REF/refs\/tags\//}
+ CLEAN=${TAG#server-}
+ CLEAN=${CLEAN#v}
+ node -p "let p=require('./package.json'); p.version='${CLEAN}'; require('fs').writeFileSync('package.json', JSON.stringify(p,null,2));"
+
+ - name: Install dependencies
+ run: npm install
+
+ - name: Build server bundle (Windows)
+ run: node ./tools/pack_server.js --platform=win32
+
+ - name: Get version
+ id: get-ver
+ shell: bash
+ run: |
+ TAG=${GITHUB_REF/refs\/tags\//}
+ CLEAN=${TAG#server-}
+ CLEAN=${CLEAN#v}
+ echo "VERSION=${CLEAN}" >> "$GITHUB_OUTPUT"
+
+ - name: Upload artifact
+ uses: actions/upload-artifact@v4
+ with:
+ name: server-windows
+ path: ./build/dist/MEDomicsLab-Server-${{ steps.get-ver.outputs.VERSION }}-win32.zip
+
+ build-linux:
+ runs-on: ubuntu-22.04
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@v4
+ with:
+ submodules: recursive
+
+ - name: Set up Node.js
+ uses: actions/setup-node@v4
+ with:
+ node-version: 18.13
+
+ - name: Set up Go
+ uses: actions/setup-go@v4
+ with:
+ go-version: 1.21
+
+ - name: Set version from tag
+ run: |
+ TAG=${GITHUB_REF/refs\/tags\//}
+ CLEAN=${TAG#server-}
+ CLEAN=${CLEAN#v}
+ node -p "let p=require('./package.json'); p.version='${CLEAN}'; require('fs').writeFileSync('package.json', JSON.stringify(p,null,2));"
+
+ - name: Install dependencies
+ run: npm install
+
+ - name: Build server bundle (Linux)
+ run: node ./tools/pack_server.js --platform=linux
+
+ - name: Get version
+ id: get-ver
+ run: |
+ TAG=${GITHUB_REF/refs\/tags\//}
+ CLEAN=${TAG#server-}
+ CLEAN=${CLEAN#v}
+ echo "VERSION=${CLEAN}" >> "$GITHUB_OUTPUT"
+
+ - name: Upload artifact
+ uses: actions/upload-artifact@v4
+ with:
+ name: server-linux
+ path: ./build/dist/MEDomicsLab-Server-${{ steps.get-ver.outputs.VERSION }}-linux.zip
+
+ build-mac:
+ runs-on: macos-latest
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@v4
+ with:
+ submodules: recursive
+
+ - name: Set up Node.js
+ uses: actions/setup-node@v4
+ with:
+ node-version: 18.13
+
+ - name: Set up Go
+ uses: actions/setup-go@v4
+ with:
+ go-version: 1.21
+
+ - name: Set version from tag
+ run: |
+ TAG=${GITHUB_REF/refs\/tags\//}
+ CLEAN=${TAG#server-}
+ CLEAN=${CLEAN#v}
+ node -p "let p=require('./package.json'); p.version='${CLEAN}'; require('fs').writeFileSync('package.json', JSON.stringify(p,null,2));"
+
+ - name: Install dependencies
+ run: npm install
+
+ - name: Build server bundle (macOS)
+ run: node ./tools/pack_server.js --platform=darwin
+
+ - name: Get version
+ id: get-ver
+ run: |
+ TAG=${GITHUB_REF/refs\/tags\//}
+ CLEAN=${TAG#server-}
+ CLEAN=${CLEAN#v}
+ echo "VERSION=${CLEAN}" >> "$GITHUB_OUTPUT"
+
+ - name: Upload artifact
+ uses: actions/upload-artifact@v4
+ with:
+ name: server-mac
+ path: ./build/dist/MEDomicsLab-Server-${{ steps.get-ver.outputs.VERSION }}-darwin.zip
+
+ publish:
+ needs: [build-windows, build-linux, build-mac]
+ runs-on: ubuntu-latest
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@v4
+
+ - uses: actions/download-artifact@v4
+ with:
+ name: server-windows
+ path: .
+
+ - uses: actions/download-artifact@v4
+ with:
+ name: server-linux
+ path: .
+
+ - uses: actions/download-artifact@v4
+ with:
+ name: server-mac
+
+
+ - name: Get version
+ id: get-ver
+ shell: bash
+ run: |
+ TAG=${GITHUB_REF/refs\/tags\//}
+ CLEAN=${TAG#server-}
+ CLEAN=${CLEAN#v}
+ echo "VERSION=${CLEAN}" >> "$GITHUB_OUTPUT"
+
+ - name: Create GitHub Release (draft)
+ uses: softprops/action-gh-release@v1
+ with:
+ files: |
+ MEDomicsLab-Server-${{ steps.get-ver.outputs.VERSION }}-win32.zip
+ MEDomicsLab-Server-${{ steps.get-ver.outputs.VERSION }}-linux.zip
+ MEDomicsLab-Server-${{ steps.get-ver.outputs.VERSION }}-darwin.zip
+ name: MEDomicsLab Server-${{ steps.get-ver.outputs.VERSION }}
+ body: |
+ MEDomicsLab Server-${{ steps.get-ver.outputs.VERSION }}
+ Standalone Node/Express server bundle with Go + Python assets. Run the start script per OS.
+ ${{ github.ref }}
+ draft: true
+ prerelease: false
+ token: ${{ secrets.GITHUB_TOKEN }}
+ env:
+ GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+ GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
diff --git a/.gitignore b/.gitignore
index 04eadbd3..8e262614 100644
--- a/.gitignore
+++ b/.gitignore
@@ -14,6 +14,7 @@ __pycache__/*
libmongocrypt/
.Python
.vscode/
+build/server/
develop-eggs/
dist/
downloads/
@@ -168,3 +169,4 @@ package-lock.json
pythonCode/local_dir/*
!*.gitkeep
+medomics-server-state.json
diff --git a/README.md b/README.md
index e309c9cd..813ef846 100644
--- a/README.md
+++ b/README.md
@@ -115,6 +115,108 @@ Explore our [contribution page](https://medomics-udes.gitbook.io/medomics-docs/c
+## Server Bundle (Headless)
+
+Use the standalone server bundle to run the backend (Express + Go server + Python code) without installing the Electron app. It will ensure Python and MongoDB are available and start services for you.
+
+- Download: grab the latest `MEDomicsLab-Server--.zip` from GitHub Releases.
+- Prerequisite: install Node.js 18+ on the host and ensure outbound internet access (to download Python/Mongo when needed).
+
+### Windows
+- Extract the zip, then in PowerShell run:
+
+```powershell
+cd
+./start.bat
+```
+
+- This will:
+ - Start the Express server and print the listening port
+ - Ensure the Go server, MongoDB, and Jupyter (if requested) are installed/running
+
+- Stop the server:
+
+```powershell
+./stop.bat
+```
+
+### Linux
+- Extract the zip, then in a shell:
+
+```bash
+cd
+chmod +x start.sh stop.sh
+./start.sh
+```
+
+- Stop the server:
+
+```bash
+./stop.sh
+```
+
+### macOS
+- Extract the zip, then in a shell:
+
+```bash
+cd
+chmod +x start.sh stop.sh
+./start.sh
+```
+
+- Stop the server:
+
+```bash
+./stop.sh
+```
+
+### Status and Health
+- Query status via CLI (from bundle folder):
+
+```bash
+node ./backend/cli/medomics-server.mjs status --json
+```
+
+- Or via HTTP from another terminal/app once you know the Express port (defaults to an available port in 5000–8000):
+
+```bash
+curl http://127.0.0.1:/status
+```
+
+### Configuration hints
+- To force the Express port, set `MEDOMICS_EXPRESS_PORT` before starting:
+
+```bash
+MEDOMICS_EXPRESS_PORT=6000 ./start.sh # Linux/macOS
+```
+
+```powershell
+$env:MEDOMICS_EXPRESS_PORT=6000; ./start.bat # Windows PowerShell
+```
+
+If Python/Mongo are missing, the start script will download and install a local Python runtime under `~/.medomics/python` and set up MongoDB as needed.
+
+## Client vs Server Releases
+
+- Client (Electron app only)
+ - Tag pattern: `client-v*` (e.g., `client-v1.2.3`).
+ - Artifacts: Windows NSIS installer (`.exe` + `latest.yml` + `.blockmap`), Linux `.deb`, macOS `.dmg` and `.zip` (and `latest-mac.yml`).
+ - Contents: Electron UI only; no Go/Python/Mongo bundled.
+ - Use when you want the desktop UI and connect to a running backend.
+
+- Server (headless backend bundle)
+ - Tag pattern: `server-v*` (e.g., `server-v1.2.3`).
+ - Artifacts: `MEDomicsLab-Server--.zip` for `win32`, `linux`, `darwin`.
+ - Contents: `backend/` (Express + CLI), `go_executables/` (Go server), `pythonCode/`, `pythonEnv/`, plus start/stop scripts.
+ - Use when you want to deploy or run the backend without the Electron app.
+
+- Downloads
+ - Find both under GitHub Releases of this repository. Choose the release type (Client vs Server) matching your needs and your OS.
+
+- Versioning
+ - The tag version (after `client-v`/`server-v`) is written into `package.json` during CI, so artifacts reflect the tag version.
+ - Legacy `v*` tags (full multi-OS build) may still exist; prefer the split releases for clarity.
+
## Acknowledgement
This project relies on the following open-source packages, and we are grateful to their developers:
diff --git a/backend/cli/README.md b/backend/cli/README.md
new file mode 100644
index 00000000..22cab5cb
--- /dev/null
+++ b/backend/cli/README.md
@@ -0,0 +1,50 @@
+# MEDomics Server CLI (Scaffold)
+
+This folder contains a scaffolded command-line interface for running the MEDomics backend headlessly.
+
+## Commands
+
+| Command | Purpose |
+|---------|---------|
+| start | Launch the Express backend and persist a state file with port/PID. |
+| stop | Gracefully stop the backend using the saved PID; force kill after timeout. |
+| status | Query /status and print JSON summary. |
+| ensure | Idempotently start services (Go, Mongo, Jupyter). |
+| install | Drive Express endpoints to install Mongo and Python env/packages, then re-check requirements. |
+| upgrade | (Stub) Manifest-driven update process. |
+
+## State File
+
+Writes `medomics-server-state.json` in the current working directory containing:
+```json
+{
+ "running": true,
+ "pid": 12345,
+ "expressPort": 3737,
+ "started": "2025-11-06T12:34:56.789Z"
+}
+```
+
+## Next Steps
+1. Implement real install (call /install-bundled-python, /install-mongo endpoints or direct utils import).
+2. Add upgrade logic (download archive, verify SHA256 + signature, replace folder atomically).
+3. Provide stop + service management commands.
+4. Harden error handling & logging (structured logs, log rotation).
+
+## Development
+Run locally from repo root:
+```bash
+node ./backend/cli/medomics-server.mjs start
+node ./backend/cli/medomics-server.mjs stop
+node ./backend/cli/medomics-server.mjs status
+node ./backend/cli/medomics-server.mjs ensure --go --mongo --jupyter --workspace /path/to/workspace
+node ./backend/cli/medomics-server.mjs install --json
+```
+
+The `install` command will:
+1. Start Express if not already running (using the CLI's start logic).
+2. GET `/check-requirements`.
+3. POST `/install-mongo` if MongoDB is missing.
+4. POST `/install-bundled-python` if Python env is missing.
+5. POST `/install-required-python-packages` if packages are missing.
+6. GET `/check-requirements` again and print a JSON summary.
diff --git a/backend/cli/medomics-server.mjs b/backend/cli/medomics-server.mjs
new file mode 100644
index 00000000..9fdfaa97
--- /dev/null
+++ b/backend/cli/medomics-server.mjs
@@ -0,0 +1,505 @@
+#!/usr/bin/env node
+// MEDomics Server CLI scaffold
+// Provides headless management commands for the backend: start, status, ensure, install (stub), upgrade (stub)
+// This is an initial scaffold; many operations are stubs to be filled in later.
+
+import path from 'path'
+import fs from 'fs'
+import os from 'os'
+import { fork } from 'child_process'
+import http from 'http'
+// no __filename/__dirname needed in this scaffold
+// Removed __dirname and unused port range constants in scaffold to satisfy lint
+function resolveExpressServerPath() {
+ // Primary: cwd/backed/expressServer.mjs (original behavior)
+ const candidates = []
+ const cwdCandidate = path.resolve(process.cwd(), 'backend', 'expressServer.mjs')
+ candidates.push(cwdCandidate)
+ // Fallback: relative to this CLI file location (supports being invoked from other working dirs)
+ try {
+ const cliDirUrl = new URL('.', import.meta.url)
+ let cliDir = cliDirUrl.pathname
+ // On Windows the pathname may start with /C:/ ... strip leading slash if path like /C:/
+ if (process.platform === 'win32' && /^\/[a-zA-Z]:\//.test(cliDir)) cliDir = cliDir.slice(1)
+ const relCandidate = path.resolve(cliDir, '..', 'expressServer.mjs')
+ candidates.push(relCandidate)
+ } catch (e) { /* ignore URL resolution errors */ }
+ // Environment override: MEDOMICS_SERVER_ROOT (useful for tests)
+ if (process.env.MEDOMICS_SERVER_ROOT) {
+ candidates.push(path.resolve(process.env.MEDOMICS_SERVER_ROOT, 'backend', 'expressServer.mjs'))
+ candidates.push(path.resolve(process.env.MEDOMICS_SERVER_ROOT, 'expressServer.mjs'))
+ }
+ // Return first existing
+ for (const c of candidates) {
+ try {
+ if (fs.existsSync(c)) return c
+ } catch (e) { /* ignore fs errors */ }
+ }
+ return candidates[0] // fall back to primary even if missing (caller will error out)
+}
+function getStateFile(flags) {
+ const raw = flags['state-file'] ?? flags['stateFile']
+ const hasValidString = typeof raw === 'string' && raw.trim().length > 0
+ const f = hasValidString ? raw : null
+ const defaultState = path.resolve(os.homedir(), '.medomics', 'medomics-server', 'state.json')
+ let p = f ? path.resolve(f) : defaultState
+ // If --state-file points to a directory, place state.json inside it.
+ try {
+ if (fs.existsSync(p) && fs.statSync(p).isDirectory()) {
+ p = path.join(p, 'state.json')
+ }
+ } catch (e) { /* ignore and keep p as-is */ }
+ return p
+}
+
+function log(msg) {
+ if (!process.env.JSON) console.log(msg)
+}
+
+function writeStateAt(stateFile, state) {
+ const targetPath = path.resolve(String(stateFile || ''))
+ if (!targetPath) return false
+ const parentDir = path.dirname(targetPath)
+ const payload = JSON.stringify(state, null, 2)
+
+ const tryWrite = () => {
+ fs.mkdirSync(parentDir, { recursive: true })
+ fs.writeFileSync(targetPath, payload)
+ }
+
+ try {
+ tryWrite()
+ return true
+ } catch (e) {
+ // Rare race/ordering issue: parent removed between checks, retry once.
+ if (e && e.code === 'ENOENT') {
+ try {
+ tryWrite()
+ return true
+ } catch (retryErr) {
+ console.warn('[state-file] write failed after retry:', retryErr && retryErr.message ? retryErr.message : retryErr)
+ return false
+ }
+ }
+ console.warn('[state-file] write failed:', e && e.message ? e.message : e)
+ return false
+ }
+}
+
+function readStateAt(stateFile) {
+ if (!fs.existsSync(stateFile)) return null
+ try { return JSON.parse(fs.readFileSync(stateFile, 'utf-8')) } catch { return null }
+}
+
+function parseArgs(argv) {
+ const args = argv.slice(2)
+ const flags = {}
+ let command = null
+ const positionals = []
+ for (let i = 0; i < args.length; i++) {
+ const a = args[i]
+ if (!command && !a.startsWith('-')) { command = a; continue }
+ if (a.startsWith('--')) {
+ const body = a.slice(2)
+ const eq = body.indexOf('=')
+ if (eq !== -1) {
+ const k = body.slice(0, eq)
+ const v = body.slice(eq + 1)
+ flags[k] = v
+ } else {
+ // Support space-delimited values: --key value
+ const k = body
+ const next = args[i + 1]
+ if (next && !next.startsWith('-')) {
+ flags[k] = next
+ i++
+ } else {
+ flags[k] = true
+ }
+ }
+ } else {
+ positionals.push(a)
+ }
+ }
+ return { command, flags, positionals }
+}
+
+async function httpGet(port, pathName) {
+ return new Promise((resolve, reject) => {
+ const req = http.request({ hostname: '127.0.0.1', port, path: pathName, method: 'GET' }, res => {
+ let data=''
+ res.on('data', d=> data+=d)
+ res.on('end', ()=> {
+ try { resolve(JSON.parse(data)) } catch { resolve({ raw: data }) }
+ })
+ })
+ req.on('error', reject)
+ req.end()
+ })
+}
+
+async function httpPost(port, pathName, body) {
+ return new Promise((resolve, reject) => {
+ const payload = JSON.stringify(body||{})
+ const req = http.request({ hostname: '127.0.0.1', port, path: pathName, method: 'POST', headers: { 'Content-Type': 'application/json', 'Content-Length': Buffer.byteLength(payload) } }, res => {
+ let data=''
+ res.on('data', d=> data+=d)
+ res.on('end', ()=> {
+ try { resolve(JSON.parse(data)) } catch { resolve({ raw: data }) }
+ })
+ })
+ req.on('error', reject)
+ req.write(payload)
+ req.end()
+ })
+}
+
+async function startCommand(flags) {
+ const stateFile = getStateFile(flags)
+ const existing = readStateAt(stateFile)
+ if (existing?.running) {
+ if (existing.pid && pidIsAlive(existing.pid)) {
+ console.error('Server already running (state file present). Use status or stop.')
+ process.exit(1)
+ } else {
+ // Stale state file; remove and proceed to start fresh
+ try { fs.unlinkSync(stateFile) } catch (e) { /* ignore unlink errors */ }
+ }
+ }
+ const expressServerPath = resolveExpressServerPath()
+ if (!fs.existsSync(expressServerPath)) {
+ console.error('expressServer.mjs not found at expected path: ' + expressServerPath)
+ process.exit(1)
+ }
+ log('Starting MEDomics Express server...')
+ // Create placeholder state so the file exists even if startup fails early
+ try {
+ writeStateAt(stateFile, { starting: true, pid: null, expressPort: null, created: new Date().toISOString() })
+ } catch (e) {
+ // Best-effort; continue even if we can't write state yet
+ }
+ // Prepare log capture (tail) and file logging
+ const stateDir = path.dirname(stateFile)
+ const logFilePath = path.resolve(stateDir, 'server-child.log')
+ const appendLog = (prefix, data) => {
+ try { fs.appendFileSync(logFilePath, `[${new Date().toISOString()}] ${prefix}: ${data}`) } catch (e) { /* ignore */ }
+ }
+ const maxTail = 65536
+ let stdoutTail = ''
+ let stderrTail = ''
+ const addTail = (cur, chunk) => {
+ cur += chunk
+ if (cur.length > maxTail) cur = cur.slice(cur.length - maxTail)
+ return cur
+ }
+
+ // Use silent fork to capture stdout/stderr, then mirror to console
+ const child = fork(expressServerPath, [], { silent: true, env: { ...process.env, NODE_ENV: flags.production ? 'production' : (process.env.NODE_ENV||'development') } })
+ if (child.stdout) {
+ child.stdout.on('data', (d) => {
+ const s = d.toString()
+ stdoutTail = addTail(stdoutTail, s)
+ try { process.stdout.write(s) } catch (e) { /* ignore write error */ }
+ appendLog('stdout', s)
+ })
+ }
+ if (child.stderr) {
+ child.stderr.on('data', (d) => {
+ const s = d.toString()
+ stderrTail = addTail(stderrTail, s)
+ try { process.stderr.write(s) } catch (e) { /* ignore write error */ }
+ appendLog('stderr', s)
+ })
+ }
+ const timeoutMs = parseInt(flags.timeout||'15000',10)
+ let settled = false
+ child.on('message', async (msg) => {
+ if (settled) return
+ if (msg && msg.type === 'EXPRESS_PORT') {
+ settled = true
+ const state = { running: true, pid: child.pid, expressPort: msg.expressPort, started: new Date().toISOString() }
+ writeStateAt(stateFile, state)
+ // Always emit a JSON line that background.js can parse
+ process.stdout.write(JSON.stringify({ success: true, state, expressPort: state.expressPort })+'\n')
+ if (!flags.json) {
+ log('Express started on port ' + msg.expressPort)
+ }
+ }
+ })
+ child.on('exit', code => {
+ if (!settled) {
+ console.error('Express server exited prematurely with code', code)
+ try {
+ const prev = readStateAt(stateFile) || {}
+ writeStateAt(stateFile, { ...prev, running: false, failed: true, code: code||1, ended: new Date().toISOString(), lastStdout: stdoutTail, lastStderr: stderrTail, expressPath: expressServerPath, cwd: process.cwd(), node: process.version })
+ } catch (e) { /* ignore state write error */ }
+ process.exit(code||1)
+ }
+ })
+ // Fallback timeout
+ setTimeout(() => {
+ if (!settled) {
+ console.error('Timed out waiting for Express port message')
+ try { child.kill() } catch (e) { /* ignore kill errors */ }
+ try {
+ const prev = readStateAt(stateFile) || {}
+ writeStateAt(stateFile, { ...prev, running: false, failed: true, timeout: true, waitedMs: timeoutMs, ended: new Date().toISOString(), lastStdout: stdoutTail, lastStderr: stderrTail, expressPath: expressServerPath, cwd: process.cwd(), node: process.version })
+ } catch (e) { /* ignore state write error */ }
+ process.exit(1)
+ }
+ }, timeoutMs)
+}
+
+async function statusCommand(flags) {
+ const stateFile = getStateFile(flags)
+ const state = readStateAt(stateFile)
+ if (!state?.expressPort) {
+ console.error('No running state found (start not invoked or state file missing).')
+ process.exit(1)
+ }
+ try {
+ const status = await httpGet(state.expressPort, '/status')
+ const out = { success: true, pid: state.pid, expressPort: state.expressPort, status }
+ process.stdout.write(JSON.stringify(out, null, flags.json?0:2)+'\n')
+ } catch (e) {
+ console.error('Failed to query status:', e.message)
+ process.exit(1)
+ }
+}
+
+async function ensureCommand(flags) {
+ const stateFile = getStateFile(flags)
+ const state = readStateAt(stateFile)
+ if (!state?.expressPort) {
+ console.error('Cannot ensure services: server not started.')
+ process.exit(1)
+ }
+ const port = state.expressPort
+ const result = {}
+ try {
+ if (flags.go) result.go = await httpPost(port, '/ensure-go', {})
+ if (flags.mongo) result.mongo = await httpPost(port, '/ensure-mongo', { workspacePath: flags.workspace })
+ if (flags.jupyter) result.jupyter = await httpPost(port, '/ensure-jupyter', { workspacePath: flags.workspace })
+ process.stdout.write(JSON.stringify({ success: true, ensured: result }, null, flags.json?0:2)+'\n')
+ } catch (e) {
+ console.error('Ensure failed:', e.message)
+ process.exit(1)
+ }
+}
+
+async function installCommand(flags) {
+ // Option 1 implementation: drive existing Express endpoints
+ // 1) Ensure Express is running (start if no state)
+ const stateFile = getStateFile(flags)
+ let state = readStateAt(stateFile)
+ if (!state?.expressPort) {
+ log('Express not started (no state found). Starting...')
+ await startCommand({ ...flags })
+ // re-read state after start
+ state = readStateAt(stateFile)
+ }
+ if (!state?.expressPort) {
+ console.error('Failed to obtain Express port after start.')
+ process.exit(1)
+ }
+ const port = state.expressPort
+
+ const summary = { actions: [], initial: null, final: null }
+
+ // 2) Check current requirements
+ try {
+ const check = await httpGet(port, '/check-requirements')
+ summary.initial = check
+ } catch (e) {
+ console.error('check-requirements failed:', e.message)
+ process.exit(1)
+ }
+
+ const init = summary.initial?.result || summary.initial
+
+ // Helpers to detect missing components tolerant to schema differences
+ const needsMongo = () => {
+ if (!init) return true
+ const candidates = [
+ init.mongo?.installed,
+ init.mongo?.ok,
+ init.mongoInstalled,
+ init.mongo_ok,
+ init.mongo
+ ]
+ for (const v of candidates) {
+ if (v === true) return false
+ if (v === false) return true
+ }
+ return true
+ }
+
+ const needsPythonEnv = () => {
+ if (!init) return true
+ const candidates = [
+ init.python?.installed,
+ init.python?.ok,
+ init.pythonEnv?.installed,
+ init.pythonInstalled,
+ init.python_ok,
+ init.python
+ ]
+ for (const v of candidates) {
+ if (v === true) return false
+ if (v === false) return true
+ }
+ return true
+ }
+
+ const needsPythonPackages = () => {
+ if (!init) return true
+ const candidates = [
+ init.python?.packagesOk,
+ init.pythonPackages?.ok,
+ init.pythonPackagesOk,
+ ]
+ for (const v of candidates) {
+ if (v === true) return false
+ if (v === false) return true
+ }
+ return true
+ }
+
+ // 3) Install MongoDB if needed
+ try {
+ if (needsMongo()) {
+ if (!flags.json) console.log('Installing MongoDB...')
+ const r = await httpPost(port, '/install-mongo', {})
+ summary.actions.push({ step: 'install-mongo', response: r })
+ }
+ } catch (e) {
+ console.error('install-mongo failed:', e.message)
+ process.exit(1)
+ }
+
+ // 4) Install Python env and packages if needed
+ try {
+ if (needsPythonEnv()) {
+ if (!flags.json) console.log('Installing bundled Python environment...')
+ const r = await httpPost(port, '/install-bundled-python', {})
+ summary.actions.push({ step: 'install-bundled-python', response: r })
+ }
+ } catch (e) {
+ console.error('install-bundled-python failed:', e.message)
+ process.exit(1)
+ }
+
+ try {
+ if (needsPythonPackages()) {
+ if (!flags.json) console.log('Installing required Python packages...')
+ const r = await httpPost(port, '/install-required-python-packages', {})
+ summary.actions.push({ step: 'install-required-python-packages', response: r })
+ }
+ } catch (e) {
+ console.error('install-required-python-packages failed:', e.message)
+ process.exit(1)
+ }
+
+ // 5) Re-check requirements and print summary
+ try {
+ const final = await httpGet(port, '/check-requirements')
+ summary.final = final
+ } catch (e) {
+ console.error('final check-requirements failed:', e.message)
+ process.exit(1)
+ }
+
+ process.stdout.write(JSON.stringify({ success: true, install: summary }, null, flags.json?0:2)+'\n')
+}
+
+async function upgradeCommand(flags) {
+ // Placeholder: Would fetch manifest, compare versions, download, verify, extract.
+ process.stdout.write(JSON.stringify({ success: true, message: 'Upgrade stub – implement manifest-driven update.' }, null, flags.json?0:2)+'\n')
+}
+
+async function main() {
+ const { command, flags } = parseArgs(process.argv)
+ if (!command || flags.help) {
+ console.log(`MEDomics Server CLI
+Usage: medomics-server [flags]
+
+Commands:
+ start Start Express backend
+ stop Stop the running backend
+ status Show JSON status snapshot
+ ensure [--go --mongo --jupyter --workspace PATH]
+ install Install dependencies (stub)
+ upgrade Upgrade server (stub)
+
+Flags:
+ --workspace=PATH Workspace root for ensure operations
+ --timeout=MS Startup timeout (default 15000)
+ --json Emit compact JSON outputs
+ --production Set NODE_ENV=production
+ --state-file=PATH Path to state file (default .medomics/medomics-server/state.json)
+ --help Display this help
+`)
+ process.exit(0)
+ }
+ switch (command) {
+ case 'start': return startCommand(flags)
+ case 'status': return statusCommand(flags)
+ case 'ensure': return ensureCommand(flags)
+ case 'install': return installCommand(flags)
+ case 'upgrade': return upgradeCommand(flags)
+ case 'stop': return stopCommand(flags)
+ default:
+ console.error('Unknown command:', command)
+ process.exit(1)
+ }
+}
+
+function pidIsAlive(pid) {
+ try { process.kill(pid, 0); return true } catch { return false }
+}
+
+async function stopCommand(flags) {
+ const stateFile = getStateFile(flags)
+ const state = readStateAt(stateFile)
+ if (!state?.pid) {
+ console.error('No running state (nothing to stop).')
+ process.exit(1)
+ }
+ const pid = state.pid
+ const alive = pidIsAlive(pid)
+ if (!alive) {
+ // Stale state file
+ fs.unlinkSync(stateFile)
+ if (flags.json) {
+ process.stdout.write(JSON.stringify({ success: true, message: 'State file removed (process already dead).' })+'\n')
+ } else {
+ console.log('Process already stopped; state file cleaned.')
+ }
+ return
+ }
+ if (!flags.json) console.log('Stopping MEDomics server process PID', pid)
+ try {
+ process.kill(pid, 'SIGTERM')
+ } catch (e) {
+ console.error('Failed to send SIGTERM:', e.message)
+ }
+ const deadline = Date.now() + 5000
+ const interval = setInterval(() => {
+ if (!pidIsAlive(pid)) {
+ clearInterval(interval)
+ try { fs.unlinkSync(stateFile) } catch (e) { /* ignore */ }
+ const result = { success: true, stopped: true }
+ process.stdout.write(JSON.stringify(result)+'\n')
+ } else if (Date.now() > deadline) {
+ clearInterval(interval)
+ // Force kill
+ try { process.kill(pid, 'SIGKILL') } catch (e) { /* ignore */ }
+ const forced = !pidIsAlive(pid)
+ try { fs.unlinkSync(stateFile) } catch (e) { /* ignore */ }
+ const result = { success: forced, forced }
+ process.stdout.write(JSON.stringify(result)+'\n')
+ }
+ }, 250)
+}
+
+main()
diff --git a/backend/expressServer.mjs b/backend/expressServer.mjs
new file mode 100644
index 00000000..0521f527
--- /dev/null
+++ b/backend/expressServer.mjs
@@ -0,0 +1,1399 @@
+import * as serverPathUtils from "./utils/serverPathUtils.js"
+const { setAppPath } = serverPathUtils
+import express from "express"
+import bodyParser from "body-parser"
+import axios from "axios"
+import * as serverWorkspace from "./utils/serverWorkspace.js"
+const { createServerMedomicsDirectory, createServerWorkingDirectory, getServerWorkingDirectory } = serverWorkspace
+import * as mongoDBServer from "./utils/mongoDBServer.js"
+const { startMongoDB, stopMongoDB, getMongoDBPath, checkMongoIsRunning, getMongoDebugInfo } = mongoDBServer
+import cors from "cors"
+import dirTree from "directory-tree"
+import { exec, execSync } from "child_process"
+import * as pythonEnv from "./utils/pythonEnv.js"
+const { getBundledPythonEnvironment, installBundledPythonExecutable, installRequiredPythonPackages, checkPythonRequirements, ensurePythonRequirementsInstalled } = pythonEnv
+import * as jupyterServer from "./utils/jupyterServer.js"
+const { startJupyterServer, stopJupyterServer, checkJupyterIsRunning } = jupyterServer
+import MEDconfig from "./utils/medomics.server.dev.js"
+import * as serverInstallation from "./utils/serverInstallation.js"
+const { checkRequirements } = serverInstallation
+import { runServer, findAvailablePort } from "./utils/server.mjs"
+import fs from "fs"
+import path from "path"
+import os from "os"
+import crypto from "crypto"
+import mongodb from "mongodb"
+import Papa from "papaparse"
+const { MongoClient } = mongodb
+
+const expressApp = express()
+expressApp.use(bodyParser.json())
+expressApp.use(cors())
+
+expressApp.use(function(req, res, next) {
+ res.header("Access-Control-Allow-Origin", "*")
+ res.header("Access-Control-Allow-Headers", "Origin, X-Requested-With, Content-Type, Accept")
+ next()
+})
+
+const EXPRESS_PORT_START = 5000
+const EXPRESS_PORT_END = 8000
+
+// Service state snapshot to report via /status and to keep idempotent ensures
+const serviceState = {
+ expressPort: null,
+ go: { running: false, port: null },
+ mongo: { running: false, port: null },
+ jupyter: { running: false, port: null },
+ exploratory: {
+ dtale: { sessions: {} },
+ sweetviz: { sessions: {} },
+ ydata: { sessions: {} }
+ }
+}
+
+// Keep a handle to the HTTP server to support graceful stop via endpoint
+let httpServer = null
+
+// --- State file helpers ---
+function getStateFilePath() {
+ const dir = path.join(os.homedir(), ".medomics", "medomics-server")
+ try { if (!fs.existsSync(dir)) fs.mkdirSync(dir, { recursive: true }) } catch(e) { console.warn("[state-file] mkdir error:", e && e.message ? e.message : e) }
+ return path.join(dir, "state.json")
+}
+
+function snapshotState(started) {
+ return {
+ started: !!started,
+ expressPort: serviceState.expressPort,
+ pid: process.pid,
+ updatedAt: new Date().toISOString()
+ }
+}
+
+function writeStateFile(started) {
+ try {
+ const p = getStateFilePath()
+ const payload = snapshotState(started)
+ fs.writeFileSync(p, JSON.stringify(payload, null, 2))
+ } catch (e) {
+ console.warn("[state-file] write error:", e && e.message ? e.message : e)
+ }
+}
+
+// On process termination, mark started=false best-effort
+function setupGracefulShutdownState() {
+ const markStopped = () => {
+ try { writeStateFile(false) } catch(e) { console.warn("[state-file] write error on shutdown:", e && e.message ? e.message : e) }
+ }
+ try {
+ process.on("SIGINT", () => { markStopped(); process.exit(0) })
+ process.on("SIGTERM", () => { markStopped(); process.exit(0) })
+ process.on("beforeExit", () => { markStopped() })
+ process.on("exit", () => { markStopped() })
+ } catch(e) { console.warn("[state-file] error setting up graceful shutdown handlers:", e && e.message ? e.message : e) }
+}
+
+let isProd = process.env.NODE_ENV && process.env.NODE_ENV === "production"
+let goServerProcess = null
+let mongoClient = null
+
+function createRequestUUID() {
+ if (typeof crypto.randomUUID === "function") {
+ return crypto.randomUUID()
+ }
+ if (typeof crypto.randomBytes === "function") {
+ const bytes = crypto.randomBytes(16)
+ bytes[6] = (bytes[6] & 0x0f) | 0x40
+ bytes[8] = (bytes[8] & 0x3f) | 0x80
+ const hex = bytes.toString("hex")
+ return `${hex.slice(0, 8)}-${hex.slice(8, 12)}-${hex.slice(12, 16)}-${hex.slice(16, 20)}-${hex.slice(20)}`
+ }
+ return `${Date.now()}-${Math.random().toString(16).slice(2, 10)}`
+}
+
+function getMongoUri() {
+ const mongoPort = serviceState?.mongo?.port || 54017
+ return `mongodb://127.0.0.1:${mongoPort}`
+}
+
+async function connectToDataDB() {
+ if (!mongoClient) {
+ mongoClient = new MongoClient(getMongoUri())
+ await mongoClient.connect()
+ }
+ return mongoClient.db("data")
+}
+
+function normalizeValue(value) {
+ if (value === null || value === undefined) return null
+ if (typeof value === "number" && Number.isNaN(value)) return null
+ if (typeof value === "string") {
+ const trimmed = value.trim()
+ if (!trimmed) return null
+ const lowered = trimmed.toLowerCase()
+ if (["nan", "null", "none"].includes(lowered)) return null
+ }
+ return value
+}
+
+function stripIds(doc = {}) {
+ const { _id, id, ...rest } = doc
+ return rest
+}
+
+async function insertBigCSVIntoCollection(filePath, collectionName) {
+ const db = await connectToDataDB()
+ const collection = db.collection(collectionName)
+
+ let allowedColumns = null
+ const batchSize = 1000
+ let batch = []
+ let totalInserted = 0
+
+ return new Promise((resolve, reject) => {
+ Papa.parse(fs.createReadStream(filePath), {
+ header: true,
+ dynamicTyping: true,
+ skipEmptyLines: true,
+ transformHeader: (h) => (h || "").trim(),
+ transform: (value) => normalizeValue(value),
+ step: (results, parser) => {
+ const row = results.data
+
+ if (!allowedColumns && Object.keys(row).length > 0) {
+ allowedColumns = Object.keys(row)
+ }
+
+ const cleanedRow = stripIds(
+ Object.fromEntries(
+ Object.entries(row)
+ .filter(([key]) => allowedColumns.includes(key))
+ .map(([key, value]) => [key, normalizeValue(value)])
+ )
+ )
+
+ batch.push(cleanedRow)
+
+ if (batch.length >= batchSize) {
+ parser.pause()
+ collection
+ .insertMany(batch)
+ .then(() => {
+ totalInserted += batch.length
+ batch = []
+ parser.resume()
+ })
+ .catch((error) => {
+ reject(error)
+ parser.abort()
+ })
+ }
+ },
+ complete: async () => {
+ try {
+ if (batch.length > 0) {
+ await collection.insertMany(batch)
+ totalInserted += batch.length
+ resolve({ insertedCount: totalInserted })
+ } else {
+ resolve({ insertedCount: totalInserted })
+ }
+ } catch (error) {
+ reject(error)
+ }
+ },
+ error: (error) => reject(error)
+ })
+ })
+}
+
+async function insertCSVIntoCollection(filePath, collectionName) {
+ const db = await connectToDataDB()
+ const collection = db.collection(collectionName)
+ const fileSize = fs.statSync(filePath).size
+ const maxBSONSize = 16 * 1024 * 1024
+
+ if (fileSize > maxBSONSize) {
+ return await insertBigCSVIntoCollection(filePath, collectionName)
+ }
+
+ return new Promise((resolve, reject) => {
+ Papa.parse(fs.createReadStream(filePath), {
+ header: true,
+ dynamicTyping: true,
+ transform: (value) => normalizeValue(value),
+ complete: async (results) => {
+ try {
+ const rows = (results.data || []).map((row) =>
+ Object.fromEntries(Object.entries(row || {}).map(([key, value]) => [key, normalizeValue(value)]))
+ )
+
+ if (!rows.length) {
+ resolve({ insertedCount: 0 })
+ return
+ }
+
+ const result = await collection.insertMany(rows)
+ resolve({ insertedCount: result.insertedCount || 0 })
+ } catch (err) {
+ reject(err)
+ }
+ },
+ error: (error) => reject(error)
+ })
+ })
+}
+
+async function insertHTMLIntoCollection(filePath, collectionName) {
+ const db = await connectToDataDB()
+ const collection = db.collection(collectionName)
+ const htmlContent = fs.readFileSync(filePath, "utf8")
+ await collection.insertOne({ htmlContent })
+ return { insertedCount: 1 }
+}
+
+async function insertImageIntoCollection(filePath, collectionName) {
+ const db = await connectToDataDB()
+ const collection = db.collection(collectionName)
+ const data = fs.readFileSync(filePath)
+ await collection.insertOne({ path: filePath, data })
+ return { insertedCount: 1 }
+}
+
+async function insertPKLIntoCollection(filePath, collectionName) {
+ const db = await connectToDataDB()
+ const collection = db.collection(collectionName)
+ const fileSize = fs.statSync(filePath).size
+ const maxBSONSize = 16 * 1024 * 1024
+ if (fileSize > maxBSONSize) {
+ throw new Error(`PKL file ${filePath} size exceeds the maximum BSON document size of 16MB`)
+ }
+ const pklContent = fs.readFileSync(filePath)
+ await collection.insertOne({ pklContent })
+ return { insertedCount: 1 }
+}
+
+async function insertObjectIntoCollectionRemote(objectPath, medDataObject) {
+ if (!objectPath || !medDataObject || !medDataObject.id || !medDataObject.type) {
+ throw new Error("Invalid insert payload")
+ }
+
+ if (!fs.existsSync(objectPath)) {
+ throw new Error(`Input file does not exist: ${objectPath}`)
+ }
+
+ const type = String(medDataObject.type).toLowerCase()
+ const collectionName = medDataObject.id
+
+ switch (type) {
+ case "csv":
+ return await insertCSVIntoCollection(objectPath, collectionName)
+ case "html":
+ return await insertHTMLIntoCollection(objectPath, collectionName)
+ case "png":
+ case "jpg":
+ case "jpeg":
+ return await insertImageIntoCollection(objectPath, collectionName)
+ case "pkl":
+ return await insertPKLIntoCollection(objectPath, collectionName)
+ default:
+ throw new Error(`Unsupported object type: ${medDataObject.type}`)
+ }
+}
+
+async function downloadCollectionToFileRemote(collectionId, filePath, type) {
+ const db = await connectToDataDB()
+ const collection = db.collection(collectionId)
+ const documents = await collection.find({}, { projection: { _id: 0 } }).toArray()
+
+ if (!documents.length) {
+ throw new Error(`No documents found in collection ${collectionId}`)
+ }
+
+ const normalizedType = String(type || "").toLowerCase()
+ if (normalizedType === "csv") {
+ const csv = Papa.unparse(documents)
+ fs.writeFileSync(filePath, csv)
+ return
+ }
+
+ if (normalizedType === "html") {
+ const htmlDocuments = documents.map((doc) => doc.htmlContent).filter((content) => content)
+ if (!htmlDocuments.length) throw new Error(`No valid HTML content found in collection ${collectionId}`)
+ fs.writeFileSync(filePath, htmlDocuments.join("\n"))
+ return
+ }
+
+ if (normalizedType === "json") {
+ fs.writeFileSync(filePath, JSON.stringify(documents, null, 2))
+ return
+ }
+
+ if (normalizedType === "png" || normalizedType === "jpg" || normalizedType === "jpeg") {
+ const imageDocument = documents.find((doc) => doc.data)
+ if (!imageDocument) throw new Error(`No valid image content found in collection ${collectionId}`)
+ const imageBuffer = Buffer.from(imageDocument.data.buffer || imageDocument.data)
+ fs.writeFileSync(filePath, imageBuffer)
+ return
+ }
+
+ if (normalizedType === "pkl") {
+ const firstDocument = documents[0] || {}
+ const source = firstDocument.model || firstDocument.pklContent || firstDocument.base64
+ if (!source) throw new Error(`No valid PKL content found in collection ${collectionId}`)
+ const pklBuffer = Buffer.isBuffer(source) ? source : Buffer.from(source.buffer || source, firstDocument.base64 ? "base64" : undefined)
+ fs.writeFileSync(filePath, pklBuffer)
+ return
+ }
+
+ throw new Error(`Unsupported file type: ${type}`)
+}
+
+export async function startExpressServer() {
+ try {
+ console.log('[express:start] scanning ports', EXPRESS_PORT_START, '-', EXPRESS_PORT_END)
+ const envPort = process.env.MEDOMICS_EXPRESS_PORT && Number(process.env.MEDOMICS_EXPRESS_PORT)
+ let expressPort = null
+ if (envPort && envPort > 0 && envPort < 65536) {
+ console.log('[express:start] using MEDOMICS_EXPRESS_PORT override', envPort)
+ expressPort = envPort
+ } else {
+ // Primary legacy finder (may rely on netstat/lsof)
+ let primaryFailed = null
+ try {
+ expressPort = await Promise.race([
+ findAvailablePort(EXPRESS_PORT_START, EXPRESS_PORT_END),
+ new Promise((_, reject) => setTimeout(() => reject(new Error('legacy-port-scan-timeout')), 8000))
+ ])
+ } catch (e) {
+ primaryFailed = e
+ console.warn('[express:start] legacy port finder failed:', e && e.message ? e.message : e)
+ }
+ if (!expressPort) {
+ console.log('[express:start] falling back to simple net binding scan')
+ expressPort = await simpleFindAvailablePort(EXPRESS_PORT_START, EXPRESS_PORT_END)
+ }
+ if (!expressPort) {
+ throw primaryFailed || new Error('no-port-found')
+ }
+ }
+ console.log('[express:start] selected port', expressPort)
+ httpServer = expressApp.listen(expressPort, () => {
+ console.log(`Express server listening on port ${expressPort}`)
+ // Write state.json with started=true and selected port
+ writeStateFile(true)
+ setupGracefulShutdownState()
+ })
+ httpServer.on('error', (err) => {
+ console.error('[express:start] server error event', err && err.stack ? err.stack : err)
+ })
+ httpServer.on('close', () => {
+ // Mark stopped on server close
+ writeStateFile(false)
+ serviceState.expressPort = null
+ httpServer = null
+ })
+ serviceState.expressPort = expressPort
+ if (process.send) {
+ process.send({ type: 'EXPRESS_PORT', expressPort })
+ }
+ } catch (err) {
+ console.error('[express:start] failed to start Express server:', err && err.stack ? err.stack : err)
+ throw err
+ }
+}
+
+// Simple fallback port finder using net module only
+import net from 'net'
+async function simpleFindAvailablePort(start, end) {
+ for (let p = start; p <= end; p++) {
+ const ok = await new Promise(resolve => {
+ const tester = net.createServer()
+ tester.once('error', () => { try { tester.close(()=>resolve(false)) } catch { resolve(false) } })
+ tester.once('listening', () => tester.close(() => resolve(true)))
+ tester.listen(p, '127.0.0.1')
+ })
+ if (ok) return p
+ }
+ return null
+}
+
+function normalizePathForPlatform(p) {
+ if (!p) return p
+ let normalized = p.replace(/\\/g, '/')
+ if (process.platform === 'win32') {
+ normalized = normalized.replace(/\//g, '\\')
+ if (normalized.match(/^\\[A-Za-z]:/)) {
+ normalized = normalized.slice(1)
+ }
+ }
+ return normalized
+}
+
+async function startGoServer(preferredPort = null) {
+ // Kick the Go server using existing helper; capture process handle and update state
+ try {
+ // Ensure bundled python exists and has required packages (e.g. pandas)
+ // so GO-launched scripts don't fail at import time.
+ try {
+ const pythonExe = getBundledPythonEnvironment()
+ if (!pythonExe) {
+ throw new Error('Bundled Python environment not found')
+ }
+ const reqOk = checkPythonRequirements(pythonExe)
+ if (!reqOk) {
+ console.log('[python] requirements missing; installing into', pythonExe)
+ await ensurePythonRequirementsInstalled(null, pythonExe)
+ }
+ } catch (pyErr) {
+ console.error('[python] ensure requirements failed:', pyErr && pyErr.message ? pyErr.message : pyErr)
+ throw pyErr
+ }
+
+ const { process: proc, port } = await runServer(isProd, preferredPort, goServerProcess, serviceState.go, null)
+ goServerProcess = proc
+ serviceState.go.running = true
+ serviceState.go.port = port
+ return { running: true, port }
+ } catch (err) {
+ serviceState.go.running = false
+ serviceState.go.port = null
+ throw err
+ }
+}
+
+function cleanGoResponsePayload(payload = "") {
+ let response = payload || ""
+ if (typeof response !== "string") return response
+ response = response.split("NaN").join("null")
+
+ let candidate = response
+ for (let i = 0; i < 4; i++) {
+ if (typeof candidate !== "string") return candidate
+
+ try {
+ const parsed = JSON.parse(candidate)
+ if (typeof parsed === "string") {
+ candidate = parsed
+ continue
+ }
+ return parsed
+ } catch (_) {
+ const startIdx = candidate.indexOf("{")
+ const endIdx = candidate.lastIndexOf("}")
+ if (startIdx >= 0 && endIdx > startIdx) {
+ const trimmed = candidate.substring(startIdx, endIdx + 1)
+ if (trimmed !== candidate) {
+ candidate = trimmed
+ continue
+ }
+ }
+ break
+ }
+ }
+
+ return candidate
+}
+
+async function callGoEndpoint(topic, payload = {}, options = {}) {
+ if (!serviceState.go.port) {
+ throw new Error("GO server is not running")
+ }
+ const url = `http://127.0.0.1:${serviceState.go.port}${topic.startsWith("/") ? "" : "/"}${topic}`
+ const timeoutMs = Number.isFinite(options?.timeoutMs) ? Number(options.timeoutMs) : 120000
+ const response = await axios.post(url, { message: JSON.stringify(payload) }, { headers: { "Content-Type": "application/json" }, timeout: timeoutMs <= 0 ? 0 : timeoutMs })
+ const data = response && response.data ? response.data : {}
+ if (data.type === "toParse") {
+ return cleanGoResponsePayload(data.response_message)
+ }
+ return data.response_message
+}
+
+function parseDtalePort(webServerUrl) {
+ if (!webServerUrl) return null
+ try {
+ const u = new URL(webServerUrl)
+ const p = Number(u.port)
+ return Number.isFinite(p) ? p : null
+ } catch (_) {
+ return null
+ }
+}
+
+function extractDtaleReadyInfo(progressPayload) {
+ let parsed = progressPayload
+ if (typeof parsed === "string") {
+ parsed = cleanGoResponsePayload(parsed)
+ }
+
+ if (parsed && typeof parsed === "object") {
+ const remotePort = Number(parsed.port) || parseDtalePort(parsed.web_server_url)
+ return {
+ parsed,
+ error: parsed.error || null,
+ remotePort: Number.isFinite(remotePort) && remotePort > 0 ? remotePort : null,
+ webServerUrl: parsed.web_server_url || null,
+ name: parsed.name || null,
+ snapshot: JSON.stringify(parsed)
+ }
+ }
+
+ const raw = typeof progressPayload === "string" ? progressPayload : String(progressPayload || "")
+ const rawNormalized = raw.split('\\"').join('"')
+ const urlMatch = rawNormalized.match(/"web_server_url"\s*:\s*"([^"]+)"/)
+ const portMatch = rawNormalized.match(/"port"\s*:\s*(\d+)/)
+ const nameMatch = rawNormalized.match(/"name"\s*:\s*"([^"]+)"/)
+ const errorMatch = rawNormalized.match(/"error"\s*:\s*"([^"]+)"/)
+
+ const urlFromRaw = urlMatch ? urlMatch[1] : null
+ const portFromRaw = portMatch ? Number(portMatch[1]) : parseDtalePort(urlFromRaw)
+
+ return {
+ parsed: null,
+ error: errorMatch ? errorMatch[1] : null,
+ remotePort: Number.isFinite(portFromRaw) && portFromRaw > 0 ? portFromRaw : null,
+ webServerUrl: urlFromRaw,
+ name: nameMatch ? nameMatch[1] : null,
+ snapshot: rawNormalized
+ }
+}
+
+async function waitForDtaleReady(progressTopic, timeoutMs = 300000) {
+ const start = Date.now()
+ let lastProgress = null
+ while (Date.now() - start < timeoutMs) {
+ let progress = null
+ try {
+ progress = await callGoEndpoint(progressTopic, {}, { timeoutMs: 10000 })
+ } catch (err) {
+ console.warn("D-Tale progress polling warning:", err && err.message ? err.message : err)
+ await new Promise((resolve) => setTimeout(resolve, 1000))
+ continue
+ }
+
+ const readyInfo = extractDtaleReadyInfo(progress)
+ if (readyInfo.snapshot) {
+ lastProgress = readyInfo.snapshot
+ }
+
+ if (readyInfo.error) {
+ throw new Error(`D-Tale startup failed: ${readyInfo.error}`)
+ }
+
+ if (readyInfo.remotePort) {
+ const resolvedUrl = readyInfo.webServerUrl || `http://127.0.0.1:${readyInfo.remotePort}/`
+ return {
+ webServerUrl: resolvedUrl,
+ remotePort: readyInfo.remotePort,
+ name: readyInfo.name || "D-Tale"
+ }
+ }
+ await new Promise((resolve) => setTimeout(resolve, 1000))
+ }
+ const lastSnapshot = lastProgress || "no-progress-snapshot"
+ throw new Error(`Timed out waiting for D-Tale web server to become ready (last progress: ${lastSnapshot})`)
+}
+
+expressApp.post("/run-go-server", async (req, res) => {
+ try {
+ console.log("Received request to run Go server")
+ if (goServerProcess) {
+ goServerProcess.kill()
+ console.log("Previous Go server process killed")
+ }
+
+ let bundledPythonPath = getBundledPythonEnvironment()
+ if (!bundledPythonPath) {
+ throw new Error("Bundled Python environment not found")
+ }
+
+ await startGoServer()
+
+ } catch (err) {
+ console.error("Error running Go server: ", err)
+ res.status(500).json({ success: false, error: err.message })
+ return
+ }
+ res.json({ success: true, running: true, port: serviceState.go.port })
+})
+
+expressApp.post("/exploratory/dtale/start", async (req, res) => {
+ try {
+ const body = req.body || {}
+ const requestId = body.requestId || createRequestUUID()
+ const pageId = body.pageId || "D-Tale"
+ const dataset = body.dataset
+ if (!dataset || !dataset.id || !dataset.name) {
+ return res.status(400).json({ success: false, error: "dataset with id and name is required" })
+ }
+
+ if (!serviceState.go.running || !serviceState.go.port) {
+ await startGoServer()
+ }
+
+ const routeId = `${requestId}/${pageId}-${dataset.name}`
+ await callGoEndpoint(`/removeId/${routeId}`, { dataset })
+ void callGoEndpoint(`/exploratory/start_dtale/${routeId}`, { dataset }, { timeoutMs: 0 }).catch((err) => {
+ console.warn("D-Tale start request warning:", err && err.message ? err.message : err)
+ })
+ const dtaleInfo = await waitForDtaleReady(`/exploratory/progress/${routeId}`)
+ serviceState.exploratory.dtale.sessions[requestId] = {
+ requestId,
+ pageId,
+ dataset,
+ remotePort: dtaleInfo.remotePort,
+ webServerUrl: dtaleInfo.webServerUrl,
+ name: dtaleInfo.name,
+ updatedAt: Date.now()
+ }
+
+ return res.json({
+ success: true,
+ requestId,
+ remotePort: dtaleInfo.remotePort,
+ webServerUrl: dtaleInfo.webServerUrl,
+ name: dtaleInfo.name
+ })
+ } catch (err) {
+ console.error("Error starting D-Tale service:", err)
+ return res.status(500).json({ success: false, error: err.message })
+ }
+})
+
+expressApp.post("/exploratory/dtale/progress", async (req, res) => {
+ try {
+ const routeId = req?.body?.routeId
+ if (!routeId || typeof routeId !== "string") {
+ return res.status(400).json({ success: false, error: "routeId is required" })
+ }
+
+ if (!serviceState.go.running || !serviceState.go.port) {
+ await startGoServer()
+ }
+
+ const progress = await callGoEndpoint(`/exploratory/progress/${routeId}`, {}, { timeoutMs: 10000 })
+ const parsedProgress = typeof progress === "string" ? cleanGoResponsePayload(progress) : progress
+ return res.json({ success: true, progress: parsedProgress })
+ } catch (err) {
+ console.error("Error getting D-Tale progress:", err)
+ return res.status(500).json({ success: false, error: err.message })
+ }
+})
+
+expressApp.post("/exploratory/dtale/stop", async (req, res) => {
+ try {
+ const body = req.body || {}
+ const requestId = body.requestId
+ const session = requestId ? serviceState.exploratory.dtale.sessions[requestId] : null
+ const remotePort = Number(body.remotePort || (session && session.remotePort))
+
+ if (remotePort && Number.isFinite(remotePort)) {
+ try {
+ await axios.get(`http://127.0.0.1:${remotePort}/shutdown`, { timeout: 5000 })
+ } catch (e) {
+ console.warn("D-Tale shutdown warning:", e && e.message ? e.message : e)
+ }
+ }
+
+ if (requestId && serviceState.exploratory.dtale.sessions[requestId]) {
+ delete serviceState.exploratory.dtale.sessions[requestId]
+ }
+
+ return res.json({ success: true })
+ } catch (err) {
+ console.error("Error stopping D-Tale service:", err)
+ return res.status(500).json({ success: false, error: err.message })
+ }
+})
+
+expressApp.post("/exploratory/sweetviz/start", async (req, res) => {
+ try {
+ const body = req.body || {}
+ const pageId = body.pageId || "SweetViz"
+ const mainDataset = body.mainDataset
+ const compDataset = body.compDataset || ""
+ const target = body.target
+ if (!mainDataset || !mainDataset.id || !mainDataset.name) {
+ return res.status(400).json({ success: false, error: "mainDataset with id and name is required" })
+ }
+
+ if (!serviceState.go.running || !serviceState.go.port) {
+ await startGoServer()
+ }
+
+ const htmlFileID = body.htmlFileID || createRequestUUID()
+ await callGoEndpoint(`/exploratory/start_sweetviz/${pageId}`, {
+ mainDataset,
+ compDataset,
+ htmlFileID,
+ target
+ })
+
+ serviceState.exploratory.sweetviz.sessions[htmlFileID] = {
+ htmlFileID,
+ pageId,
+ mainDataset,
+ compDataset,
+ updatedAt: Date.now()
+ }
+
+ return res.json({
+ success: true,
+ htmlFileID,
+ reportPath: `/exploratory/report/${htmlFileID}`,
+ expressPort: serviceState.expressPort
+ })
+ } catch (err) {
+ console.error("Error starting SweetViz report generation:", err)
+ return res.status(500).json({ success: false, error: err.message })
+ }
+})
+
+expressApp.post("/exploratory/ydata/start", async (req, res) => {
+ try {
+ const body = req.body || {}
+ const pageId = body.pageId || "ydata-profiling"
+ const mainDataset = body.mainDataset
+ const compDataset = body.compDataset || ""
+ if (!mainDataset || !mainDataset.id || !mainDataset.name) {
+ return res.status(400).json({ success: false, error: "mainDataset with id and name is required" })
+ }
+
+ if (!serviceState.go.running || !serviceState.go.port) {
+ await startGoServer()
+ }
+
+ const htmlFileID = body.htmlFileID || createRequestUUID()
+ await callGoEndpoint(`/exploratory/start_ydata_profiling/${pageId}`, {
+ mainDataset,
+ compDataset,
+ htmlFileID
+ })
+
+ serviceState.exploratory.ydata.sessions[htmlFileID] = {
+ htmlFileID,
+ pageId,
+ mainDataset,
+ compDataset,
+ updatedAt: Date.now()
+ }
+
+ return res.json({
+ success: true,
+ htmlFileID,
+ reportPath: `/exploratory/report/${htmlFileID}`,
+ expressPort: serviceState.expressPort
+ })
+ } catch (err) {
+ console.error("Error starting YData report generation:", err)
+ return res.status(500).json({ success: false, error: err.message })
+ }
+})
+
+async function serveExploratoryHtmlReport(req, res) {
+ try {
+ const reportId = req.params.reportId
+ if (!reportId) {
+ return res.status(400).send("Missing reportId")
+ }
+
+ const db = await connectToDataDB()
+ const collection = db.collection(reportId)
+ const doc = await collection.findOne({}, { projection: { _id: 0, htmlContent: 1 } })
+ if (!doc || !doc.htmlContent) {
+ return res.status(404).send("Exploratory report not found")
+ }
+
+ res.setHeader("Content-Type", "text/html; charset=utf-8")
+ return res.status(200).send(doc.htmlContent)
+ } catch (err) {
+ console.error("Error serving exploratory report:", err)
+ return res.status(500).send("Failed to load exploratory report")
+ }
+}
+
+expressApp.get("/exploratory/report/:reportId", serveExploratoryHtmlReport)
+
+// Stop Express server gracefully
+expressApp.post("/stop-express", async (req, res) => {
+ try {
+ if (!httpServer) {
+ return res.status(200).json({ success: true, message: 'Express not running' })
+ }
+ httpServer.close(() => {
+ try { writeStateFile(false) } catch (e) { /* ignore */ }
+ serviceState.expressPort = null
+ httpServer = null
+ res.json({ success: true, stopped: true })
+ })
+ } catch (err) {
+ console.error("Error stopping Express server:", err)
+ res.status(500).json({ success: false, error: err.message })
+ }
+ // Stop GO server if running
+ try {
+ if (goServerProcess) {
+ console.log('[express:stop] stopping GO server...')
+ try { goServerProcess.kill('SIGTERM') } catch (_) { /* ignore */ }
+ // Best-effort wait, then force kill if needed
+ await new Promise(r => setTimeout(r, 500))
+ try { goServerProcess.kill('SIGKILL') } catch (_) { /* ignore */ }
+ goServerProcess = null
+ serviceState.go.running = false
+ serviceState.go.port = null
+ }
+ } catch (e) {
+ console.warn('[express:stop] GO stop warning:', e && e.message ? e.message : e)
+ }
+
+ // Stop MongoDB if running
+ try {
+ if (serviceState.mongo.running) {
+ console.log('[express:stop] stopping MongoDB...')
+ try { await stopMongoDB() } catch (e) { console.warn('[express:stop] stopMongoDB warning:', e && e.message ? e.message : e) }
+ serviceState.mongo.running = false
+ serviceState.mongo.port = null
+ }
+ } catch (e) { console.warn('[express:stop] Mongo stop warning:', e && e.message ? e.message : e) }
+
+ // Stop Jupyter if running
+ try {
+ if (serviceState.jupyter.running) {
+ console.log('[express:stop] stopping Jupyter...')
+ try { await stopJupyterServer() } catch (e) { console.warn('[express:stop] stopJupyter warning:', e && e.message ? e.message : e) }
+ serviceState.jupyter.running = false
+ serviceState.jupyter.port = null
+ }
+ } catch (e) { console.warn('[express:stop] Jupyter stop warning:', e && e.message ? e.message : e) }
+})
+
+
+expressApp.post("/set-working-directory", async (req, res) =>{
+ let workspacePath = normalizePathForPlatform(req.body.workspacePath)
+ console.log("Received request to set workspace directory from remote: ", workspacePath)
+ try {
+ const result = await setWorkspaceDirectoryServer(workspacePath)
+ if (result && result.hasBeenSet) {
+ console.log('Workspace (from remote) set to: ' + workspacePath)
+ result.isRemote = true
+ res.json({ success: true, workspace: result })
+ } else {
+ console.log('Workspace specified by remote could not be set')
+ res.status(500).json({ success: false, error: 'Could not set workspace' })
+ }
+ } catch (err) {
+ console.log('Error setting workspace directory from remote : ', err)
+ res.status(500).json({ success: false, error: err.message })
+ }
+})
+
+// Status: single source of truth snapshot for all services this backend manages
+expressApp.get("/status", async (req, res) => {
+ try {
+ console.log("Received request to get service status")
+ // Refresh GO runtime state by probing the recorded port.
+ try {
+ if (serviceState.go.port) {
+ const goUp = await checkGoIsListening(serviceState.go.port, 300)
+ serviceState.go.running = !!goUp
+ if (!goUp) serviceState.go.port = null
+ }
+ } catch (_) {
+ // ignore detection failure
+ }
+ // Optionally refresh Jupyter runtime status on demand
+ try {
+ const jStatus = await checkJupyterIsRunning()
+ serviceState.jupyter.running = !!(jStatus && jStatus.running)
+ // Port not tracked dynamically here; defaults are managed in module
+ } catch (e) {
+ // ignore status refresh failures
+ }
+ // Refresh Mongo runtime state based on listening port
+ try {
+ const mongoUp = await checkMongoIsRunning(MEDconfig.mongoPort)
+ serviceState.mongo.running = !!mongoUp
+ if (mongoUp && !serviceState.mongo.port) serviceState.mongo.port = MEDconfig.mongoPort
+ } catch (e) {
+ // ignore detection failure
+ }
+ res.json({
+ success: true,
+ expressPort: serviceState.expressPort,
+ serverIdentity: {
+ hostName: os.hostname(),
+ pid: process.pid,
+ platform: process.platform
+ },
+ go: { running: serviceState.go.running, port: serviceState.go.port },
+ mongo: { running: serviceState.mongo.running, port: serviceState.mongo.port },
+ jupyter: { running: serviceState.jupyter.running, port: serviceState.jupyter.port }
+ })
+ } catch (err) {
+ res.status(500).json({ success: false, error: err.message })
+ }
+})
+
+// Ensure GO: idempotent start; returns current/active port
+expressApp.post("/ensure-go", async (req, res) => {
+ try {
+ if (serviceState.go.running) {
+ return res.json({ success: true, running: true, port: serviceState.go.port })
+ }
+ const preferredPort = req?.body?.preferredPort || null
+ await startGoServer(preferredPort)
+ return res.json({ success: true, running: true, port: serviceState.go.port })
+ } catch (err) {
+ console.error("ensure-go error:", err)
+ res.status(500).json({ success: false, running: false, error: err.message })
+ }
+})
+
+// Ensure MongoDB: idempotently start mongod using the workspace's .medomics/mongod.conf
+// Body optional: { workspacePath?: string }
+expressApp.post("/ensure-mongo", async (req, res) => {
+ try {
+ // If already running, return current state
+ const mongoUp = await checkMongoIsRunning(MEDconfig.mongoPort)
+ if (serviceState.mongo.running || mongoUp) {
+ serviceState.mongo.running = true
+ if (!serviceState.mongo.port) serviceState.mongo.port = MEDconfig.mongoPort
+ return res.json({ success: true, running: true, port: serviceState.mongo.port || MEDconfig.mongoPort })
+ }
+ // Determine workspace path: prefer body.workspacePath, else current sessionData
+ let workspacePath = req?.body?.workspacePath || getServerWorkingDirectory()
+ workspacePath = normalizePathForPlatform(workspacePath)
+ // Ensure .medomics config and data directories exist
+ createServerMedomicsDirectory(workspacePath)
+
+ // If a mongod process is already spawned (e.g., by /set-working-directory) but hasn't opened the port yet,
+ // wait for it instead of spawning a second instance (which can fail due to log file/port locks).
+ try {
+ const dbg = getMongoDebugInfo()
+ if (dbg && (dbg.running || dbg.pid)) {
+ const upExisting = await waitForMongoUp(MEDconfig.mongoPort, 12000)
+ serviceState.mongo.running = !!upExisting
+ serviceState.mongo.port = MEDconfig.mongoPort
+ if (!upExisting) {
+ return res.status(500).json({
+ success: false,
+ running: false,
+ error: "MongoDB process exists but did not start listening within timeout",
+ port: MEDconfig.mongoPort,
+ mongoDebug: getMongoDebugInfo()
+ })
+ }
+ return res.json({ success: true, running: true, port: MEDconfig.mongoPort })
+ }
+ } catch (_) {
+ // best-effort; continue with fresh start below
+ }
+
+
+ // Start MongoDB and record default port from config
+ startMongoDB(workspacePath)
+ // Wait briefly for port to open so the caller gets a reliable signal
+ const up = await waitForMongoUp(MEDconfig.mongoPort, 12000)
+ serviceState.mongo.running = !!up
+ serviceState.mongo.port = MEDconfig.mongoPort
+ if (!up) {
+ return res.status(500).json({
+ success: false,
+ running: false,
+ error: "MongoDB did not start listening within timeout",
+ port: MEDconfig.mongoPort,
+ mongoDebug: getMongoDebugInfo()
+ })
+ }
+ return res.json({ success: true, running: true, port: serviceState.mongo.port })
+ } catch (err) {
+ console.error("ensure-mongo error:", err)
+ return res.status(500).json({ success: false, running: false, error: err.message, mongoDebug: getMongoDebugInfo() })
+ }
+})
+
+// Debug: retrieve last MongoDB spawn/exit/stdout/stderr info
+expressApp.get("/mongo-debug", (req, res) => {
+ try {
+ return res.json({ success: true, mongoDebug: getMongoDebugInfo() })
+ } catch (err) {
+ return res.status(500).json({ success: false, error: err.message })
+ }
+})
+
+// Ensure Jupyter: idempotent start, returns running and port
+// Body optional: { workspacePath?: string, preferredPort?: number }
+expressApp.post("/ensure-jupyter", async (req, res) => {
+ try {
+ const preferredPort = req?.body?.preferredPort || 8900
+ let workspacePath = req?.body?.workspacePath || getServerWorkingDirectory()
+ workspacePath = normalizePathForPlatform(workspacePath)
+
+ // Check current runtime state
+ try {
+ const jStatus = await checkJupyterIsRunning()
+ serviceState.jupyter.running = !!(jStatus && jStatus.running)
+ } catch (_) {
+ // ignore transient status errors
+ }
+
+ if (serviceState.jupyter.running) {
+ // If running but we have no port stored, assume preferredPort or default
+ if (!serviceState.jupyter.port) serviceState.jupyter.port = preferredPort
+ return res.json({ success: true, running: true, port: serviceState.jupyter.port })
+ }
+
+ // Not running: start it
+ const result = await startJupyterServer(workspacePath, preferredPort)
+ if (!result || result.running !== true) {
+ const errMsg = (result && result.error) ? result.error : "Failed to start Jupyter"
+ serviceState.jupyter.running = false
+ serviceState.jupyter.port = null
+ return res.status(500).json({ success: false, running: false, error: errMsg })
+ }
+
+ serviceState.jupyter.running = true
+ serviceState.jupyter.port = preferredPort
+ return res.json({ success: true, running: true, port: serviceState.jupyter.port })
+ } catch (err) {
+ console.error("ensure-jupyter error:", err)
+ return res.status(500).json({ success: false, running: false, error: err.message })
+ }
+})
+
+expressApp.get("/get-working-dir-tree", (req, res) => {
+ try {
+ let requestPath = normalizePathForPlatform(req.query.requestedPath)
+ console.log("Received request to get working directory tree for path: ", requestPath)
+ const workingDirectory = dirTree(requestPath)
+ if (!workingDirectory) {
+ console.log("No working directory found for the requested path:" + requestPath)
+ res.status(500).json({ success: false, error: "Working directory not found" })
+ }
+ res.json({ success: true, workingDirectory: workingDirectory })
+ } catch (err) {
+ console.error("Error getting working directory: ", err)
+ res.status(500).json({ success: false, error: err.message })
+ }
+})
+
+expressApp.post("/insert-object-into-collection", async (req, res) => {
+ try {
+ if (!req.body) {
+ console.error("No object provided in request body")
+ return res.status(400).json({ success: false, error: "No object provided" })
+ } else if (!req.body.objectPath || !req.body.medDataObject) {
+ console.error("Invalid request body: objectPath and medDataObject are required")
+ return res.status(400).json({ success: false, error: "Invalid request body" })
+ }
+ console.log("Received request to insert object into collection: ", req.body)
+ const result = await insertObjectIntoCollectionRemote(req.body.objectPath, req.body.medDataObject)
+ res.status(200).json({ success: true, insertedCount: result.insertedCount || 0 })
+ } catch (err) {
+ console.error("Error inserting object into remote collection: ", err)
+ res.status(500).json({ success: false, error: err.message })
+ }
+})
+
+expressApp.post("/download-collection-to-file", async (req, res) => {
+ try {
+ if (!req.body) {
+ console.error("No object provided in request body")
+ return res.status(400).json({ success: false, error: "No object provided" })
+ } else if (!req.body.collectionId || !req.body.filePath || !req.body.type) {
+ console.error("Invalid request body: downloadCollectionToFile requires collectionId, filePath, and type")
+ return res.status(400).json({ success: false, error: "Invalid request body" })
+ }
+ console.log("Received request to download collection to file: ", req.body)
+ await downloadCollectionToFileRemote(req.body.collectionId, req.body.filePath, req.body.type)
+ res.status(200).json({ success: true })
+ } catch (err) {
+ console.error("Error downloading object to file: ", err)
+ res.status(500).json({ success: false, error: err.message })
+ }
+})
+
+expressApp.get("/get-bundled-python-environment", (req, res) => {
+ try {
+ console.log("Received request to get bundled python environment")
+ const pythEnv = getBundledPythonEnvironment()
+ if (!pythEnv) {
+ res.status(500).json({ success: false, error: "Bundled python environment not found" })
+ }
+ res.status(200).json({ success: true, pythonEnv: pythEnv })
+ } catch (err) {
+ console.error("Error getting bundled python environment: ", err)
+ res.status(500).json({ success: false, error: err.message })
+ }
+})
+
+expressApp.get("/get-installed-python-packages", (req, res) => {
+ try {
+ console.log("Received request to get installed python packages")
+ const pythonPackages = getBundledPythonEnvironment()
+ if (!pythonPackages) {
+ res.status(500).json({ success: false, error: "No installed python packages found" })
+ }
+ res.status(200).json({ success: true, packages: pythonPackages })
+ } catch (err) {
+ console.error("Error getting installed python packages: ", err)
+ res.status(500).json({ success: false, error: err.message })
+ }
+})
+
+expressApp.post("/start-mongo", async (req, res) => {
+ try {
+ if (!req.body) {
+ console.error("No object provided in request body")
+ return res.status(400).json({ success: false, error: "No object provided" })
+ } else if (!req.body.workspacePath) {
+ console.error("Invalid request body: startMongo requires a workspacePath")
+ return res.status(400).json({ success: false, error: "Invalid request body (no path provided)" })
+ }
+ let workspacePath = normalizePathForPlatform(req.body.workspacePath)
+ console.log("Received request to start mongoDB with path : ", workspacePath)
+ startMongoDB(workspacePath)
+ res.status(200).json({ success: true, message: "Started MongoDB on remote server" })
+ } catch (err) {
+ console.error("Error starting MongoDB (request from remote client): ", err)
+ res.status(500).json({ success: false, error: err.message })
+ }
+})
+
+expressApp.get("/check-jupyter-status", async (req, res) => {
+ try {
+ console.log("Received request to check Jupyter status")
+ const result = await checkJupyterIsRunning()
+ res.status(200).json({ running: result.running, error: result.error || null })
+ } catch (err) {
+ console.error("Error checking Jupyter server status: ", err)
+ res.status(500).json({ running: false, error: err.message })
+ }
+})
+
+expressApp.post("/start-jupyter-server", async (req, res) => {
+ try {
+ if (!req.body) {
+ console.error("No object provided in request body")
+ return res.status(400).json({ running: false, error: "No object provided" })
+ } else if (!req.body.workspacePath) {
+ console.error("Invalid request body: startJupyterServer requires a workspacePath")
+ return res.status(400).json({ running: false, error: "Invalid request body (no path provided)" })
+ }
+ let workspacePath = normalizePathForPlatform(req.body.workspacePath)
+ console.log("Received request to start Jupyter Server with path : ", workspacePath)
+ const result = await startJupyterServer(workspacePath)
+ console.log("Jupyter server started: ", result)
+ res.status(200).json({ running: result.running, error: result.error || null })
+ } catch (err) {
+ console.error("Error starting Jupyter (request from remote client): ", err)
+ res.status(500).json({ running: false, error: err.message })
+ }
+})
+
+expressApp.post("/stop-jupyter-server", async (req, res) => {
+ try {
+ console.log("Received request to stop Jupyter Server")
+ const result = stopJupyterServer()
+ res.status(200).json(result)
+ } catch (err) {
+ console.error("Error stopping Jupyter (request from remote client): ", err)
+ res.status(500).json({ running: false, error: err.message })
+ }
+})
+
+ // Stop MongoDB (remote call)
+ expressApp.post("/stop-mongo", async (req, res) => {
+ try {
+ console.log("Received request to stop MongoDB")
+ await stopMongoDB()
+ res.status(200).json({ success: true })
+ } catch (err) {
+ console.error("Error stopping MongoDB:", err)
+ res.status(500).json({ success: false, error: err.message })
+ }
+ })
+
+ // Get path to mongod executable
+ expressApp.get("/get-mongo-path", (req, res) => {
+ try {
+ const path = getMongoDBPath()
+ if (!path) return res.status(404).json({ success: false, error: "mongod not found" })
+ res.status(200).json({ success: true, path })
+ } catch (err) {
+ console.error("Error getting mongo path:", err)
+ res.status(500).json({ success: false, error: err.message })
+ }
+ })
+
+ // Install MongoDB via helper
+ expressApp.post("/install-mongo", async (req, res) => {
+ try {
+ console.log("Received request to install MongoDB")
+ const result = await serverInstallation.installMongoDB()
+ res.status(200).json({ success: !!result })
+ } catch (err) {
+ console.error("Error installing MongoDB:", err)
+ const payload = { success: false, error: err.message }
+ // Surface installer exit code (e.g., Windows Installer 1601) to the renderer
+ if (typeof err.code !== "undefined") {
+ payload.errorCode = err.code
+ payload.installerExitCode = err.code
+ if (err.code === 1601) {
+ payload.windowsInstallerError = true
+ }
+ }
+ res.status(500).json(payload)
+ }
+ })
+
+ // Install bundled python executable
+ expressApp.post("/install-bundled-python", async (req, res) => {
+ try {
+ console.log("Received request to install bundled python")
+ // Provide a basic notify callback that logs to console in headless mode
+ const notify = (payload) => console.log("install-bundled-python:", payload)
+ const result = await installBundledPythonExecutable(notify)
+ res.status(200).json({ success: !!result })
+ } catch (err) {
+ console.error("Error installing bundled python:", err)
+ res.status(500).json({ success: false, error: err.message })
+ }
+ })
+
+ // Install required python packages for a given python path
+ expressApp.post("/install-required-python-packages", async (req, res) => {
+ try {
+ const pythonPath = req.body && req.body.pythonPath
+ console.log("Requested install-required-python-packages for:", pythonPath)
+ const notify = (payload) => console.log("install-required-python-packages:", payload)
+ await installRequiredPythonPackages(notify, pythonPath)
+ res.status(200).json({ success: true })
+ } catch (err) {
+ console.error("Error installing required python packages:", err)
+ res.status(500).json({ success: false, error: err.message })
+ }
+ })
+
+ // Check system requirements (MongoDB, Python)
+ expressApp.get("/check-requirements", async (req, res) => {
+ try {
+ const result = await checkRequirements()
+ res.status(200).json({ success: true, result })
+ } catch (err) {
+ console.error("Error checking requirements:", err)
+ res.status(500).json({ success: false, error: err.message })
+ }
+ })
+
+ // Check whether the python requirements are met for a given pythonPath
+ expressApp.get("/check-python-requirements", (req, res) => {
+ try {
+ const pythonPath = req.query.pythonPath || null
+ const ok = checkPythonRequirements(pythonPath)
+ res.status(200).json({ success: true, requirementsMet: !!ok })
+ } catch (err) {
+ console.error("Error checking python requirements:", err)
+ res.status(500).json({ success: false, error: err.message })
+ }
+ })
+
+export async function setWorkspaceDirectoryServer(workspacePath) {
+ if (!workspacePath) {
+ throw new Error("No workspace path provided")
+ }
+ setAppPath("sessionData", workspacePath)
+ console.log("Setting workspace directory to: " + workspacePath)
+ createServerWorkingDirectory()
+ createServerMedomicsDirectory(workspacePath)
+ let hasBeenSet = true
+ try {
+ await stopMongoDB()
+ if (process.platform === "win32") {
+ // killProcessOnPort(serverPort)
+ } else if (process.platform === "darwin") {
+ await new Promise((resolve) => {
+ exec("pkill -f mongod", () => {
+ resolve()
+ })
+ })
+ } else {
+ try {
+ execSync("killall mongod")
+ } catch (error) {
+ console.warn("Failed to kill mongod: ", error)
+ }
+ }
+ startMongoDB(workspacePath)
+ return {
+ workingDirectory: dirTree(workspacePath),
+ hasBeenSet: hasBeenSet,
+ newPort: serviceState.mongo.port
+ }
+ } catch (error) {
+ console.error("Failed to change workspace: ", error)
+ }
+}
+
+async function waitForMongoUp(port, timeoutMs = 12000) {
+ const start = Date.now()
+ while (Date.now() - start < timeoutMs) {
+ try {
+ if (await checkMongoIsRunning(port)) return true
+ } catch (_) {
+ // ignore
+ }
+ await new Promise(r => setTimeout(r, 250))
+ }
+ return false
+}
+
+async function checkGoIsListening(port, timeoutMs = 300) {
+ return await new Promise(resolve => {
+ try {
+ if (!port || typeof port !== 'number') return resolve(false)
+ const socket = new net.Socket()
+ let settled = false
+ const finish = (ok) => {
+ if (settled) return
+ settled = true
+ try { socket.destroy() } catch (_) { /* ignore */ }
+ resolve(ok)
+ }
+ socket.setTimeout(timeoutMs)
+ socket.once('connect', () => finish(true))
+ socket.once('timeout', () => finish(false))
+ socket.once('error', () => finish(false))
+ socket.connect(port, '127.0.0.1')
+ } catch (_) {
+ resolve(false)
+ }
+ })
+}
+
+if (process.argv[1] && process.argv[1].endsWith('expressServer.mjs')) {
+ (async () => {
+ console.log('[bootstrap] entrypoint detected')
+ try {
+ console.log('[bootstrap] running requirements check')
+ const reqResult = await checkRequirements()
+ console.log('[bootstrap] requirements result', reqResult)
+ console.log('[bootstrap] starting express')
+ await startExpressServer()
+ console.log('[bootstrap] express started on', serviceState.expressPort)
+ try {
+ await startGoServer()
+ console.log('[bootstrap] go server started on', serviceState.go.port)
+ } catch (goErr) {
+ console.error('[bootstrap] failed to start Go server:', goErr && goErr.stack ? goErr.stack : goErr)
+ // Continue running Express even if Go server fails to start
+ serviceState.go.running = false
+ serviceState.go.port = null
+ }
+ } catch (e) {
+ console.error('[bootstrap] fatal startup error', e && e.stack ? e.stack : e)
+ process.exit(1)
+ }
+ })()
+}
diff --git a/backend/manifest.template.json b/backend/manifest.template.json
new file mode 100644
index 00000000..b1382d68
--- /dev/null
+++ b/backend/manifest.template.json
@@ -0,0 +1,38 @@
+{
+ "name": "MEDomics Server",
+ "version": "0.0.0-template",
+ "releaseDate": "2025-11-06T00:00:00Z",
+ "minAppVersion": "1.9.0",
+ "notes": "Template manifest – replace fields during release automation. Hosted via GitHub Releases: update URLs, version, sha256, and signatures during publish.",
+ "assets": [
+ {
+ "os": "windows",
+ "arch": "x64",
+ "url": "https://github.com/OWNER/REPO/releases/download/v0.0.0/medomics-server-v0.0.0-win-x64.zip",
+ "sha256": "REPLACE_SHA256",
+ "sig": "REPLACE_SIGNATURE",
+ "format": "zip"
+ },
+ {
+ "os": "linux",
+ "arch": "x64",
+ "url": "https://github.com/OWNER/REPO/releases/download/v0.0.0/medomics-server-v0.0.0-linux-x64.tar.gz",
+ "sha256": "REPLACE_SHA256",
+ "sig": "REPLACE_SIGNATURE",
+ "format": "tar.gz"
+ },
+ {
+ "os": "darwin",
+ "arch": "arm64",
+ "url": "https://github.com/OWNER/REPO/releases/download/v0.0.0/medomics-server-v0.0.0-macos-arm64.tar.gz",
+ "sha256": "REPLACE_SHA256",
+ "sig": "REPLACE_SIGNATURE",
+ "format": "tar.gz"
+ }
+ ],
+ "signatureFormat": "minisign",
+ "upgradePolicy": {
+ "allowDowngrade": false,
+ "requireSignature": true
+ }
+}
diff --git a/backend/package.json b/backend/package.json
new file mode 100644
index 00000000..ecd4cd39
--- /dev/null
+++ b/backend/package.json
@@ -0,0 +1,14 @@
+{
+ "name": "medomics-server-backend",
+ "private": true,
+ "type": "module",
+ "dependencies": {
+ "axios": "^1.3.3",
+ "body-parser": "^1.20.2",
+ "cors": "^2.8.5",
+ "directory-tree": "^3.5.1",
+ "express": "^4.19.2",
+ "mongodb": "^4.17.2",
+ "papaparse": "^5.4.1"
+ }
+}
diff --git a/backend/service/linux/medomics-server.service b/backend/service/linux/medomics-server.service
new file mode 100644
index 00000000..fb305c64
--- /dev/null
+++ b/backend/service/linux/medomics-server.service
@@ -0,0 +1,20 @@
+[Unit]
+Description=MEDomics Server (Express + Go + Mongo/Jupyter manager)
+After=network.target
+StartLimitIntervalSec=400
+StartLimitBurst=3
+
+[Service]
+Type=simple
+WorkingDirectory=/opt/medomics-server
+ExecStart=/opt/medomics-server/bin/medomics-server start
+Restart=on-failure
+RestartSec=5
+User=medomics
+Group=medomics
+Environment=NODE_ENV=production
+StandardOutput=append:/var/log/medomics-server.log
+StandardError=append:/var/log/medomics-server.err.log
+
+[Install]
+WantedBy=multi-user.target
diff --git a/backend/service/macos/com.medomicslab.medomics-server.plist b/backend/service/macos/com.medomicslab.medomics-server.plist
new file mode 100644
index 00000000..5fb528e4
--- /dev/null
+++ b/backend/service/macos/com.medomicslab.medomics-server.plist
@@ -0,0 +1,26 @@
+
+
+
+
+ Label
+ com.medomicslab.medomics-server
+ ProgramArguments
+
+ /usr/local/medomics-server/bin/medomics-server
+ start
+
+ WorkingDirectory
+ /usr/local/medomics-server
+ RunAtLoad
+
+ EnvironmentVariables
+
+ NODE_ENV
+ production
+
+ StandardOutPath
+ /usr/local/medomics-server/logs/medomics-server.log
+ StandardErrorPath
+ /usr/local/medomics-server/logs/medomics-server.err.log
+
+
diff --git a/backend/service/windows/README.md b/backend/service/windows/README.md
new file mode 100644
index 00000000..eb074f38
--- /dev/null
+++ b/backend/service/windows/README.md
@@ -0,0 +1,14 @@
+Windows Service notes
+
+Two options are provided:
+1) Built-in Service Control Manager (sc.exe) with a direct binPath to medomics-server.exe start.
+2) NSSM (Non-Sucking Service Manager) for more robust service wrapping and logging.
+
+Usage with sc.exe (example):
+- Run PowerShell as Administrator
+- sc.exe create MEDomicsServer binPath= '"C:\\Program Files\\MEDomicsServer\\bin\\medomics-server.exe" start --workspace "C:\\MEDomicsWorkspace"' start= auto DisplayName= "MEDomics Server"
+- sc.exe start MEDomicsServer
+
+Using the provided script:
+- .\\install-service.ps1 -InstallPath "C:\\Program Files\\MEDomicsServer" -Workspace "C:\\MEDomicsWorkspace"
+- Add -UseNssm to rely on NSSM if installed.
diff --git a/backend/service/windows/install-service.ps1 b/backend/service/windows/install-service.ps1
new file mode 100644
index 00000000..7b9da43f
--- /dev/null
+++ b/backend/service/windows/install-service.ps1
@@ -0,0 +1,37 @@
+Param(
+ [string]$InstallPath = "C:\\Program Files\\MEDomicsServer",
+ [string]$ServiceName = "MEDomicsServer",
+ [string]$Workspace = "C:\\MEDomicsWorkspace",
+ [switch]$UseNssm
+)
+
+Write-Host "Installing MEDomics Server service '$ServiceName' at $InstallPath" -ForegroundColor Cyan
+
+if (-not (Test-Path $InstallPath)) {
+ Write-Host "Install path does not exist: $InstallPath" -ForegroundColor Yellow
+ exit 1
+}
+
+$exe = Join-Path $InstallPath 'bin/medomics-server.exe'
+if (-not (Test-Path $exe)) {
+ Write-Host "Executable not found: $exe" -ForegroundColor Red
+ exit 1
+}
+
+if ($UseNssm) {
+ if (-not (Get-Command nssm -ErrorAction SilentlyContinue)) {
+ Write-Host "nssm not found in PATH" -ForegroundColor Red; exit 1
+ }
+ nssm install $ServiceName $exe start --workspace $Workspace
+ nssm set $ServiceName Start SERVICE_AUTO_START
+ Write-Host "Service installed via NSSM. Use 'nssm edit $ServiceName' to adjust settings." -ForegroundColor Green
+} else {
+ $cmd = "$exe start --workspace $Workspace"
+ sc.exe create $ServiceName binPath= "$cmd" start= auto DisplayName= "MEDomics Server"
+ if ($LASTEXITCODE -ne 0) { Write-Host "sc.exe create failed" -ForegroundColor Red; exit 1 }
+ Write-Host "Service created. Starting..." -ForegroundColor Cyan
+ sc.exe start $ServiceName | Out-Null
+ Write-Host "Service started." -ForegroundColor Green
+}
+
+Write-Host "Done." -ForegroundColor Green
diff --git a/backend/utils/jupyterServer.js b/backend/utils/jupyterServer.js
new file mode 100644
index 00000000..54bc898c
--- /dev/null
+++ b/backend/utils/jupyterServer.js
@@ -0,0 +1,204 @@
+import fs from "fs"
+import { getBundledPythonEnvironment } from "./pythonEnv.js"
+import util from "util"
+import { spawn, exec as execCb } from "child_process"
+const exec = util.promisify(execCb)
+
+let jupyterStatus = { running: false, error: null }
+let jupyterPort = 8900
+
+async function getPythonPath() {
+ let pythonPath = getBundledPythonEnvironment()
+ // Check if pythonPath is set
+ if (pythonPath === "") {
+ console.error("Python path is not set. Jupyter server cannot be started.")
+ return null
+ }
+ return pythonPath
+}
+
+
+async function startJupyterServer(workspacePath, port = 8900) {
+ if (!workspacePath) {
+ return { running: false, error: "No workspace path found. Jupyter server cannot be started." }
+ }
+ const pythonPath = await getPythonPath()
+
+ if (!pythonPath) {
+ return { running: false, error: "Python path is not set. Jupyter server cannot be started." }
+ }
+ const configSet = await setJupyterConfig(pythonPath)
+ if (!configSet.success) {
+ return { running: false, error: configSet.error }
+ }
+ console.log("Checking if Jupyter server is already running before spawning: ", jupyterStatus.running)
+ if (!jupyterStatus.running) {
+ const jupyter = spawn(pythonPath, [
+ '-m', 'jupyter', 'notebook',
+ `--NotebookApp.token=''`,
+ `--NotebookApp.password=''`,
+ '--no-browser',
+ `--port=${port}`,
+ `${workspacePath}/DATA`
+ ])
+ jupyter.stderr.on('data', (data) => {
+ console.log(`[Jupyter STDOUT]: ${data}`)
+ if (data.toString().includes(port.toString())) {
+ console.log("Jupyter server is ready and running.")
+ }
+ })
+ jupyter.on('close', (code) => {
+ console.log(`[Jupyter] exited with code ${code}`)
+ })
+ jupyterPort = port
+ return { running: true, error: null }
+ }
+}
+
+async function getJupyterPid (port) {
+ if (!port) {
+ throw new Error("Port is required to get Jupyter PID")
+ }
+ const { exec } = require('child_process')
+ const { promisify } = require('util')
+ const execAsync = promisify(exec)
+
+ const platform = process.platform
+ const command = platform === 'win32'
+ ? `netstat -ano | findstr :${port}`
+ : `lsof -ti :${port} | head -n 1`
+
+ try {
+ const { stdout, stderr } = await execAsync(command)
+ if (stderr) throw new Error(stderr)
+
+ return platform === 'win32'
+ ? stdout.trim().split(/\s+/).pop()
+ : stdout.trim()
+ } catch (error) {
+ throw new Error(`PID lookup failed: ${error.message}`)
+ }
+ }
+
+async function setJupyterConfig(pythonPathArg) {
+ if (!pythonPathArg) {
+ return { success: false, error: "Python path is not set. Cannot configure Jupyter." }
+ }
+ // Check if jupyter is installed
+ try {
+ await exec(`${pythonPathArg} -m jupyter --version`).then((result) => {
+ const trimmedVersion = result.stdout.split("\n")
+ const includesJupyter = trimmedVersion.some((line) => line.startsWith("jupyter"))
+ if (!includesJupyter) {
+ throw new Error("Jupyter is not installed")
+ }
+ })
+ } catch (error) {
+ return { success: false, error: "Jupyter is not installed. Please install Jupyter to use this feature."}
+ }
+ // Check if jupyter_notebook_config.py exists and update it
+ try {
+ const result = await exec(`${pythonPathArg} -m jupyter --paths`)
+ if (result.stderr) {
+ console.error("Error getting Jupyter paths:", result.stderr)
+ return { success: false, error: "Failed to get Jupyter paths." }
+ }
+ const configPath = result.stdout.split("\n").find(line => line.includes(".jupyter"))
+
+ if (configPath) {
+ const configFilePath = configPath.trim() + "/jupyter_notebook_config.py"
+
+ // Check if the file exists
+ if (!fs.existsSync(configFilePath)) {
+ try {
+ // Await the config generation
+ const output = await exec(`${pythonPathArg} -m jupyter notebook --generate-config`)
+ if (output.stderr) {
+ console.error("Error generating Jupyter config:", output.stderr)
+ return { success: false, error: "Error generating Jupyter config. Please check the console for more details." }
+ }
+ } catch (error) {
+ console.error("Error generating config:", error)
+ return {success: false, error: "Failed to generate Jupyter config" }
+ }
+ }
+
+ // Get last line of configfilepath
+ const lastLine = fs.readFileSync(configFilePath, "utf8").split("\n").slice(-1)[0]
+
+ if (!lastLine.includes("c.NotebookApp.tornado_settings") ||
+ !lastLine.includes("c.ServerApp.allow_unauthenticated_access")) {
+ // Add config settings
+ fs.appendFileSync(configFilePath, `\nc.ServerApp.allow_unauthenticated_access = True`)
+ fs.appendFileSync(configFilePath, `\nc.NotebookApp.tornado_settings={'headers': {'Content-Security-Policy': "frame-ancestors 'self' http://localhost:8888;"}}`)
+ }
+ return { success: true, error: null }
+ }
+ } catch (error) {
+ console.error("Error in Jupyter config setup:", error)
+ return { running: false, error: "Failed to configure Jupyter." }
+ }
+}
+
+async function stopJupyterServer() {
+ const pythonPath = await getPythonPath()
+
+ if (!pythonPath) {
+ console.error("Python path is not set. Cannot stop Jupyter server.")
+ return { running: false, error: "Python path is not set. Cannot stop Jupyter server." }
+ }
+
+ try {
+ // Get the PID first
+ const pid = await getJupyterPid(jupyterPort)
+
+ if (!pid) {
+ console.log("No running Jupyter server found")
+ return { running: false, error: "No running Jupyter server found" }
+ }
+
+ // Platform-specific kill command
+ const killCommand = process.platform === 'win32'
+ ? `taskkill /PID ${pid} /F`
+ : `kill ${pid}`
+
+ await exec(killCommand)
+ console.log(`Successfully stopped Jupyter server (PID: ${pid})`)
+ return { running: false, error: null }
+ } catch (error) {
+ console.error("Error stopping Jupyter server:", error)
+ // Fallback to original method if PID method fails
+ try {
+ await exec(`${pythonPath} -m jupyter notebook stop ${jupyterPort}`)
+ return { running: false, error: null }
+ } catch (fallbackError) {
+ console.error("Fallback stop method also failed:", fallbackError)
+ return { running: true, error: "Failed to stop server" }
+ }
+ }
+}
+
+async function checkJupyterIsRunning() {
+ console.log("Checking if Jupyter server is running on port", jupyterPort)
+ try {
+ const pythonPath = await getPythonPath()
+ console.log("Python path for checking Jupyter status:", pythonPath)
+ if (!pythonPath) {
+ console.log("Python path is not set. Cannot check Jupyter server status.")
+ return { running: false, error: "Python path is not set. Cannot check Jupyter server status." }
+ }
+ const result = await exec(`${pythonPath} -m jupyter notebook list`)
+ console.log("Jupyter notebook list result:", result)
+ if (result.stderr) {
+ console.log("Error checking Jupyter server status:", result.stderr)
+ return { running: false, error: "Jupyter server is not running. You can start it from the settings page." }
+ }
+ const isRunning = result.stdout.includes(jupyterPort.toString())
+ console.log("Is Jupyter server running:", isRunning)
+ return { running: isRunning, error: isRunning ? null : "Jupyter server is not running. You can start it from the settings page." }
+ } catch (error) {
+ return { running: false, error: error }
+ }
+}
+
+export { startJupyterServer, stopJupyterServer, checkJupyterIsRunning }
\ No newline at end of file
diff --git a/backend/utils/medomics.server.dev.js b/backend/utils/medomics.server.dev.js
new file mode 100644
index 00000000..ea7c44bc
--- /dev/null
+++ b/backend/utils/medomics.server.dev.js
@@ -0,0 +1,14 @@
+export const PORT_FINDING_METHOD = {
+ FIX: 0,
+ AVAILABLE: 1
+}
+
+const config = {
+ runServerAutomatically: true,
+ useReactDevTools: false,
+ defaultPort: 54288,
+ mongoPort: 54017,
+ portFindingMethod: PORT_FINDING_METHOD.FIX
+}
+
+export default config
\ No newline at end of file
diff --git a/backend/utils/mongoDBServer.js b/backend/utils/mongoDBServer.js
new file mode 100644
index 00000000..424f7667
--- /dev/null
+++ b/backend/utils/mongoDBServer.js
@@ -0,0 +1,264 @@
+import fs from "fs"
+import path from "path"
+import { exec, spawn, execSync } from "child_process"
+let mongoProcess = null
+
+let lastMongo = {
+ startedAt: null,
+ mongodPath: null,
+ args: null,
+ workspacePath: null,
+ configPath: null,
+ pid: null,
+ stopRequestedAt: null,
+ lastExit: null, // { code, signal, at }
+ lastError: null, // { message, stack, at }
+ stdoutTail: [],
+ stderrTail: []
+}
+
+const MAX_TAIL_LINES = 200
+
+function pushTail(arr, line) {
+ if (!line) return
+ arr.push(line)
+ if (arr.length > MAX_TAIL_LINES) arr.splice(0, arr.length - MAX_TAIL_LINES)
+}
+
+function bufferToLines(data) {
+ try {
+ return String(data).split(/\r?\n/).filter(Boolean)
+ } catch {
+ return []
+ }
+}
+
+
+function startMongoDB(workspacePath) {
+ const mongoConfigPath = path.join(workspacePath, ".medomics", "mongod.conf")
+ if (fs.existsSync(mongoConfigPath)) {
+ console.log("Starting MongoDB with config: " + mongoConfigPath)
+ let mongod = getMongoDBPath()
+ if (!mongod) {
+ const err = new Error("mongod executable not found")
+ lastMongo.lastError = { message: err.message, stack: err.stack, at: new Date().toISOString() }
+ console.error("Failed to start MongoDB:", err.message)
+ return
+ }
+
+ lastMongo.startedAt = new Date().toISOString()
+ lastMongo.mongodPath = mongod
+ lastMongo.args = ["--config", mongoConfigPath]
+ lastMongo.workspacePath = workspacePath
+ lastMongo.configPath = mongoConfigPath
+ lastMongo.pid = null
+ lastMongo.stopRequestedAt = null
+ lastMongo.lastExit = null
+ lastMongo.lastError = null
+ lastMongo.stdoutTail = []
+ lastMongo.stderrTail = []
+
+ if (process.platform !== "darwin") {
+ mongoProcess = spawn(mongod, ["--config", mongoConfigPath], { windowsHide: true })
+ } else {
+ if (fs.existsSync(getMongoDBPath())) {
+ mongoProcess = spawn(getMongoDBPath(), ["--config", mongoConfigPath], { windowsHide: true })
+ } else {
+ mongoProcess = spawn("/opt/homebrew/Cellar/mongodb-community/7.0.12/bin/mongod", ["--config", mongoConfigPath], { shell: true })
+ }
+ }
+
+ lastMongo.pid = mongoProcess?.pid || null
+
+ mongoProcess.stdout.on("data", (data) => {
+ for (const line of bufferToLines(data)) {
+ pushTail(lastMongo.stdoutTail, line)
+ }
+ console.log(`MongoDB stdout: ${data}`)
+ })
+
+ mongoProcess.stderr.on("data", (data) => {
+ for (const line of bufferToLines(data)) {
+ pushTail(lastMongo.stderrTail, line)
+ }
+ console.error(`MongoDB stderr: ${data}`)
+ })
+
+ mongoProcess.on("exit", (code, signal) => {
+ lastMongo.lastExit = { code, signal, at: new Date().toISOString() }
+ })
+
+ mongoProcess.on("close", (code, signal) => {
+ const stopNote = lastMongo.stopRequestedAt ? ` (stop requested at ${lastMongo.stopRequestedAt})` : ""
+ console.log(`MongoDB process exited with code ${code} signal ${signal || "null"}${stopNote}`)
+ })
+
+ mongoProcess.on("error", (err) => {
+ lastMongo.lastError = { message: err?.message || String(err), stack: err?.stack || null, at: new Date().toISOString() }
+ console.error("Failed to start MongoDB: ", err)
+ })
+ } else {
+ const errorMsg = `MongoDB config file does not exist: ${mongoConfigPath}`
+ lastMongo.lastError = { message: errorMsg, stack: null, at: new Date().toISOString() }
+ console.error(errorMsg)
+ }
+}
+
+
+async function stopMongoDB() {
+ return new Promise((resolve) => {
+ if (!mongoProcess) return resolve()
+
+ lastMongo.stopRequestedAt = new Date().toISOString()
+
+ const proc = mongoProcess
+ let settled = false
+ const finish = () => {
+ if (settled) return
+ settled = true
+ mongoProcess = null
+ resolve()
+ }
+
+ proc.once("close", () => finish())
+ proc.once("error", () => finish())
+
+ try {
+ proc.kill()
+ } catch (error) {
+ console.log("Error while stopping MongoDB ", error)
+ finish()
+ }
+
+ // Safety: don't hang forever if close never fires
+ setTimeout(() => finish(), 5000).unref?.()
+ })
+}
+
+function getMongoDebugInfo() {
+ return {
+ running: !!(mongoProcess && mongoProcess.exitCode === null),
+ pid: mongoProcess?.pid || lastMongo.pid || null,
+ startedAt: lastMongo.startedAt,
+ stopRequestedAt: lastMongo.stopRequestedAt,
+ mongodPath: lastMongo.mongodPath,
+ args: lastMongo.args,
+ workspacePath: lastMongo.workspacePath,
+ configPath: lastMongo.configPath,
+ lastExit: lastMongo.lastExit,
+ lastError: lastMongo.lastError,
+ stdoutTail: lastMongo.stdoutTail,
+ stderrTail: lastMongo.stderrTail
+ }
+}
+
+function getMongoDBPath() {
+ if (process.platform === "win32") {
+ // Check if mongod is in the process.env.PATH
+ const paths = process.env.PATH.split(path.delimiter)
+ for (let i = 0; i < paths.length; i++) {
+ const binPath = path.join(paths[i], "mongod.exe")
+ if (fs.existsSync(binPath)) {
+ console.log("mongod found in PATH")
+ return binPath
+ }
+ }
+ // Check if mongod is in the default installation path on Windows - C:\Program Files\MongoDB\Server\\bin\mongod.exe
+ const programFilesPath = process.env["ProgramFiles"]
+ if (programFilesPath) {
+ const mongoPath = path.join(programFilesPath, "MongoDB", "Server")
+ // Check if the MongoDB directory exists
+ if (!fs.existsSync(mongoPath)) {
+ console.error("MongoDB directory not found")
+ return null
+ }
+ const dirs = fs.readdirSync(mongoPath)
+ for (let i = 0; i < dirs.length; i++) {
+ const binPath = path.join(mongoPath, dirs[i], "bin", "mongod.exe")
+ if (fs.existsSync(binPath)) {
+ return binPath
+ }
+ }
+ }
+ console.error("mongod not found")
+ return null
+ } else if (process.platform === "darwin") {
+ // Check if it is installed in the .medomics directory
+ const binPath = path.join(process.env.HOME, ".medomics", "mongodb", "bin", "mongod")
+ if (fs.existsSync(binPath)) {
+ console.log("mongod found in .medomics directory")
+ return binPath
+ }
+ if (process.env.NODE_ENV !== "production") {
+ // Check if mongod is in the process.env.PATH
+ const paths = process.env.PATH.split(path.delimiter)
+ for (let i = 0; i < paths.length; i++) {
+ const binPath = path.join(paths[i], "mongod")
+ if (fs.existsSync(binPath)) {
+ console.log("mongod found in PATH")
+ return binPath
+ }
+ }
+ // Check if mongod is in the default installation path on macOS - /usr/local/bin/mongod
+ const binPath = "/usr/local/bin/mongod"
+ if (fs.existsSync(binPath)) {
+ return binPath
+ }
+ }
+ console.error("mongod not found")
+ return null
+ } else if (process.platform === "linux") {
+ // Check if mongod is in the process.env.PATH
+ const paths = process.env.PATH.split(path.delimiter)
+ for (let i = 0; i < paths.length; i++) {
+ const binPath = path.join(paths[i], "mongod")
+ if (fs.existsSync(binPath)) {
+ return binPath
+ }
+ }
+ console.error("mongod not found in PATH" + paths)
+ // Check if mongod is in the default installation path on Linux - /usr/bin/mongod
+ if (fs.existsSync("/usr/bin/mongod")) {
+ return "/usr/bin/mongod"
+ }
+ console.error("mongod not found in /usr/bin/mongod")
+
+ if (fs.existsSync("/home/" + process.env.USER + "/.medomics/mongodb/bin/mongod")) {
+ return "/home/" + process.env.USER + "/.medomics/mongodb/bin/mongod"
+ }
+ return null
+ } else {
+ return "mongod"
+ }
+}
+
+export { startMongoDB, stopMongoDB, getMongoDBPath, getMongoDebugInfo }
+
+// Cross-platform check to see if a given TCP port is in use (LISTENING)
+async function checkMongoIsRunning(port) {
+ if (!port) return false
+ const platform = process.platform
+ const cmd = platform === "win32"
+ ? `netstat -ano | findstr :${port}`
+ : `lsof -i:${port} -sTCP:LISTEN -n -P || true`
+
+ try {
+ const { stdout } = await new Promise((resolve) => {
+ exec(cmd, (err, stdout, stderr) => {
+ // Treat any exec error as "not running" but resolve to simplify control flow
+ resolve({ stdout: stdout || "", stderr: stderr || "" })
+ })
+ })
+ if (!stdout) return false
+ if (platform === "win32") {
+ // netstat output contains LISTENING lines for open ports
+ return /LISTENING/i.test(stdout)
+ }
+ // On Unix, any lsof output indicates a process is listening on this port
+ return stdout.trim().length > 0
+ } catch (_) {
+ return false
+ }
+}
+
+export { checkMongoIsRunning }
\ No newline at end of file
diff --git a/main/utils/pythonEnv.js b/backend/utils/pythonEnv.js
similarity index 56%
rename from main/utils/pythonEnv.js
rename to backend/utils/pythonEnv.js
index f624a00e..a6e62457 100644
--- a/main/utils/pythonEnv.js
+++ b/backend/utils/pythonEnv.js
@@ -1,11 +1,144 @@
-import { app, dialog } from "electron"
-const fs = require("fs")
-var path = require("path")
-const { join } = require("path")
-const { readdir, stat, rm } = require("fs/promises")
-const util = require("util")
-const { execSync } = require("child_process")
-const exec = util.promisify(require("child_process").exec)
+import { getAppPath } from "./serverPathUtils.js"
+import path from "path"
+import util from "util"
+import fs from "fs"
+import { execSync, exec as execCb } from "child_process"
+const exec = util.promisify(execCb)
+import { readdir, stat } from "fs/promises"
+
+const _requirementsInstallPromises = new Map()
+let _bundledPythonSizeCheckStarted = false
+
+function sleep(ms) {
+ return new Promise(resolve => setTimeout(resolve, ms))
+}
+
+function getRequirementsInstallLockPath(pythonExecutablePath) {
+ try {
+ // Put the lock next to the interpreter so it is shared across processes.
+ return path.join(path.dirname(pythonExecutablePath), '.requirements-install.lock')
+ } catch {
+ return path.join(process.cwd(), '.requirements-install.lock')
+ }
+}
+
+async function acquireInstallLock(lockPath, timeoutMs = 30 * 60 * 1000) {
+ const started = Date.now()
+ let lockAcquired = false
+ while (!lockAcquired) {
+ try {
+ fs.writeFileSync(lockPath, JSON.stringify({ pid: process.pid, started: new Date().toISOString() }), { flag: 'wx' })
+ lockAcquired = true
+ return
+ } catch (e) {
+ if (e && e.code !== 'EEXIST') throw e
+ try {
+ const st = fs.statSync(lockPath)
+ // If lock is stale (> 60 min), delete it.
+ if (Date.now() - st.mtimeMs > 60 * 60 * 1000) {
+ try { fs.unlinkSync(lockPath) } catch {}
+ continue
+ }
+ } catch {
+ // If stat fails, retry creating lock.
+ }
+ if (Date.now() - started > timeoutMs) {
+ throw new Error(`Timed out waiting for python requirements install lock: ${lockPath}`)
+ }
+ await sleep(2000)
+ }
+ }
+}
+
+async function releaseInstallLock(lockPath) {
+ try {
+ await fs.promises.unlink(lockPath)
+ } catch (e) {
+ if (e && e.code === 'ENOENT') return
+ }
+}
+
+async function rmRecursive(targetPath) {
+ // Node 14.5 (used by nexe targets) does not support `rm` in fs/promises.
+ // Prefer rm when available, otherwise fall back to rmdir(recursive).
+ const fsp = fs.promises
+ try {
+ if (fsp && typeof fsp.rm === "function") {
+ await fsp.rm(targetPath, { recursive: true, force: true })
+ return
+ }
+ } catch (e) {
+ // If rm exists but fails (permissions, etc.), fall through to rmdir.
+ }
+
+ try {
+ if (fsp && typeof fsp.rmdir === "function") {
+ await fsp.rmdir(targetPath, { recursive: true })
+ }
+ } catch (e) {
+ // Match rm({force:true}) semantics as closely as we can.
+ if (e && (e.code === "ENOENT")) return
+ throw e
+ }
+}
+
+function getServerBundleRoot() {
+ // In Electron builds, process.resourcesPath is a good anchor.
+ // In the standalone server (nexe), process.resourcesPath can be undefined.
+ // Fall back to the executable directory and its parent, then cwd.
+ const execDir = (() => {
+ try {
+ return process.execPath ? path.dirname(process.execPath) : null
+ } catch {
+ return null
+ }
+ })()
+
+ const candidates = [
+ (typeof process.resourcesPath === "string" && process.resourcesPath) ? process.resourcesPath : null,
+ execDir,
+ execDir ? path.dirname(execDir) : null,
+ process.cwd(),
+ ].filter(Boolean)
+
+ // Prefer a directory that looks like the server bundle root.
+ for (const candidate of candidates) {
+ try {
+ if (
+ fs.existsSync(path.join(candidate, "pythonEnv")) ||
+ fs.existsSync(path.join(candidate, "pythonCode")) ||
+ fs.existsSync(path.join(candidate, "go_executables")) ||
+ fs.existsSync(path.join(candidate, "backend"))
+ ) {
+ return candidate
+ }
+ } catch {
+ // ignore
+ }
+ }
+ return candidates[0] || process.cwd()
+}
+
+function getMergedRequirementsPath() {
+ const requirementsFileName = "merged_requirements.txt"
+ const bundleRoot = getServerBundleRoot()
+ const candidates = [
+ path.join(bundleRoot, "pythonEnv", requirementsFileName),
+ path.join(bundleRoot, "resources", "pythonEnv", requirementsFileName),
+ path.join(process.cwd(), "pythonEnv", requirementsFileName),
+ path.join(process.cwd(), "resources", "pythonEnv", requirementsFileName),
+ ]
+ for (const p of candidates) {
+ try {
+ if (fs.existsSync(p)) return p
+ } catch {
+ // ignore
+ }
+ }
+ // Fall back to the most likely default; caller can handle missing file.
+ return candidates[0]
+}
+
/**
* Recursively calculates the size of a directory in bytes.
@@ -16,13 +149,13 @@ async function getDirectorySize(dir) {
const files = await readdir(dir, { withFileTypes: true })
const paths = files.map(async file => {
- const path = join(dir, file.name)
+ const filePath = path.join(dir, file.name)
if (file.isDirectory()) {
// Recurse into subdirectories
- return await getDirectorySize(path)
+ return await getDirectorySize(filePath)
} else if (file.isFile()) {
// Get size of files
- const { size } = await stat(path)
+ const { size } = await stat(filePath)
return size
}
return 0
@@ -45,7 +178,7 @@ async function checkSizeAndDeleteIfZero(directoryPath) {
if (size === 0) {
console.log(`Directory is empty. Deleting...`)
// The { recursive: true } option allows deleting a directory and its contents (even if empty)
- await rm(directoryPath, { recursive: true, force: true })
+ await rmRecursive(directoryPath)
console.log(`Directory deleted: ${directoryPath}`)
} else {
console.log(`Directory is not empty (size: ${size} bytes). Not deleting.`)
@@ -55,12 +188,12 @@ async function checkSizeAndDeleteIfZero(directoryPath) {
}
}
-export function getPythonEnvironment(medCondaEnv = "med_conda_env") {
+function getPythonEnvironment(medCondaEnv = "med_conda_env") {
// Returns the python environment
let pythonEnvironment = process.env.MED_ENV
// Retrieve the path to the conda environment from the settings file
- let userDataPath = app.getPath("userData")
+ let userDataPath = getAppPath("userData")
let settingsFilePath = path.join(userDataPath, "settings.json")
let settingsFound = fs.existsSync(settingsFilePath)
let settings = {}
@@ -129,7 +262,7 @@ function getCondaPath(parentPath) {
condaPath = checkDirectories(parentPath, possibleCondaPaths)
}
if (condaPath === null && process.platform !== "darwin") {
- console.log("No conda environment found")
+ console.log("No conda environment found. Please install Anaconda or Miniconda and try again.")
}
}
return condaPath
@@ -142,6 +275,9 @@ function getCondaPath(parentPath) {
* @returns {String} The path to the directory that exists
*/
function checkDirectories(parentPath, directories) {
+ if (!parentPath) {
+ return null
+ }
let directoryPath = null
directories.forEach((directory) => {
if (directoryPath === null) {
@@ -187,11 +323,15 @@ function getThePythonExecutablePath(condaPath, envName) {
return pythonExecutablePath
}
-export function getBundledPythonEnvironment() {
+function getBundledPythonEnvironment() {
let pythonEnvironment = null
let bundledPythonPath = null
+ // Check if the python path can be found in the .medomics directory
+ let medomicsDirExists = fs.existsSync(path.join(getAppPath("home"), ".medomics", "python"))
+ console.log("medomicsDirExists: ", medomicsDirExists)
+
if (process.env.NODE_ENV === "production") {
// Get the user path followed by .medomics
let userPath = getHomePath()
@@ -212,16 +352,21 @@ export function getBundledPythonEnvironment() {
bundledPythonPath = path.join(userPath, ".medomics", "python")
} else {
// Check if the python path can be found in the .medomics directory
- let medomicsDirExists = fs.existsSync(path.join(app.getPath("home"), ".medomics", "python"))
+ let medomicsDirExists = fs.existsSync(path.join(getAppPath("home"), ".medomics", "python"))
if (medomicsDirExists) {
bundledPythonPath = path.join(getHomePath(), ".medomics", "python")
} else {
+ console.log("Using process.cwd() path because medomicsDirExists is false: ", process.cwd())
bundledPythonPath = path.join(process.cwd(), "python")
}
}
// Check if the python folder is empty, if yes, delete it
- checkSizeAndDeleteIfZero(bundledPythonPath)
+ if (!_bundledPythonSizeCheckStarted) {
+ _bundledPythonSizeCheckStarted = true
+ // Fire-and-forget (cannot await from a sync getter); avoid spamming logs by running once.
+ checkSizeAndDeleteIfZero(bundledPythonPath).catch(() => {})
+ }
pythonEnvironment = path.join(bundledPythonPath, "bin", "python")
if (process.platform == "win32") {
@@ -233,15 +378,55 @@ export function getBundledPythonEnvironment() {
return pythonEnvironment
}
-export async function installRequiredPythonPackages(mainWindow) {
- let requirementsFileName = "merged_requirements.txt"
- if (process.env.NODE_ENV === "production") {
- installPythonPackage(mainWindow, pythonExecutablePath, null, path.join(process.cwd(), "resources", "pythonEnv", requirementsFileName))
- } else {
- installPythonPackage(mainWindow, pythonExecutablePath, null, path.join(process.cwd(), "pythonEnv", requirementsFileName))
+async function ensurePythonRequirementsInstalled(notify, pythonExecutablePath, requirementsFilePath = null) {
+ const pythonPath = pythonExecutablePath || getBundledPythonEnvironment()
+ const reqPath = requirementsFilePath || getMergedRequirementsPath()
+ if (!pythonPath || !fs.existsSync(pythonPath)) {
+ throw new Error(`Python executable not found: ${pythonPath}`)
+ }
+ if (!reqPath || !fs.existsSync(reqPath)) {
+ throw new Error(`Requirements file not found: ${reqPath}`)
+ }
+
+ const key = `${pythonPath}::${reqPath}`
+ if (_requirementsInstallPromises.has(key)) {
+ return await _requirementsInstallPromises.get(key)
+ }
+
+ const lockPath = getRequirementsInstallLockPath(pythonPath)
+ const promise = (async () => {
+ await acquireInstallLock(lockPath)
+ try {
+ const ok = checkPythonRequirements(pythonPath, reqPath)
+ if (ok) return { ok: true, installed: false }
+ await installRequiredPythonPackages(notify, pythonPath)
+ const ok2 = checkPythonRequirements(pythonPath, reqPath)
+ if (!ok2) {
+ throw new Error('Python requirements are still missing after install')
+ }
+ return { ok: true, installed: true }
+ } finally {
+ await releaseInstallLock(lockPath)
+ }
+ })()
+
+ _requirementsInstallPromises.set(key, promise)
+ try {
+ return await promise
+ } finally {
+ _requirementsInstallPromises.delete(key)
}
}
+async function installRequiredPythonPackages(notify, pythonExecutablePath) {
+ const requirementsPath = getMergedRequirementsPath()
+ if (!fs.existsSync(requirementsPath)) {
+ throw new Error(`Requirements file not found: ${requirementsPath}`)
+ }
+ // Ensure the async install is awaited.
+ await installPythonPackage(notify, pythonExecutablePath, null, requirementsPath)
+}
+
function comparePythonInstalledPackages(pythonPackages, requirements) {
let missingPackages = []
for (let i = 0; i < requirements.length; i++) {
@@ -271,18 +456,22 @@ function comparePythonInstalledPackages(pythonPackages, requirements) {
return missingPackages
}
-export function checkPythonRequirements(pythonPath = null, requirementsFilePath = null) {
+function checkPythonRequirements(pythonPath = null, requirementsFilePath = null) {
let pythonRequirementsMet = false
if (pythonPath === null) {
// pythonPath = getPythonEnvironment()
pythonPath = getBundledPythonEnvironment()
}
if (requirementsFilePath === null) {
- if (process.env.NODE_ENV === "production") {
- requirementsFilePath = path.join(process.resourcesPath, "pythonEnv", "merged_requirements.txt")
- } else {
- requirementsFilePath = path.join(process.cwd(), "pythonEnv", "merged_requirements.txt")
- }
+ requirementsFilePath = getMergedRequirementsPath()
+ }
+ if (!pythonPath || !fs.existsSync(pythonPath)) {
+ console.warn("Python executable not found for requirements check:", pythonPath)
+ return false
+ }
+ if (!requirementsFilePath || !fs.existsSync(requirementsFilePath)) {
+ console.warn("Requirements file not found for requirements check:", requirementsFilePath)
+ return false
}
let pythonPackages = getInstalledPythonPackages(pythonPath)
let requirements = fs.readFileSync(requirementsFilePath, "utf8").split("\n")
@@ -297,7 +486,7 @@ export function checkPythonRequirements(pythonPath = null, requirementsFilePath
return pythonRequirementsMet
}
-export function getInstalledPythonPackages(pythonPath = null) {
+function getInstalledPythonPackages(pythonPath = null) {
let pythonPackages = []
if (pythonPath === null) {
pythonPath = getPythonEnvironment()
@@ -305,7 +494,8 @@ export function getInstalledPythonPackages(pythonPath = null) {
let pythonPackagesOutput = ""
try {
- pythonPackagesOutput = execSync(`${pythonPath} -m pip list --format=json`).toString()
+ // Quote path to support spaces (e.g., Program Files).
+ pythonPackagesOutput = execSync(`"${pythonPath}" -m pip list --format=json`).toString()
} catch (error) {
console.warn("Error retrieving python packages:", error)
}
@@ -317,33 +507,57 @@ export function getInstalledPythonPackages(pythonPath = null) {
return pythonPackages
}
-export async function installPythonPackage(mainWindow, pythonPath, packageName = null, requirementsFilePath = null) {
+async function installPythonPackage(notify, pythonPath, packageName = null, requirementsFilePath = null) {
console.log("Installing python package: ", packageName, requirementsFilePath, " with pythonPath: ", pythonPath)
- let execSyncResult = null
- let pipUpgradePromise = exec(`${pythonPath} -m pip install --upgrade pip`)
- execCallbacksForChildWithNotifications(pipUpgradePromise.child, "Python pip Upgrade", mainWindow)
- await pipUpgradePromise
- if (requirementsFilePath !== null) {
- let installPythonPackagePromise = exec(`${pythonPath} -m pip install -r ${requirementsFilePath}`)
- execCallbacksForChildWithNotifications(installPythonPackagePromise.child, "Python Package Installation from requirements", mainWindow)
- await installPythonPackagePromise
- } else {
- let installPythonPackagePromise = exec(`${pythonPath} -m pip install ${packageName}`)
- execCallbacksForChildWithNotifications(installPythonPackagePromise.child, "Python Package Installation", mainWindow)
- await installPythonPackagePromise
+ const quotedPython = `"${pythonPath}"`
+ const quotedReq = requirementsFilePath ? `"${requirementsFilePath}"` : null
+ try {
+ const pipUpgradePromise = exec(`${quotedPython} -m pip install --upgrade pip`)
+ execCallbacksForChildWithNotifications(pipUpgradePromise.child, "Python pip Upgrade", notify)
+ await pipUpgradePromise
+ } catch (e) {
+ // Promisified exec rejects, but its error can contain stdout/stderr.
+ console.error('[python] pip upgrade failed:', e && e.message ? e.message : e)
+ if (e && e.stdout) console.error('[python] pip upgrade stdout:', String(e.stdout))
+ if (e && e.stderr) console.error('[python] pip upgrade stderr:', String(e.stderr))
+ throw e
+ }
+
+ try {
+ if (requirementsFilePath !== null) {
+ const installPythonPackagePromise = exec(`${quotedPython} -m pip install -r ${quotedReq}`)
+ execCallbacksForChildWithNotifications(installPythonPackagePromise.child, "Python Package Installation from requirements", notify)
+ await installPythonPackagePromise
+ } else {
+ const installPythonPackagePromise = exec(`${quotedPython} -m pip install ${packageName}`)
+ execCallbacksForChildWithNotifications(installPythonPackagePromise.child, "Python Package Installation", notify)
+ await installPythonPackagePromise
+ }
+ } catch (e) {
+ console.error('[python] pip install failed:', e && e.message ? e.message : e)
+ if (e && e.stdout) console.error('[python] pip install stdout:', String(e.stdout))
+ if (e && e.stderr) console.error('[python] pip install stderr:', String(e.stderr))
+ throw e
}
}
-export function execCallbacksForChildWithNotifications(child, id, mainWindow) {
- mainWindow.webContents.send("notification", { id: id, message: `Starting...`, header: `${id} in progress` })
+function execCallbacksForChildWithNotifications(child, id, notify) {
+ if (!notify) notify = () => {}
+ // Always log to console (captured by express.log in packaged server), even
+ // when no UI notifier is provided.
+ console.log(`[python] ${id}: starting...`)
+ notify({ id: id, message: `Starting...`, header: `${id} in progress` })
child.stdout.on("data", (data) => {
- mainWindow.webContents.send("notification", { id: id, message: `stdout: ${data}`, header: `${id} in progress` })
+ console.log(`[python] ${id} stdout: ${String(data)}`)
+ notify({ id: id, message: `stdout: ${data}`, header: `${id} in progress` })
})
child.stderr.on("data", (data) => {
- mainWindow.webContents.send("notification", { id: id, message: `stderr: ${data}`, header: `${id} Error` })
+ console.log(`[python] ${id} stderr: ${String(data)}`)
+ notify({ id: id, message: `stderr: ${data}`, header: `${id} Error` })
})
child.on("close", (code) => {
- mainWindow.webContents.send("notification", { id: id, message: `${id} exited with code ${code}`, header: `${id} Finished` })
+ console.log(`[python] ${id}: exited with code ${code}`)
+ notify({ id: id, message: `${id} exited with code ${code}`, header: `${id} Finished` })
})
}
@@ -354,10 +568,11 @@ function getHomePath() {
} else {
homePath = process.env.HOME
}
+ console.log("homePath: ", homePath)
return homePath
}
-export async function installBundledPythonExecutable(mainWindow) {
+async function installBundledPythonExecutable(notify) {
let bundledPythonPath = null
let medomicsPath = null
@@ -384,8 +599,10 @@ export async function installBundledPythonExecutable(mainWindow) {
}
bundledPythonPath = pythonPath
} else {
+ console.log("Using process.cwd() path because not in production env: ", process.cwd())
+ bundledPythonPath = path.join(process.cwd(), "python")
// Check if the python path can be found in the .medomics directory
- let medomicsDirExists = fs.existsSync(path.join(app.getPath("home"), ".medomics", "python"))
+ let medomicsDirExists = fs.existsSync(path.join(getAppPath("home"), ".medomics", "python"))
if (medomicsDirExists) {
bundledPythonPath = path.join(getHomePath(), ".medomics", "python")
} else {
@@ -409,25 +626,25 @@ export async function installBundledPythonExecutable(mainWindow) {
let downloadPromise = exec(`wget ${url} -O ${outputFileName}`, { shell: "powershell.exe" })
- execCallbacksForChildWithNotifications(downloadPromise.child, "Python Downloading", mainWindow)
+ execCallbacksForChildWithNotifications(downloadPromise.child, "Python Downloading", notify)
const { stdout, stderr } = await downloadPromise
let extractCommand = `tar -xvf ${outputFileName} -C ${pythonParentFolderExtractString}`
let extractionPromise = exec(extractCommand, { shell: "powershell.exe" })
- execCallbacksForChildWithNotifications(extractionPromise.child, "Python Exec. Extracting", mainWindow)
+ execCallbacksForChildWithNotifications(extractionPromise.child, "Python Exec. Extracting", notify)
- const { stdout: extrac, stderr: extracErr } = await extractionPromise
+ await extractionPromise
// Install the required python packages
if (process.env.NODE_ENV === "production") {
- installPythonPackage(mainWindow, pythonExecutablePath, null, path.join(process.cwd(), "resources", "pythonEnv", "merged_requirements.txt"))
+ installPythonPackage(notify, pythonExecutablePath, null, path.join(process.cwd(), "resources", "pythonEnv", "merged_requirements.txt"))
} else {
- installPythonPackage(mainWindow, pythonExecutablePath, null, path.join(process.cwd(), "pythonEnv", "merged_requirements.txt"))
+ installPythonPackage(notify, pythonExecutablePath, null, path.join(process.cwd(), "pythonEnv", "merged_requirements.txt"))
}
let removeCommand = `rm ${outputFileName}`
let removePromise = exec(removeCommand, { shell: "powershell.exe" })
- execCallbacksForChildWithNotifications(removePromise.child, "Python Exec. Removing", mainWindow)
- const { stdout: remove, stderr: removeErr } = await removePromise
+ execCallbacksForChildWithNotifications(removePromise.child, "Python Exec. Removing", notify)
+ await removePromise
} else if (process.platform == "darwin") {
// Download the right python executable (arm64 or x86_64)
let isArm64 = process.arch === "arm64"
@@ -439,25 +656,25 @@ export async function installBundledPythonExecutable(mainWindow) {
let url = `https://github.com/indygreg/python-build-standalone/releases/download/20240224/${file}`
let extractCommand = `tar -xvf ${file} -C ${pythonParentFolderExtractString}`
let downloadPromise = exec(`/bin/bash -c "$(curl -fsSLO ${url})"`)
- execCallbacksForChildWithNotifications(downloadPromise.child, "Python Downloading", mainWindow)
- const { stdout, stderr } = await downloadPromise
+ execCallbacksForChildWithNotifications(downloadPromise.child, "Python Downloading", notify)
+ await downloadPromise
// Extract the python executable
let extractionPromise = exec(extractCommand)
- execCallbacksForChildWithNotifications(extractionPromise.child, "Python Exec. Extracting", mainWindow)
- const { stdout: extrac, stderr: extracErr } = await extractionPromise
+ execCallbacksForChildWithNotifications(extractionPromise.child, "Python Exec. Extracting", notify)
+ await extractionPromise
// Remove the downloaded file
let removeCommand = `rm ${file}`
let removePromise = exec(removeCommand)
- execCallbacksForChildWithNotifications(removePromise.child, "Python Exec. Removing", mainWindow)
- const { stdout: remove, stderr: removeErr } = await removePromise
+ execCallbacksForChildWithNotifications(removePromise.child, "Python Exec. Removing", notify)
+ await removePromise
// Install the required python packages
if (process.env.NODE_ENV === "production") {
- installPythonPackage(mainWindow, pythonExecutablePath, null, path.join(process.resourcesPath, "pythonEnv", "requirements_mac.txt"))
+ installPythonPackage(notify, pythonExecutablePath, null, path.join(process.resourcesPath, "pythonEnv", "requirements_mac.txt"))
} else {
- installPythonPackage(mainWindow, pythonExecutablePath, null, path.join(process.cwd(), "pythonEnv", "requirements_mac.txt"))
+ installPythonPackage(notify, pythonExecutablePath, null, path.join(process.cwd(), "pythonEnv", "requirements_mac.txt"))
}
} else if (process.platform == "linux") {
// Download the right python executable (arm64 or x86_64)
@@ -471,29 +688,44 @@ export async function installBundledPythonExecutable(mainWindow) {
// Download the python executable
let downloadPromise = exec(`wget ${url} -P ${pythonParentFolderExtractString}`)
- execCallbacksForChildWithNotifications(downloadPromise.child, "Python Downloading", mainWindow)
+ execCallbacksForChildWithNotifications(downloadPromise.child, "Python Downloading", notify)
const { stdout: download, stderr: downlaodErr } = await downloadPromise
// Extract the python executable
let extractCommand = `tar -xvf ${path.join(pythonParentFolderExtractString, file)} -C ${pythonParentFolderExtractString}`
let extractionPromise = exec(extractCommand)
- execCallbacksForChildWithNotifications(extractionPromise.child, "Python Exec. Extracting", mainWindow)
- const { stdout: extrac, stderr: extracErr } = await extractionPromise
+ execCallbacksForChildWithNotifications(extractionPromise.child, "Python Exec. Extracting", notify)
+ await extractionPromise
// Remove the downloaded file
let removeCommand = `rm ${path.join(pythonParentFolderExtractString, file)}`
let removePromise = exec(removeCommand)
- execCallbacksForChildWithNotifications(removePromise.child, "Python Exec. Removing", mainWindow)
- const { stdout: remove, stderr: removeErr } = await removePromise
+ execCallbacksForChildWithNotifications(removePromise.child, "Python Exec. Removing", notify)
+ await removePromise
console.log("pythonExecutablePath: ", pythonExecutablePath)
console.log("process.cwd(): ", process)
console.log("process.resourcesPath: ", process.resourcesPath)
// Install the required python packages
if (process.env.NODE_ENV === "production") {
- installPythonPackage(mainWindow, pythonExecutablePath, null, path.join(process.resourcesPath, "pythonEnv", "merged_requirements.txt"))
+ installPythonPackage(notify, pythonExecutablePath, null, path.join(process.resourcesPath, "pythonEnv", "merged_requirements.txt"))
} else {
- installPythonPackage(mainWindow, pythonExecutablePath, null, path.join(process.cwd(), "pythonEnv", "merged_requirements.txt"))
+ installPythonPackage(notify, pythonExecutablePath, null, path.join(process.cwd(), "pythonEnv", "merged_requirements.txt"))
}
}
}
}
+
+export {
+ getPythonEnvironment,
+ getBundledPythonEnvironment,
+ installRequiredPythonPackages,
+ ensurePythonRequirementsInstalled,
+ checkPythonRequirements,
+ getInstalledPythonPackages,
+ installPythonPackage,
+ execCallbacksForChildWithNotifications,
+ installBundledPythonExecutable,
+ getMergedRequirementsPath,
+ getServerBundleRoot
+}
+
diff --git a/main/utils/server.js b/backend/utils/server.mjs
similarity index 55%
rename from main/utils/server.js
rename to backend/utils/server.mjs
index a68fd959..591fd291 100644
--- a/main/utils/server.js
+++ b/backend/utils/server.mjs
@@ -1,8 +1,19 @@
-import MEDconfig, { PORT_FINDING_METHOD } from "../../medomics.dev"
-import { getPythonEnvironment, getBundledPythonEnvironment } from "./pythonEnv"
-const { exec, execFile } = require("child_process")
-const os = require("os")
-var path = require("path")
+// When running the backend standalone (node ./backend/expressServer.mjs)
+// the project may be a mixed ESM/CommonJS workspace and importing the
+// top-level `medomics.dev.js` can fail. Provide local defaults here so
+// the backend can run independently. If you need to sync values, update
+// them manually or implement a small shared JSON config.
+export const PORT_FINDING_METHOD = { FIX: 0, AVAILABLE: 1 }
+const MEDconfig = {
+ runServerAutomatically: true,
+ defaultPort: 54288,
+ portFindingMethod: PORT_FINDING_METHOD.FIX
+}
+import { getPythonEnvironment, getBundledPythonEnvironment } from "./pythonEnv.js"
+import { exec, execFile } from "child_process"
+import os from "os"
+import path from "path"
+import fs from "fs"
export function findAvailablePort(startPort, endPort = 8000) {
let killProcess = MEDconfig.portFindingMethod === PORT_FINDING_METHOD.FIX || !MEDconfig.runServerAutomatically
@@ -120,6 +131,12 @@ export async function runServer(isProd, serverPort, serverProcess, serverState,
let env = process.env
let bundledPythonPath = getBundledPythonEnvironment()
+ // The Go server expects MED_ENV to be the Python executable to run.
+ // Prefer bundled Python (if present), else configured pythonEnvironment, else provided condaPath.
+ // Fall back to plain `python` so PATH resolution can work.
+ const pythonForGo = (bundledPythonPath || pythonEnvironment || condaPath || "python")
+ env.MED_ENV = pythonForGo
+
if (bundledPythonPath !== null) {
bundledPythonPath = bundledPythonPath.replace("python.exe", "")
@@ -131,20 +148,25 @@ export async function runServer(isProd, serverPort, serverProcess, serverState,
console.log("env.PATH: " + env.PATH)
}
+ let chosenPort = null
+
if (!isProd) {
//**** DEVELOPMENT ****//
let args = [serverPort, "dev", process.cwd()]
// Get the temporary directory path
args.push(os.tmpdir())
-
- if (condaPath !== null) {
- args.push(condaPath)
- }
+ // Always pass the effective python executable path as last arg so Go can use it.
+ // This avoids stale conda paths overriding bundled Python.
+ args.push(pythonForGo)
await findAvailablePort(MEDconfig.defaultPort)
.then((port) => {
serverPort = port
- serverState.serverIsRunning = true
+ chosenPort = port
+ // ensure the spawned process receives the actual chosen port as first argument
+ if (Array.isArray(args) && args.length > 0) args[0] = serverPort
+ serverState.running = true
+ serverState.port = serverPort
serverProcess = execFile(`${process.platform == "win32" ? "main.exe" : "./main"}`, args, {
windowsHide: false,
cwd: path.join(process.cwd(), "go_server"),
@@ -164,7 +186,8 @@ export async function runServer(isProd, serverPort, serverProcess, serverState,
console.log(`disconnected`)
})
serverProcess.on("close", (code) => {
- serverState.serverIsRunning = false
+ serverState.running = false
+ serverState.port = null
console.log(`server child process close all stdio with code ${code}`)
})
}
@@ -174,35 +197,85 @@ export async function runServer(isProd, serverPort, serverProcess, serverState,
})
} else {
//**** PRODUCTION ****//
- let args = [serverPort, "prod", process.resourcesPath]
+ // In production we must pass a base directory where pythonCode/ exists.
+ // In standalone server bundles, this is the directory containing medomics-server.exe.
+ // `process.resourcesPath` is Electron-specific and may be undefined under nexe.
+ const exeDir = path.dirname(process.execPath)
+ const baseRootCandidates = [
+ (typeof process.resourcesPath === 'string' && process.resourcesPath) ? process.resourcesPath : null,
+ exeDir,
+ path.dirname(exeDir),
+ ].filter(Boolean)
+
+ const baseRoot = baseRootCandidates.find((candidate) => {
+ try {
+ // Prefer a directory that looks like the server bundle root.
+ return fs.existsSync(path.join(candidate, 'pythonCode')) || fs.existsSync(path.join(candidate, 'go_executables')) || fs.existsSync(path.join(candidate, 'backend'))
+ } catch {
+ return false
+ }
+ }) || exeDir
+ let args = [serverPort, "prod", baseRoot]
// Get the temporary directory path
args.push(os.tmpdir())
- if (condaPath !== null) {
- args.push(condaPath)
- }
+ // Always pass python executable path as last argument so Go can run python scripts.
+ // (If not present, it will be the string "python" and rely on PATH.)
+ args.push(pythonForGo)
await findAvailablePort(MEDconfig.defaultPort)
.then((port) => {
serverPort = port
- console.log("_dirname: ", __dirname)
+ chosenPort = port
console.log("process.resourcesPath: ", process.resourcesPath)
+ console.log("process.execPath: ", process.execPath)
+ console.log("[go] baseRoot:", baseRoot)
+ console.log("[go] MED_ENV (python):", env.MED_ENV)
+ // ensure the spawned process receives the actual chosen port as first argument
+ if (Array.isArray(args) && args.length > 0) args[0] = serverPort
+
+ // In production, the GO executable is located relative to the
+ // server bundle root (same folder that contains pythonCode/ and go_executables/).
if (process.platform == "win32") {
- serverProcess = execFile(path.join(process.resourcesPath, "go_executables\\server_go_win32.exe"), args, {
- windowsHide: false,
- env: env
- })
- serverState.serverIsRunning = true
+ const goPathWin = path.join(baseRoot, "go_executables", "server_go_win32.exe")
+ console.log("Resolved GO executable path (win32):", goPathWin)
+
+ if (!fs.existsSync(goPathWin)) {
+ console.error("GO executable not found at:", goPathWin)
+ } else {
+ serverProcess = execFile(goPathWin, args, {
+ windowsHide: false,
+ env: env
+ })
+ serverState.running = true
+ serverState.port = serverPort
+ }
} else if (process.platform == "linux") {
- serverProcess = execFile(path.join(process.resourcesPath, "go_executables/server_go"), args, {
- windowsHide: false
- })
- serverState.serverIsRunning = true
+ const goPathLinux = path.join(baseRoot, "go_executables", "server_go")
+ console.log("Resolved GO executable path (linux):", goPathLinux)
+
+ if (!fs.existsSync(goPathLinux)) {
+ console.error("GO executable not found at:", goPathLinux)
+ } else {
+ serverProcess = execFile(goPathLinux, args, {
+ windowsHide: false
+ })
+ serverState.running = true
+ serverState.port = serverPort
+ }
} else if (process.platform == "darwin") {
- serverProcess = execFile(path.join(process.resourcesPath, "go_executables/server_go"), args, {
- windowsHide: false
- })
- serverState.serverIsRunning = true
+ const goPathDarwin = path.join(baseRoot, "go_executables", "server_go")
+ console.log("Resolved GO executable path (darwin):", goPathDarwin)
+
+ if (!fs.existsSync(goPathDarwin)) {
+ console.error("GO executable not found at:", goPathDarwin)
+ } else {
+ serverProcess = execFile(goPathDarwin, args, {
+ windowsHide: false
+ })
+ serverState.running = true
+ serverState.port = serverPort
+ }
}
if (serverProcess) {
serverProcess.stdout.on("data", function (data) {
@@ -210,11 +283,24 @@ export async function runServer(isProd, serverPort, serverProcess, serverState,
})
serverProcess.stderr.on("data", (data) => {
console.log(`stderr: ${data}`)
- serverState.serverIsRunning = true
+ serverState.running = true
+ serverState.port = serverPort
+ })
+ serverProcess.on("error", (err) => {
+ // Covers spawn failures and async child_process errors.
+ // Ensure the exported serverState reflects the process not running.
+ try {
+ console.log(`[go] server process error: ${err && err.message ? err.message : String(err)}`)
+ } catch {
+ // ignore logging errors
+ }
+ serverState.running = false
+ serverState.port = null
})
serverProcess.on("close", (code) => {
- serverState.serverIsRunning = false
- console.log(`my server child process close all stdio with code ${code}`)
+ serverState.running = false
+ serverState.port = null
+ console.log(`[go] process close all stdio with code ${code}`)
})
}
})
@@ -222,5 +308,6 @@ export async function runServer(isProd, serverPort, serverProcess, serverState,
console.error(err)
})
}
- return serverProcess
+ // Return both the spawned process handle and the actual bound port
+ return { process: serverProcess, port: chosenPort }
}
diff --git a/backend/utils/serverInstallation.js b/backend/utils/serverInstallation.js
new file mode 100644
index 00000000..bce6b777
--- /dev/null
+++ b/backend/utils/serverInstallation.js
@@ -0,0 +1,179 @@
+import { getBundledPythonEnvironment, execCallbacksForChildWithNotifications } from "./pythonEnv.js"
+import { getMongoDBPath } from "./mongoDBServer.js"
+import { getAppPath } from "./serverPathUtils.js"
+import fs from "fs"
+import readline from "readline"
+
+async function checkIsBrewInstalled() {
+ let isBrewInstalled = false
+ try {
+ let { stdout, stderr } = await exec(`brew --version`)
+ isBrewInstalled = stdout !== "" && stderr === ""
+ } catch (error) {
+ isBrewInstalled = false
+ }
+ return isBrewInstalled
+}
+
+async function checkIsXcodeSelectInstalled() {
+ let isXcodeSelectInstalled = false
+ try {
+ let { stdout, stderr } = await exec(`xcode-select -p`)
+ isXcodeSelectInstalled = stdout !== "" && stderr === ""
+ } catch (error) {
+ isXcodeSelectInstalled = false
+ }
+ return isXcodeSelectInstalled
+}
+
+async function installBrew(){
+ let installBrewPromise = exec(`/bin/bash -c "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/HEAD/install.sh)"`)
+ await installBrewPromise
+ return true
+}
+
+async function installXcodeSelect() {
+ let installXcodeSelectPromise = exec(`xcode-select --install`)
+ await installXcodeSelectPromise
+ return true
+}
+
+
+import path from "path"
+import util from "util"
+import { exec as childProcessExec } from "child_process"
+const exec = util.promisify(childProcessExec)
+
+async function checkRequirements() {
+ // Ensure .medomics directory exists
+ const homeDir = getAppPath("home")
+ const medomicsDir = path.join(homeDir, ".medomics")
+ if (!fs.existsSync(medomicsDir)) {
+ fs.mkdirSync(medomicsDir)
+ }
+ const mongoDBInstalled = getMongoDBPath()
+ const pythonInstalled = getBundledPythonEnvironment()
+
+ console.log("MongoDB installed:", mongoDBInstalled ? mongoDBInstalled : "Not found")
+ console.log("Python installed:", pythonInstalled ? pythonInstalled : "Not found")
+ return { pythonInstalled, mongoDBInstalled }
+}
+
+async function installMongoDB() {
+ if (process.platform === "win32") {
+ // Download MongoDB installer
+ const downloadUrl = "https://fastdl.mongodb.org/windows/mongodb-windows-x86_64-7.0.12-signed.msi"
+ const downloadPath = path.join(getAppPath("downloads"), "mongodb-windows-x86_64-7.0.12-signed.msi")
+ let downloadMongoDBPromise = exec(`curl -o ${downloadPath} ${downloadUrl}`)
+ execCallbacksForChildWithNotifications(downloadMongoDBPromise.child, "Downloading MongoDB installer")
+ await downloadMongoDBPromise
+ // Install MongoDB
+ // msiexec.exe /l*v mdbinstall.log /qb /i mongodb-windows-x86_64-7.0.12-signed.msi ADDLOCAL="ServerNoService" SHOULD_INSTALL_COMPASS="0"
+ let installMongoDBPromise = exec(`msiexec.exe /l*v mdbinstall.log /qb /i ${downloadPath} ADDLOCAL="ServerNoService" SHOULD_INSTALL_COMPASS="0"`)
+ execCallbacksForChildWithNotifications(installMongoDBPromise.child, "Installing MongoDB")
+ await installMongoDBPromise
+
+ let removeMongoDBInstallerPromise = exec(`rm ${downloadPath}`, { shell: "powershell" })
+ execCallbacksForChildWithNotifications(removeMongoDBInstallerPromise.child, "Removing MongoDB installer")
+ await removeMongoDBInstallerPromise
+
+ return getMongoDBPath() !== null
+ } else if (process.platform === "darwin") {
+ // Check if Homebrew is installed
+ let isBrewInstalled = await checkIsBrewInstalled()
+ if (!isBrewInstalled) {
+ await installBrew()
+ }
+ // Check if Xcode Command Line Tools are installed
+ let isXcodeSelectInstalled = await checkIsXcodeSelectInstalled()
+ if (!isXcodeSelectInstalled) {
+ await installXcodeSelect()
+ }
+
+ let installMongoDBPromise = exec(`brew tap mongodb/brew && brew install mongodb-community@7.0.12`)
+ execCallbacksForChildWithNotifications(installMongoDBPromise.child, "Installing MongoDB")
+
+
+
+ return getMongoDBPath() !== null
+ } else if (process.platform === "linux") {
+ const linuxURLDict = {
+ "Ubuntu 20.04 x86_64": "https://fastdl.mongodb.org/linux/mongodb-linux-x86_64-ubuntu2004-7.0.15.tgz",
+ "Ubuntu 22.04 x86_64": "https://fastdl.mongodb.org/linux/mongodb-linux-x86_64-ubuntu2204-7.0.15.tgz",
+ "Ubuntu 20.04 aarch64": "https://fastdl.mongodb.org/linux/mongodb-linux-aarch64-ubuntu2004-7.0.15.tgz",
+ "Ubuntu 22.04 aarch64": "https://fastdl.mongodb.org/linux/mongodb-linux-aarch64-ubuntu2204-7.0.15.tgz",
+ "Debian 10 x86_64": "https://fastdl.mongodb.org/linux/mongodb-linux-x86_64-debian10-7.0.15.tgz",
+ "Debian 11 x86_64": "https://fastdl.mongodb.org/linux/mongodb-linux-x86_64-debian11-7.0.15.tgz",
+ }
+ // Check if MongoDB is installed
+ if (getMongoDBPath() !== null) {
+ return true
+ } else {
+ // Check which Linux distribution is being used
+ let { stdout, stderr } = await exec(`cat /etc/os-release`)
+ let osRelease = stdout
+ let isUbuntu = osRelease.includes("Ubuntu")
+ if (!isUbuntu) {
+ console.log("Only Ubuntu is supported for now")
+ return false
+ } else {
+ // osRelease is a string with the contents of /etc/os-release
+ // Get the version of Ubuntu
+ let ubuntuVersion = osRelease.match(/VERSION_ID="(.*)"/)[1]
+ // Get the architecture of the system
+ let architecture = "x86_64"
+ if (process.arch === "arm64") {
+ architecture = "aarch64"
+ }
+ // Get the download URL
+ let downloadUrl = linuxURLDict[`Ubuntu ${ubuntuVersion} ${architecture}`]
+ // Download MongoDB installer
+ const downloadPath = path.join(getAppPath("downloads"), `mongodb-linux-${architecture}-ubuntu${ubuntuVersion}-7.0.15.tgz`)
+ let downloadMongoDBPromise = exec(`curl -o ${downloadPath} ${downloadUrl}`)
+ execCallbacksForChildWithNotifications(downloadMongoDBPromise.child, "Downloading MongoDB installer")
+ await downloadMongoDBPromise
+ // Install MongoDB in the .medomics directory in the user's home directory
+ ubuntuVersion = ubuntuVersion.replace(".", "")
+ let command = `tar -xvzf ${downloadPath} -C /home/${process.env.USER}/.medomics/ && mv /home/${process.env.USER}/.medomics/mongodb-linux-${architecture}-ubuntu${ubuntuVersion}-7.0.15 /home/${process.env.USER}/.medomics/mongodb`
+ let installMongoDBPromise = exec(command)
+
+ // let installMongoDBPromise = exec(`tar -xvzf ${downloadPath} && mv mongodb-linux-${architecture}-ubuntu${ubuntuVersion}-7.0.15 /home/${process.env.USER}/.medomics/mongodb`)
+ execCallbacksForChildWithNotifications(installMongoDBPromise.child, "Installing MongoDB")
+ await installMongoDBPromise
+
+
+
+
+ return getMongoDBPath() !== null
+ }
+ }
+ }
+}
+
+// Helper: CLI prompt for MongoDB install
+async function promptAndInstallMongoDB() {
+ const rl = readline.createInterface({ input: process.stdin, output: process.stdout })
+ const question = (q) => new Promise((res) => rl.question(q, res))
+ const answer = await question("MongoDB is not installed. Would you like to install it now? (Y/n): ")
+ rl.close()
+ if (answer.trim().toLowerCase() === "y" || answer.trim() === "") {
+ const success = await installMongoDB()
+ if (success) {
+ console.log("MongoDB installed successfully.")
+ } else {
+ console.log("MongoDB installation failed. Please install manually.")
+ }
+ } else {
+ console.log("MongoDB installation skipped. The application may not function correctly without it.")
+ }
+}
+
+export {
+ checkIsBrewInstalled,
+ checkIsXcodeSelectInstalled,
+ installBrew,
+ installXcodeSelect,
+ installMongoDB,
+ promptAndInstallMongoDB,
+ checkRequirements
+}
\ No newline at end of file
diff --git a/backend/utils/serverPathUtils.js b/backend/utils/serverPathUtils.js
new file mode 100644
index 00000000..354dd003
--- /dev/null
+++ b/backend/utils/serverPathUtils.js
@@ -0,0 +1,36 @@
+import os from "os"
+import path from "path"
+const pathOverrides = {}
+
+function setAppPath(alias, value) {
+ pathOverrides[alias] = value
+}
+
+function getAppPath(alias, isProd = true) {
+ if (pathOverrides[alias]) return pathOverrides[alias]
+
+ switch (alias) {
+ case "home":
+ return os.homedir()
+ case "downloads":
+ return path.join(os.homedir(), "Downloads")
+ case "sessionData":
+ return pathOverrides["sessionData"] || path.join(os.homedir(), ".medomics", "sessionData")
+ case "userData": {
+ const appName = "medomics"
+ let dataDir
+ if (process.platform === "win32") {
+ dataDir = path.join(process.env.APPDATA || path.join(os.homedir(), "AppData", "Roaming"), appName)
+ } else if (process.platform === "darwin") {
+ dataDir = path.join(os.homedir(), "Library", "Application Support", appName)
+ } else {
+ dataDir = path.join(process.env.XDG_CONFIG_HOME || path.join(os.homedir(), ".config"), appName)
+ }
+ return isProd ? dataDir : dataDir + " (development)"
+ }
+ default:
+ throw new Error("Unknown path alias: " + alias)
+ }
+}
+
+export { setAppPath, getAppPath }
diff --git a/backend/utils/serverWorkspace.js b/backend/utils/serverWorkspace.js
new file mode 100644
index 00000000..6dca9bf8
--- /dev/null
+++ b/backend/utils/serverWorkspace.js
@@ -0,0 +1,186 @@
+import MEDconfig, { PORT_FINDING_METHOD } from "./medomics.server.dev.js"
+import { getAppPath, setAppPath } from "./serverPathUtils.js"
+
+import fs from "fs"
+import path from "path"
+import dirTree from "directory-tree"
+
+function getServerWorkingDirectory() {
+ // Returns the working directory
+ return getAppPath("sessionData")
+}
+
+function loadServerWorkspaces() {
+ const userDataPath = getAppPath("userData")
+ const workspaceFilePath = path.join(userDataPath, "workspaces.json")
+ if (fs.existsSync(workspaceFilePath)) {
+ const workspaces = JSON.parse(fs.readFileSync(workspaceFilePath, "utf8"))
+ // Sort workspaces by date, most recent first
+ let sortedWorkspaces = workspaces.sort((a, b) => new Date(b.lastTimeItWasOpened) - new Date(a.lastTimeItWasOpened))
+ // Check if the workspaces still exist
+ let workspacesThatStillExist = []
+ sortedWorkspaces.forEach((workspace) => {
+ if (fs.existsSync(workspace.path)) {
+ workspacesThatStillExist.push(workspace)
+ } else {
+ console.log("Workspace does not exist anymore: ", workspace.path)
+ }
+ })
+ return workspacesThatStillExist
+ } else {
+ return []
+ }
+}
+
+/**
+ * Saves the recent workspaces
+ * @param {Array} workspaces An array of workspaces
+ */
+function saveServerWorkspaces(workspaces) {
+ const userDataPath = getAppPath("userData")
+ const workspaceFilePath = path.join(userDataPath, "workspaces.json")
+ fs.writeFileSync(workspaceFilePath, JSON.stringify(workspaces))
+}
+
+/**
+ * Updates the recent workspaces
+ * @param {String} workspacePath The path of the workspace to update
+ */
+function updateServerWorkspace(workspacePath) {
+ const workspaces = loadServerWorkspaces()
+ const workspaceIndex = workspaces.findIndex((workspace) => workspace.path === workspacePath)
+ if (workspaceIndex !== -1) {
+ // Workspace exists, update it
+ workspaces[workspaceIndex].status = "opened"
+ workspaces[workspaceIndex].lastTimeItWasOpened = new Date().toISOString()
+ } else {
+ // Workspace doesn't exist, add it
+ workspaces.push({
+ path: workspacePath,
+ status: "opened",
+ lastTimeItWasOpened: new Date().toISOString()
+ })
+ }
+ setAppPath("sessionData", workspacePath)
+ saveServerWorkspaces(workspaces)
+}
+
+/**
+ * Generate recent workspaces options
+ * @param {*} event The event
+ * @param {*} mainWindow The main window
+ * @param {*} hasBeenSet A boolean indicating if the workspace has been set
+ * @param {*} workspacesArray The array of workspaces, if null, the function will load the workspaces
+ * @returns {Array} An array of recent workspaces options
+ */
+function getRecentServerWorkspacesOptions(event, mainWindow, hasBeenSet, serverPort, workspacesArray = null) {
+ let workspaces
+ if (workspacesArray === null) {
+ workspaces = loadServerWorkspaces()
+ } else {
+ workspaces = workspacesArray
+ }
+ const recentWorkspaces = workspaces.filter((workspace) => workspace.status === "opened")
+ if (event !== null) {
+ event.reply("recentWorkspaces", recentWorkspaces)
+ }
+ const recentWorkspacesOptions = recentWorkspaces.map((workspace) => {
+ return {
+ label: workspace.path,
+ click() {
+ updateServerWorkspace(workspace.path)
+ let workspaceObject = {
+ workingDirectory: dirTree(workspace.path),
+ hasBeenSet: true,
+ newPort: serverPort
+ }
+ hasBeenSet = true
+ //mainWindow.webContents.send("openWorkspace", workspaceObject)
+ }
+ }
+ })
+ return recentWorkspacesOptions
+}
+
+// Function to create the working directory
+function createServerWorkingDirectory() {
+ // See the workspace menuTemplate in the repository
+ createFolder("DATA")
+ createFolder("EXPERIMENTS")
+}
+
+
+// Function to create a folder from a given path
+function createFolder(folderString) {
+ // Creates a folder in the working directory
+ const folderPath = path.join(getAppPath("sessionData"), folderString)
+ // Check if the folder already exists
+ if (!fs.existsSync(folderPath)) {
+ fs.mkdir(folderPath, { recursive: true }, (err) => {
+ if (err) {
+ console.error(err)
+ return
+ }
+ console.log("Folder created successfully!")
+ })
+ }
+}
+
+// Function to create the .medomics directory and necessary files
+const createServerMedomicsDirectory = (directoryPath) => {
+ const medomicsDir = path.join(directoryPath, ".medomics")
+ const mongoDataDir = path.join(medomicsDir, "MongoDBdata")
+ const mongoConfigPath = path.join(medomicsDir, "mongod.conf")
+
+ const toForwardSlashes = (p) => String(p).replace(/\\/g, "/")
+
+ if (!fs.existsSync(medomicsDir)) {
+ // Create .medomicsDir
+ fs.mkdirSync(medomicsDir, { recursive: true })
+ }
+
+ if (!fs.existsSync(mongoDataDir)) {
+ // Create MongoDB data dir
+ fs.mkdirSync(mongoDataDir, { recursive: true })
+ }
+
+ const desiredMongoConfig = [
+ "systemLog:",
+ " destination: file",
+ ` path: \"${toForwardSlashes(path.join(medomicsDir, "mongod.log"))}\"`,
+ " logAppend: true",
+ "storage:",
+ ` dbPath: \"${toForwardSlashes(mongoDataDir)}\"`,
+ "net:",
+ " bindIp: 127.0.0.1",
+ ` port: ${MEDconfig.mongoPort}`,
+ ""
+ ].join("\n")
+
+ let shouldWriteConfig = !fs.existsSync(mongoConfigPath)
+ if (!shouldWriteConfig) {
+ try {
+ const existing = fs.readFileSync(mongoConfigPath, "utf8")
+ // Migrate old configs that used bindIp localhost or unquoted Windows paths.
+ const hasLocalhostBind = /\bbindIp:\s*localhost\b/i.test(existing)
+ const hasUnquotedWinPath = /\bpath:\s*[A-Za-z]:\\/i.test(existing) || /\bdbPath:\s*[A-Za-z]:\\/i.test(existing)
+ const missingKey = !/\bdbPath:\b/i.test(existing) || !/\bport:\b/i.test(existing)
+ if (hasLocalhostBind || hasUnquotedWinPath || missingKey) shouldWriteConfig = true
+ } catch {
+ shouldWriteConfig = true
+ }
+ }
+
+ if (shouldWriteConfig) {
+ fs.writeFileSync(mongoConfigPath, desiredMongoConfig)
+ }
+}
+
+export {
+ getServerWorkingDirectory,
+ loadServerWorkspaces,
+ updateServerWorkspace,
+ getRecentServerWorkspacesOptions,
+ createServerWorkingDirectory,
+ createServerMedomicsDirectory
+}
diff --git a/electron-builder.client.yml b/electron-builder.client.yml
new file mode 100644
index 00000000..cecab1a6
--- /dev/null
+++ b/electron-builder.client.yml
@@ -0,0 +1,51 @@
+appId: com.medomicslab.medapp
+productName: MEDomicsLab
+artifactName: "${productName}-${version}-${os}.${ext}"
+nodeGypRebuild: false
+npmRebuild: false
+npmArgs: "--build-from-source=false"
+directories:
+ output: build/dist
+ buildResources: resources
+files:
+ # App sources only
+ - app/**/*
+ # Restrict resources to icons only (exclude server_go)
+ - resources/icons/**/*
+ # Keep package.json for metadata
+ - package.json
+ # Include production node_modules for runtime deps (pty, ssh2, etc.)
+ - node_modules/**/*
+ # Explicit excludes: backend and heavy backend-only assets
+ - '!backend/**/*'
+ - '!go_executables/**/*'
+ - '!pythonCode/**/*'
+ - '!pythonEnv/**/*'
+ - '!resources/server_go'
+ # Exclude backend-only node modules to reduce app size
+ - '!**/node_modules/mongodb-client-encryption/**'
+ - '!**/node_modules/mongodb/**'
+extends: null
+mac:
+ hardenedRuntime: true
+ gatekeeperAssess: false
+ entitlements: "build/entitlements.mac.plist"
+ entitlementsInherit: "build/entitlements.mac.plist"
+ category: "public.app-category.developer-tools"
+ target:
+ - dmg
+ - zip
+win:
+ target: nsis
+ icon: "app/assets/icon.ico"
+nsis: {}
+linux:
+ target: deb
+ icon: "resources/icons"
+ maintainer: "medomicslab.udes@gmail.com"
+deb:
+ # Client-only: no post-install system changes (no maintainer scripts)
+publish:
+ provider: github
+ # Use current GitHub Actions repository (no cross-repo publish)
+ releaseType: draft
diff --git a/electron-builder.yml b/electron-builder.yml
index 519fea98..2bb462a5 100644
--- a/electron-builder.yml
+++ b/electron-builder.yml
@@ -4,9 +4,9 @@ artifactName: "${productName}-${version}-${os}.${ext}" # Added from package.json
copyright: Copyright © 2023 MEDomicsLab Team
# Fix for building on Windows when node-gyp is throwing errors on mongodb-client-encryption module
####
-# nodeGypRebuild: false
+nodeGypRebuild: false
npmRebuild: false
-# npmArgs: "--build-from-source=false"
+npmArgs: "--build-from-source=false"
####
directories:
output: build/dist # Changed from dist to build/dist to match package.json
diff --git a/go_executables/server_go_win32.exe b/go_executables/server_go_win32.exe
index ae7949ee..385f369e 100644
Binary files a/go_executables/server_go_win32.exe and b/go_executables/server_go_win32.exe differ
diff --git a/go_server/blueprints/connection/connection.go b/go_server/blueprints/connection/connection.go
new file mode 100644
index 00000000..1f1ff9ee
--- /dev/null
+++ b/go_server/blueprints/connection/connection.go
@@ -0,0 +1,38 @@
+package connection
+
+import (
+ Utils "go_module/src"
+ "log"
+)
+
+var prePath = "connection"
+
+// AddHandleFunc adds the specific module handle function to the server
+func AddHandleFunc() {
+ Utils.CreateHandleFunc(prePath+"/register_ssh_key", registerSSHKey)
+ Utils.CreateHandleFunc(prePath+"/connection_test_request", connectionTestRequest)
+}
+
+// validateSSHKey checks if the key is valid and saves it locally to the server
+// Returns the status of the validation
+func registerSSHKey(jsonConfig string, id string) (string, error) {
+ log.Println("Registering SSH Key: ", id)
+ response, err := Utils.StartPythonScripts(jsonConfig, "../pythonCode/modules/connection/register_ssh_key.py", id)
+ Utils.RemoveIdFromScripts(id)
+ if err != nil {
+ return "", err
+ }
+ return response, nil
+}
+
+// handleProgress handles the request to get the progress of the experiment
+// It returns the progress of the experiment
+func connectionTestRequest(jsonConfig string, id string) (string, error) {
+ log.Println("Connection test request: ", id)
+ response, err := Utils.StartPythonScripts(jsonConfig, "../pythonCode/modules/connection/connection_test_request.py", id)
+ Utils.RemoveIdFromScripts(id)
+ if err != nil {
+ return "", err
+ }
+ return response, nil
+}
diff --git a/go_server/main b/go_server/main
index 904e1b04..15fa5b0d 100755
Binary files a/go_server/main and b/go_server/main differ
diff --git a/go_server/main.exe b/go_server/main.exe
index 86db7f56..41a374be 100644
Binary files a/go_server/main.exe and b/go_server/main.exe differ
diff --git a/go_server/main.exe~ b/go_server/main.exe~
deleted file mode 100644
index d9880542..00000000
Binary files a/go_server/main.exe~ and /dev/null differ
diff --git a/go_server/main.go b/go_server/main.go
index ebd93f4d..e04c8d55 100644
--- a/go_server/main.go
+++ b/go_server/main.go
@@ -5,6 +5,7 @@ import (
"fmt"
MEDprofiles "go_module/blueprints/MEDprofiles_"
Application "go_module/blueprints/application"
+ Connection "go_module/blueprints/connection"
Evaluation "go_module/blueprints/evaluation"
Exploratory "go_module/blueprints/exploratory"
ExtractionImage "go_module/blueprints/extraction_image"
@@ -27,6 +28,7 @@ func main() {
// Here is where you add the handle functions to the server
Learning.AddHandleFunc()
+ Connection.AddHandleFunc()
Evaluation.AddHandleFunc()
Exploratory.AddHandleFunc()
ExtractionImage.AddHandleFunc()
diff --git a/go_server/src/utils.go b/go_server/src/utils.go
index 750df4bb..b43c4d77 100644
--- a/go_server/src/utils.go
+++ b/go_server/src/utils.go
@@ -10,6 +10,7 @@ import (
"os"
"os/exec"
"path/filepath"
+ "runtime"
"strings"
"sync"
)
@@ -153,12 +154,35 @@ func StartPythonScripts(jsonParam string, filename string, id string) (string, e
}
log.Println("filename: " + filename)
script, _ := filepath.Abs(filepath.Join(cwd, filename))
- condaEnv := os.Getenv("MED_ENV")
+ condaEnv := resolvePythonExecutable(os.Getenv("MED_ENV"))
+ log.Println("Resolved python executable: " + condaEnv)
Mu.Lock()
if runMode == "prod" {
- prodDir := os.Args[3]
- filename = strings.ReplaceAll(filename, "..", "")
- script, _ = filepath.Abs(filepath.Join(prodDir, filename))
+ prodDir := ""
+ if len(os.Args) > 3 {
+ prodDir = os.Args[3]
+ }
+ // Guard against JS passing undefined (e.g., `${process.resourcesPath}` under nexe).
+ if prodDir == "" || strings.EqualFold(prodDir, "undefined") {
+ if exePath, err := os.Executable(); err == nil {
+ exeDir := filepath.Dir(exePath)
+ // Go binary is typically in /go_executables/, so root is one level up.
+ prodDir = filepath.Dir(exeDir)
+ log.Println("prodDir was empty/undefined; inferred bundle root: " + prodDir)
+ } else {
+ prodDir = cwd
+ log.Println("prodDir was empty/undefined; falling back to cwd: " + prodDir)
+ }
+ }
+ // Convert ../pythonCode/... into a safe relative path pythonCode/... so Join can't escape prodDir.
+ rel := filepath.ToSlash(filename)
+ for strings.HasPrefix(rel, "../") {
+ rel = strings.TrimPrefix(rel, "../")
+ }
+ rel = strings.TrimPrefix(rel, "./")
+ rel = strings.TrimLeft(rel, "/")
+ rel = filepath.FromSlash(rel)
+ script, _ = filepath.Abs(filepath.Join(prodDir, rel))
log.Println("running script in prod: " + script)
}
log.Println("Conda env: " + condaEnv)
@@ -207,6 +231,49 @@ func StartPythonScripts(jsonParam string, filename string, id string) (string, e
return response, nil
}
+func resolvePythonExecutable(preferred string) string {
+ candidates := []string{}
+ if preferred != "" {
+ candidates = append(candidates, preferred)
+ }
+
+ if home, err := os.UserHomeDir(); err == nil && home != "" {
+ if runtime.GOOS == "windows" {
+ candidates = append(candidates, filepath.Join(home, ".medomics", "python", "python.exe"))
+ } else {
+ candidates = append(candidates, filepath.Join(home, ".medomics", "python", "bin", "python"))
+ }
+ }
+
+ if runtime.GOOS == "windows" {
+ candidates = append(candidates, "python")
+ } else {
+ candidates = append(candidates, "python3", "python")
+ }
+
+ for _, candidate := range candidates {
+ if candidate == "" {
+ continue
+ }
+
+ if strings.Contains(candidate, string(filepath.Separator)) {
+ if st, err := os.Stat(candidate); err == nil && !st.IsDir() {
+ return candidate
+ }
+ continue
+ }
+
+ if p, err := exec.LookPath(candidate); err == nil && p != "" {
+ return p
+ }
+ }
+
+ if runtime.GOOS == "windows" {
+ return "python"
+ }
+ return "python3"
+}
+
// It is used to transfer stdout and stderr to the terminal
func copyOutput(r io.Reader, response *string) {
scanner := bufio.NewScanner(r)
diff --git a/main/background.js b/main/background.js
index d3579e32..af0f2ca2 100644
--- a/main/background.js
+++ b/main/background.js
@@ -1,3 +1,9 @@
+// Force Electron headless mode if --no-gui is present
+if (process.argv.some(arg => arg.includes('--no-gui'))) {
+ process.env.ELECTRON_ENABLE_HEADLESS = '1'
+ // On some Linux systems, also clear DISPLAY
+ process.env.DISPLAY = ''
+}
import { app, ipcMain, Menu, dialog, BrowserWindow, protocol, shell, nativeTheme } from "electron"
import axios from "axios"
import os from "os"
@@ -5,24 +11,143 @@ import serve from "electron-serve"
import { createWindow, TerminalManager } from "./helpers"
import { installExtension, REACT_DEVELOPER_TOOLS } from "electron-extension-installer"
import MEDconfig from "../medomics.dev"
-import { runServer, findAvailablePort } from "./utils/server"
-import { setWorkingDirectory, getRecentWorkspacesOptions, loadWorkspaces, createMedomicsDirectory, updateWorkspace, createWorkingDirectory } from "./utils/workspace"
+const crypto = require("crypto")
+const decompress = require("decompress")
+const https = require("https")
+// Backend access is done over HTTP requests to the backend Express server.
+// This avoids importing backend modules into the Electron main process.
+// We expose small wrapper functions below that call the backend endpoints.
+// Helper to build backend URL (uses expressPort if available, otherwise falls back to serverPort)
+function backendUrl(path) {
+ const port = expressPort || serverPort || MEDconfig.defaultPort
+ return `http://localhost:${port}${path}`
+}
+
+async function httpGet(path, params = {}) {
+ try {
+ const res = await axios.get(backendUrl(path), { params })
+ return res.data
+ } catch (err) {
+ console.warn(`Backend GET ${path} failed:`, err && err.message)
+ return null
+ }
+}
+
+async function httpPost(path, body = {}) {
+ try {
+ const res = await axios.post(backendUrl(path), body)
+ return res.data
+ } catch (err) {
+ console.warn(`Backend POST ${path} failed:`, err && err.message)
+ return null
+ }
+}
+
+// Wrapper functions that replace previous direct imports
+async function runServerViaBackend() {
+ return await httpPost("/run-go-server", {})
+}
+
+// Find an available port locally (used for dev UI port selection). This is a small
+// local implementation so the main process doesn't import backend code for this.
+function findAvailablePort(startPort, endPort = 8000) {
+ const net = require("net")
+ return new Promise((resolve, reject) => {
+ let port = startPort
+ function tryPort() {
+ const server = net.createServer()
+ server.once("error", (err) => {
+ server.close()
+ if (err.code === "EADDRINUSE") {
+ port++
+ if (port > endPort) return reject(new Error("No available port"))
+ tryPort()
+ } else {
+ reject(err)
+ }
+ })
+ server.once("listening", () => {
+ server.close(() => resolve(port))
+ })
+ server.listen(port)
+ }
+ tryPort()
+ })
+}
+
+async function getBundledPythonEnvironment() {
+ const data = await httpGet("/get-bundled-python-environment")
+ return data && data.pythonEnv ? data.pythonEnv : null
+}
+
+async function getInstalledPythonPackages(pythonPath) {
+ const data = await httpGet("/get-installed-python-packages", { pythonPath })
+ return data && data.packages ? data.packages : null
+}
+
+async function startMongoDB(workspacePath) {
+ return await httpPost("/start-mongo", { workspacePath })
+}
+
+async function stopMongoDB() {
+ // Backend doesn't currently expose a stop-mongo endpoint; call a generic endpoint if available.
+ return await httpPost("/stop-mongo", {})
+}
+
+async function getMongoDBPath() {
+ const data = await httpGet("/get-mongo-path")
+ return data && data.path ? data.path : null
+}
+
+async function checkJupyterIsRunning() {
+ const data = await httpGet("/check-jupyter-status")
+ return data || { running: false, error: "no-response" }
+}
+
+async function startJupyterServer(workspacePath, port) {
+ return await httpPost("/start-jupyter-server", { workspacePath, port })
+}
+
+async function stopJupyterServer() {
+ return await httpPost("/stop-jupyter-server", {})
+}
+
+async function installMongoDB() {
+ return await httpPost("/install-mongo", {})
+}
+
+async function checkRequirements() {
+ const data = await httpGet("/check-requirements")
+ return data
+}
+import {
+ setWorkingDirectory,
+ getRecentWorkspacesOptions,
+ loadWorkspaces,
+ createMedomicsDirectory,
+ createRemoteMedomicsDirectory,
+ updateWorkspace,
+ createWorkingDirectory,
+ createRemoteWorkingDirectory
+} from "./utils/workspace"
+// Backend python & installation utilities are accessed via HTTP wrappers defined above.
import {
- getBundledPythonEnvironment,
- getInstalledPythonPackages,
- installPythonPackage,
- installBundledPythonExecutable,
- checkPythonRequirements,
- installRequiredPythonPackages
-} from "./utils/pythonEnv"
-import { installMongoDB, checkRequirements } from "./utils/installation"
+ getTunnelState,
+ getActiveTunnel,
+ detectRemoteOS,
+ getRemoteWorkspacePath,
+ checkRemotePortOpen,
+ startExpressForward,
+ startPortTunnel
+} from './utils/remoteFunctions.js'
+// MongoDB and Jupyter functions are accessed via HTTP wrappers (startMongoDB, stopMongoDB, getMongoDBPath, startJupyterServer, stopJupyterServer, checkJupyterIsRunning)
+
const fs = require("fs")
const terminalManager = new TerminalManager()
var path = require("path")
-let mongoProcess = null
const dirTree = require("directory-tree")
-const { exec, spawn, execSync } = require("child_process")
+const { exec, spawn, execSync, fork } = require("child_process")
let serverProcess = null
const serverState = { serverIsRunning: false }
var serverPort = MEDconfig.defaultPort
@@ -30,6 +155,8 @@ var hasBeenSet = false
const isProd = process.env.NODE_ENV === "production"
let splashScreen // The splash screen is the window that is displayed while the application is loading
export var mainWindow // The main window is the window of the application
+// Robust headless mode detection
+const isHeadless = process.argv.some(arg => arg.includes('--no-gui'))
//**** AUTO UPDATER ****//
const { autoUpdater } = require("electron-updater")
@@ -78,6 +205,991 @@ console.log = function () {
}
}
+// **** BACKEND EXPRESS SERVER **** //
+let expressPort = null
+
+// ---- Local port blacklist to avoid accidental use in remote flows ----
+const portBlacklist = new Set()
+function blacklistPort(port) {
+ const p = Number(port)
+ if (Number.isFinite(p)) portBlacklist.add(p)
+}
+function isPortBlacklisted(port) {
+ const p = Number(port)
+ return Number.isFinite(p) && portBlacklist.has(p)
+}
+function setExpressPort(p) {
+ expressPort = p
+ blacklistPort(p)
+}
+
+// IPC helpers to query/manage blacklist from renderer if needed
+ipcMain.handle('getPortBlacklist', async () => Array.from(portBlacklist))
+ipcMain.handle('blacklistPort', async (_event, port) => {
+ blacklistPort(port)
+ return { success: true, port: Number(port) }
+})
+
+ipcMain.handle("get-express-port", async () => {
+ return expressPort
+})
+
+function getBackendServerExecutable() {
+ const platform = process.platform
+ // Prefer user-configured path (CLI) from settings if available and exists
+ try {
+ const userDataPath = app.getPath("userData")
+ const settingsFilePath = path.join(userDataPath, "settings.json")
+ if (fs.existsSync(settingsFilePath)) {
+ const settings = JSON.parse(fs.readFileSync(settingsFilePath, "utf8"))
+ if (settings && settings.localBackendPath && fs.existsSync(settings.localBackendPath)) {
+ return settings.localBackendPath // CLI executable path
+ }
+ }
+ } catch {}
+ if (app.isPackaged) {
+ // In packaged builds, fallback to a bundled CLI if present
+ const cliCandidates = [
+ path.join(process.resourcesPath, "backend", platform === "win32" ? "medomics-server.exe" : "medomics-server"),
+ path.join(process.resourcesPath, "backend", "bin", platform === "win32" ? "medomics-server.exe" : "medomics-server")
+ ]
+ for (const pth of cliCandidates) {
+ try { if (fs.existsSync(pth)) return pth } catch {}
+ }
+ // Legacy fallback: original server binaries (kept for backward compatibility)
+ if (platform === "win32") return path.join(process.resourcesPath, "backend", "server_win.exe")
+ if (platform === "darwin") return path.join(process.resourcesPath, "backend", "server_mac")
+ if (platform === "linux") return path.join(process.resourcesPath, "backend", "server_linux")
+ } else {
+ // In development, run the CLI via node
+ return ["node", path.join(__dirname, "../backend/cli/medomics-server.mjs")]
+ }
+}
+
+// ---- Helpers for backend installation ----
+function saveLocalBackendPath(exePath) {
+ return new Promise((resolve, reject) => {
+ try {
+ const userDataPath = app.getPath('userData')
+ const settingsFilePath = path.join(userDataPath, 'settings.json')
+ let settings = {}
+ if (fs.existsSync(settingsFilePath)) {
+ try { settings = JSON.parse(fs.readFileSync(settingsFilePath, 'utf8')) || {} } catch {}
+ }
+ settings.localBackendPath = exePath
+ fs.writeFileSync(settingsFilePath, JSON.stringify(settings, null, 2))
+ resolve(true)
+ } catch (e) { reject(e) }
+ })
+}
+
+function findInstalledExecutable(versionDir) {
+ try {
+ if (!fs.existsSync(versionDir)) return null
+ const binDir = path.join(versionDir, 'bin')
+ if (fs.existsSync(binDir)) {
+ const entries = fs.readdirSync(binDir)
+ const exeCandidates = entries.map(e => path.join(binDir, e)).filter(p => {
+ const lower = p.toLowerCase()
+ if (process.platform === 'win32') return lower.endsWith('.exe') && lower.includes('medomics')
+ return lower.includes('medomics') && fs.statSync(p).isFile()
+ })
+ return exeCandidates[0] || null
+ }
+ // Fallback scan entire versionDir
+ const walk = (dir) => {
+ const items = fs.readdirSync(dir)
+ for (const item of items) {
+ const full = path.join(dir, item)
+ try {
+ const st = fs.statSync(full)
+ if (st.isDirectory()) {
+ const found = walk(full)
+ if (found) return found
+ } else if (st.isFile()) {
+ const lower = full.toLowerCase()
+ if (process.platform === 'win32') {
+ if (lower.endsWith('.exe') && lower.includes('medomics')) return full
+ } else if (lower.includes('medomics')) {
+ return full
+ }
+ }
+ } catch {}
+ }
+ return null
+ }
+ return walk(versionDir)
+ } catch { return null }
+}
+
+function sha256File(filePath) {
+ return new Promise((resolve, reject) => {
+ try {
+ const hash = crypto.createHash('sha256')
+ const stream = fs.createReadStream(filePath)
+ stream.on('data', d => hash.update(d))
+ stream.on('end', () => resolve(hash.digest('hex')))
+ stream.on('error', reject)
+ } catch (e) { reject(e) }
+ })
+}
+
+function downloadWithProgress(url, destPath, onProgress) {
+ return new Promise(async (resolve, reject) => {
+ try {
+ const writer = fs.createWriteStream(destPath)
+ const response = await axios.get(url, { responseType: 'stream' })
+ const total = Number(response.headers['content-length']) || 0
+ let downloaded = 0
+ const start = Date.now()
+ response.data.on('data', chunk => {
+ downloaded += chunk.length
+ const percent = total ? (downloaded / total) * 100 : null
+ const elapsed = (Date.now() - start) / 1000
+ const speed = elapsed > 0 ? (downloaded / elapsed) : 0
+ onProgress && onProgress({ downloaded, total, percent, speed })
+ })
+ response.data.pipe(writer)
+ writer.on('finish', () => resolve(destPath))
+ writer.on('error', reject)
+ } catch (e) { reject(e) }
+ })
+}
+
+async function cleanupOldVersions(versionsDir, currentExePath, keep = 3) {
+ try {
+ if (!fs.existsSync(versionsDir)) return
+ const entries = fs.readdirSync(versionsDir).map(v => ({ name: v, path: path.join(versionsDir, v) }))
+ // Filter only directories
+ const dirs = entries.filter(e => { try { return fs.statSync(e.path).isDirectory() } catch { return false } })
+ // Sort by mtime descending (newest first)
+ dirs.sort((a,b) => {
+ const ma = fs.statSync(a.path).mtimeMs
+ const mb = fs.statSync(b.path).mtimeMs
+ return mb - ma
+ })
+ // Determine which to keep: newest keep entries + the one containing currentExePath
+ const keepSet = new Set()
+ for (let i=0; i currentExePath.startsWith(d.path))
+ if (currentVersionDir) keepSet.add(currentVersionDir.name)
+ const removeTargets = dirs.filter(d => !keepSet.has(d.name))
+ for (const rem of removeTargets) {
+ try {
+ fs.rmSync(rem.path, { recursive: true, force: true })
+ } catch {}
+ }
+ } catch (e) {
+ console.warn('cleanupOldVersions error:', e.message)
+ }
+}
+
+function startBackendServer() {
+ let child
+ const execPath = getBackendServerExecutable()
+ const isDev = Array.isArray(execPath)
+ let cmd, args
+
+ // Validate that the executable/script exists before attempting to spawn
+ try {
+ if (isDev) {
+ const scriptPath = execPath[1]
+ if (!scriptPath || !fs.existsSync(scriptPath)) {
+ console.warn('Backend dev script not found; skipping backend start:', scriptPath)
+ return null
+ }
+ } else {
+ if (!execPath || typeof execPath !== 'string' || !fs.existsSync(execPath)) {
+ console.warn('Backend executable not found; skipping backend start:', execPath)
+ return null
+ }
+ }
+ } catch (e) {
+ console.warn('Error while checking backend executable; skipping backend start:', e && e.message)
+ return null
+ }
+
+ // Prepare CLI state file under user home for consistent port discovery across Electron and CLI
+ const stateDir = path.join(require('os').homedir(), '.medomics', 'medomics-server')
+ try { if (!fs.existsSync(stateDir)) fs.mkdirSync(stateDir, { recursive: true }) } catch {}
+ const stateFilePath = path.join(stateDir, 'state.json')
+
+ if (isDev) {
+ // node backend/cli/medomics-server.mjs start --json
+ cmd = execPath[0]
+ args = [execPath[1], 'start', '--json', '--state-file', stateFilePath]
+ } else {
+ // /medomics-server start --json
+ cmd = execPath
+ args = ['start', '--json', '--state-file', stateFilePath]
+ }
+
+ try {
+ child = spawn(cmd, args, { stdio: ['ignore', 'pipe', 'pipe'] })
+ } catch (e) {
+ console.warn('Failed to spawn backend process:', e && e.message)
+ return null
+ }
+
+ child.on('error', (err) => {
+ try { console.warn('Backend process error:', err && err.message) } catch {}
+ })
+
+ // Parse JSON lines from stdout to capture expressPort
+ let buffer = ''
+ child.stdout.on('data', (chunk) => {
+ try {
+ buffer += chunk.toString()
+ let idx
+ while ((idx = buffer.indexOf('\n')) !== -1) {
+ const line = buffer.slice(0, idx).trim()
+ buffer = buffer.slice(idx + 1)
+ if (!line) continue
+ try {
+ const obj = JSON.parse(line)
+ if (obj && obj.success && (obj.state?.expressPort || obj.expressPort)) {
+ const port = obj.state?.expressPort || obj.expressPort
+ console.log(`Local Express server started on port: ${port}`)
+ setExpressPort(port)
+ }
+ } catch (_) {
+ // Non-JSON line; ignore
+ }
+ }
+ } catch (err) {
+ console.warn('Error parsing backend stdout:', err)
+ }
+ })
+
+ child.stderr.on('data', (chunk) => {
+ try { console.warn('[backend]', chunk.toString().trim()) } catch {}
+ })
+
+ // Keep legacy IPC handling in case CLI forwards messages in the future
+ if (child.on) {
+ child.on("message", (message) => {
+ try {
+ if (message && message.type === "EXPRESS_PORT") {
+ const port = message.expressPort || message.port
+ console.log(`Local Express server started on port: ${port}`)
+ setExpressPort(port)
+ }
+ } catch (err) {
+ console.warn('Error handling message from backend process:', err)
+ }
+ })
+ }
+
+ // Fallback: if we didn't get the port within timeout, probe known range
+ const fallbackTimeoutMs = 10000
+ setTimeout(async () => {
+ if (!expressPort) {
+ try {
+ const found = await findExpressPortByProbing(5000, 8000, 48, 250)
+ if (found) {
+ setExpressPort(found)
+ console.log(`Discovered Express port via probe: ${found}`)
+ } else {
+ console.warn('Failed to discover Express port via probe within timeout')
+ }
+ } catch (e) {
+ console.warn('Error probing for Express port:', e.message)
+ }
+ }
+ }, fallbackTimeoutMs)
+
+ child.unref()
+ return child
+}
+
+async function findExpressPortByProbing(start = 5000, end = 8000, batchSize = 40, timeoutMs = 300) {
+ const clamp = (n, min, max) => Math.max(min, Math.min(max, n))
+ let p = start
+ while (p <= end) {
+ const to = clamp(p + batchSize - 1, p, end)
+ const ports = []
+ for (let i = p; i <= to; i++) {
+ // Skip blacklisted ports (i.e., local server port) to avoid false positives
+ if (!portBlacklist.has(i)) ports.push(i)
+ }
+ const results = await Promise.allSettled(ports.map(port => axios.get(`http://127.0.0.1:${port}/status`, { timeout: timeoutMs })))
+ for (let i = 0; i < results.length; i++) {
+ const r = results[i]
+ if (r.status === 'fulfilled') {
+ const data = r.value && r.value.data ? r.value.data : r.value
+ if (data && (data.success || data.expressPort || data.go || data.mongo || data.jupyter)) {
+ return ports[i]
+ }
+ }
+ }
+ p = to + 1
+ }
+ return null
+}
+
+// ---- Unified status and ensure (local via CLI, remote via tunnel) ----
+function getCliCommandAndArgs(baseArgs = []) {
+ const execPath = getBackendServerExecutable()
+ const isDev = Array.isArray(execPath)
+ const stateDir = path.join(require('os').homedir(), '.medomics', 'medomics-server')
+ try { if (!fs.existsSync(stateDir)) fs.mkdirSync(stateDir, { recursive: true }) } catch {}
+ const stateFilePath = path.join(stateDir, 'state.json')
+ if (isDev) return { cmd: execPath[0], args: [execPath[1], ...baseArgs, '--state-file', stateFilePath] }
+ return { cmd: execPath, args: [...baseArgs, '--state-file', stateFilePath] }
+}
+
+function runCliCommand(baseArgs = [], timeoutMs = 15000) {
+ return new Promise((resolve) => {
+ try {
+ const { cmd, args } = getCliCommandAndArgs(baseArgs)
+
+ // Guard: ensure the CLI executable or dev script exists
+ try {
+ const isDevCmd = cmd === 'node' && Array.isArray(args) && args.length > 0
+ if (isDevCmd) {
+ const scriptPath = args[0]
+ if (!scriptPath || !fs.existsSync(scriptPath)) {
+ return resolve({ success: false, error: 'cli-not-found', details: { mode: 'dev', scriptPath } })
+ }
+ } else {
+ if (!cmd || typeof cmd !== 'string' || !fs.existsSync(cmd)) {
+ return resolve({ success: false, error: 'cli-not-found', details: { mode: 'prod', execPath: cmd } })
+ }
+ }
+ } catch (chkErr) {
+ return resolve({ success: false, error: 'cli-check-failed', details: chkErr && chkErr.message })
+ }
+
+ let buffer = ''
+ let settled = false
+ const safeResolve = (obj) => { if (!settled) { settled = true; resolve(obj) } }
+
+ let child
+ try {
+ child = spawn(cmd, args, { stdio: ['ignore', 'pipe', 'pipe'] })
+ } catch (spawnErr) {
+ return safeResolve({ success: false, error: 'spawn-failed', details: spawnErr && spawnErr.message })
+ }
+
+ let timer = setTimeout(() => {
+ try { child.kill() } catch {}
+ safeResolve({ success: false, error: 'cli-timeout' })
+ }, timeoutMs)
+
+ child.stdout.on('data', (chunk) => {
+ buffer += chunk.toString()
+ })
+ child.stderr.on('data', (chunk) => {
+ // keep for debugging; do not reject
+ })
+ child.on('error', (err) => {
+ clearTimeout(timer)
+ safeResolve({ success: false, error: 'cli-error', details: err && err.message })
+ })
+ child.on('close', () => {
+ clearTimeout(timer)
+ if (settled) return
+ // Try parse last JSON line
+ const lines = buffer.split(/\r?\n/).filter(Boolean)
+ for (let i = lines.length - 1; i >= 0; i--) {
+ try { return safeResolve(JSON.parse(lines[i])) } catch {}
+ }
+ safeResolve({ success: false, error: 'no-json-output' })
+ })
+ } catch (e) {
+ resolve({ success: false, error: e.message })
+ }
+ })
+}
+
+ipcMain.handle('backendStatus', async (_event, { target = 'local' } = {}) => {
+ try {
+ if (target === 'remote') {
+ const tunnel = getTunnelState()
+ const localExpressPort = tunnel && tunnel.localExpressPort
+ const conn = getActiveTunnel && getActiveTunnel()
+ if (!conn) return { success: false, error: 'no-active-ssh' }
+
+ const normalizeHost = (value) => String(value || '').trim().toLowerCase().replace(/\.+$/, '')
+ const shortHost = (value) => normalizeHost(value).split('.')[0]
+ const isValidStatusPayload = (payload) => {
+ return !!(payload && typeof payload === 'object' && payload.success === true)
+ }
+
+ const remoteOS = await detectRemoteOS()
+ const remoteHostName = await new Promise((resolve) => {
+ try {
+ const hostCmd = remoteOS === 'win32'
+ ? `powershell -NoProfile -Command "$env:COMPUTERNAME"`
+ : `bash -lc "hostname -f 2>/dev/null || hostname 2>/dev/null || uname -n"`
+ conn.exec(hostCmd, (err, stream) => {
+ if (err) return resolve(null)
+ let out = ''
+ stream.on('data', (d) => { out += d.toString() })
+ stream.stderr.on('data', () => {})
+ stream.on('close', () => resolve((out || '').trim() || null))
+ })
+ } catch {
+ resolve(null)
+ }
+ })
+
+ // First try: use existing local→remote forwarding to /status only when the express
+ // tunnel is already known to be active, and verify identity against remote host.
+ const tunnels = Array.isArray(tunnel?.tunnels) ? tunnel.tunnels : []
+ const expressForward = tunnels.find((t) => t?.name === 'express')
+ const hasActiveExpressForward = !!(
+ localExpressPort &&
+ expressForward &&
+ expressForward.status === 'forwarding' &&
+ Number(expressForward.localPort) === Number(localExpressPort)
+ )
+ if (hasActiveExpressForward) {
+ try {
+ const res = await axios.get(`http://127.0.0.1:${localExpressPort}/status`, { timeout: 3000 })
+ const data = res && res.data
+ const reportedHost = data && data.serverIdentity && data.serverIdentity.hostName
+ const matchesRemoteHost = remoteHostName && reportedHost && (
+ normalizeHost(remoteHostName) === normalizeHost(reportedHost) ||
+ shortHost(remoteHostName) === shortHost(reportedHost)
+ )
+ if (isValidStatusPayload(data) && matchesRemoteHost) return data
+ } catch {}
+ }
+
+ // Fallback: sweep remote ports 5000-8000 to discover an Express server
+
+ // Single-shot list of listening ports on remote for performance
+ const listCmd = remoteOS === 'win32'
+ ? `netstat -an | findstr LISTEN`
+ : `bash -c "command -v ss >/dev/null 2>&1 && ss -ltn || netstat -an | grep LISTEN"`
+
+ const listening = await new Promise((resolve) => {
+ try {
+ conn.exec(listCmd, (err, stream) => {
+ if (err) return resolve("")
+ let out = ""
+ stream.on('data', (d) => { out += d.toString() })
+ stream.stderr.on('data', () => {})
+ stream.on('close', () => resolve(out))
+ })
+ } catch { resolve("") }
+ })
+
+ const ports = []
+ const re = /:(\d{2,5})/g
+ let m
+ while ((m = re.exec(listening)) !== null) {
+ const p = Number(m[1])
+ if (p >= 5000 && p <= 8000 && !ports.includes(p)) ports.push(p)
+ }
+
+ // Sort ascending for determinism
+ ports.sort((a,b) => a - b)
+ if (!ports.length) return { success: false, error: 'no-open-ports-in-range', range: [5000, 8000] }
+
+ // Try probing candidates by creating a temporary local forward and requesting /status
+ const tryPortStatus = async (remotePort) => {
+ return new Promise((resolve) => {
+ try {
+ const net = require('net')
+ const server = net.createServer((socket) => {
+ conn.forwardOut(
+ socket.localAddress || '127.0.0.1',
+ socket.localPort || 0,
+ '127.0.0.1',
+ parseInt(remotePort, 10),
+ (err, stream) => {
+ if (err) { socket.destroy(); return }
+ socket.pipe(stream).pipe(socket)
+ }
+ )
+ })
+ // Let the OS assign an ephemeral local port
+ server.listen(0, '127.0.0.1')
+ server.on('error', () => { try { server.close() } catch {}; resolve(null) })
+ server.on('listening', () => {
+ const addr = server.address()
+ const localPort = (addr && typeof addr === 'object') ? addr.port : null
+ if (!localPort) { try { server.close() } catch {}; return resolve(null) }
+ // Small delay to allow listener to bind fully
+ setTimeout(async () => {
+ try {
+ const resp = await axios.get(`http://127.0.0.1:${localPort}/status`, { timeout: 1500 })
+ try { server.close() } catch {}
+ resolve(resp && resp.data ? { data: resp.data, localEp: localPort } : null)
+ } catch {
+ try { server.close() } catch {}
+ resolve(null)
+ }
+ }, 250)
+ })
+ } catch { resolve(null) }
+ })
+ }
+
+ for (const rp of ports) {
+ const found = await tryPortStatus(rp)
+ if (found && isValidStatusPayload(found.data)) {
+ // Persistently start Express forward using discovered remote port
+ try {
+ await startExpressForward({ remoteExpressPort: rp })
+ } catch {}
+ // If GO service is reported running with a port, start GO forward too
+ try {
+ const goPort = Number(found.data?.go?.port)
+ if (found.data?.go?.running && goPort) {
+ const st = getTunnelState()
+ const lp = Number(st && st.localGoPort)
+ await startPortTunnel({ name: 'go', localPort: lp, remotePort: goPort, ensureRemoteOpen: true })
+ }
+ } catch {}
+ // Augment response with discovered ports for visibility
+ return { ...found.data, discoveredRemotePort: rp }
+ }
+ }
+
+ return { success: false, error: 'status-not-found', openPorts: ports }
+ }
+ // local
+ if (expressPort) {
+ const data = await httpGet('/status')
+ if (data) return data
+ }
+ // Fallback to CLI status
+ const out = await runCliCommand(['status'])
+ return out
+ } catch (e) {
+ return { success: false, error: e.message }
+ }
+})
+
+ipcMain.handle('backendEnsure', async (_event, { target = 'local', go = false, mongo = false, jupyter = false, workspace } = {}) => {
+ try {
+ if (target === 'remote') {
+ const tunnel = getTunnelState()
+ const remotePort = tunnel && tunnel.localExpressPort
+ if (!remotePort) return { success: false, error: 'no-remote-port' }
+ const ensured = {}
+ if (go) ensured.go = (await axios.post(`http://127.0.0.1:${remotePort}/ensure-go`, {}, { timeout: 10000 })).data
+ if (mongo) ensured.mongo = (await axios.post(`http://127.0.0.1:${remotePort}/ensure-mongo`, { workspacePath: workspace }, { timeout: 20000 })).data
+ if (jupyter) ensured.jupyter = (await axios.post(`http://127.0.0.1:${remotePort}/ensure-jupyter`, { workspacePath: workspace }, { timeout: 20000 })).data
+ return { success: true, ensured }
+ }
+ // local via CLI
+ const args = ['ensure']
+ if (go) args.push('--go')
+ if (mongo) args.push('--mongo')
+ if (jupyter) args.push('--jupyter')
+ if (workspace) args.push('--workspace', workspace)
+ const out = await runCliCommand(args)
+ return out
+ } catch (e) {
+ return { success: false, error: e.message }
+ }
+})
+
+// Check if a remote port is open (listening) on the SSH-connected host
+ipcMain.handle('remoteCheckPort', async (_event, { port }) => {
+ try {
+ console.log('[remoteCheckPort] request for port', port)
+ const tunnel = getTunnelState()
+ if (!tunnel || !tunnel.tunnelActive) return { success: false, error: 'no-tunnel' }
+ if (!port || isNaN(Number(port))) return { success: false, error: 'invalid-port' }
+ const conn = getActiveTunnel && getActiveTunnel()
+ if (!conn) return { success: false, error: 'no-active-ssh' }
+ const open = await checkRemotePortOpen(conn, Number(port))
+ console.log('[remoteCheckPort] result', { port: Number(port), open: !!open })
+ return { success: true, port: Number(port), open: !!open }
+ } catch (e) {
+ console.warn('[remoteCheckPort] error', e && e.message ? e.message : e)
+ return { success: false, error: e && e.message ? e.message : String(e) }
+ }
+})
+
+// Stop backend on app quit (best-effort)
+app.on('before-quit', async () => {
+ try { await runCliCommand(['stop'], 5000) } catch {}
+})
+
+// ---- Local backend presence/install stubs ----
+function checkLocalBackendPresence() {
+ // Development always considered present (runs node script)
+ if (!app.isPackaged) {
+ return { installed: true, source: 'dev-script', path: path.join(__dirname, "../backend/expressServer.mjs") }
+ }
+ // Check user settings override
+ try {
+ const userDataPath = app.getPath("userData")
+ const settingsFilePath = path.join(userDataPath, "settings.json")
+ if (fs.existsSync(settingsFilePath)) {
+ const settings = JSON.parse(fs.readFileSync(settingsFilePath, "utf8"))
+ if (settings && settings.localBackendPath && fs.existsSync(settings.localBackendPath)) {
+ return { installed: true, source: 'user', path: settings.localBackendPath }
+ }
+ }
+ } catch {}
+ // Check packaged locations
+ const candidates = [
+ path.join(process.resourcesPath, 'backend', process.platform === 'win32' ? 'server_win.exe' : (process.platform === 'darwin' ? 'server_mac' : 'server_linux'))
+ ]
+ const found = candidates.find(p => {
+ try { return fs.existsSync(p) } catch { return false }
+ })
+ if (found) return { installed: true, source: 'packaged', path: found }
+ return { installed: false, source: 'missing' }
+}
+
+ipcMain.handle('checkLocalBackend', async () => {
+ return checkLocalBackendPresence()
+})
+
+// Unified presence check (local or remote) for whether the backend is installed/available on disk.
+// - local: uses filesystem-based checkLocalBackendPresence()
+// - remote: uses the SSH tunnel's forwarded Express port to call a remote endpoint
+// Preferred endpoint: /check-local-backend (should return { installed, path?, source? })
+// Fallback: /status (if reachable, we infer installed=true because the server is running)
+ipcMain.handle('backendPresence', async (_event, { target = 'local' } = {}) => {
+ try {
+ if (target === 'remote') {
+ const tunnel = getTunnelState()
+ const remotePort = tunnel && tunnel.localExpressPort
+ if (!remotePort) {
+ return { success: false, target: 'remote', installed: false, error: 'no-remote-port' }
+ }
+ // Try explicit remote presence endpoint first
+ try {
+ const pres = await axios.get(`http://127.0.0.1:${remotePort}/check-local-backend`, { timeout: 5000 })
+ if (pres && pres.data) {
+ // Normalize shape
+ const d = pres.data
+ const installed = !!(d.installed || d.success)
+ return { success: true, target: 'remote', installed, details: d }
+ }
+ } catch {}
+ // Fallback: if /status works, the server is clearly installed and running
+ try {
+ const res = await axios.get(`http://127.0.0.1:${remotePort}/status`, { timeout: 5000 })
+ if (res && res.data) {
+ return { success: true, target: 'remote', installed: true, details: res.data }
+ }
+ } catch (e) {
+ return { success: false, target: 'remote', installed: false, error: e && e.message }
+ }
+ return { success: false, target: 'remote', installed: false, error: 'unknown' }
+ }
+ // local
+ const local = checkLocalBackendPresence()
+ return { success: true, target: 'local', installed: !!local.installed, details: local }
+ } catch (e) {
+ return { success: false, target, installed: false, error: e && e.message }
+ }
+})
+
+ipcMain.handle('setLocalBackendPath', async (_event, exePath) => {
+ try {
+ if (!exePath) return { success: false, error: 'no-path' }
+ if (!fs.existsSync(exePath)) return { success: false, error: 'not-found' }
+ const userDataPath = app.getPath('userData')
+ const settingsFilePath = path.join(userDataPath, 'settings.json')
+ let settings = {}
+ if (fs.existsSync(settingsFilePath)) {
+ try { settings = JSON.parse(fs.readFileSync(settingsFilePath, 'utf8')) || {} } catch {}
+ }
+ settings.localBackendPath = exePath
+ fs.writeFileSync(settingsFilePath, JSON.stringify(settings, null, 2))
+ return { success: true, path: exePath }
+ } catch (e) {
+ return { success: false, error: e.message }
+ }
+})
+
+// IPC: Get latest backend release info from GitHub
+// Enhancement: fetch all releases and return the latest "server-" tagged one.
+ipcMain.handle('getLatestBackendReleaseInfo', async (_event, payload) => {
+ const owner = (payload && payload.owner) || 'MEDomicsLab' // TO REPLACE WITH MEDOMICS BASE REPO
+ const repo = (payload && payload.repo) || 'MEDomics'
+ const serverOnly = payload && typeof payload.serverOnly === 'boolean' ? payload.serverOnly : true
+
+ // Helpers
+ const isServerTag = (s) => {
+ const tag = (s || '').toLowerCase()
+ // Prefer explicit server-vX pattern, but allow serverX as a fallback
+ return /^server[-_]?v\d+/i.test(tag) || tag.startsWith('server-') || tag.startsWith('server_')
+ }
+ const parseSemver = (s) => {
+ if (!s) return null
+ const m = (s.match(/v(\d+)(?:\.(\d+))?(?:\.(\d+))?/i))
+ if (!m) return null
+ return [parseInt(m[1]||'0',10), parseInt(m[2]||'0',10), parseInt(m[3]||'0',10)]
+ }
+ const cmpSemverDesc = (a,b) => {
+ for (let i=0;i<3;i++) { const d = (b[i]||0)-(a[i]||0); if (d!==0) return d }
+ return 0
+ }
+
+ const releasesUrl = `https://api.github.com/repos/${owner}/${repo}/releases?per_page=100`
+
+ const fetchJson = (url) => new Promise((resolve) => {
+ try {
+ const req = https.request(url, {
+ method: 'GET',
+ headers: {
+ 'User-Agent': 'MEDomics-App',
+ 'Accept': 'application/vnd.github+json'
+ }
+ }, (res) => {
+ let data = ''
+ res.on('data', (chunk) => { data += chunk })
+ res.on('end', () => {
+ try { resolve({ ok: true, json: JSON.parse(data) }) }
+ catch (e) { resolve({ ok: false, error: `Parse error: ${e && e.message ? e.message : String(e)}` }) }
+ })
+ })
+ req.on('error', (err) => resolve({ ok: false, error: err && err.message ? err.message : String(err) }))
+ req.end()
+ } catch (e) { resolve({ ok: false, error: e && e.message ? e.message : String(e) }) }
+ })
+
+ // Try full releases listing to pick the latest server-tagged release
+ const listRes = await fetchJson(releasesUrl)
+ if (listRes.ok && Array.isArray(listRes.json)) {
+ let rels = listRes.json
+ if (serverOnly) {
+ rels = rels.filter(r => isServerTag(r.tag_name || r.name))
+ }
+ if (rels.length > 0) {
+ // Prefer semver compare if parsable; else fall back to published_at
+ const withSem = rels.map(r => ({ r, v: parseSemver((r.tag_name || r.name) || '') }))
+ const semAvail = withSem.some(x => x.v)
+ let chosen
+ if (semAvail) {
+ const sortable = withSem.map(x => ({ ...x, v: x.v || [0,0,0] }))
+ sortable.sort((a,b) => cmpSemverDesc(a.v,b.v))
+ chosen = sortable[0].r
+ } else {
+ rels.sort((a,b) => new Date(b.published_at||b.created_at||0) - new Date(a.published_at||a.created_at||0))
+ chosen = rels[0]
+ }
+ return { success: true, tag: chosen.tag_name || chosen.name, raw: chosen }
+ }
+ }
+
+ // Fallback: GitHub latest (may be a client release)
+ const latestUrl = `https://api.github.com/repos/${owner}/${repo}/releases/latest`
+ const latestRes = await fetchJson(latestUrl)
+ if (latestRes.ok && latestRes.json) {
+ const json = latestRes.json
+ return { success: true, tag: json.tag_name || json.name, raw: json }
+ }
+ return { success: false, error: listRes.error || latestRes.error || 'Failed to fetch releases' }
+})
+
+ipcMain.handle('installLocalBackendFromURL', async (_event, { version, manifestUrl } = {}) => {
+ // Download and install the backend using either a manifest or latest GitHub release tagged for server.
+ // Manifest path (legacy): fetch manifest -> pick asset -> download -> verify sha256 -> extract -> set settings.localBackendPath -> cleanup.
+ // GitHub path (new): fetch releases -> pick latest with tag containing 'server' -> select OS/arch asset -> download -> extract -> save path -> cleanup.
+ const progress = (payload) => {
+ try { _event?.sender?.send('localBackendInstallProgress', payload) } catch {}
+ }
+ try {
+ const platform = process.platform // 'win32' | 'linux' | 'darwin'
+ const arch = process.arch // 'x64' | 'arm64' | ...
+
+ // Prepare directories
+ const userDataPath = app.getPath('userData')
+ const baseDir = path.join(userDataPath, 'medomics-server')
+ const versionsDir = path.join(baseDir, 'versions')
+ const downloadsDir = path.join(baseDir, 'downloads')
+ try { if (!fs.existsSync(baseDir)) fs.mkdirSync(baseDir, { recursive: true }) } catch {}
+ try { if (!fs.existsSync(versionsDir)) fs.mkdirSync(versionsDir, { recursive: true }) } catch {}
+ try { if (!fs.existsSync(downloadsDir)) fs.mkdirSync(downloadsDir, { recursive: true }) } catch {}
+
+ const selectOsArchAsset = (assets) => {
+ const nameHas = (s, keys) => keys.some(k => (s||'').toLowerCase().includes(k))
+ const osKeys = platform === 'win32' ? ['windows', 'win32', 'win'] : (platform === 'darwin' ? ['darwin', 'macos', 'mac'] : ['linux'])
+ const archKeys = arch === 'arm64' ? ['arm64', 'aarch64'] : ['x64', 'amd64']
+ const extKeys = ['.zip', '.tar.gz', '.tgz']
+ // First pass: by name
+ let candidate = assets.find(a => nameHas(a.name, osKeys) && nameHas(a.name, archKeys) && nameHas(a.name, extKeys))
+ if (!candidate) {
+ // Second pass: by browser_download_url
+ candidate = assets.find(a => nameHas(a.browser_download_url||'', osKeys) && nameHas(a.browser_download_url||'', archKeys))
+ }
+ // Prefer zip
+ const zips = assets.filter(a => (a.name||'').toLowerCase().endsWith('.zip') && nameHas(a.name, osKeys) && nameHas(a.name, archKeys))
+ if (zips.length) candidate = zips[0]
+ return candidate || null
+ }
+
+ if (manifestUrl) {
+ // Legacy manifest-based install
+ progress({ phase: 'fetch-manifest', manifestUrl })
+ const { data: manifest } = await axios.get(manifestUrl, { timeout: 20000 })
+ const manifestVersion = version || manifest?.version || 'unknown'
+ const osKeys = [platform, platform === 'win32' ? 'windows' : (platform === 'darwin' ? 'darwin' : 'linux')]
+ const candidates = (manifest?.assets || []).filter(a => {
+ const osMatch = osKeys.includes((a.os||'').toLowerCase())
+ if (!osMatch) return false
+ if (!a.arch) return true
+ return (a.arch||'').toLowerCase() === arch
+ })
+ if (!candidates.length) {
+ progress({ phase: 'error', error: 'no-asset-for-platform', details: { platform, arch } })
+ return { success: false, error: 'no-asset-for-platform', details: { platform, arch } }
+ }
+ const asset = candidates[0]
+ const url = asset.url
+ const expectedSha = (asset.sha256||'').trim().toLowerCase()
+ const format = (asset.format||'').toLowerCase() || (url.endsWith('.zip') ? 'zip' : (url.endsWith('.tar.gz') ? 'tar.gz' : ''))
+ if (!url) {
+ progress({ phase: 'error', error: 'asset-has-no-url' })
+ return { success: false, error: 'asset-has-no-url' }
+ }
+
+ const versionDir = path.join(versionsDir, manifestVersion)
+ const existingExe = findInstalledExecutable(versionDir)
+ if (existingExe) {
+ await saveLocalBackendPath(existingExe)
+ progress({ phase: 'already-installed', version: manifestVersion, path: existingExe })
+ return { success: true, version: manifestVersion, path: existingExe, reused: true }
+ }
+
+ const fileName = path.basename(url).split('?')[0]
+ const downloadPath = path.join(downloadsDir, fileName)
+ progress({ phase: 'download-start', url, downloadPath })
+ await downloadWithProgress(url, downloadPath, (d) => progress({ phase: 'download-progress', ...d }))
+ progress({ phase: 'download-complete', downloadPath })
+
+ if (expectedSha) {
+ progress({ phase: 'verify-start' })
+ const actualSha = await sha256File(downloadPath)
+ const ok = (actualSha||'').toLowerCase() === expectedSha
+ if (!ok) {
+ progress({ phase: 'error', error: 'checksum-mismatch', expectedSha, actualSha })
+ return { success: false, error: 'checksum-mismatch', expectedSha, actualSha }
+ }
+ progress({ phase: 'verify-ok', sha256: actualSha })
+ } else {
+ progress({ phase: 'verify-skip', reason: 'no-sha256-in-manifest' })
+ }
+
+ progress({ phase: 'extract-start', to: versionDir, format })
+ await decompress(downloadPath, versionDir)
+ progress({ phase: 'extract-complete', to: versionDir })
+
+ const exePath = findInstalledExecutable(versionDir)
+ if (!exePath) {
+ progress({ phase: 'error', error: 'executable-not-found-in-extracted', versionDir })
+ return { success: false, error: 'executable-not-found-in-extracted', versionDir }
+ }
+ try { if (process.platform !== 'win32') fs.chmodSync(exePath, 0o755) } catch {}
+
+ await saveLocalBackendPath(exePath)
+ try { await cleanupOldVersions(versionsDir, exePath, 3) } catch {}
+ progress({ phase: 'done', version: manifestVersion, path: exePath })
+ return { success: true, version: manifestVersion, path: exePath }
+ }
+
+ // New GitHub releases-based install
+ const defaultOwner = 'MEDomicsLab'
+ const defaultRepo = 'MEDomics'
+ progress({ phase: 'github-fetch-releases', owner: defaultOwner, repo: defaultRepo })
+ const { data: releases } = await axios.get(`https://api.github.com/repos/${defaultOwner}/${defaultRepo}/releases`, {
+ headers: { 'Accept': 'application/vnd.github+json', 'User-Agent': 'medomicslab-installer' },
+ timeout: 20000
+ })
+ if (!Array.isArray(releases) || releases.length === 0) {
+ progress({ phase: 'error', error: 'no-releases-found' })
+ return { success: false, error: 'no-releases-found' }
+ }
+
+ // Pick latest release with a tag indicating server (e.g., contains 'server')
+ const serverReleases = releases.filter(r => {
+ const tag = (r.tag_name||'').toLowerCase()
+ const name = (r.name||'').toLowerCase()
+ return tag.includes('server') || name.includes('server')
+ })
+ const sorted = (serverReleases.length ? serverReleases : releases).sort((a,b) => {
+ const pa = new Date(a.published_at||a.created_at||0).getTime()
+ const pb = new Date(b.published_at||b.created_at||0).getTime()
+ return pb - pa
+ })
+ const chosen = sorted[0]
+ if (!chosen) {
+ progress({ phase: 'error', error: 'no-suitable-release' })
+ return { success: false, error: 'no-suitable-release' }
+ }
+ progress({ phase: 'github-pick-release', tag: chosen.tag_name, name: chosen.name })
+
+ // Select asset for OS/arch
+ const asset = selectOsArchAsset(chosen.assets||[])
+ if (!asset) {
+ progress({ phase: 'error', error: 'no-asset-for-platform', details: { platform, arch } })
+ return { success: false, error: 'no-asset-for-platform', details: { platform, arch } }
+ }
+ const url = asset.browser_download_url
+ if (!url) {
+ progress({ phase: 'error', error: 'asset-missing-download-url' })
+ return { success: false, error: 'asset-missing-download-url' }
+ }
+ progress({ phase: 'github-select-asset', asset: asset.name, url })
+
+ const ver = chosen.tag_name || chosen.name || 'latest'
+ const versionDir = path.join(versionsDir, ver)
+ const existingExe = findInstalledExecutable(versionDir)
+ if (existingExe) {
+ await saveLocalBackendPath(existingExe)
+ progress({ phase: 'already-installed', version: ver, path: existingExe })
+ return { success: true, version: ver, path: existingExe, reused: true }
+ }
+
+ const fileName = path.basename(url).split('?')[0]
+ const downloadPath = path.join(downloadsDir, fileName)
+ progress({ phase: 'download-start', url, downloadPath })
+ await downloadWithProgress(url, downloadPath, (d) => progress({ phase: 'download-progress', ...d }))
+ progress({ phase: 'download-complete', downloadPath })
+
+ // Extract archive
+ const lower = fileName.toLowerCase()
+ const format = lower.endsWith('.zip') ? 'zip' : (lower.endsWith('.tar.gz') || lower.endsWith('.tgz') ? 'tar.gz' : 'unknown')
+ progress({ phase: 'extract-start', to: versionDir, format })
+ await decompress(downloadPath, versionDir)
+ progress({ phase: 'extract-complete', to: versionDir })
+
+ // Locate executable
+ const exePath = findInstalledExecutable(versionDir)
+ if (!exePath) {
+ progress({ phase: 'error', error: 'executable-not-found-in-extracted', versionDir })
+ return { success: false, error: 'executable-not-found-in-extracted', versionDir }
+ }
+ try { if (process.platform !== 'win32') fs.chmodSync(exePath, 0o755) } catch {}
+
+ await saveLocalBackendPath(exePath)
+ try { await cleanupOldVersions(versionsDir, exePath, 3) } catch {}
+ progress({ phase: 'done', version: ver, path: exePath })
+ return { success: true, version: ver, path: exePath }
+ } catch (e) {
+ const message = e?.message || String(e)
+ try { progress({ phase: 'error', error: message }) } catch {}
+ return { success: false, error: message }
+ }
+})
+
+ipcMain.handle('open-dialog-backend-exe', async () => {
+ const filters = process.platform === 'win32'
+ ? [{ name: 'Executable', extensions: ['exe'] }]
+ : [{ name: 'Executable', extensions: ['*'] }]
+ const { filePaths, canceled } = await dialog.showOpenDialog({
+ title: 'Select the server executable',
+ properties: ['openFile'],
+ filters
+ })
+ if (canceled || !filePaths || !filePaths[0]) return { success: false, error: 'canceled' }
+ return { success: true, path: filePaths[0] }
+})
+
//**** AUTO-UPDATER ****//
function sendStatusToWindow(text) {
@@ -179,7 +1291,9 @@ if (isProd) {
app.setPath("userData", `${app.getPath("userData")} (development)`)
}
-;(async () => {
+
+// Main async startup
+(async () => {
await app.whenReady()
protocol.registerFileProtocol("local", (request, callback) => {
@@ -196,34 +1310,43 @@ if (isProd) {
event.reply("get-file-path-reply", path.resolve(configPath))
})
- splashScreen = new BrowserWindow({
- icon: path.join(__dirname, "../resources/MEDomicsLabWithShadowNoText100.png"),
- width: 700,
- height: 700,
- transparent: true,
- frame: false,
- alwaysOnTop: true,
- center: true,
- show: true
- })
+ if (!isHeadless) {
+ splashScreen = new BrowserWindow({
+ icon: path.join(__dirname, "../resources/MEDomicsLabWithShadowNoText100.png"),
+ width: 700,
+ height: 700,
+ transparent: true,
+ frame: false,
+ alwaysOnTop: true,
+ center: true,
+ show: true
+ })
- mainWindow = createWindow("main", {
- width: 1500,
- height: 1000,
- show: false
- })
+ mainWindow = createWindow("main", {
+ width: 1500,
+ height: 1000,
+ show: false
+ })
- if (isProd) {
- splashScreen.loadFile(path.join(__dirname, "splash.html"))
+ if (isProd) {
+ splashScreen.loadFile(path.join(__dirname, "splash.html"))
+ } else {
+ splashScreen.loadFile(path.join(__dirname, "../main/splash.html"))
+ }
+ splashScreen.once("ready-to-show", () => {
+ splashScreen.show()
+ splashScreen.focus()
+ splashScreen.setAlwaysOnTop(true)
+ })
} else {
- splashScreen.loadFile(path.join(__dirname, "../main/splash.html"))
+ // Headless/server-only mode
+ mainWindow = undefined
+ splashScreen = undefined
+ console.log("Running in headless/server-only mode: no GUI will be created.")
}
- splashScreen.once("ready-to-show", () => {
- splashScreen.show()
- splashScreen.focus()
- splashScreen.setAlwaysOnTop(true)
- })
- const openRecentWorkspacesSubmenuOptions = getRecentWorkspacesOptions(null, mainWindow, hasBeenSet, serverPort)
+
+ // Use mainWindow only if not headless
+ const openRecentWorkspacesSubmenuOptions = getRecentWorkspacesOptions(null, !isHeadless ? mainWindow : null, hasBeenSet, serverPort)
console.log("openRecentWorkspacesSubmenuOptions", JSON.stringify(openRecentWorkspacesSubmenuOptions, null, 2))
const menuTemplate = [
{
@@ -295,20 +1418,22 @@ if (isProd) {
}
]
+ // Start backend server
+ startBackendServer()
console.log("running mode:", isProd ? "production" : "development")
console.log("process.resourcesPath: ", process.resourcesPath)
console.log(MEDconfig.runServerAutomatically ? "Server will start automatically here (in background of the application)" : "Server must be started manually")
- let bundledPythonPath = getBundledPythonEnvironment()
- if (MEDconfig.runServerAutomatically) {
- // Start the Go server – Python path is optional (passed if available)
- runServer(isProd, serverPort, serverProcess, serverState, bundledPythonPath)
- .then((process) => {
- serverProcess = process
- console.log("Server process started: ", serverProcess)
- })
- .catch((err) => {
- console.error("Failed to start server: ", err)
- })
+ let bundledPythonPath = await getBundledPythonEnvironment()
+ if (MEDconfig.runServerAutomatically && bundledPythonPath !== null) {
+ // Find the bundled python environment
+ if (bundledPythonPath !== null) {
+ // Request the backend to start its Go server (backend will spawn its own process)
+ runServerViaBackend()
+ .then((result) => {
+ console.log("Backend run-go-server result:", result)
+ })
+ .catch((err) => console.error("Failed to request backend run-go-server:", err))
+ }
} else {
//**** NO SERVER ****//
findAvailablePort(MEDconfig.defaultPort)
@@ -339,14 +1464,20 @@ if (isProd) {
})
ipcMain.handle("setWorkingDirectory", async (event, data) => {
+ const result = await setWorkspaceDirectory(data)
+ console.log("setWorkingDirectory result: ", result)
+ return result
+ })
+
+ const setWorkspaceDirectory = async (data) => {
app.setPath("sessionData", data)
+ console.log(`setWorkspaceDirectory : ${data}`)
createWorkingDirectory() // Create DATA & EXPERIMENTS directories
- console.log(`setWorkingDirectory : ${data}`)
createMedomicsDirectory(data)
hasBeenSet = true
try {
// Stop MongoDB if it's running
- await stopMongoDB(mongoProcess)
+ await stopMongoDB()
if (process.platform === "win32") {
// Kill the process on the port
// killProcessOnPort(serverPort)
@@ -364,7 +1495,7 @@ if (isProd) {
}
}
// Start MongoDB with the new configuration
- startMongoDB(data, mongoProcess)
+ startMongoDB(data)
return {
workingDirectory: dirTree(app.getPath("sessionData")),
hasBeenSet: hasBeenSet,
@@ -373,7 +1504,8 @@ if (isProd) {
} catch (error) {
console.error("Failed to change workspace: ", error)
}
- })
+ }
+
/**
* @description Returns the path of the specified directory of the app
@@ -432,6 +1564,7 @@ if (isProd) {
*/
ipcMain.handle("get-settings", async () => {
const userDataPath = app.getPath("userData")
+ console.log("userDataPath: ", userDataPath)
const settingsFilePath = path.join(userDataPath, "settings.json")
if (fs.existsSync(settingsFilePath)) {
const settings = JSON.parse(fs.readFileSync(settingsFilePath, "utf8"))
@@ -489,17 +1622,8 @@ if (isProd) {
}
console.log("Received Python path: ", pythonPath)
if (MEDconfig.runServerAutomatically) {
- runServer(isProd, serverPort, serverProcess, serverState, pythonPath)
- .then((process) => {
- serverProcess = process
- console.log(`success: ${serverState.serverIsRunning}`)
- return serverState.serverIsRunning
- })
- .catch((err) => {
- console.error("Failed to start server: ", err)
- serverState.serverIsRunning = false
- return false
- })
+ await runServerViaBackend()
+ return true
}
return serverState.serverIsRunning
})
@@ -538,11 +1662,34 @@ if (isProd) {
let recentWorkspaces = loadWorkspaces()
event.reply("recentWorkspaces", recentWorkspaces)
} else if (data === "updateWorkingDirectory") {
- event.reply("updateDirectory", {
- workingDirectory: dirTree(app.getPath("sessionData")),
- hasBeenSet: hasBeenSet,
- newPort: serverPort
- }) // Sends the folder structure to Next.js
+ const activeTunnel = getActiveTunnel()
+ const tunnel = getTunnelState()
+ if (activeTunnel && tunnel) {
+ // If an SSH tunnel is active, we set the remote workspace path
+ const remoteWorkspacePath = getRemoteWorkspacePath()
+ axios.get(`http://localhost:${tunnel.localExpressPort}/get-working-dir-tree`, { params: { requestedPath: remoteWorkspacePath } })
+ .then((response) => {
+ if (response.data.success && response.data.workingDirectory) {
+ event.reply("updateDirectory", {
+ workingDirectory: response.data.workingDirectory,
+ hasBeenSet: true,
+ newPort: tunnel.localExpressPort,
+ isRemote: true
+ }) // Sends the folder structure to Next.js
+ } else {
+ console.error("Failed to get remote working directory tree: ", response.data.error)
+ }
+ })
+ .catch((error) => {
+ console.error("Error getting remote working directory tree: ", error)
+ })
+ } else {
+ event.reply("updateDirectory", {
+ workingDirectory: dirTree(app.getPath("sessionData")),
+ hasBeenSet: hasBeenSet,
+ newPort: serverPort
+ }) // Sends the folder structure to Next.js
+ }
} else if (data === "getServerPort") {
event.reply("getServerPort", {
newPort: serverPort
@@ -557,17 +1704,18 @@ if (isProd) {
mainWindow.webContents.send("toggleDarkMode")
})
- if (isProd) {
- await mainWindow.loadURL("app://./index.html")
- } else {
- const port = process.argv[2]
- await mainWindow.loadURL(`http://localhost:${port}/`)
- mainWindow.webContents.openDevTools()
+ if (!isHeadless) {
+ if (isProd) {
+ await mainWindow.loadURL("app://./index.html")
+ } else {
+ const port = process.argv[2]
+ await mainWindow.loadURL(`http://localhost:${port}/`)
+ mainWindow.webContents.openDevTools()
+ }
+ splashScreen.destroy()
+ mainWindow.maximize()
+ mainWindow.show()
}
-
- splashScreen.destroy()
- mainWindow.maximize()
- mainWindow.show()
})()
ipcMain.handle("request", async (_, axios_request) => {
@@ -575,8 +1723,72 @@ ipcMain.handle("request", async (_, axios_request) => {
return { data: result.data, status: result.status }
})
+// General backend request handler used by the renderer via preload
+ipcMain.handle('express-request', async (_event, req) => {
+ if (!req || typeof req.path !== 'string' || !req.path.startsWith('/')) {
+ const err = new Error('express-request: invalid request shape')
+ err.code = 'BAD_REQUEST'
+ throw err
+ }
+
+ const host = '127.0.0.1'
+ const port = req.port || expressPort || serverPort || MEDconfig.defaultPort
+ const url = `http://${host}:${port}${req.path}`
+
+ try {
+ const axiosResp = await axios({
+ method: req.method || 'get',
+ url,
+ params: req.params || undefined,
+ data: req.body || undefined,
+ headers: req.headers || undefined,
+ timeout: req.timeout || 20000
+ })
+ return { status: axiosResp.status, data: axiosResp.data, headers: axiosResp.headers }
+ } catch (err) {
+ const status = err?.response?.status
+ const dataSnippet = (() => {
+ try {
+ const d = err?.response?.data
+ if (typeof d === 'string') return d.slice(0, 500)
+ return JSON.stringify(d).slice(0, 500)
+ } catch { return '' }
+ })()
+ const method = (req.method || 'GET').toUpperCase()
+ const msg = status
+ ? `express-request ${method} ${url} failed with status ${status}${dataSnippet ? `: ${dataSnippet}` : ''}`
+ : `express-request ${method} ${url} failed: ${err && err.message ? err.message : 'unknown error'}`
+ const e = new Error(msg)
+ e.code = 'BACKEND_ERROR'
+ e.status = status
+ throw e
+ }
+})
+
// Python environment handling
ipcMain.handle("getInstalledPythonPackages", async (event, pythonPath) => {
+ const activeTunnel = getActiveTunnel()
+ const tunnel = getTunnelState()
+ if (activeTunnel && tunnel) {
+ let pythonPackages = null
+ const forwardedPort = tunnel.localExpressPort || tunnel.remoteExpressPort
+ if (!forwardedPort) {
+ console.error("Remote Python packages request: no forwarded Express port available")
+ return null
+ }
+ await axios.get(`http://127.0.0.1:${forwardedPort}/get-installed-python-packages`, { params: { pythonPath: pythonPath } })
+ .then((response) => {
+ if (response.data.success && response.data.packages) {
+ pythonPackages = response.data.packages
+ } else {
+ console.error("Failed to get remote Python packages: ", response.data.error)
+ }
+ })
+ .catch((error) => {
+ console.error("Error getting remote Python packages: ", error)
+ })
+ return pythonPackages
+ }
return getInstalledPythonPackages(pythonPath)
})
@@ -592,56 +1804,103 @@ ipcMain.handle("installMongoDB", async (event) => {
})
ipcMain.handle("getBundledPythonEnvironment", async (event) => {
- return getBundledPythonEnvironment()
+ const activeTunnel = getActiveTunnel()
+ const tunnel = getTunnelState()
+ if (activeTunnel && tunnel) {
+ let pythonEnv = null
+ const forwardedPort = tunnel.localExpressPort || tunnel.remoteExpressPort
+ if (!forwardedPort) {
+ console.error("Remote bundled Python environment request: no forwarded Express port available")
+ return null
+ }
+ await axios.get(`http://127.0.0.1:${forwardedPort}/get-bundled-python-environment`)
+ .then((response) => {
+ if (response.data.success && response.data.pythonEnv) {
+ pythonEnv = response.data.pythonEnv
+ } else {
+ console.error("Failed to get remote bundled Python environment: ", response.data.error)
+ }
+ })
+ .catch((error) => {
+ console.error("Error getting remote bundled Python environment: ", error)
+ })
+ return pythonEnv
+ } else {
+ return await getBundledPythonEnvironment()
+ }
})
ipcMain.handle("installBundledPythonExecutable", async (event) => {
+ // Notification callback for Electron
+ const notify = (payload) => {
+ if (mainWindow && mainWindow.webContents) {
+ mainWindow.webContents.send("notification", payload)
+ }
+ }
// Check if Python is installed
- let pythonInstalled = getBundledPythonEnvironment()
+ let pythonInstalled = await getBundledPythonEnvironment()
if (pythonInstalled === null) {
- // If Python is not installed, install it
- return installBundledPythonExecutable(mainWindow)
+ // If Python is not installed, ask backend to install via its endpoint
+ return await httpPost("/install-bundled-python", { })
} else {
- // Check if the required packages are installed
- let requirementsInstalled = checkPythonRequirements()
- if (requirementsInstalled) {
+ // Check if required packages are installed via backend
+ const reqInstalled = await httpGet("/check-python-requirements", { pythonPath: pythonInstalled })
+ if (reqInstalled) {
return true
} else {
- await installRequiredPythonPackages(mainWindow)
+ await httpPost("/install-required-python-packages", { pythonPath: pythonInstalled })
return true
}
}
})
ipcMain.handle("checkRequirements", async (event) => {
- return checkRequirements()
+ return await checkRequirements()
})
ipcMain.handle("checkPythonRequirements", async (event) => {
- return checkPythonRequirements()
+ return await httpGet("/check-python-requirements")
})
ipcMain.handle("checkMongoDBisInstalled", async (event) => {
- return getMongoDBPath()
+ return await getMongoDBPath()
+})
+
+ipcMain.handle("startJupyterServer", async (event, workspacePath, port) => {
+ return await startJupyterServer(workspacePath, port)
})
+ipcMain.handle("stopJupyterServer", async () => {
+ return await stopJupyterServer()
+})
+
+ipcMain.handle("checkJupyterIsRunning", async () => {
+ return checkJupyterIsRunning()
+})
+
+
ipcMain.on("restartApp", (event, data, args) => {
app.relaunch()
app.quit()
})
ipcMain.handle("checkMongoIsRunning", async (event) => {
- // Check if something is running on the port MEDconfig.mongoPort
- let port = MEDconfig.mongoPort
+ const activeTunnel = getActiveTunnel()
+ const tunnel = getTunnelState()
let isRunning = false
- if (process.platform === "win32") {
- isRunning = exec(`netstat -ano | findstr :${port}`).toString().trim() !== ""
- } else if (process.platform === "darwin") {
- isRunning = exec(`lsof -i :${port}`).toString().trim() !== ""
+ if (activeTunnel && tunnel) {
+ isRunning = await checkRemotePortOpen(activeTunnel, tunnel.remoteDBPort)
} else {
- isRunning = exec(`netstat -tuln | grep ${port}`).toString().trim() !== ""
+ // Check if something is running on the port MEDconfig.mongoPort
+ let port = MEDconfig.mongoPort
+ if (process.platform === "win32") {
+ isRunning = exec(`netstat -ano | findstr :${port}`).toString().trim() !== ""
+ } else if (process.platform === "darwin") {
+ isRunning = exec(`lsof -i :${port}`).toString().trim() !== ""
+ } else {
+ isRunning = exec(`netstat -tuln | grep ${port}`).toString().trim() !== ""
+ }
}
-
return isRunning
})
@@ -812,171 +2071,22 @@ ipcMain.handle("terminal-get-available-shells", async () => {
* @returns {BrowserWindow} The new window
*/
function openWindowFromURL(url) {
- let window = new BrowserWindow({
- icon: path.join(__dirname, "../resources/MEDomicsLabWithShadowNoText100.png"),
- width: 700,
- height: 700,
- transparent: true,
- center: true
- })
-
- window.loadURL(url)
- window.once("ready-to-show", () => {
- window.show()
- window.focus()
- })
-}
-
-// Function to start MongoDB
-function startMongoDB(workspacePath) {
- const mongoConfigPath = path.join(workspacePath, ".medomics", "mongod.conf")
- if (fs.existsSync(mongoConfigPath)) {
- console.log("Starting MongoDB with config: " + mongoConfigPath)
- let mongod = getMongoDBPath()
- if (process.platform !== "darwin") {
- mongoProcess = spawn(mongod, [
- "--config",
- mongoConfigPath,
- "--port",
- MEDconfig.mongoPort
- ])
-
- } else {
- if (fs.existsSync(getMongoDBPath())) {
- mongoProcess = spawn(getMongoDBPath(), ["--config", mongoConfigPath])
- } else {
- mongoProcess = spawn("/opt/homebrew/Cellar/mongodb-community/7.0.12/bin/mongod", ["--config", mongoConfigPath], { shell: true })
- }
- }
- mongoProcess.stdout.on("data", (data) => {
- console.log(`MongoDB stdout: ${data}`)
+ const isHeadless = process.argv.some(arg => arg.includes('--no-gui'))
+ if (!isHeadless) {
+ let window = new BrowserWindow({
+ icon: path.join(__dirname, "../resources/MEDomicsLabWithShadowNoText100.png"),
+ width: 700,
+ height: 700,
+ transparent: true,
+ center: true
})
- mongoProcess.stderr.on("data", (data) => {
- console.error(`MongoDB stderr: ${data}`)
+ window.loadURL(url)
+ window.once("ready-to-show", () => {
+ window.show()
+ window.focus()
})
-
- mongoProcess.on("close", (code) => {
- console.log(`MongoDB process exited with code ${code}`)
- })
-
- mongoProcess.on("error", (err) => {
- console.error("Failed to start MongoDB: ", err)
- // reject(err)
- })
- } else {
- const errorMsg = `MongoDB config file does not exist: ${mongoConfigPath}`
- console.error(errorMsg)
}
}
-// Function to stop MongoDB
-async function stopMongoDB(mongoProcess) {
- return new Promise((resolve, reject) => {
- if (mongoProcess) {
- mongoProcess.on("exit", () => {
- mongoProcess = null
- resolve()
- })
- try {
- mongoProcess.kill()
- resolve()
- } catch (error) {
- console.log("Error while stopping MongoDB ", error)
- // reject()
- }
- } else {
- resolve()
- }
- })
-}
-export function getMongoDBPath() {
- if (process.platform === "win32") {
- // Check if mongod is in the process.env.PATH
- const paths = process.env.PATH.split(path.delimiter)
- for (let i = 0; i < paths.length; i++) {
- const binPath = path.join(paths[i], "mongod.exe")
- if (fs.existsSync(binPath)) {
- console.log("mongod found in PATH")
- return binPath
- }
- }
- // Check if mongod is in the default installation path on Windows - C:\Program Files\MongoDB\Server\\bin\mongod.exe
- const programFilesPath = process.env["ProgramFiles"]
- if (programFilesPath) {
- const mongoPath = path.join(programFilesPath, "MongoDB", "Server")
- // Check if the MongoDB directory exists
- if (!fs.existsSync(mongoPath)) {
- console.error("MongoDB directory not found")
- return null
- }
- const dirs = fs.readdirSync(mongoPath)
- for (let i = 0; i < dirs.length; i++) {
- const binPath = path.join(mongoPath, dirs[i], "bin", "mongod.exe")
- if (fs.existsSync(binPath)) {
- return binPath
- }
- }
- }
- console.error("mongod not found")
- return null
- } else if (process.platform === "darwin") {
- // Check if it is installed in the .medomics directory
- const binPath = path.join(process.env.HOME, ".medomics", "mongodb", "bin", "mongod")
- if (fs.existsSync(binPath)) {
- console.log("mongod found in .medomics directory")
- return binPath
- }
- if (process.env.NODE_ENV !== "production") {
- // Check if mongod is in the process.env.PATH
- const paths = process.env.PATH.split(path.delimiter)
- for (let i = 0; i < paths.length; i++) {
- const binPath = path.join(paths[i], "mongod")
- if (fs.existsSync(binPath)) {
- console.log("mongod found in PATH")
- return binPath
- }
- }
- // Check if mongod is in the default installation path on macOS - /usr/local/bin/mongod
- const binPath = "/usr/local/bin/mongod"
- if (fs.existsSync(binPath)) {
- return binPath
- }
- }
- console.error("mongod not found")
- return null
- } else if (process.platform === "linux") {
- // Check if mongod is in the process.env.PATH
- const paths = process.env.PATH.split(path.delimiter)
- for (let i = 0; i < paths.length; i++) {
- console.log(`Checking for mongod in: index ${i}, path ${paths[i]}`)
- const binPath = path.join(paths[i], "mongod")
- console.log(`Checking if mongod exists at: ${binPath}`)
- if (fs.existsSync(binPath)) {
- return binPath
- }
- }
- console.error("mongod not found in PATH" + paths)
- // Check if mongod is in the default installation path on Linux - /usr/bin/mongod
- if (fs.existsSync("/usr/bin/mongod")) {
- return "/usr/bin/mongod"
- }
-
- // Check the tarball install location used by after-install.sh
- if (fs.existsSync("/usr/local/bin/mongod")) {
- return "/usr/local/bin/mongod"
- }
-
- if (fs.existsSync("/usr/local/lib/mongodb/bin/mongod")) {
- return "/usr/local/lib/mongodb/bin/mongod"
- }
-
- if (fs.existsSync(process.env.HOME + "/.medomics/mongodb/bin/mongod")) {
- return process.env.HOME + "/.medomics/mongodb/bin/mongod"
- }
- return null
- } else {
- return "mongod"
- }
-}
diff --git a/main/helpers/create-window.js b/main/helpers/create-window.js
index 3e8b3fe5..5d2c1c55 100644
--- a/main/helpers/create-window.js
+++ b/main/helpers/create-window.js
@@ -1,6 +1,7 @@
-import { screen, BrowserWindow } from "electron"
+import { screen, BrowserWindow, app } from "electron"
import Store from "electron-store"
import path from "path"
+import fs from "fs"
export default function createWindow(windowName, options) {
const key = "window-state"
@@ -59,13 +60,33 @@ export default function createWindow(windowName, options) {
state = ensureVisibleOnSomeDisplay(restore())
+ // Resolve a robust preload path that works in dev and in the bundled app
+ // Possible layouts:
+ // - Dev: __dirname === /main/helpers -> ../preload.js
+ // - Bundled: __dirname may resolve under /app or asar; try multiple candidates
+ const preloadCandidates = [
+ path.join(__dirname, '../preload.js'),
+ path.join(__dirname, '../../main/preload.js'),
+ path.join(__dirname, './preload.js'),
+ path.join(process.resourcesPath || __dirname, 'preload.js'),
+ path.join(process.resourcesPath || __dirname, 'app', 'preload.js')
+ ]
+ const resolvedPreload = preloadCandidates.find(p => {
+ try { return fs.existsSync(p) } catch { return false }
+ }) || path.join(__dirname, '../preload.js')
+
+ const isProd = (process.env.NODE_ENV === 'production') || app.isPackaged
+
win = new BrowserWindow({
icon: path.join(__dirname, "../resources/MEDomicsLabWithShadowNoText100.png"),
...state,
...options,
webPreferences: {
+ // Preload to expose a minimal API to renderer (kept alongside current settings)
+ preload: resolvedPreload,
nodeIntegration: true,
- contextIsolation: false,
+ // Use contextIsolation in production; relax in dev to avoid brittle shims
+ contextIsolation: isProd,
...options.webPreferences
},
show: false
diff --git a/main/preload.js b/main/preload.js
new file mode 100644
index 00000000..4daf327e
--- /dev/null
+++ b/main/preload.js
@@ -0,0 +1,96 @@
+const { contextBridge, ipcRenderer, webFrame } = require('electron')
+
+const isIsolated = process.contextIsolated === true
+
+function buildBackendAPI() {
+ return {
+ requestExpress: (req) => ipcRenderer.invoke('express-request', req),
+ getExpressPort: () => ipcRenderer.invoke('get-express-port'),
+ startPortTunnel: (payload) => ipcRenderer.invoke('startPortTunnel', payload),
+ stopPortTunnel: (payload) => ipcRenderer.invoke('stopPortTunnel', payload),
+ listPortTunnels: () => ipcRenderer.invoke('listPortTunnels')
+ }
+}
+
+if (isIsolated) {
+ contextBridge.exposeInMainWorld('backend', buildBackendAPI())
+} else {
+ // In non-isolated (dev) mode, attach directly to window
+ try { window.backend = buildBackendAPI() } catch (_) {}
+}
+
+// Expose a minimal, controlled native require to the page for modules like 'electron'
+// used by legacy/imported code paths. Prefer using window.backend via contextBridge
+// instead of requiring 'electron' directly in renderer code.
+try {
+ if (isIsolated) {
+ contextBridge.exposeInMainWorld('nativeRequire', (mod) => {
+ try { return require(mod) } catch (e) { return undefined }
+ })
+ } else {
+ // In dev non-isolated mode, require is available directly
+ window.nativeRequire = (m) => { try { return require(m) } catch { return undefined } }
+ }
+} catch (_) { /* ignore */ }
+
+// Attempt to set critical shims as early as possible using webFrame,
+// which executes in the main world before most scripts run.
+try {
+ // Skip heavy shims when not isolated (dev mode) since Node globals are available
+ if (!isIsolated) { throw new Error('skip-shims') }
+ webFrame.executeJavaScript(
+ [
+ 'try{',
+ ' if(typeof window.global==="undefined"){ window.global = window; }',
+ ' if(typeof window.process==="undefined"){ window.process = { env: {}, browser: true }; }',
+ ' if(typeof window.require==="undefined" && typeof window.nativeRequire!=="undefined"){ window.require = function(m){ return window.nativeRequire(m); } }',
+ ' if(typeof require==="undefined" && typeof window.require!=="undefined"){ var require = window.require; }',
+ ' if(typeof window.Buffer==="undefined" && typeof window.nativeRequire!=="undefined"){ window.Buffer = window.nativeRequire("buffer").Buffer; }',
+ ' if(typeof __dirname==="undefined"){ var __dirname = "/"; }',
+ ' if(typeof __filename==="undefined"){ var __filename = "/index.js"; }',
+ '}catch(e){}'
+ ].join(' '),
+ true
+ )
+} catch (_) { /* ignore */ }
+
+// Inject shims into the main world for libraries expecting Node globals
+// like `global` (webpack/react-refresh) and sometimes `process` for
+// env reads. We inject when DOM is ready to ensure documentElement exists.
+function injectMainWorldShims() {
+ try {
+ const lines = []
+ if (typeof window.global === 'undefined') {
+ lines.push('window.global = window;')
+ }
+ if (typeof window.process === 'undefined') {
+ // Minimal process shim suitable for client-side checks
+ lines.push('window.process = { env: {}, browser: true };')
+ }
+ // Provide require fallback via nativeRequire if available
+ lines.push('if(typeof window.require==="undefined" && typeof window.nativeRequire!=="undefined"){ window.require = function(m){ return window.nativeRequire(m); } }')
+ lines.push('if(typeof require==="undefined" && typeof window.require!=="undefined"){ var require = window.require; }')
+ // Provide Buffer via native buffer module if available
+ lines.push('if(typeof window.Buffer==="undefined" && typeof window.nativeRequire!=="undefined"){ window.Buffer = window.nativeRequire("buffer").Buffer; }')
+ // Provide CommonJS dirname/filename fallbacks for dev overlays
+ lines.push('if(typeof __dirname==="undefined"){ var __dirname = "/"; }')
+ lines.push('if(typeof __filename==="undefined"){ var __filename = "/index.js"; }')
+ const code = `(function(){ ${lines.join(' ')} })();`
+ const script = document.createElement('script')
+ script.textContent = code
+ document.documentElement.appendChild(script)
+ script.remove()
+ } catch (_) {
+ // Non-fatal; continue without shims
+ }
+}
+
+try {
+ if (typeof window !== 'undefined' && isIsolated) {
+ if (document && (document.readyState === 'interactive' || document.readyState === 'complete')) {
+ injectMainWorldShims()
+ } else {
+ window.addEventListener('DOMContentLoaded', injectMainWorldShims, { once: true })
+ }
+ }
+} catch (_) { /* ignore */ }
diff --git a/main/server-package.json b/main/server-package.json
new file mode 100644
index 00000000..3dbc1ca5
--- /dev/null
+++ b/main/server-package.json
@@ -0,0 +1,3 @@
+{
+ "type": "module"
+}
diff --git a/main/sshKeygen.js b/main/sshKeygen.js
new file mode 100644
index 00000000..bec8773d
--- /dev/null
+++ b/main/sshKeygen.js
@@ -0,0 +1,20 @@
+// SSH key generation utility for Electron main process
+const forge = require('node-forge')
+
+/**
+ * Generate an RSA SSH key pair
+ * @param {string} comment - Comment to append to the public key
+ * @param {string} username - Username for the key (optional, for comment)
+ * @returns {Promise<{privateKey: string, publicKey: string}>}
+ */
+export async function generateSSHKeyPair(comment = '', username = '') {
+ return new Promise((resolve, reject) => {
+ forge.pki.rsa.generateKeyPair({ bits: 2048, workers: 2 }, (err, keypair) => {
+ if (err) return reject(err)
+ const privateKey = forge.pki.privateKeyToPem(keypair.privateKey)
+ // OpenSSH public key format
+ const sshPublic = forge.ssh.publicKeyToOpenSSH(keypair.publicKey, `${username || 'user'}@${comment}`)
+ resolve({ privateKey, publicKey: sshPublic })
+ });
+ });
+}
diff --git a/main/utils/installation.js b/main/utils/installation.js
deleted file mode 100644
index 08d1ad5f..00000000
--- a/main/utils/installation.js
+++ /dev/null
@@ -1,169 +0,0 @@
-import { app } from "electron"
-import { execCallbacksForChildWithNotifications } from "../utils/pythonEnv"
-import { mainWindow, getMongoDBPath } from "../background"
-import { getBundledPythonEnvironment } from "../utils/pythonEnv"
-import fs from "fs"
-
-//**** LOG ****// This is used to send the console.log messages to the main window
-const originalConsoleLog = console.log
-/**
- * @description Sends the console.log messages to the main window
- * @param {*} message The message to send
- * @summary We redefine the console.log function to send the messages to the main window
- */
-console.log = function () {
- try {
- originalConsoleLog(...arguments)
- if (mainWindow !== undefined) {
- mainWindow.webContents.send("log", ...arguments)
- }
- } catch (error) {
- console.error(error)
- }
-}
-
-
-export const checkIsBrewInstalled = async () => {
- let isBrewInstalled = false
- try {
- let { stdout, stderr } = await exec(`brew --version`)
- isBrewInstalled = stdout !== "" && stderr === ""
- } catch (error) {
- isBrewInstalled = false
- }
- return isBrewInstalled
-}
-
-export const checkIsXcodeSelectInstalled = async () => {
- let isXcodeSelectInstalled = false
- try {
- let { stdout, stderr } = await exec(`xcode-select -p`)
- isXcodeSelectInstalled = stdout !== "" && stderr === ""
- } catch (error) {
- isXcodeSelectInstalled = false
- }
-}
-
-export const installBrew = async () => {
- let installBrewPromise = exec(`/bin/bash -c "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/HEAD/install.sh)"`)
- execCallbacksForChildWithNotifications(installBrewPromise.child, "Installing Homebrew", mainWindow)
- await installBrewPromise
- return true
-}
-
-export const installXcodeSelect = async () => {
- let installXcodeSelectPromise = exec(`xcode-select --install`)
- execCallbacksForChildWithNotifications(installXcodeSelectPromise.child, "Installing Xcode Command Line Tools", mainWindow)
- await installXcodeSelectPromise
- return true
-}
-
-
-var path = require("path")
-const util = require("util")
-const exec = util.promisify(require("child_process").exec)
-
-export const checkRequirements = async () => {
- // Check if .medomics directory exists
- let medomicsDirExists = fs.existsSync(path.join(app.getPath("home"), ".medomics"))
- if (!medomicsDirExists) {
- fs.mkdirSync(path.join(app.getPath("home"), ".medomics"))
- }
- let mongoDBInstalled = getMongoDBPath()
- let pythonInstalled = getBundledPythonEnvironment()
-
- console.log("MongoDB installed: " + mongoDBInstalled)
- console.log("Python installed: " + pythonInstalled)
- return { pythonInstalled: pythonInstalled, mongoDBInstalled: mongoDBInstalled }
-}
-
-export const installMongoDB = async () => {
- if (process.platform === "win32") {
- // Download MongoDB installer
- const downloadUrl = "https://fastdl.mongodb.org/windows/mongodb-windows-x86_64-7.0.12-signed.msi"
- const downloadPath = path.join(app.getPath("downloads"), "mongodb-windows-x86_64-7.0.12-signed.msi")
- let downloadMongoDBPromise = exec(`curl -o ${downloadPath} ${downloadUrl}`)
- execCallbacksForChildWithNotifications(downloadMongoDBPromise.child, "Downloading MongoDB installer", mainWindow)
- await downloadMongoDBPromise
- // Install MongoDB
- // msiexec.exe /l*v mdbinstall.log /qb /i mongodb-windows-x86_64-7.0.12-signed.msi ADDLOCAL="ServerNoService" SHOULD_INSTALL_COMPASS="0"
- let installMongoDBPromise = exec(`msiexec.exe /l*v mdbinstall.log /qb /i ${downloadPath} ADDLOCAL="ServerNoService" SHOULD_INSTALL_COMPASS="0"`)
- execCallbacksForChildWithNotifications(installMongoDBPromise.child, "Installing MongoDB", mainWindow)
- await installMongoDBPromise
-
- let removeMongoDBInstallerPromise = exec(`rm ${downloadPath}`, { shell: "powershell" })
- execCallbacksForChildWithNotifications(removeMongoDBInstallerPromise.child, "Removing MongoDB installer", mainWindow)
- await removeMongoDBInstallerPromise
-
- return getMongoDBPath() !== null
- } else if (process.platform === "darwin") {
- // Check if Homebrew is installed
- let isBrewInstalled = await checkIsBrewInstalled()
- if (!isBrewInstalled) {
- await installBrew()
- }
- // Check if Xcode Command Line Tools are installed
- let isXcodeSelectInstalled = await checkIsXcodeSelectInstalled()
- if (!isXcodeSelectInstalled) {
- await installXcodeSelect()
- }
-
- let installMongoDBPromise = exec(`brew tap mongodb/brew && brew install mongodb-community@7.0.12`)
- execCallbacksForChildWithNotifications(installMongoDBPromise.child, "Installing MongoDB", mainWindow)
-
- return getMongoDBPath() !== null
- } else if (process.platform === "linux") {
- const linuxURLDict = {
- "Ubuntu 24.04 x86_64": "https://fastdl.mongodb.org/linux/mongodb-linux-x86_64-ubuntu2404-8.0.9.tgz",
- "Ubuntu 20.04 x86_64": "https://fastdl.mongodb.org/linux/mongodb-linux-x86_64-ubuntu2004-7.0.15.tgz",
- "Ubuntu 22.04 x86_64": "https://fastdl.mongodb.org/linux/mongodb-linux-x86_64-ubuntu2204-7.0.15.tgz",
- "Ubuntu 20.04 aarch64": "https://fastdl.mongodb.org/linux/mongodb-linux-aarch64-ubuntu2004-7.0.15.tgz",
- "Ubuntu 22.04 aarch64": "https://fastdl.mongodb.org/linux/mongodb-linux-aarch64-ubuntu2204-7.0.15.tgz",
- "Debian 10 x86_64": "https://fastdl.mongodb.org/linux/mongodb-linux-x86_64-debian10-7.0.15.tgz",
- "Debian 11 x86_64": "https://fastdl.mongodb.org/linux/mongodb-linux-x86_64-debian11-7.0.15.tgz",
- }
- // Check if MongoDB is installed
- if (getMongoDBPath() !== null) {
- return true
- }
- // Check which Linux distribution is being used
- let { stdout, stderr } = await exec(`cat /etc/os-release`)
- let osRelease = stdout
- let isUbuntu = osRelease.includes("Ubuntu")
- if (!isUbuntu) {
- return false
- } else {
- // osRelease is a string with the contents of /etc/os-release
- // Get the version of Ubuntu
- let ubuntuVersion = osRelease.match(/VERSION_ID="(.*)"/)[1]
- // Get the architecture of the system
- let architecture = "x86_64"
- if (process.arch === "arm64") {
- architecture = "aarch64"
- }
- // Get the download URL
- let downloadUrl = linuxURLDict[`Ubuntu ${ubuntuVersion} ${architecture}`]
- // Download MongoDB installer
- let mongoDBVersion = "7.0.15"
- if (ubuntuVersion === "24.04") {
- mongoDBVersion = "8.0.9"
- }
- const downloadPath = path.join(app.getPath("downloads"), `mongodb-linux-${architecture}-ubuntu${ubuntuVersion}-${mongoDBVersion}.tgz`)
- let downloadMongoDBPromise = exec(`curl -o ${downloadPath} ${downloadUrl}`)
- execCallbacksForChildWithNotifications(downloadMongoDBPromise.child, "Downloading MongoDB installer", mainWindow)
- await downloadMongoDBPromise
- // Install MongoDB in the .medomics directory in the user's home directory
- ubuntuVersion = ubuntuVersion.replace(".", "")
- let command = `tar -xvzf ${downloadPath} -C ${process.env.HOME}/.medomics/ && mv ${process.env.HOME}/.medomics/mongodb-linux-${architecture}-ubuntu${ubuntuVersion}-${mongoDBVersion} ${process.env.HOME}/.medomics/mongodb`
- let installMongoDBPromise = exec(command)
-
- // let installMongoDBPromise = exec(`tar -xvzf ${downloadPath} && mv mongodb-linux-${architecture}-ubuntu${ubuntuVersion}-7.0.15 ${process.env.HOME}/.medomics/mongodb`)
- execCallbacksForChildWithNotifications(installMongoDBPromise.child, "Installing MongoDB", mainWindow)
- await installMongoDBPromise
-
- const test = getMongoDBPath()
-
- return getMongoDBPath() !== null
- }
- }
-}
diff --git a/main/utils/remoteFunctions.js b/main/utils/remoteFunctions.js
new file mode 100644
index 00000000..0c99dce2
--- /dev/null
+++ b/main/utils/remoteFunctions.js
@@ -0,0 +1,2521 @@
+import { Client } from "ssh2"
+import { app, ipcMain } from "electron"
+import { mainWindow } from "../background.js"
+import { generateSSHKeyPair } from '../sshKeygen'
+const net = require("net")
+var path = require("path")
+const fs = require("fs")
+const axios = require("axios")
+
+// Global tunnel state for remote connection management
+let activeTunnel = null
+let activeTunnelServer = null
+
+let mongoDBLocalPort = null
+let mongoDBRemotePort = null
+
+let jupyterLocalPort = null
+let jupyterRemotePort = null
+
+let remoteWorkspacePath = null
+let remoteBackendExecutablePath = null
+
+export function setActiveTunnel(tunnel) {
+ activeTunnel = tunnel
+}
+export function setActiveTunnelServer(server) {
+ activeTunnelServer = server
+}
+export function getActiveTunnel() {
+ return activeTunnel
+}
+export function getActiveTunnelServer() {
+ return activeTunnelServer
+}
+export function setRemoteWorkspacePath(path) {
+ remoteWorkspacePath = path
+ try {
+ setTunnelState({ ...getTunnelState(), remoteWorkspacePath: path || null })
+ try { mainWindow.webContents.send('tunnelStateUpdate', { remoteWorkspacePath: path || null }) } catch {}
+ } catch {}
+}
+export function getRemoteWorkspacePath() {
+ return remoteWorkspacePath
+}
+
+export function setRemoteBackendExecutablePath(p) {
+ // Always store a plain string path
+ if (p && typeof p === 'object' && p.path) {
+ remoteBackendExecutablePath = p.path
+ } else {
+ remoteBackendExecutablePath = p
+ }
+ try {
+ setTunnelState({ ...getTunnelState(), remoteBackendExecutablePath: remoteBackendExecutablePath || null })
+ try { mainWindow.webContents.send('tunnelStateUpdate', { remoteBackendExecutablePath: remoteBackendExecutablePath || null }) } catch {}
+ } catch {}
+}
+export function getRemoteBackendExecutablePath() {
+ return remoteBackendExecutablePath
+}
+
+// Tunnel information and state management
+let tunnelInfo = {
+ host: null,
+ tunnelActive: false,
+ localAddress: "localhost",
+ // Express (backend) forwarding
+ localExpressPort: null, // local port forwarded to remote Express
+ remoteExpressPort: null, // remote Express port
+ // Optional GO direct forwarding
+ localGoPort: null,
+ remoteGoPort: null,
+ localDBPort: null,
+ remoteDBPort: null,
+ localJupyterPort: null,
+ remoteJupyterPort: null,
+ remotePort: null,
+ username: null,
+ // Additional statuses/flags
+ serverStartedRemotely: false,
+ expressStatus: 'unknown',
+ expressLogPath: null,
+ // Persisted remote context
+ remoteWorkspacePath: null,
+ remoteBackendExecutablePath: null,
+ requirementsMetRemote: false,
+ requirementsDetailsRemote: null,
+ requirementsCheckedAt: null,
+ // Generic list of active tunnels
+ tunnels: [] // [{ name: string, localPort: number, remotePort: number, status: 'forwarding'|'closed' }]
+}
+
+export function setTunnelState(info) {
+ // Exclude password
+ const { password, privateKey, ...safeInfo } = info
+ const hasFlag = Object.prototype.hasOwnProperty.call(safeInfo, 'tunnelActive')
+ const nextTunnelActive = hasFlag
+ ? !!safeInfo.tunnelActive
+ : (typeof tunnelInfo.tunnelActive === 'boolean' ? tunnelInfo.tunnelActive : false)
+ tunnelInfo = { ...tunnelInfo, ...safeInfo, tunnelActive: nextTunnelActive }
+}
+
+export function clearTunnelState() {
+ try { remoteWorkspacePath = null } catch {}
+ try { remoteBackendExecutablePath = null } catch {}
+ tunnelInfo = {
+ host: null,
+ tunnelActive: false,
+ localAddress: "localhost",
+ localExpressPort: null,
+ remoteExpressPort: null,
+ localGoPort: null,
+ remoteGoPort: null,
+ localDBPort: null,
+ remoteDBPort: null,
+ localJupyterPort: null,
+ remoteJupyterPort: null,
+ remotePort: null,
+ username: null,
+ serverStartedRemotely: false,
+ expressStatus: 'unknown',
+ expressLogPath: null,
+ remoteWorkspacePath: null,
+ remoteBackendExecutablePath: null,
+ requirementsMetRemote: false,
+ requirementsDetailsRemote: null,
+ requirementsCheckedAt: null,
+ tunnels: [],
+ }
+}
+
+export function getTunnelState() {
+ return tunnelInfo
+}
+
+ipcMain.handle('getTunnelState', () => {
+ return getTunnelState()
+})
+
+ipcMain.handle('getRemoteWorkspacePath', () => {
+ return getRemoteWorkspacePath()
+})
+
+ipcMain.handle('getRemoteBackendExecutablePath', () => {
+ return getRemoteBackendExecutablePath()
+})
+
+ipcMain.handle('setTunnelState', (_event, info) => {
+ setTunnelState(info)
+ mainWindow.webContents.send('tunnelStateUpdate', info)
+})
+
+ipcMain.handle('clearTunnelState', () => {
+ clearTunnelState()
+ mainWindow.webContents.send('tunnelStateClear')
+})
+
+// Helpers for managing remote backend (Express) server lifecycle
+async function execRemote(conn, cmd) {
+ return new Promise((resolve, reject) => {
+ conn.exec(cmd, (err, stream) => {
+ if (err) return reject(err)
+ let stdout = ''
+ let stderr = ''
+ stream.on('data', (d) => { stdout += d.toString() })
+ stream.stderr.on('data', (d) => { stderr += d.toString() })
+ stream.on('close', (code) => {
+ resolve({ code, stdout: stdout.trim(), stderr: stderr.trim() })
+ })
+ })
+ })
+}
+
+async function getRemoteHome(conn, remoteOS) {
+ if (remoteOS === 'win32') {
+ const r = await execRemote(conn, 'powershell -NoProfile -Command "$env:USERPROFILE"')
+ return r.stdout || 'C:\\Users\\Public'
+ } else {
+ const r = await execRemote(conn, 'printf "%s" "$HOME"')
+ return r.stdout || '/home'
+ }
+}
+
+async function findRemoteBackendExecutable(conn, remoteOS) {
+ try {
+ // If a path is already stored, verify it exists and is executable
+ if (remoteBackendExecutablePath) {
+ if (remoteOS === 'win32') {
+ const r = await execRemote(conn, `powershell -NoProfile -Command "If (Test-Path '${remoteBackendExecutablePath.replace(/'/g, "''")}') { Write-Output '${remoteBackendExecutablePath.replace(/'/g, "''")}' }"`)
+ if ((r.stdout||'').trim()) return { path: remoteBackendExecutablePath }
+ } else {
+ const r = await execRemote(conn, `bash -lc "[ -x '${remoteBackendExecutablePath.replace(/'/g, "'\\''")}' ] && echo '${remoteBackendExecutablePath.replace(/'/g, "'\\''")}' || true"`)
+ if ((r.stdout||'').trim()) return { path: remoteBackendExecutablePath }
+ }
+ }
+
+ // Look for medomics-server under the versions directory of ~/.medomics/medomics-server
+ const home = await getRemoteHome(conn, remoteOS)
+ const baseDir = remoteOS === 'win32' ? `${home}\\.medomics\\medomics-server` : `${home}/.medomics/medomics-server`
+ const versionsDir = remoteOS === 'win32' ? `${baseDir}\\versions` : `${baseDir}/versions`
+
+ if (remoteOS === 'win32') {
+ // Prefer newest medomics-server.exe found under versions/**/bin
+ const ps = `powershell -NoProfile -Command "if (Test-Path '${versionsDir.replace(/'/g, "''")}') { Get-ChildItem -Path '${versionsDir.replace(/'/g, "''")}' -Recurse -Filter medomics-server.exe | Sort-Object LastWriteTime -Descending | Select-Object -First 1 -ExpandProperty FullName }"`
+ const r = await execRemote(conn, ps)
+ const found = (r.stdout||'').trim()
+ if (found) return { path: found }
+ // Fallback: check typical bin path for latest version directory
+ const ls = await execRemote(conn, `powershell -NoProfile -Command "If (Test-Path '${versionsDir.replace(/'/g, "''")}') { Get-ChildItem -Path '${versionsDir.replace(/'/g, "''")}' -Directory | Sort-Object LastWriteTime -Descending | Select-Object -First 1 -ExpandProperty FullName }"`)
+ const latestDir = (ls.stdout||'').trim()
+ if (latestDir) {
+ const candidate = `${latestDir}\\bin\\medomics-server.exe`
+ const chk = await execRemote(conn, `powershell -NoProfile -Command "If (Test-Path '${candidate.replace(/'/g, "''")}') { Write-Output '${candidate.replace(/'/g, "''")}' }"`)
+ if ((chk.stdout||'').trim()) return { path: candidate }
+ }
+ } else {
+ // POSIX: prefer current/bin/medomics-server, else search under versions
+ const currentBin = `${baseDir}/current/bin/medomics-server`
+ const curChk = await execRemote(conn, `bash -lc "[ -x '${currentBin.replace(/'/g, "'\\''")}' ] && echo '${currentBin.replace(/'/g, "'\\''")}' || true"`)
+ const curFound = (curChk.stdout||'').trim()
+ if (curFound) return { path: currentBin }
+ const findCmd = `bash -lc "if [ -d '${versionsDir.replace(/'/g, "'\\''")}' ]; then find '${versionsDir.replace(/'/g, "'\\''")}' -type f -name 'medomics-server' -print -quit; fi || true"`
+ const r = await execRemote(conn, findCmd)
+ const found = (r.stdout||'').trim()
+ if (found) return { path: found }
+ // Fallback: check bin under latest version dir
+ const ls = await execRemote(conn, `bash -lc "ls -1dt '${versionsDir.replace(/'/g, "'\\''")}'/* 2>/dev/null | head -n1"`)
+ const latestDir = (ls.stdout||'').trim()
+ if (latestDir) {
+ const candidate = `${latestDir}/bin/medomics-server`
+ const chk = await execRemote(conn, `bash -lc "[ -x '${candidate.replace(/'/g, "'\\''")}' ] && echo '${candidate.replace(/'/g, "'\\''")}' || true"`)
+ if ((chk.stdout||'').trim()) return { path: candidate }
+ }
+ }
+ return null
+ } catch (e) {
+ return null
+ }
+}
+
+async function startRemoteBackend(conn, remoteOS, exePath, remotePort) {
+ try {
+ if (!exePath) {
+ return { success: false, status: 'not-found', error: 'No remote Express path provided' }
+ }
+ const isScript = exePath.endsWith('.js') || exePath.endsWith('.mjs')
+ // Derive versionDir and log path similarly to startRemoteExpress
+ let versionDir = getVersionDirFromExePath(exePath, remoteOS)
+ let baseDir = null
+ if (versionDir) {
+ const normalizedVersionDir = versionDir.replace(/\\/g, '/')
+ baseDir = normalizedVersionDir.includes('/versions/') ? normalizedVersionDir.split('/versions/')[0] : normalizedVersionDir
+ }
+ let logsDir = null
+ let logPath = null
+ if (baseDir) {
+ logsDir = remoteOS === 'win32' ? `${baseDir.replace(/\//g,'\\')}\\logs` : `${baseDir}/logs`
+ logPath = remoteOS === 'win32' ? `${logsDir}\\express.log` : `${logsDir}/express.log`
+ // Ensure logs dir exists and truncate previous log
+ if (remoteOS === 'win32') {
+ await execRemote(conn, `powershell -NoProfile -Command "New-Item -ItemType Directory -Force -Path '${logsDir.replace(/'/g, "''")}' | Out-Null; Clear-Content -Path '${logPath.replace(/'/g, "''")}' -ErrorAction SilentlyContinue; New-Item -ItemType File -Force -Path '${logPath.replace(/'/g, "''")}' | Out-Null"`)
+ } else {
+ await execRemote(conn, `bash -lc "mkdir -p '${logsDir.replace(/'/g, "'\\''")}' && : > '${logPath.replace(/'/g, "'\\''")}'"`)
+ }
+ }
+ if (logPath) {
+ try {
+ setTunnelState({ ...getTunnelState(), expressLogPath: logPath })
+ try { mainWindow.webContents.send('tunnelStateUpdate', { expressLogPath: logPath }) } catch {}
+ } catch {}
+ }
+
+ // If we're launching a packaged server binary, prefer using the shipped
+ // start script (start.bat/start.sh) so it can set NODE_ENV=production and
+ // any other required environment/config.
+ //
+ // IMPORTANT: If the script exists but fails (common case: Linux start.sh uses
+ // `set -e` and calls `ensure` before `start`), do not abort; fall back to
+ // directly running the executable with `start --json`.
+ let scriptFailure = null
+ if (!isScript) {
+ try { setRemoteBackendExecutablePath(exePath) } catch {}
+ try {
+ const viaScript = await startRemoteExpress(conn, remoteOS, remotePort)
+ if (viaScript && viaScript.success) {
+ return { success: true, status: 'express-running', port: remotePort }
+ }
+ if (viaScript && viaScript.status && viaScript.status !== 'script-not-found') {
+ scriptFailure = viaScript
+ console.warn('[remote] startRemoteBackend startRemoteExpress failed; attempting direct start fallback:', viaScript)
+ }
+ } catch (e) {
+ scriptFailure = { success: false, status: 'failed-to-start', error: e && e.message ? e.message : String(e) }
+ console.warn('[remote] startRemoteBackend startRemoteExpress threw; attempting direct start fallback:', e && e.message ? e.message : e)
+ }
+ }
+ let cmd
+ console.log('[remote] startRemoteBackend called', { remoteOS, exePath, remotePort, isScript })
+ if (remoteOS === 'win32') {
+ if (isScript) {
+ cmd = `powershell -NoProfile -Command "$env:NODE_ENV='production'; $env:MEDOMICS_EXPRESS_PORT=${remotePort}; Start-Process -FilePath 'node' -ArgumentList '${exePath.replace(/'/g, "''")}' -WindowStyle Hidden -PassThru | Out-Null"`
+ } else {
+ // If launching medomics-server.exe, pass explicit CLI args: start --json
+ const workDir = (versionDir || path.dirname(exePath)).replace(/\\/g, '\\')
+ const exeBase = path.basename(exePath).replace(/\\/g, '\\')
+ if (logsDir && logPath) {
+ cmd = `cmd.exe /c "cd /d \"${workDir}\" && set NODE_ENV=production && set MEDOMICS_EXPRESS_PORT=${remotePort} && \"${exeBase}\" start --json >> \"${logPath.replace(/\\/g,'\\')}\" 2>&1"`
+ } else {
+ // Fallback without log redirection
+ cmd = `cmd.exe /c "cd /d \"${workDir}\" && set NODE_ENV=production && set MEDOMICS_EXPRESS_PORT=${remotePort} && \"${exeBase}\" start --json"`
+ }
+ }
+ } else {
+ if (isScript) {
+ cmd = `bash -lc 'export NODE_ENV=production; export MEDOMICS_EXPRESS_PORT=${remotePort}; nohup node "${exePath.replace(/"/g, '\\"')}" >/dev/null 2>&1 < /dev/null & echo $!'`
+ } else {
+ const exeEsc = exePath.replace(/"/g, '\\"')
+ if (logPath) {
+ cmd = `bash -lc 'export NODE_ENV=production; export MEDOMICS_EXPRESS_PORT=${remotePort}; nohup "${exeEsc}" start --json >> "${logPath.replace(/"/g, '\\"')}" 2>&1 < /dev/null & echo $!'`
+ } else {
+ cmd = `bash -lc 'export NODE_ENV=production; export MEDOMICS_EXPRESS_PORT=${remotePort}; nohup "${exeEsc}" start --json >/dev/null 2>&1 < /dev/null & echo $!'`
+ }
+ }
+ }
+ console.log('[remote] startRemoteBackend exec cmd', cmd)
+ let r = null
+ if (remoteOS === 'win32' && !isScript) {
+ // Fire-and-forget for the long-running medomics-server.exe so we can poll the port
+ try {
+ conn.exec(cmd, (err, stream) => {
+ if (err) {
+ console.log('[remote] startRemoteBackend exec error', err.message || String(err))
+ return
+ }
+ stream.on('data', (d) => {
+ try { console.log('[remote] startRemoteBackend stdout chunk', d.toString().slice(0, 200)) } catch {}
+ })
+ stream.stderr.on('data', (d) => {
+ try { console.log('[remote] startRemoteBackend stderr chunk', d.toString().slice(0, 200)) } catch {}
+ })
+ stream.on('close', (code) => {
+ console.log('[remote] startRemoteBackend cmd closed with code', code)
+ })
+ })
+ } catch (e) {
+ console.log('[remote] startRemoteBackend exec exception', e && e.message ? e.message : String(e))
+ }
+ } else {
+ r = await execRemote(conn, cmd)
+ console.log('[remote] startRemoteBackend exec result', r)
+ if (r && r.stderr && r.stderr.trim() && !r.stdout) {
+ return { success: false, status: 'failed-to-start', error: r.stderr.trim() }
+ }
+ }
+ // Poll for port to open
+ await sleep(800)
+ const maxAttempts = 30
+ for (let i = 0; i < maxAttempts; i++) {
+ const open = await checkRemotePortOpen(conn, remotePort)
+ if (open) return { success: true, status: 'express-running' }
+ await sleep(600)
+ }
+ console.log('[remote] startRemoteBackend timeout waiting for port', remotePort)
+ if (scriptFailure) {
+ const scriptMsg = scriptFailure.error ? String(scriptFailure.error) : 'unknown'
+ return { success: false, status: 'timeout', error: `Express did not open port ${remotePort} in time (start script failed first: ${scriptMsg})` }
+ }
+ return { success: false, status: 'timeout', error: `Express did not open port ${remotePort} in time` }
+ } catch (e) {
+ return { success: false, status: 'failed-to-start', error: e && e.message ? e.message : String(e) }
+ }
+}
+
+function sleep(ms){ return new Promise(r=>setTimeout(r, ms)) }
+
+// Derive extracted version directory from stored backend executable path
+function getVersionDirFromExePath(p, remoteOS) {
+ if (!p) return null
+ const normalized = p.replace(/\\/g, '/')
+ const parts = normalized.split('/')
+ // Typical: .../versions//bin/medomics-server[.exe]
+ const binIdx = parts.lastIndexOf('bin')
+ if (binIdx > 0) {
+ const versionParts = parts.slice(0, binIdx)
+ return versionParts.join('/')
+ }
+ // If points directly to medomics-server, use parent directory
+ if (normalized.toLowerCase().includes('medomics-server')) {
+ const idx = normalized.lastIndexOf('/')
+ if (idx > 0) return normalized.slice(0, idx)
+ }
+ // Unknown layout (e.g., legacy GO path) → let caller fall back to baseDir/current
+ return null
+}
+
+// Start Express using extracted start scripts under version directory
+async function startRemoteExpress(conn, remoteOS, remotePort) {
+ try {
+ const exePath = getRemoteBackendExecutablePath()
+ console.log('[remote] startRemoteExpress called', { remoteOS, remotePort, exePath })
+ let versionDir = getVersionDirFromExePath(exePath, remoteOS)
+ // Fallback: use ~/.medomics/medomics-server/{current|latest version}
+ if (!versionDir) {
+ const home = await getRemoteHome(conn, remoteOS)
+ const baseDir = remoteOS === 'win32' ? `${home}\\.medomics\\medomics-server` : `${home}/.medomics/medomics-server`
+ const versionsDir = remoteOS === 'win32' ? `${baseDir}\\versions` : `${baseDir}/versions`
+ // Prefer 'current' symlink on POSIX or latest version directory
+ if (remoteOS !== 'win32') {
+ const curCheck = await execRemote(conn, `bash -lc "[ -d '${baseDir.replace(/'/g, "'\\''")}/current' ] && readlink -f '${baseDir.replace(/'/g, "'\\''")}/current' || echo"`)
+ const curDir = (curCheck.stdout||'').trim()
+ if (curDir) versionDir = curDir
+ }
+ if (!versionDir) {
+ if (remoteOS === 'win32') {
+ const ls = await execRemote(conn, `powershell -NoProfile -Command "Get-ChildItem -Path '${versionsDir.replace(/'/g, "''")}' -Directory | Sort-Object LastWriteTime -Descending | Select-Object -First 1 -ExpandProperty FullName"`)
+ versionDir = (ls.stdout||'').trim()
+ } else {
+ const ls = await execRemote(conn, `bash -lc "ls -1dt '${versionsDir.replace(/'/g, "'\\''")}'/* 2>/dev/null | head -n1"`)
+ versionDir = (ls.stdout||'').trim()
+ }
+ }
+ if (!versionDir) {
+ return { success: false, status: 'script-not-found', error: 'Cannot resolve server version directory (no current or versions found)' }
+ }
+ }
+ // Determine baseDir from versionDir and construct logs dir + log file path
+ const normalizedVersionDir = versionDir.replace(/\\/g, '/');
+ const baseDir = normalizedVersionDir.includes('/versions/') ? normalizedVersionDir.split('/versions/')[0] : normalizedVersionDir;
+ const logsDir = remoteOS === 'win32' ? `${baseDir.replace(/\//g,'\\')}\\logs` : `${baseDir}/logs`;
+ const logPath = remoteOS === 'win32' ? `${logsDir}\\express.log` : `${logsDir}/express.log`;
+ console.log('[remote] startRemoteExpress resolved paths', { versionDir, baseDir, logsDir, logPath })
+ // Ensure logs dir exists and truncate previous log
+ if (remoteOS === 'win32') {
+ await execRemote(conn, `powershell -NoProfile -Command "New-Item -ItemType Directory -Force -Path '${logsDir.replace(/'/g, "''")}' | Out-Null; Clear-Content -Path '${logPath.replace(/'/g, "''")}' -ErrorAction SilentlyContinue; New-Item -ItemType File -Force -Path '${logPath.replace(/'/g, "''")}' | Out-Null"`)
+ } else {
+ await execRemote(conn, `bash -lc "mkdir -p '${logsDir.replace(/'/g, "'\\''")}' && : > '${logPath.replace(/'/g, "'\\''")}'"`)
+ }
+ let candidates
+ if (remoteOS === 'win32') {
+ candidates = [
+ `${versionDir}\\start.bat`,
+ `${versionDir}\\scripts\\start.bat`,
+ `${versionDir}\\bin\\start.bat`,
+ ]
+ } else {
+ candidates = [
+ `${versionDir}/start.sh`,
+ `${versionDir}/scripts/start.sh`,
+ `${versionDir}/bin/start.sh`,
+ ]
+ }
+ console.log("Candidates: ", candidates)
+ let scriptPath = null
+ for (const candidate of candidates) {
+ const checkCmd = remoteOS === 'win32'
+ // Use a well-formed PowerShell Test-Path invocation and keep backslashes
+ ? `powershell -NoProfile -Command "Test-Path '${candidate.replace(/'/g, "''")}'"`
+ : `bash -lc "[ -f '${candidate}' ] && echo yes || echo no"`
+ const r = await execRemote(conn, checkCmd)
+ const exists = remoteOS === 'win32' ? /True/i.test(r.stdout || '') : /yes/i.test(r.stdout || '')
+ if (exists) { scriptPath = candidate; break }
+ }
+ if (!scriptPath) {
+ return { success: false, status: 'script-not-found', error: 'start script not found in server directory' }
+ }
+ console.log('[remote] startRemoteExpress using scriptPath', scriptPath)
+ let cmd
+ if (remoteOS === 'win32') {
+ // Use cmd.exe directly: cd into the versionDir, set MEDOMICS_EXPRESS_PORT,
+ // and run the batch file, redirecting its output to express.log so we can see errors.
+ const workDir = versionDir.replace(/\\/g, '\\')
+ const batName = path.basename(scriptPath)
+ const winLogPath = logPath.replace(/\\/g, '\\')
+ cmd = `cmd.exe /c "cd /d \"${workDir}\" && set NODE_ENV=production && set MEDOMICS_EXPRESS_PORT=${remotePort} && echo [launcher] NODE_ENV=%NODE_ENV% MEDOMICS_EXPRESS_PORT=%MEDOMICS_EXPRESS_PORT% >> \"${winLogPath}\" && \"${batName}\" >> \"${winLogPath}\" 2>&1"`
+ console.log('[remote] startRemoteExpress exec cmd', cmd)
+ // Fire-and-forget: do not await completion of the batch; it runs the server and can stay running.
+ try {
+ conn.exec(cmd, (err, stream) => {
+ if (err) {
+ console.log('[remote] startRemoteExpress exec error', err.message || String(err))
+ return
+ }
+ stream.on('data', (d) => {
+ // Optionally log a small amount of stdout for debugging
+ try { console.log('[remote] startRemoteExpress stdout chunk', d.toString().slice(0, 200)) } catch {}
+ })
+ stream.stderr.on('data', (d) => {
+ try { console.log('[remote] startRemoteExpress stderr chunk', d.toString().slice(0, 200)) } catch {}
+ })
+ stream.on('close', (code) => {
+ console.log('[remote] startRemoteExpress cmd closed with code', code)
+ })
+ })
+ } catch (e) {
+ console.log('[remote] startRemoteExpress exec exception', e && e.message ? e.message : String(e))
+ }
+ } else {
+ const posixVersionDir = versionDir.replace(/'/g, "'\\''")
+ const posixScriptName = path.basename(scriptPath).replace(/'/g, "'\\''")
+ const posixLogPath = logPath.replace(/'/g, "'\\''")
+ cmd = `bash -lc "export NODE_ENV=production; export MEDOMICS_EXPRESS_PORT='${remotePort}'; echo '[launcher] NODE_ENV='\"$NODE_ENV\"' MEDOMICS_EXPRESS_PORT='\"$MEDOMICS_EXPRESS_PORT\" >> '${logPath.replace(/'/g, "'\\''")}'; nohup '${scriptPath}' >> '${logPath.replace(/'/g, "'\\''")}' 2>&1 &"`
+
+ console.log('[remote] startRemoteExpress exec cmd', cmd)
+ const r2 = await execRemote(conn, cmd)
+ console.log('[remote] startRemoteExpress exec result', r2)
+ }
+ // Poll for port open
+ await sleep(800)
+ const maxAttempts = 20
+ for (let i = 0; i < maxAttempts; i++) {
+ const open = await checkRemotePortOpen(conn, remotePort)
+ if (open) return { success: true, status: 'running', port: remotePort }
+ await sleep(500)
+ }
+ console.log('[remote] startRemoteExpress timeout waiting for port', remotePort)
+ return { success: false, status: 'timeout', error: `Express did not open port ${remotePort} in time` }
+ } catch (e) {
+ return { success: false, status: 'failed-to-start', error: e && e.message ? e.message : String(e) }
+ }
+}
+
+// Live log streaming state
+let activeExpressLogStream = null
+
+ipcMain.handle('startRemoteServerLogStream', async () => {
+ const conn = getActiveTunnel()
+ if (!conn) return { success: false, error: 'No active SSH tunnel' }
+ if (activeExpressLogStream) return { success: true } // already streaming
+ try {
+ const { expressLogPath } = getTunnelState()
+ if (!expressLogPath) return { success: false, error: 'No expressLogPath available' }
+ const remoteOS = await detectRemoteOS()
+ let cmd
+ if (remoteOS === 'win32') {
+ cmd = `powershell -NoProfile -Command \"Get-Content -Path '${expressLogPath.replace(/'/g, "''")}' -Tail 200 -Wait\"`
+ } else {
+ cmd = `bash -lc "tail -n 200 -F '${expressLogPath.replace(/'/g, "'\\''")}'"`
+ }
+ return await new Promise((resolve) => {
+ conn.exec(cmd, (err, stream) => {
+ if (err) return resolve({ success: false, error: err.message })
+ activeExpressLogStream = stream
+ try { mainWindow.webContents.send('remoteServerLog:state', { streaming: true }) } catch {}
+ stream.on('data', (d) => {
+ try { mainWindow.webContents.send('remoteServerLog:data', d.toString()) } catch {}
+ })
+ stream.stderr.on('data', (d) => {
+ try { mainWindow.webContents.send('remoteServerLog:data', d.toString()) } catch {}
+ })
+ stream.on('close', () => {
+ activeExpressLogStream = null
+ try { mainWindow.webContents.send('remoteServerLog:state', { streaming: false }) } catch {}
+ })
+ resolve({ success: true })
+ })
+ })
+ } catch (e) {
+ return { success: false, error: e && e.message ? e.message : String(e) }
+ }
+})
+
+ipcMain.handle('stopRemoteServerLogStream', async () => {
+ try {
+ if (activeExpressLogStream) {
+ try { activeExpressLogStream.close && activeExpressLogStream.close() } catch {}
+ activeExpressLogStream = null
+ }
+ try { mainWindow.webContents.send('remoteServerLog:state', { streaming: false }) } catch {}
+ return { success: true }
+ } catch (e) {
+ return { success: false, error: e && e.message ? e.message : String(e) }
+ }
+})
+
+function mapOsKey(remoteOS) {
+ // Map Node-like OS ids to manifest os keys
+ if (remoteOS === 'win32') return ['windows', 'win32']
+ if (remoteOS === 'darwin') return ['darwin', 'macos', 'osx']
+ return ['linux']
+}
+
+function selectAssetForRemote(manifest, remoteOS) {
+ const assets = (manifest && manifest.assets) || []
+ const osKeys = mapOsKey(remoteOS)
+ const first = assets.find(a => osKeys.includes(String(a.os||'').toLowerCase()))
+ return first || null
+}
+
+function sendInstallProgress(payload) {
+ try {
+ console.log(payload)
+ // Try sending to exported mainWindow if available
+ const bg = (() => { try { return require('../background.js') } catch { return null } })()
+ const win = bg && bg.mainWindow ? bg.mainWindow : (require('electron').BrowserWindow.getAllWindows()[0] || null)
+ if (win && win.webContents) {
+ win.webContents.send('remoteBackendInstallProgress', payload)
+ return
+ }
+ } catch {}
+}
+
+function extractVersionFolderNameFromExePath(exePath) {
+ if (!exePath || typeof exePath !== 'string') return null
+ const normalized = exePath.replace(/\\/g, '/')
+ const marker = '/versions/'
+ const idx = normalized.lastIndexOf(marker)
+ if (idx === -1) return null
+ const rest = normalized.slice(idx + marker.length)
+ const seg = rest.split('/')[0]
+ return seg ? String(seg).trim() : null
+}
+
+async function listRemoteVersionDirs(conn, remoteOS, versionsDir) {
+ try {
+ if (!versionsDir) return []
+ if (remoteOS === 'win32') {
+ const ps = `powershell -NoProfile -Command "if (Test-Path '${versionsDir.replace(/'/g, "''")}') { Get-ChildItem -Path '${versionsDir.replace(/'/g, "''")}' -Directory | Select-Object -ExpandProperty Name }"`
+ const r = await execRemote(conn, ps)
+ return String(r.stdout || '')
+ .split(/\r?\n/)
+ .map(s => s.trim())
+ .filter(Boolean)
+ }
+
+ const sh = `bash -lc "if [ -d '${versionsDir.replace(/'/g, "'\\''")}' ]; then find '${versionsDir.replace(/'/g, "'\\''")}' -maxdepth 1 -mindepth 1 -type d -exec basename {} \\; 2>/dev/null; fi"`
+ const r = await execRemote(conn, sh)
+ return String(r.stdout || '')
+ .split(/\r?\n/)
+ .map(s => s.trim())
+ .filter(Boolean)
+ } catch {
+ return []
+ }
+}
+
+async function deleteRemoteDir(conn, remoteOS, dirPath) {
+ if (!dirPath) return { success: false, error: 'missing-path' }
+ try {
+ if (remoteOS === 'win32') {
+ const ps = `powershell -NoProfile -Command "$ErrorActionPreference='Stop'; Remove-Item -LiteralPath '${dirPath.replace(/'/g, "''")}' -Recurse -Force"`
+ const r = await execRemote(conn, ps)
+ if (r && r.code && r.code !== 0) return { success: false, error: r.stderr || 'remove-item-failed' }
+ return { success: true }
+ }
+
+ const sh = `bash -lc "rm -rf -- '${dirPath.replace(/'/g, "'\\''")}'"`
+ const r = await execRemote(conn, sh)
+ if (r && r.code && r.code !== 0) return { success: false, error: r.stderr || 'rm-failed' }
+ return { success: true }
+ } catch (e) {
+ return { success: false, error: e && e.message ? e.message : String(e) }
+ }
+}
+
+async function cleanupRemoteBackendVersions(conn, remoteOS, { versionsDir, keepVersionNames = [] } = {}) {
+ const keep = new Set((keepVersionNames || []).map(v => String(v || '').trim()).filter(Boolean))
+ const deleted = []
+ const failed = []
+ try {
+ const names = await listRemoteVersionDirs(conn, remoteOS, versionsDir)
+ for (const name of names) {
+ if (!name) continue
+ if (keep.has(name)) continue
+ const fullPath = remoteOS === 'win32' ? `${versionsDir}\\${name}` : `${versionsDir}/${name}`
+ const res = await deleteRemoteDir(conn, remoteOS, fullPath)
+ if (res && res.success) deleted.push(name)
+ else failed.push({ name, error: res && res.error ? res.error : 'delete-failed' })
+ }
+ } catch (e) {
+ failed.push({ name: '*', error: e && e.message ? e.message : String(e) })
+ }
+ return {
+ success: failed.length === 0,
+ versionsDir,
+ keep: Array.from(keep),
+ deleted,
+ failed,
+ }
+}
+
+ipcMain.handle('ensureRemoteBackend', async (_event, { port } = {}) => {
+ const conn = getActiveTunnel()
+ if (!conn) return { success: false, status: 'tunnel-inactive', error: 'No active SSH tunnel' }
+
+ const tunnel = getTunnelState()
+ const targetPort = port || tunnel.remoteExpressPort
+ const localPort = tunnel.localExpressPort
+ if (!targetPort || !localPort) {
+ return { success: false, status: 'invalid-config', error: 'Missing local/remote backend port configuration' }
+ }
+ try {
+ // 1) Ensure Express is reachable on remote targetPort; if not, start it using start scripts
+ let isOpen = await checkRemotePortOpen(conn, targetPort)
+ if (!isOpen) {
+ const remoteOS = await detectRemoteOS()
+ // Prefer start scripts, but fall back to direct executable start if scripts fail.
+ const startRes = await startRemoteExpress(conn, remoteOS, targetPort)
+ if (!startRes.success) {
+ try {
+ const exe = await findRemoteBackendExecutable(conn, remoteOS)
+ const exePath = (typeof exe === 'object' && exe && exe.path) ? exe.path : exe
+ if (exePath) {
+ const fallback = await startRemoteBackend(conn, remoteOS, exePath, targetPort)
+ if (!fallback.success) {
+ return { success: false, status: 'failed-to-start', error: `start script failed: ${startRes.error || startRes.status || 'unknown'}; direct start failed: ${fallback.error || fallback.status || 'unknown'}` }
+ }
+ } else {
+ return startRes
+ }
+ } catch (e) {
+ return { success: false, status: startRes.status || 'failed-to-start', error: startRes.error || (e && e.message ? e.message : String(e)) }
+ }
+ }
+ isOpen = await checkRemotePortOpen(conn, targetPort)
+ if (!isOpen) return { success: false, status: 'timeout', error: `Express did not open port ${targetPort}` }
+ }
+
+ // 2) Express is up; set status, infer log path under baseDir/logs/express.log
+ // Try to compute log path similarly to startRemoteExpress
+ let info = { expressStatus: 'running', serverStartedRemotely: true }
+ try {
+ const exe = getRemoteBackendExecutablePath()
+ const normalized = (exe||'').replace(/\\/g,'/')
+ let baseDir = null
+ if (normalized.includes('/versions/')) baseDir = normalized.split('/versions/')[0]
+ if (!baseDir) {
+ const remoteOS = await detectRemoteOS()
+ const home = await getRemoteHome(getActiveTunnel(), remoteOS)
+ baseDir = remoteOS === 'win32' ? `${home}\\.medomics\\medomics-server` : `${home}/.medomics/medomics-server`
+ }
+ const remoteOS = await detectRemoteOS()
+ const logPath = remoteOS === 'win32' ? `${baseDir}\\logs\\express.log` : `${baseDir}/logs/express.log`
+ info = { ...info, expressLogPath: logPath }
+ } catch {}
+ // 3) Ensure there is a local forward from localPort -> targetPort
+ try {
+ await startExpressForward({ localExpressPort: localPort, remoteExpressPort: targetPort })
+ } catch (e) {
+ console.warn('Failed to start Express forward after ensureRemoteBackend:', e && e.message ? e.message : e)
+ }
+ setTunnelState({ ...getTunnelState(), ...info })
+ try { mainWindow.webContents.send('tunnelStateUpdate', info) } catch {}
+ return { success: true, status: 'running', port: targetPort }
+ } catch (e) {
+ return { success: false, status: 'error', error: e && e.message ? e.message : String(e) }
+ }
+})
+
+function getLocalGoBinaryForOS(remoteOS) {
+ // Prefer packaged resources; fallback to repo path
+ try {
+ let base = process.resourcesPath ? path.join(process.resourcesPath, 'go_executables') : null
+ let repo = path.join(process.cwd(), 'go_executables')
+ if (remoteOS === 'win32') {
+ const cand = [base && path.join(base,'server_go_win32.exe'), path.join(repo,'server_go_win32.exe')].filter(Boolean)
+ return cand.find(p=>p && fs.existsSync(p)) || null
+ } else if (remoteOS === 'darwin') {
+ const cand = [base && path.join(base,'server_go'), path.join(repo,'server_go_mac')].filter(Boolean)
+ return cand.find(p=>p && fs.existsSync(p)) || null
+ } else {
+ // linux
+ const cand = [base && path.join(base,'server_go'), path.join(repo,'server_go_linux'), path.join(repo,'server_go')].filter(Boolean)
+ return cand.find(p=>p && fs.existsSync(p)) || null
+ }
+ } catch {
+ return null
+ }
+}
+
+ipcMain.handle('installRemoteBackend', async () => {
+ const conn = getActiveTunnel()
+ if (!conn) return { success: false, error: 'No active SSH tunnel' }
+ try {
+ const remoteOS = await detectRemoteOS()
+ const localBin = getLocalGoBinaryForOS(remoteOS)
+ if (!localBin) return { success: false, error: 'Local GO binary not found for remote OS' }
+ const home = await getRemoteHome(conn, remoteOS)
+ let remoteDir, remotePath
+ if (remoteOS === 'win32') {
+ remoteDir = `${home}\\.medomics\\MEDomicsLab\\go_executables`
+ remotePath = path.join(remoteDir, 'server_go_win32.exe')
+ } else {
+ remoteDir = `${home}/.medomics/MEDomicsLab/go_executables`
+ remotePath = `${remoteDir}/server_go`
+ }
+ // mkdir -p remoteDir
+ if (remoteOS === 'win32') {
+ await execRemote(conn, `powershell -NoProfile -Command "New-Item -ItemType Directory -Force -Path '${remoteDir.replace(/'/g, "''")}' | Out-Null"`)
+ } else {
+ await execRemote(conn, `bash -lc "mkdir -p '${remoteDir.replace(/'/g, "'\\''")}'"`)
+ }
+ // Upload file via SFTP
+ const sftp = await new Promise((resolve, reject) => conn.sftp((err, s) => err ? reject(err) : resolve(s)))
+ await new Promise((resolve, reject) => sftp.fastPut(localBin, remotePath, (err) => err ? reject(err) : resolve()))
+ if (remoteOS !== 'win32') {
+ await execRemote(conn, `bash -lc "chmod +x '${remotePath.replace(/'/g, "'\\''")}'"`)
+ }
+ setRemoteBackendExecutablePath(remotePath)
+ try { sftp.end && sftp.end() } catch {}
+ return { success: true, path: remotePath }
+ } catch (e) {
+ return { success: false, error: e.message }
+ }
+})
+
+ipcMain.handle('setRemoteBackendPath', async (_event, p) => {
+ setRemoteBackendExecutablePath(p)
+ return { success: true, path: p }
+})
+
+// Locate remote backend executable under default install folders and persist the path
+ipcMain.handle('locateRemoteBackendExecutable', async () => {
+ const conn = getActiveTunnel()
+ if (!conn) return { success: false, error: 'No active SSH tunnel' }
+ try {
+ const remoteOS = await detectRemoteOS()
+ const exe = await findRemoteBackendExecutable(conn, remoteOS)
+ if (exe) {
+ const pathValue = (typeof exe === 'object' && exe.path) ? exe.path : exe
+ setRemoteBackendExecutablePath(pathValue)
+ // Optionally infer and set express log path for convenience
+ try {
+ const normalized = (pathValue||'').replace(/\\/g,'/')
+ let baseDir = null
+ if (normalized.includes('/versions/')) baseDir = normalized.split('/versions/')[0]
+ if (!baseDir) {
+ const home = await getRemoteHome(conn, remoteOS)
+ baseDir = remoteOS === 'win32' ? `${home}\\.medomics\\medomics-server` : `${home}/.medomics/medomics-server`
+ }
+ const logPath = remoteOS === 'win32' ? `${baseDir}\\logs\\express.log` : `${baseDir}/logs/express.log`
+ setTunnelState({ ...getTunnelState(), expressLogPath: logPath })
+ try { mainWindow.webContents.send('tunnelStateUpdate', { expressLogPath: logPath }) } catch {}
+ } catch {}
+ return { success: true, path: pathValue }
+ }
+ return { success: false, error: 'executable-not-found' }
+ } catch (e) {
+ return { success: false, error: e && e.message ? e.message : String(e) }
+ }
+})
+
+ipcMain.handle('startRemoteBackendUsingPath', async (_event, { path: exePath, port }) => {
+ const conn = getActiveTunnel()
+ if (!conn) return { success: false, error: 'No active SSH tunnel' }
+ const remoteOS = await detectRemoteOS()
+ const state = getTunnelState()
+ const targetPort = port || state.remoteExpressPort
+ // Persist the chosen path so startRemoteExpress can resolve start scripts relative to it.
+ try { setRemoteBackendExecutablePath(exePath) } catch {}
+ const res = await startRemoteBackend(conn, remoteOS, exePath, targetPort)
+ if (res && res.success) {
+ // Mark Express as running, started via app, and ensure forward is active
+ try {
+ const info = {
+ expressStatus: 'running',
+ serverStartedRemotely: true,
+ remoteExpressPort: targetPort ? Number(targetPort) : state.remoteExpressPort,
+ }
+ setTunnelState({ ...getTunnelState(), ...info })
+ try { mainWindow.webContents.send('tunnelStateUpdate', info) } catch {}
+ } catch {}
+ try {
+ await startExpressForward({ localExpressPort: state.localExpressPort, remoteExpressPort: targetPort })
+ } catch (e) {
+ console.warn('Failed to start Express forward after startRemoteBackendUsingPath:', e && e.message ? e.message : e)
+ }
+ }
+ return res
+})
+
+ipcMain.handle('installRemoteBackendFromURL', async (_event, { manifestUrl, version } = {}) => {
+ const conn = getActiveTunnel()
+ if (!conn) return { success: false, error: 'No active SSH tunnel' }
+ try {
+ const remoteOS = await detectRemoteOS()
+ let url, expectedSha = '', manifestVersion
+ if (manifestUrl) {
+ // Legacy manifest-based install
+ sendInstallProgress({ phase: 'fetch-manifest', manifestUrl })
+ const { data: manifest } = await axios.get(manifestUrl, { timeout: 20000 })
+ manifestVersion = version || manifest?.version
+ if (!manifestVersion) {
+ sendInstallProgress({ phase: 'error', step: 'manifest', error: 'no-version-in-manifest' })
+ return { success: false, error: 'no-version-in-manifest' }
+ }
+ const asset = selectAssetForRemote(manifest, remoteOS)
+ if (!asset) {
+ sendInstallProgress({ phase: 'error', step: 'manifest', error: 'no-asset-for-remote', details: { remoteOS } })
+ return { success: false, error: 'no-asset-for-remote', details: { remoteOS } }
+ }
+ url = asset.url
+ expectedSha = (asset.sha256||'').trim().toLowerCase()
+ if (!url) {
+ sendInstallProgress({ phase: 'error', step: 'manifest', error: 'asset-has-no-url' })
+ return { success: false, error: 'asset-has-no-url' }
+ }
+ } else {
+ // GitHub releases-based install (no manifest provided)
+ const defaultOwner = 'MEDomicsLab'
+ const defaultRepo = 'MEDomics'
+ sendInstallProgress({ phase: 'github-fetch-releases', owner: defaultOwner, repo: defaultRepo })
+ const { data: releases } = await axios.get(`https://api.github.com/repos/${defaultOwner}/${defaultRepo}/releases`, {
+ headers: { 'Accept': 'application/vnd.github+json', 'User-Agent': 'medomicslab-remote-installer' },
+ timeout: 20000
+ })
+ if (!Array.isArray(releases) || releases.length === 0) {
+ sendInstallProgress({ phase: 'error', step: 'github', error: 'no-releases-found' })
+ return { success: false, error: 'no-releases-found' }
+ }
+ const serverReleases = releases.filter(r => {
+ const tag = (r.tag_name||'').toLowerCase()
+ const name = (r.name||'').toLowerCase()
+ return tag.includes('server') || name.includes('server')
+ })
+ const sorted = (serverReleases.length ? serverReleases : releases).sort((a,b) => {
+ const pa = new Date(a.published_at||a.created_at||0).getTime()
+ const pb = new Date(b.published_at||b.created_at||0).getTime()
+ return pb - pa
+ })
+ const chosen = sorted[0]
+ if (!chosen) {
+ sendInstallProgress({ phase: 'error', step: 'github', error: 'no-suitable-release' })
+ return { success: false, error: 'no-suitable-release' }
+ }
+ sendInstallProgress({ phase: 'github-pick-release', tag: chosen.tag_name, name: chosen.name })
+ // Select asset by fixed naming pattern: MEDomicsLab-Server-[version]-.zip
+ const assets = chosen.assets || []
+ const suffix = remoteOS === 'win32' ? '-win32.zip' : (remoteOS === 'darwin' ? '-darwin.zip' : '-linux.zip')
+ let candidate = assets.find(a => (a.name||'').toLowerCase().endsWith(suffix))
+ if (!candidate) {
+ // Fallback: check browser_download_url
+ candidate = assets.find(a => (a.browser_download_url||'').toLowerCase().endsWith(suffix))
+ }
+ if (!candidate) {
+ sendInstallProgress({ phase: 'error', step: 'github', error: 'no-asset-for-platform', details: { remoteOS, expectedSuffix: suffix } })
+ return { success: false, error: 'no-asset-for-platform', details: { remoteOS, expectedSuffix: suffix } }
+ }
+ url = candidate.browser_download_url
+ if (!url) {
+ sendInstallProgress({ phase: 'error', step: 'github', error: 'asset-missing-download-url' })
+ return { success: false, error: 'asset-missing-download-url' }
+ }
+ manifestVersion = chosen.tag_name || chosen.name || 'latest'
+ sendInstallProgress({ phase: 'github-select-asset', asset: candidate.name, url })
+ }
+
+ const home = await getRemoteHome(conn, remoteOS)
+ const baseDir = remoteOS === 'win32' ? `${home}\\.medomics\\medomics-server` : `${home}/.medomics/medomics-server`
+ const versionsDir = remoteOS === 'win32' ? `${baseDir}\\versions` : `${baseDir}/versions`
+ const versionDir = remoteOS === 'win32' ? `${versionsDir}\\${manifestVersion}` : `${versionsDir}/${manifestVersion}`
+ const downloadsDir = remoteOS === 'win32' ? `${baseDir}\\downloads` : `${baseDir}/downloads`
+
+ // Ensure dirs exist
+ sendInstallProgress({ phase: 'prepare-dirs', baseDir, versionDir })
+ if (remoteOS === 'win32') {
+ await execRemote(conn, `powershell -NoProfile -Command "New-Item -ItemType Directory -Force -Path '${baseDir.replace(/'/g, "''")}' | Out-Null; New-Item -ItemType Directory -Force -Path '${versionsDir.replace(/'/g, "''")}' | Out-Null; New-Item -ItemType Directory -Force -Path '${versionDir.replace(/'/g, "''")}' | Out-Null; New-Item -ItemType Directory -Force -Path '${downloadsDir.replace(/'/g, "''")}' | Out-Null"`)
+ } else {
+ await execRemote(conn, `bash -lc "mkdir -p '${baseDir.replace(/'/g, "'\\''")}' '${versionsDir.replace(/'/g, "'\\''")}' '${versionDir.replace(/'/g, "'\\''")}' '${downloadsDir.replace(/'/g, "'\\''")}'"`)
+ }
+
+ // If already installed, try to reuse
+ const candidateExeWin = `${versionDir}\\bin\\medomics-server.exe`
+ const candidateExePosix = `${versionDir}/bin/medomics-server`
+ if (remoteOS === 'win32') {
+ const r = await execRemote(conn, `powershell -NoProfile -Command "If (Test-Path '${candidateExeWin.replace(/'/g, "''")}') { Write-Output 'FOUND' }"`)
+ if ((r.stdout||'').trim() === 'FOUND') {
+ setRemoteBackendExecutablePath(candidateExeWin)
+ sendInstallProgress({ phase: 'already-installed', version: manifestVersion, path: candidateExeWin })
+ return { success: true, version: manifestVersion, path: candidateExeWin, reused: true }
+ }
+ } else {
+ const r = await execRemote(conn, `bash -lc "[ -x '${candidateExePosix.replace(/'/g, "'\\''")}'] && echo FOUND || true"`)
+ if ((r.stdout||'').trim() === 'FOUND') {
+ setRemoteBackendExecutablePath(candidateExePosix)
+ sendInstallProgress({ phase: 'already-installed', version: manifestVersion, path: candidateExePosix })
+ return { success: true, version: manifestVersion, path: candidateExePosix, reused: true }
+ }
+ }
+
+ // Download
+ const fileName = url.split('/').pop().split('?')[0]
+ const remoteDownloadPath = remoteOS === 'win32' ? `${downloadsDir}\\${fileName}` : `${downloadsDir}/${fileName}`
+ // Try to get expected size to enable percent & speed reporting (final-only)
+ let expectedBytes = null
+ try {
+ const head = await axios.head(url, { timeout: 15000 })
+ const len = head?.headers?.['content-length'] || head?.headers?.['Content-Length']
+ if (len && !isNaN(Number(len))) expectedBytes = Number(len)
+ } catch {}
+ const t0 = Date.now()
+ sendInstallProgress({ phase: 'download-start', url, remoteDownloadPath })
+ if (remoteOS === 'win32') {
+ const ps = `powershell -NoProfile -Command "Invoke-WebRequest -Uri '${url.replace(/'/g, "''")}' -OutFile '${remoteDownloadPath.replace(/'/g, "''")}' -UseBasicParsing"`
+ const r = await execRemote(conn, ps)
+ if (r.code !== 0 && r.stderr) { sendInstallProgress({ phase: 'error', step: 'download', details: r.stderr }); return { success: false, error: 'download-failed', details: r.stderr } }
+ } else {
+ const sh = `bash -lc "curl -L --fail -o '${remoteDownloadPath.replace(/'/g, "'\\''")}' '${url.replace(/'/g, "'\\''")}'"`
+ const r = await execRemote(conn, sh)
+ if (r.code !== 0 && r.stderr) { sendInstallProgress({ phase: 'error', step: 'download', details: r.stderr }); return { success: false, error: 'download-failed', details: r.stderr } }
+ }
+ const dt = Math.max(1, Date.now() - t0) // ms
+ let speedBps = null
+ if (expectedBytes && dt > 0) {
+ speedBps = Math.round((expectedBytes / dt) * 1000) // bytes/sec
+ }
+ sendInstallProgress({ phase: 'download-complete', remoteDownloadPath, percent: 100, speed: speedBps || undefined })
+
+ // Verify SHA256
+ if (expectedSha) {
+ sendInstallProgress({ phase: 'verify-start' })
+ if (remoteOS === 'win32') {
+ const r = await execRemote(conn, `powershell -NoProfile -Command "(Get-FileHash -Algorithm SHA256 '${remoteDownloadPath.replace(/'/g, "''")}').Hash"`)
+ const actual = (r.stdout||'').trim().toLowerCase()
+ if (!actual || actual !== expectedSha) { sendInstallProgress({ phase: 'error', step: 'verify', expectedSha, actual }); return { success: false, error: 'checksum-mismatch', expectedSha, actual } }
+ } else {
+ // Prefer sha256sum, fallback to shasum
+ const r = await execRemote(conn, `bash -lc "if command -v sha256sum >/dev/null 2>&1; then sha256sum '${remoteDownloadPath.replace(/'/g, "'\\''")}' | awk '{print $1}'; else shasum -a 256 '${remoteDownloadPath.replace(/'/g, "'\\''")}' | awk '{print $1}'; fi"`)
+ const actual = (r.stdout||'').trim().toLowerCase()
+ if (!actual || actual !== expectedSha) { sendInstallProgress({ phase: 'error', step: 'verify', expectedSha, actual }); return { success: false, error: 'checksum-mismatch', expectedSha, actual } }
+ }
+ sendInstallProgress({ phase: 'verify-ok' })
+ } else {
+ sendInstallProgress({ phase: 'verify-skip', reason: 'no-sha256-in-manifest' })
+ }
+
+ // Extract
+ sendInstallProgress({ phase: 'extract-start', to: versionDir })
+ if (remoteOS === 'win32') {
+ if (fileName.toLowerCase().endsWith('.zip')) {
+ const r = await execRemote(conn, `powershell -NoProfile -Command "Expand-Archive -Path '${remoteDownloadPath.replace(/'/g, "''")}' -DestinationPath '${versionDir.replace(/'/g, "''")}' -Force"`)
+ if (r.code !== 0 && r.stderr) { sendInstallProgress({ phase: 'error', step: 'extract', details: r.stderr }); return { success: false, error: 'extract-failed', details: r.stderr } }
+ } else {
+ // Attempt tar if available (Windows 10+)
+ const r = await execRemote(conn, `tar -xf "${remoteDownloadPath}" -C "${versionDir}" 2>&1 || powershell -NoProfile -Command "throw 'Unsupported archive format'"`)
+ if (r.code !== 0 && r.stderr) { sendInstallProgress({ phase: 'error', step: 'extract', details: r.stderr }); return { success: false, error: 'extract-failed', details: r.stderr } }
+ }
+ } else {
+ if (fileName.toLowerCase().endsWith('.tar.gz') || fileName.toLowerCase().endsWith('.tgz')) {
+ const r = await execRemote(conn, `bash -lc "tar -xzf '${remoteDownloadPath.replace(/'/g, "'\\''")}' -C '${versionDir.replace(/'/g, "'\\''")}'"`)
+ if (r.code !== 0 && r.stderr) { sendInstallProgress({ phase: 'error', step: 'extract', details: r.stderr }); return { success: false, error: 'extract-failed', details: r.stderr } }
+ } else if (fileName.toLowerCase().endsWith('.zip')) {
+ const r = await execRemote(conn, `bash -lc "unzip -o '${remoteDownloadPath.replace(/'/g, "'\\''")}' -d '${versionDir.replace(/'/g, "'\\''")}'"`)
+ if (r.code !== 0 && r.stderr) { sendInstallProgress({ phase: 'error', step: 'extract', details: r.stderr }); return { success: false, error: 'extract-failed', details: r.stderr } }
+ } else {
+ sendInstallProgress({ phase: 'error', step: 'extract', error: 'unsupported-archive-format' })
+ return { success: false, error: 'unsupported-archive-format' }
+ }
+ }
+ sendInstallProgress({ phase: 'extract-complete' })
+
+ // Locate executable
+ let exePath
+ if (remoteOS === 'win32') {
+ const findExe = await execRemote(conn, `powershell -NoProfile -Command "Get-ChildItem -Path '${versionDir.replace(/'/g, "''")}' -Recurse -Filter medomics-server.exe | Select-Object -First 1 -ExpandProperty FullName"`)
+ exePath = (findExe.stdout || '').trim()
+ } else {
+ const findExe = await execRemote(conn, `bash -lc "( [ -x '${candidateExePosix.replace(/'/g, "'\\''")}' ] && echo '${candidateExePosix.replace(/'/g, "'\\''")}' ) || find '${versionDir.replace(/'/g, "'\\''")}' -type f -name 'medomics-server' -print -quit || true"`)
+ exePath = (findExe.stdout || '').trim()
+ }
+ if (!exePath) { sendInstallProgress({ phase: 'error', step: 'locate-exe' }); return { success: false, error: 'executable-not-found' } }
+ if (remoteOS !== 'win32') {
+ await execRemote(conn, `bash -lc "chmod +x '${exePath.replace(/'/g, "'\\''")}'"`)
+ }
+
+ // Optional: create 'current' symlink on posix
+ if (remoteOS !== 'win32') {
+ const currentLink = `${baseDir}/current`
+ await execRemote(conn, `bash -lc "ln -sfn '${versionDir.replace(/'/g, "'\\''")}' '${currentLink.replace(/'/g, "'\\''")}'"`)
+ }
+
+ const previousExePath = getRemoteBackendExecutablePath()
+ setRemoteBackendExecutablePath(exePath)
+ // Best-effort cleanup: delete older versions after downloading a new one.
+ // Keep the newly installed version AND the currently used version (if different), so we don't break a running server.
+ let cleanup = null
+ try {
+ const previousVersionName = extractVersionFolderNameFromExePath(previousExePath)
+ const keepVersionNames = [manifestVersion, previousVersionName].filter(Boolean)
+ sendInstallProgress({ phase: 'cleanup-start', versionsDir, keepVersionNames })
+ cleanup = await cleanupRemoteBackendVersions(conn, remoteOS, { versionsDir, keepVersionNames })
+ sendInstallProgress({ phase: 'cleanup-complete', cleanup })
+ } catch (e) {
+ cleanup = { success: false, error: e && e.message ? e.message : String(e) }
+ try { sendInstallProgress({ phase: 'cleanup-error', error: cleanup.error }) } catch {}
+ }
+
+ sendInstallProgress({ phase: 'done', version: manifestVersion, path: exePath, cleanup })
+ return { success: true, version: manifestVersion, path: exePath, cleanup }
+ } catch (e) {
+ try { sendInstallProgress({ phase: 'error', step: 'unexpected', details: e && e.message ? e.message : String(e) }) } catch {}
+ return { success: false, error: e && e.message ? e.message : String(e) }
+ }
+})
+
+
+/**
+ * Starts an SSH tunnel and creates the backend port forwarding server only.
+ * MongoDB tunnel can be created later by calling startMongoTunnel.
+ * @param {Object} params - SSH and port config.
+ * @param {string} params.host - Address of the remote host.
+ * @param {string} params.username - Username for SSH connection.
+ * @param {string} [params.privateKey] - Private key for SSH authentication.
+ * @param {string} [params.password] - Password for SSH authentication.
+ * @param {number|string} params.remotePort - Port of the SSH connection
+ * @param {number|string} params.localExpressPort - Local port forwarded to the remote Express server.
+ * @param {number|string} params.remoteExpressPort - Port on the remote host for the Express server.
+ * @param {number|string} params.localGoPort - (Optional) Local port forwarded to the remote GO server.
+ * @param {number|string} params.remoteGoPort - (Optional) Port on the remote host for the GO server.
+ * @param {number|string} params.localDBPort - Local port for the MongoDB server.
+ * @param {number|string} params.remoteDBPort - Port on the remote host for the MongoDB server.
+ * @param {number|string} params.localJupyterPort - Local port for the Jupyter server.
+ * @param {number|string} params.remoteJupyterPort - Port on the remote host for the Jupyter server.
+ * @returns {Promise<{success: boolean}>}
+ */
+export async function startSSHTunnel({ host, username, privateKey, password, remotePort, localExpressPort, remoteExpressPort, localGoPort, remoteGoPort, localDBPort, remoteDBPort, localJupyterPort, remoteJupyterPort, localBackendPort, remoteBackendPort }) {
+ return new Promise((resolve, reject) => {
+ mongoDBLocalPort = localDBPort
+ mongoDBRemotePort = remoteDBPort
+ jupyterLocalPort = localJupyterPort
+ jupyterRemotePort = remoteJupyterPort
+
+ if (activeTunnelServer) {
+ try {
+ activeTunnelServer.expressServer && activeTunnelServer.expressServer.close()
+ } catch {}
+ try {
+ activeTunnelServer.goServer && activeTunnelServer.goServer.close()
+ } catch {}
+ try {
+ activeTunnelServer.mongoServer && activeTunnelServer.mongoServer.close()
+ } catch {}
+ try {
+ activeTunnelServer.jupyterServer && activeTunnelServer.jupyterServer.close()
+ } catch {}
+ setActiveTunnelServer(null)
+ }
+ if (activeTunnel) {
+ try {
+ activeTunnel.end()
+ } catch {}
+ setActiveTunnel(null)
+ }
+ const connConfig = {
+ host,
+ port: parseInt(remotePort),
+ username
+ }
+ if (privateKey) connConfig.privateKey = privateKey
+ if (password) connConfig.password = password
+ const conn = new Client()
+ conn
+ .on("ready", () => {
+ console.log("SSH connection established to", host)
+ // Backward compatibility mapping
+ if (!localExpressPort && localBackendPort) localExpressPort = localBackendPort
+ if (!remoteExpressPort && remoteBackendPort) remoteExpressPort = remoteBackendPort
+
+ // Defer creating Express/Go forwards until remote /status confirms running.
+ // Initialize tunnel state with provided ports and mark services as closed.
+ try {
+ setTunnelState({
+ ...getTunnelState(),
+ localExpressPort: localExpressPort ? Number(localExpressPort) : null,
+ remoteExpressPort: remoteExpressPort ? Number(remoteExpressPort) : null,
+ localGoPort: localGoPort ? Number(localGoPort) : null,
+ remoteGoPort: remoteGoPort ? Number(remoteGoPort) : null,
+ expressStatus: 'closed'
+ })
+ mainWindow.webContents.send('tunnelStateUpdate', {
+ localExpressPort, remoteExpressPort, localGoPort, remoteGoPort, expressStatus: 'closed'
+ })
+ } catch {}
+
+ setActiveTunnel(conn)
+ setActiveTunnelServer({})
+ // Mark tunnel active and emit a consolidated state update
+ try {
+ setTunnelState({
+ ...getTunnelState(),
+ host,
+ username,
+ remotePort: Number(remotePort),
+ localDBPort: localDBPort ? Number(localDBPort) : null,
+ remoteDBPort: remoteDBPort ? Number(remoteDBPort) : null,
+ localJupyterPort: localJupyterPort ? Number(localJupyterPort) : null,
+ remoteJupyterPort: remoteJupyterPort ? Number(remoteJupyterPort) : null,
+ tunnelActive: true
+ })
+ mainWindow.webContents.send('tunnelStateChanged', getTunnelState())
+ } catch {}
+ resolve({ success: true })
+ })
+ .on("error", (err) => {
+ reject(new Error("SSH connection error: " + err.message))
+ })
+ .connect(connConfig)
+ })
+}
+
+// IPC to rebind the Express forward to a newly discovered remote port
+ipcMain.handle('rebindExpressForward', async (_event, { newRemoteExpressPort, newLocalExpressPort } = {}) => {
+ return rebindPortTunnel({ name: 'express', newRemotePort: Number(newRemoteExpressPort), newLocalPort: Number(newLocalExpressPort) })
+})
+
+// New: Explicit starters for Express and Go forwarding, invoked after /status confirmation
+export async function startExpressForward({ localExpressPort, remoteExpressPort }) {
+ try {
+ const state = getTunnelState()
+ const localPort = Number(localExpressPort || state.localExpressPort)
+ const remotePort = Number(remoteExpressPort || state.remoteExpressPort)
+ const res = await startPortTunnel({ name: 'express', localPort, remotePort, ensureRemoteOpen: true })
+ if (!res.success) return res
+ const updates = { localExpressPort: localPort, remoteExpressPort: remotePort, expressStatus: 'forwarding' }
+ setTunnelState({ ...getTunnelState(), ...updates })
+ try {
+ const full = getTunnelState()
+ mainWindow.webContents.send('tunnelStateChanged', full)
+ mainWindow.webContents.send('tunnelStateUpdate', full)
+ } catch {}
+ return { success: true, localPort, remotePort }
+ } catch (e) {
+ return { success: false, error: e && e.message ? e.message : String(e) }
+ }
+}
+
+// Deprecated wrapper: route to the generic startPortTunnel for GO
+export async function startGoForward({ localGoPort, remoteGoPort }) {
+ const state = getTunnelState()
+ const localPort = Number(localGoPort || state.localGoPort)
+ const remotePort = Number(remoteGoPort || state.remoteGoPort)
+ return startPortTunnel({ name: 'go', localPort, remotePort, ensureRemoteOpen: true })
+}
+
+// Generic port tunnel management
+export async function startPortTunnel({ name, localPort, remotePort, ensureRemoteOpen = false }) {
+ try {
+ const conn = getActiveTunnel()
+ if (!conn) {
+ console.log('[startPortTunnel] No active SSH tunnel')
+ return { success: false, error: 'No active SSH tunnel' }
+ }
+ const servers = getActiveTunnelServer() || {}
+ const state = getTunnelState()
+ let lp = Number(localPort)
+ const rp = Number(remotePort)
+ console.log('[startPortTunnel] request', { name, localPort: lp, remotePort: rp, ensureRemoteOpen })
+ // If local port is invalid/missing, fall back to ephemeral (port 0)
+ if (!lp || isNaN(lp)) {
+ console.log('[startPortTunnel] localPort invalid, using ephemeral port')
+ lp = 0
+ }
+ if (!rp || isNaN(rp)) {
+ console.log('[startPortTunnel] invalid remote port', remotePort)
+ return { success: false, error: 'invalid-remote-port' }
+ }
+
+ // Idempotent no-op: if the tunnel already exists, is listening, and targets the same remote port,
+ // don't close/recreate it (heartbeat calls this frequently).
+ try {
+ const tunnels = Array.isArray(state.tunnels) ? state.tunnels : []
+ const existingEntry = tunnels.find(t => t && t.name === name && t.status === 'forwarding')
+ const existingServer = servers[name]
+ const requestedLocal = lp
+ const existingLocal = existingEntry ? Number(existingEntry.localPort) : null
+ const existingRemote = existingEntry ? Number(existingEntry.remotePort) : null
+ const localOk = !requestedLocal || requestedLocal === 0 || (existingLocal && requestedLocal === existingLocal)
+ if (existingServer && existingServer.listening && existingEntry && existingRemote === rp && localOk) {
+ console.log('[startPortTunnel] already forwarding', { name, localPort: existingLocal, remotePort: existingRemote })
+ return { success: true, name, localPort: existingLocal, remotePort: existingRemote, already: true }
+ }
+ } catch (_) {
+ // best-effort; continue with normal setup
+ }
+
+ // Default ensure for canonical names; include retries
+ const canonical = ['express', 'go', 'mongo', 'jupyter']
+ const shouldEnsure = typeof ensureRemoteOpen === 'boolean' ? ensureRemoteOpen : canonical.includes(String(name || '').toLowerCase())
+ if (shouldEnsure) {
+ let open = false
+ const maxAttempts = 3
+ const delayMs = 3000
+ for (let i = 0; i < maxAttempts && !open; i++) {
+ try {
+ open = await checkRemotePortOpen(conn, rp)
+ console.log('[startPortTunnel] ensure check', { attempt: i + 1, remotePort: rp, open })
+ } catch (err) {
+ console.log('[startPortTunnel] ensure check error', err && err.message ? err.message : String(err))
+ open = false
+ }
+ if (!open && i < maxAttempts - 1) { await sleep(delayMs) }
+ }
+ if (!open) {
+ console.log('[startPortTunnel] remote port not open', rp)
+ return { success: false, error: 'remote-port-closed' }
+ }
+ }
+
+ // Close existing server under this name
+ if (servers[name]) {
+ try {
+ console.log('[startPortTunnel] closing existing server for', name)
+ await new Promise((resolve) => servers[name].close(() => resolve()))
+ } catch (err) {
+ console.log('[startPortTunnel] error closing existing server', err && err.message ? err.message : String(err))
+ }
+ }
+
+ const createForwardServer = () => {
+ const server = net.createServer((socket) => {
+ conn.forwardOut(socket.localAddress || '127.0.0.1', socket.localPort || 0, '127.0.0.1', rp, (err, stream) => {
+ if (err) {
+ console.log('[startPortTunnel] forwardOut error', err && err.message ? err.message : String(err))
+ socket.destroy();
+ return
+ }
+ socket.pipe(stream).pipe(socket)
+ })
+ })
+ return server
+ }
+
+ let netServer = createForwardServer()
+ // Try requested local port; on EADDRINUSE, fall back to ephemeral port
+ try {
+ await new Promise((resolve, reject) => {
+ netServer.once('error', reject)
+ netServer.listen(lp, '127.0.0.1', () => resolve())
+ })
+ } catch (err) {
+ if (err && err.code === 'EADDRINUSE') {
+ try { netServer.close() } catch {}
+ netServer = createForwardServer()
+ await new Promise((resolve, reject) => {
+ netServer.once('error', reject)
+ netServer.listen(0, '127.0.0.1', () => resolve())
+ })
+ const addr = netServer.address()
+ if (addr && typeof addr === 'object' && addr.port) {
+ lp = Number(addr.port)
+ }
+ } else {
+ console.log('[startPortTunnel] listen error', err && err.message ? err.message : String(err))
+ return { success: false, error: err && err.message ? err.message : String(err) }
+ }
+ }
+ console.log('[startPortTunnel] listening', { name, localPort: lp, remotePort: rp })
+
+ // Track server by name
+ setActiveTunnelServer({ ...servers, [name]: netServer })
+
+ // Update generic tunnels list in state
+ const tunnels = Array.isArray(state.tunnels) ? state.tunnels.slice() : []
+ const idx = tunnels.findIndex(t => t.name === name || t.localPort === lp)
+ const entry = { name, localPort: lp, remotePort: rp, status: 'forwarding' }
+ if (idx >= 0) tunnels[idx] = entry
+ else tunnels.push(entry)
+
+ // Also reflect canonical service fields for UI/requests helpers
+ const updates = { tunnels }
+ const n = String(name || '').toLowerCase()
+ if (n === 'express') Object.assign(updates, { localExpressPort: lp, remoteExpressPort: rp, expressStatus: 'forwarding' })
+ if (n === 'go') Object.assign(updates, { localGoPort: lp, remoteGoPort: rp })
+ if (n === 'mongo') Object.assign(updates, { localDBPort: lp, remoteDBPort: rp })
+ if (n === 'jupyter') Object.assign(updates, { localJupyterPort: lp, remoteJupyterPort: rp })
+
+ setTunnelState({ ...state, ...updates })
+ try {
+ const full = getTunnelState()
+ mainWindow.webContents.send('tunnelStateChanged', full)
+ mainWindow.webContents.send('tunnelStateUpdate', full)
+ } catch {}
+ console.log('[startPortTunnel] success', { name, localPort: lp, remotePort: rp })
+ return { success: true, name, localPort: lp, remotePort: rp }
+ } catch (e) {
+ console.log('[startPortTunnel] exception', e && e.message ? e.message : String(e))
+ return { success: false, error: e && e.message ? e.message : String(e) }
+ }
+}
+
+export async function stopPortTunnel({ name, localPort }) {
+ try {
+ const servers = getActiveTunnelServer() || {}
+ const state = getTunnelState()
+ let serverName = name
+ if (!serverName && localPort) {
+ const lp = Number(localPort)
+ const match = (state.tunnels || []).find(t => Number(t.localPort) === lp)
+ serverName = match ? match.name : undefined
+ }
+ if (!serverName || !servers[serverName]) return { success: false, error: 'tunnel-not-found' }
+ // Never hang forever on close (Disconnect relies on this).
+ await new Promise((resolve) => {
+ let done = false
+ const finish = () => {
+ if (done) return
+ done = true
+ resolve()
+ }
+ const timer = setTimeout(finish, 2000)
+ try {
+ servers[serverName].close(() => {
+ clearTimeout(timer)
+ finish()
+ })
+ } catch {
+ clearTimeout(timer)
+ finish()
+ }
+ })
+ const nextServers = { ...servers }
+ delete nextServers[serverName]
+ setActiveTunnelServer(nextServers)
+
+ const tunnels = Array.isArray(state.tunnels) ? state.tunnels.slice() : []
+ const idx = tunnels.findIndex(t => t.name === serverName)
+ if (idx >= 0) tunnels[idx] = { ...tunnels[idx], status: 'closed' }
+ setTunnelState({ ...state, tunnels })
+ try { mainWindow.webContents.send('tunnelStateChanged', getTunnelState()) } catch {}
+ return { success: true, name: serverName }
+ } catch (e) {
+ return { success: false, error: e && e.message ? e.message : String(e) }
+ }
+}
+
+export async function stopAllPortTunnels({ clearList = true } = {}) {
+ try {
+ const servers = getActiveTunnelServer() || {}
+ const names = Object.keys(servers)
+ for (const n of names) {
+ try {
+ await new Promise((resolve) => {
+ let done = false
+ const finish = () => {
+ if (done) return
+ done = true
+ resolve()
+ }
+ const timer = setTimeout(finish, 2000)
+ try {
+ servers[n].close(() => {
+ clearTimeout(timer)
+ finish()
+ })
+ } catch {
+ clearTimeout(timer)
+ finish()
+ }
+ })
+ } catch (e) {
+ console.warn('[stopAllPortTunnels] close failed', n, e && e.message ? e.message : String(e))
+ }
+ }
+ setActiveTunnelServer({})
+
+ const state = getTunnelState()
+ const tunnels = clearList ? [] : (Array.isArray(state.tunnels) ? state.tunnels.map(t => ({ ...t, status: 'closed' })) : [])
+ setTunnelState({ ...state, tunnels })
+ try {
+ const full = getTunnelState()
+ mainWindow.webContents.send('tunnelStateChanged', full)
+ mainWindow.webContents.send('tunnelStateUpdate', full)
+ } catch {}
+ return { success: true, stopped: names, cleared: !!clearList }
+ } catch (e) {
+ return { success: false, error: e && e.message ? e.message : String(e) }
+ }
+}
+
+ipcMain.handle('startPortTunnel', async (_event, payload = {}) => {
+ console.log("startPortTunnel IPC called with payload:", payload)
+ return startPortTunnel(payload)
+})
+ipcMain.handle('stopPortTunnel', async (_event, payload = {}) => {
+ return stopPortTunnel(payload)
+})
+ipcMain.handle('stopAllPortTunnels', async (_event, payload = {}) => {
+ return stopAllPortTunnels(payload)
+})
+ipcMain.handle('listPortTunnels', async () => {
+ return { success: true, tunnels: (getTunnelState().tunnels || []) }
+})
+
+// Generic rebind helper: stop existing tunnel by name and recreate with new ports
+export async function rebindPortTunnel({ name, newRemotePort, newLocalPort }) {
+ try {
+ const state = getTunnelState()
+ const tunnels = Array.isArray(state.tunnels) ? state.tunnels : []
+ const entry = tunnels.find(t => t.name === name)
+ const localPort = Number(newLocalPort || (entry ? entry.localPort : undefined) || state[
+ name === 'express' ? 'localExpressPort' :
+ name === 'go' ? 'localGoPort' :
+ name === 'mongo' ? 'localDBPort' :
+ name === 'jupyter' ? 'localJupyterPort' :
+ 'localExpressPort'
+ ])
+ const remotePort = Number(newRemotePort)
+ if (!remotePort || isNaN(remotePort)) return { success: false, error: 'invalid-remote-port' }
+ await stopPortTunnel({ name })
+ const res = await startPortTunnel({ name, localPort, remotePort, ensureRemoteOpen: true })
+ if (!res.success) return res
+ const updates = {}
+ if (name === 'express') Object.assign(updates, { remoteExpressPort: remotePort, localExpressPort: localPort, expressStatus: 'forwarding' })
+ if (name === 'go') Object.assign(updates, { remoteGoPort: remotePort, localGoPort: localPort })
+ if (name === 'mongo') Object.assign(updates, { remoteDBPort: remotePort, localDBPort: localPort })
+ if (name === 'jupyter') Object.assign(updates, { remoteJupyterPort: remotePort, localJupyterPort: localPort })
+ if (Object.keys(updates).length) {
+ setTunnelState({ ...getTunnelState(), ...updates })
+ try {
+ const full = getTunnelState()
+ mainWindow.webContents.send('tunnelStateChanged', full)
+ mainWindow.webContents.send('tunnelStateUpdate', full)
+ } catch {}
+ }
+ return { success: true, name, localPort, remotePort }
+ } catch (e) {
+ return { success: false, error: e && e.message ? e.message : String(e) }
+ }
+}
+
+ipcMain.handle('rebindPortTunnel', async (_event, payload = {}) => {
+ return rebindPortTunnel(payload)
+})
+
+// IPC wrappers for starting forwards explicitly
+ipcMain.handle('startExpressForward', async (_event, payload = {}) => {
+ return startExpressForward(payload)
+})
+ipcMain.handle('startGoForward', async (_event, payload = {}) => {
+ // Compatibility: route to generic startPortTunnel for GO
+ const state = getTunnelState()
+ const localPort = Number(payload.localGoPort || state.localGoPort)
+ const remotePort = Number(payload.remoteGoPort || state.remoteGoPort)
+ return startPortTunnel({ name: 'go', localPort, remotePort, ensureRemoteOpen: true })
+})
+
+// Probe GO service reachability: checks remote port open via SSH and local forward HTTP reachability
+ipcMain.handle('probeGo', async () => {
+ try {
+ const state = getTunnelState()
+ const conn = getActiveTunnel()
+ if (!state || !state.tunnelActive) {
+ // Still allow local forward reachability check even if tunnelActive is false
+ const localPort = Number(state && state.localGoPort)
+ const result = { success: false, error: 'no-tunnel', tunnelActive: !!(state && state.tunnelActive), localPort: localPort || null }
+ if (localPort && Number.isFinite(localPort)) {
+ try {
+ const url = `http://127.0.0.1:${localPort}/connection/connection_test_request`
+ const resp = await axios.post(url, { message: JSON.stringify({ data: "" }) }, { timeout: 3000, headers: { 'Content-Type': 'application/json' } })
+ result.localReachable = !!resp
+ result.localResponse = resp && resp.data ? resp.data : null
+ result.success = true
+ } catch (e) {
+ result.localReachable = false
+ result.localError = e && e.message ? e.message : String(e)
+ }
+ }
+ return result
+ }
+ const remotePort = Number(state.remoteGoPort)
+ const localPort = Number(state.localGoPort)
+ const result = { success: true, tunnelActive: true, remotePort: remotePort || null, localPort: localPort || null }
+
+ // Remote port open check via SSH
+ let remoteOpen = null
+ if (remotePort && Number.isFinite(remotePort)) {
+ try { remoteOpen = await checkRemotePortOpen(conn, remotePort) }
+ catch { remoteOpen = false }
+ }
+ result.remoteOpen = !!remoteOpen
+
+ // Local forward reachability by hitting the GO verify endpoint (best-effort)
+ let localReachable = null
+ if (localPort && Number.isFinite(localPort)) {
+ try {
+ const url = `http://127.0.0.1:${localPort}/connection/connection_test_request`
+ const resp = await axios.post(url, { message: JSON.stringify({ data: "" }) }, { timeout: 3000, headers: { 'Content-Type': 'application/json' } })
+ result.localResponse = resp && resp.data ? resp.data : null
+ localReachable = !!resp
+ } catch (e) {
+ result.localError = e && e.message ? e.message : String(e)
+ localReachable = false
+ }
+ }
+ result.localReachable = !!localReachable
+ result.running = !!(result.remoteOpen || result.localReachable)
+ return result
+ } catch (e) {
+ return { success: false, error: e && e.message ? e.message : String(e) }
+ }
+})
+
+/**
+ * Checks if a port is open on the remote host via SSH.
+ * @param {Client} conn - The active SSH2 Client connection.
+ * @param {number|string} port - The port to check.
+ * @returns {Promise}
+ */
+export async function checkRemotePortOpen(conn, port, loadBlocking = false) {
+ if (loadBlocking) {
+ mainWindow.webContents.send("setSidebarLoading", { processing: true, message: "Checking if MongoDB is running on server..." })
+ }
+ // Use detectRemoteOS to determine the remote OS and select the right command
+ const remoteOS = await detectRemoteOS()
+ let checkCmd
+ if (remoteOS === "win32") {
+ // Windows: only treat the port as open if it's in LISTENING state.
+ // This avoids counting TIME_WAIT/CLOSE_WAIT as "open" after a successful stop.
+ checkCmd = `netstat -an | findstr LISTENING | findstr :${port}`
+ } else {
+ // Linux/macOS: use ss or netstat/grep
+ checkCmd = `bash -c "command -v ss >/dev/null 2>&1 && ss -ltn | grep :${port} || netstat -an | grep LISTEN | grep :${port}" || netstat -an | grep :${port}`
+ }
+ console.log('[checkRemotePortOpen] remoteOS:', remoteOS, 'cmd:', checkCmd)
+ return new Promise((resolve, reject) => {
+ conn.exec(checkCmd, (err, stream) => {
+ if (err) {
+ console.log("[checkRemotePortOpen] SSH exec error:", err)
+ return reject(err)
+ }
+ let found = false
+ let stdout = ""
+ let stderr = ""
+ stream.on("data", (data) => {
+ stdout += data.toString()
+ if (data.toString().includes(port)) found = true
+ })
+ stream.stderr.on("data", (data) => {
+ stderr += data.toString()
+ })
+ stream.on("close", (code, signal) => {
+ console.log('[checkRemotePortOpen] close', { code, signal, found, stdout: stdout.trim(), stderr: stderr.trim() })
+ resolve(found)
+ })
+ })
+ })
+}
+
+// Detect the remote OS via SSH. Returns one of: 'win32' | 'linux' | 'darwin' | 'unix'
+export async function detectRemoteOS() {
+ const conn = getActiveTunnel()
+ if (!conn) return 'win32'
+ const tryExec = (cmd) => new Promise((resolve) => {
+ conn.exec(cmd, (err, stream) => {
+ if (err) return resolve({ code: -1, stdout: '', stderr: String(err && err.message || err) })
+ let stdout = ''
+ let stderr = ''
+ stream.on('data', (d) => { stdout += d.toString() })
+ stream.stderr.on('data', (d) => { stderr += d.toString() })
+ stream.on('close', (code) => resolve({ code: Number(code), stdout, stderr }))
+ })
+ })
+ // Prefer POSIX detection via bash/uname; fallback to Windows PowerShell; last resort: cmd ver
+ const candidates = [
+ "bash -lc 'uname -s'",
+ 'uname -s',
+ 'powershell -NoProfile -Command "$PSVersionTable.OS.ToString(); [System.Environment]::OSVersion.Platform"',
+ 'cmd /c ver'
+ ]
+ for (const cmd of candidates) {
+ try {
+ const r = await tryExec(cmd)
+ const out = (r.stdout || r.stderr || '').trim().toLowerCase()
+ if (!out) continue
+ if (out.includes('linux')) return 'linux'
+ if (out.includes('darwin') || out.includes('mac')) return 'darwin'
+ if (out.includes('bsd') || out.includes('unix')) return 'unix'
+ if (out.includes('windows') || out.includes('microsoft') || out.includes('version') || out.includes('win')) return 'win32'
+ } catch {}
+ }
+ return 'win32'
+}
+
+
+/**
+ * @description Confirms that the mongoDB tunnel is active and the server is listening.
+ * @returns {Promise<{success: boolean, error?: string}>}
+ */
+export async function confirmMongoTunnel(loadBlocking = false) {
+ if (loadBlocking) {
+ mainWindow.webContents.send("setSidebarLoading", { processing: true, message: "Confirming that the MongoDB tunnel is active..." })
+ }
+ console.log("Confirming MongoDB tunnel is active...")
+ const conn = getActiveTunnel()
+ if (!conn) {
+ return { success: false, error: "No active SSH tunnel" }
+ }
+
+ return new Promise((resolve, reject) => {
+ // Check for a 'mongo' entry in tunnelState.tunnels and verify the remote DB port is listening.
+ // Poll every 3000 ms, up to 10 times (keeps prior behavior).
+ let attempts = 0
+ const maxAttempts = 10
+ const intervalMs = 3000
+
+ const tick = async () => {
+ try {
+ const state = getTunnelState()
+ const tunnels = Array.isArray(state.tunnels) ? state.tunnels : []
+ const mongoTunnel = tunnels.find(t => t && t.name === 'mongo')
+ const remotePort = mongoTunnel && mongoTunnel.remotePort != null
+ ? Number(mongoTunnel.remotePort)
+ : (state.remoteDBPort != null ? Number(state.remoteDBPort) : null)
+
+ if (!mongoTunnel) {
+ attempts++
+ if (attempts >= maxAttempts) {
+ clearInterval(interval)
+ return reject({ success: false, error: "MongoDB tunnel is not present in tunnel state after multiple attempts." })
+ }
+ return
+ }
+
+ if (!remotePort || Number.isNaN(remotePort)) {
+ clearInterval(interval)
+ return reject({ success: false, error: "MongoDB remote port is missing or invalid in tunnel state." })
+ }
+
+ const isRemoteListening = await checkRemotePortOpen(conn, remotePort, false)
+ if (isRemoteListening) {
+ clearInterval(interval)
+ console.log("MongoDB tunnel is active and the remote port is listening.")
+ return resolve({ success: true })
+ }
+
+ attempts++
+ if (attempts >= maxAttempts) {
+ clearInterval(interval)
+ return reject({ success: false, error: "MongoDB is not listening on the remote port after multiple attempts." })
+ }
+ } catch (e) {
+ attempts++
+ if (attempts >= maxAttempts) {
+ clearInterval(interval)
+ return reject({ success: false, error: e && e.message ? e.message : String(e) })
+ }
+ }
+ }
+
+ const interval = setInterval(() => {
+ tick()
+ }, intervalMs)
+
+ // Run an immediate check rather than waiting for the first interval.
+ tick()
+ })
+}
+
+/**
+ * @description Stops the SSH tunnel and closes all forwarded servers.
+ * @returns {Promise<{success: boolean, error?: string}>}
+ */
+export async function stopSSHTunnel() {
+ console.log('[stopSSHTunnel] begin')
+ let error = null
+ let didSomething = false
+
+ // Close any local forward servers (legacy objects and the newer name->server map).
+ try {
+ const res = await stopAllPortTunnels({ clearList: true })
+ if (res && res.success) didSomething = true
+ else if (res && res.error) error = res.error
+ } catch (e) {
+ error = e && e.message ? e.message : String(e)
+ }
+
+ // Close SSH client.
+ if (activeTunnel) {
+ didSomething = true
+ try {
+ // end() should be non-blocking; add a best-effort timeout anyway.
+ await new Promise((resolve) => {
+ let done = false
+ const finish = () => {
+ if (done) return
+ done = true
+ resolve()
+ }
+ const timer = setTimeout(finish, 1500)
+ try {
+ activeTunnel.once && activeTunnel.once('close', () => {
+ clearTimeout(timer)
+ finish()
+ })
+ } catch {}
+ try { activeTunnel.end() } catch {}
+ // If no close event, timer will resolve.
+ })
+ } catch {}
+ try { setActiveTunnel(null) } catch {}
+ }
+
+ // Emit state change reflecting closed forwards and inactive tunnel
+ try {
+ const prev = getTunnelState()
+ const next = {
+ ...prev,
+ tunnelActive: false,
+ expressStatus: 'closed',
+ serverStartedRemotely: false,
+ tunnels: [],
+ }
+ setTunnelState(next)
+ try {
+ mainWindow.webContents.send('tunnelStateChanged', next)
+ mainWindow.webContents.send('tunnelStateUpdate', next)
+ } catch {}
+ } catch {}
+
+ console.log('[stopSSHTunnel] done', { didSomething, error })
+ if (didSomething) return { success: true }
+ return { success: false, error: error || 'No active tunnel' }
+}
+
+
+/**
+ * @description This function uses SFTP to check if a file exists at the given remote path.
+ * @param {string} filePath - The remote path of the file to check
+ * @returns {string>} - Status of the file existence check: "exists", "does not exist", "sftp error", or "tunnel inactive"
+ */
+export async function checkRemoteFileExists(filePath) {
+ const conn = getActiveTunnel()
+ if (!conn) return "tunnel inactive"
+ const getSftp = () => new Promise((resolve, reject) => {
+ conn.sftp((err, sftp) => err ? reject(err) : resolve(sftp))
+ })
+ const statFile = (sftp, p) => new Promise((resolve) => {
+ sftp.stat(p, (err, stats) => {
+ if (err) return resolve(false)
+ resolve(Boolean(stats))
+ })
+ })
+ try {
+ const sftp = await getSftp()
+ const exists = await statFile(sftp, filePath)
+ if (typeof sftp.end === 'function') { try { sftp.end() } catch {} }
+ else if (typeof sftp.close === 'function') { try { sftp.close() } catch {} }
+ return exists ? "exists" : "does not exist"
+ } catch (error) {
+ console.error("SFTP error:", error)
+ return "sftp error"
+ }
+}
+
+export async function getRemoteLStat(filePath) {
+ const conn = getActiveTunnel()
+ if (!conn) return "tunnel inactive"
+ const getSftp = () => new Promise((resolve, reject) => {
+ conn.sftp((err, sftp) => err ? reject(err) : resolve(sftp))
+ })
+ const lstat = (sftp, p) => new Promise((resolve, reject) => {
+ sftp.lstat(p, (err, stats) => err ? reject(err) : resolve(stats))
+ })
+ try {
+ const sftp = await getSftp()
+ const stats = await lstat(sftp, filePath)
+ if (typeof sftp.end === 'function') { try { sftp.end() } catch {} }
+ else if (typeof sftp.close === 'function') { try { sftp.close() } catch {} }
+ return { isDir: stats && stats.isDirectory ? stats.isDirectory() : false, isFile: stats && stats.isFile ? stats.isFile() : false, stats }
+ } catch (error) {
+ console.error("SFTP error:", error)
+ return "sftp error"
+ }
+}
+
+/**
+ * @description Read a remote file over SFTP and return text or base64 content.
+ * @param {string} filePath - remote file path
+ * @param {{ encoding?: BufferEncoding | 'base64' }} [opts]
+ * @returns {Promise<{ success: boolean, content?: string, error?: string }>}
+ */
+export async function readRemoteFile(filePath, opts = {}) {
+ const conn = getActiveTunnel()
+ if (!conn) return { success: false, error: "tunnel inactive" }
+
+ const encoding = opts && opts.encoding ? opts.encoding : "utf8"
+ const remotePath = (filePath || "").replace(/\\/g, "/")
+
+ const getSftp = () => new Promise((resolve, reject) => {
+ conn.sftp((err, sftp) => err ? reject(err) : resolve(sftp))
+ })
+
+ try {
+ const sftp = await getSftp()
+ const chunks = []
+ const content = await new Promise((resolve, reject) => {
+ const stream = sftp.createReadStream(remotePath)
+ stream.on("data", (chunk) => chunks.push(chunk))
+ stream.on("error", (err) => reject(err))
+ stream.on("end", () => {
+ const buffer = Buffer.concat(chunks)
+ if (encoding === "base64") {
+ resolve(buffer.toString("base64"))
+ } else {
+ resolve(buffer.toString(encoding))
+ }
+ })
+ })
+
+ if (typeof sftp.end === "function") { try { sftp.end() } catch {} }
+ else if (typeof sftp.close === "function") { try { sftp.close() } catch {} }
+
+ return { success: true, content }
+ } catch (error) {
+ console.error("SFTP readRemoteFile error:", error)
+ return { success: false, error: error && error.message ? error.message : String(error) }
+ }
+}
+
+
+/**
+ * @description This function uses SFTP to rename a remote file.
+ * @param {string} oldPath - The remote path of the file to rename
+ * @param {string} newPath - The new remote path of the file
+ * @returns {{ success: boolean, error: string }} - Returns an object indicating success or failure with an error message.
+ */
+ipcMain.handle('renameRemoteFile', async (_event, { oldPath, newPath }) => {
+ function sftpRename(sftp, oldPath, newPath) {
+ return new Promise((resolve, reject) => {
+ sftp.rename(oldPath, newPath, (err) => {
+ if (err) reject(err)
+ else resolve()
+ })
+ })
+ }
+
+ const activeTunnel = getActiveTunnel()
+ if (!activeTunnel) return { success: false, error: 'No active SSH tunnel' }
+ return new Promise((resolve) => {
+ activeTunnel.sftp(async (err, sftp) => {
+ if (err) return resolve({ success: false, error: err.message })
+ try {
+ await sftpRename(sftp, oldPath, newPath)
+ if (typeof sftp.end === 'function') sftp.end()
+ resolve({ success: true })
+ } catch (e) {
+ if (typeof sftp.end === 'function') sftp.end()
+ resolve({ success: false, error: e.message })
+ }
+ })
+ })
+})
+
+
+/**
+ * @description This function uses SFTP to delete a remote file.
+ * @param {string} path - The remote path of the file to delete
+ * @param {boolean} recursive - Whether do also delete all contents if the path is a directory
+ * @returns {{ success: boolean, error: string }} - Returns an object indicating success or failure with an error message.
+ */
+ipcMain.handle('deleteRemoteFile', async (_event, { path, recursive = true }) => {
+ const activeTunnel = getActiveTunnel()
+ if (!activeTunnel) return { success: false, error: 'No active SSH tunnel' }
+
+ function getSftp(callback) {
+ if (!activeTunnel) return callback(new Error('No active SSH tunnel'))
+ if (activeTunnel.sftp) {
+ return activeTunnel.sftp(callback)
+ } else if (activeTunnel.sshClient && activeTunnel.sshClient.sftp) {
+ return activeTunnel.sshClient.sftp(callback)
+ } else {
+ return callback(new Error('No SFTP available'))
+ }
+ }
+
+ // Helper: recursively delete files and folders
+ async function sftpDeleteRecursive(sftp, targetPath) {
+ // Stat the path to determine if file or directory
+ const stats = await new Promise((res, rej) => {
+ sftp.stat(targetPath, (err, stat) => {
+ if (err) return rej(err)
+ res(stat)
+ })
+ })
+ if (stats.isDirectory()) {
+ // List directory contents
+ const entries = await new Promise((res, rej) => {
+ sftp.readdir(targetPath, (err, list) => {
+ if (err) return rej(err)
+ res(list)
+ })
+ })
+ // Recursively delete each entry
+ for (const entry of entries) {
+ if (entry.filename === '.' || entry.filename === '..') continue
+ const entryPath = targetPath.replace(/[\\/]$/, '') + '/' + entry.filename
+ await sftpDeleteRecursive(sftp, entryPath)
+ }
+ // Remove the directory itself
+ await new Promise((res, rej) => {
+ sftp.rmdir(targetPath, (err) => {
+ if (err) return rej(err)
+ res()
+ })
+ })
+ } else {
+ // Remove file
+ await new Promise((res, rej) => {
+ sftp.unlink(targetPath, (err) => {
+ if (err) return rej(err)
+ res()
+ })
+ })
+ }
+ }
+
+ return new Promise((resolve) => {
+ getSftp(async (err, sftp) => {
+ if (err) return resolve({ success: false, error: err.message })
+ let sftpClosed = false
+ function closeSftp() {
+ if (sftp && !sftpClosed) {
+ if (typeof sftp.end === 'function') {
+ try { sftp.end() } catch (e) {}
+ } else if (typeof sftp.close === 'function') {
+ try { sftp.close() } catch (e) {}
+ }
+ sftpClosed = true
+ }
+ }
+ try {
+ if (recursive) {
+ await sftpDeleteRecursive(sftp, path)
+ } else {
+ // Non-recursive: try to delete as file, then as empty dir
+ try {
+ await new Promise((res, rej) => {
+ sftp.unlink(path, (err) => err ? rej(err) : res())
+ })
+ } catch (e) {
+ // If not a file, try as empty directory
+ await new Promise((res, rej) => {
+ sftp.rmdir(path, (err) => err ? rej(err) : res())
+ })
+ }
+ }
+ closeSftp()
+ resolve({ success: true })
+ } catch (e) {
+ closeSftp()
+ resolve({ success: false, error: e.message })
+ }
+ })
+ })
+})
+
+/**
+ * Cross-platform equivalent to path.dirname(): works for both '/' and '\\' separators.
+ * @param {string} filePath - The path to extract the directory from.
+ * @returns {string} Directory path
+ */
+export function remoteDirname(filePath) {
+ if (!filePath) return ''
+ // Always use forward slash for remote paths
+ const normalized = filePath.replace(/\\/g, '/')
+ const idx = normalized.lastIndexOf('/')
+ if (idx === -1) return ''
+ if (idx === 0) return '/'
+ return normalized.slice(0, idx)
+}
+
+/**
+ * Helper function to create a directory recursively using SFTP.
+ * @param {Object} sftp - The SFTP client instance.
+ * @param {string} fullPath - The path of the lowest-level directory to create, including all parent directories.
+ */
+async function sftpMkdirRecursive(sftp, fullPath) {
+ // Always use forward slash for remote paths
+ const normalized = fullPath.replace(/\\/g, '/')
+ const sep = '/'
+ const parts = normalized.split(sep).filter(Boolean)
+ let current = normalized.startsWith(sep) ? sep : ''
+ for (const part of parts) {
+ current = current === sep ? current + part : current + sep + part
+ try {
+ // Try to stat the directory
+ await new Promise((res, rej) => {
+ sftp.stat(current, (err, stats) => {
+ if (!err && stats && stats.isDirectory()) res()
+ else rej()
+ })
+ })
+ } catch {
+ // Directory does not exist, try to create
+ await new Promise((res, rej) => {
+ sftp.mkdir(current, (err) => {
+ if (!err) res()
+ else rej(err)
+ })
+ })
+ }
+ }
+}
+
+/**
+ * @description This request handler creates a new remote folder in the specified parent path.
+ * @param {string} path - The parent path where the new folder will be created
+ * @param {string} folderName - The name of the new folder to be created
+ * @returns {Promise<{success: boolean, error?: string}>}
+ */
+ipcMain.handle('createRemoteFolder', async (_event, { path: parentPath, folderName, recursive = false }) => {
+ const activeTunnel = getActiveTunnel()
+ // Helper to get SFTP client
+ function getSftp(cb) {
+ if (!activeTunnel) return cb(new Error('No active SSH tunnel'))
+ if (activeTunnel.sftp) {
+ return activeTunnel.sftp(cb)
+ } else if (activeTunnel.sshClient && activeTunnel.sshClient.sftp) {
+ return activeTunnel.sshClient.sftp(cb)
+ } else {
+ return cb(new Error('No SFTP available'))
+ }
+ }
+ // Normalize path for SFTP: always use absolute, default to home dir as '.'
+ function normalizePath(p) {
+ if (!p || p === '') return '.'
+ if (p === '~') return '.'
+ if (p.startsWith('~/')) return p.replace(/^~\//, '')
+ // Always use forward slash for remote paths
+ return p.replace(/\\/g, '/')
+ }
+ return new Promise((resolve) => {
+ getSftp(async (err, sftp) => {
+ if (err) return resolve({ success: false, error: err.message })
+ let sftpClosed = false
+ function closeSftp() {
+ if (sftp && !sftpClosed) {
+ if (typeof sftp.end === 'function') {
+ try { sftp.end() } catch (e) {}
+ } else if (typeof sftp.close === 'function') {
+ try { sftp.close() } catch (e) {}
+ }
+ sftpClosed = true
+ }
+ }
+ try {
+ console.log('Creating folder', folderName, 'in', parentPath)
+ const parent = normalizePath(parentPath)
+ // Step 1: resolve canonical parent path
+ let canonicalParent = await new Promise((res, rej) => {
+ sftp.realpath(parent, (e, abs) => e ? res(parent) : res(abs))
+ })
+ // Step 2: build new folder path
+ let newFolderPath = folderName ? canonicalParent.replace(/\/$/, '') + '/' + folderName : canonicalParent
+ // Step 3: create directory
+ if (recursive) {
+ await sftpMkdirRecursive(sftp, newFolderPath)
+ } else {
+ await new Promise((res, rej) => {
+ sftp.mkdir(newFolderPath, (e) => e ? rej(e) : res())
+ })
+ }
+ closeSftp()
+ console.log('Folder created successfully')
+ resolve({ success: true })
+ } catch (e) {
+ closeSftp()
+ console.error('Error creating remote folder:', e)
+ resolve({ success: false, error: e.message })
+ }
+ })
+ })
+})
+
+
+/**
+ * @description This request handler manages the remote navigation of folders on the server.
+ * @param {string} action - 'list' to display files and folders, 'up' to go back a directory or 'into' to enter it
+ * @param {string} path - The remote path to navigate
+ * @param {string} dirName - The name of the directory to enter (only used for 'into' action)
+ * @returns {Promise<{success: boolean, error?: string}>}
+ */
+ipcMain.handle('navigateRemoteDirectory', async (_event, { action, path: currentPath, dirName }) => {
+ const activeTunnel = getActiveTunnel()
+ // Helper to get SFTP client
+ function getSftp(cb) {
+ if (!activeTunnel) return cb(new Error('No active SSH tunnel'))
+ if (activeTunnel.sftp) {
+ // ssh2 v1.15+ attaches sftp method directly
+ return activeTunnel.sftp(cb)
+ } else if (activeTunnel.sshClient && activeTunnel.sshClient.sftp) {
+ return activeTunnel.sshClient.sftp(cb)
+ } else {
+ return cb(new Error('No SFTP available'))
+ }
+ }
+
+ // Promisified SFTP realpath
+ function sftpRealpath(sftp, p) {
+ return new Promise((resolve, reject) => {
+ sftp.realpath(p, (err, absPath) => {
+ if (err) return reject(err)
+ resolve(absPath)
+ })
+ })
+ }
+
+ // Promisified SFTP readdir
+ function sftpReaddir(sftp, p) {
+ return new Promise((resolve, reject) => {
+ sftp.readdir(p, (err, list) => {
+ if (err) return reject(err)
+ resolve(list)
+ })
+ })
+ }
+
+ // Normalize path for SFTP: always use absolute, default to home dir as '.'
+ function normalizePath(p) {
+ if (!p || p === '') return '.' // SFTP: '.' means home dir
+ if (p === '~') return '.'
+ if (p.startsWith('~/')) return p.replace(/^~\//, '')
+ // Always use forward slash for remote paths
+ return p.replace(/\\/g, '/')
+ }
+
+ return new Promise((resolve) => {
+ getSftp(async (err, sftp) => {
+ if (err) return resolve({ path: currentPath, contents: [], error: err.message })
+ let targetPath = normalizePath(currentPath)
+ let sftpClosed = false
+ // Helper to close SFTP session safely
+ function closeSftp() {
+ if (sftp && !sftpClosed) {
+ if (typeof sftp.end === 'function') {
+ try { sftp.end() } catch (e) {}
+ } else if (typeof sftp.close === 'function') {
+ try { sftp.close() } catch (e) {}
+ }
+ sftpClosed = true
+ }
+ }
+ try {
+ // Step 1: resolve canonical path (absolute)
+ let canonicalPath = await sftpRealpath(sftp, targetPath).catch(() => targetPath)
+ // Step 2: handle navigation action
+ if (action === 'up') {
+ // Go up one directory
+ if (canonicalPath === '/' || canonicalPath === '' || canonicalPath === '.') {
+ // Already at root/home
+ // List current
+ } else {
+ let parts = canonicalPath.split('/').filter(Boolean)
+ if (parts.length > 1) {
+ parts.pop()
+ canonicalPath = '/' + parts.join('/')
+ } else {
+ canonicalPath = '/'
+ }
+ }
+ } else if (action === 'into' && dirName) {
+ // Always join using absolute path
+ if (canonicalPath === '/' || canonicalPath === '') {
+ canonicalPath = '/' + dirName
+ } else if (canonicalPath === '.') {
+ // Home dir: get its absolute path
+ canonicalPath = await sftpRealpath(sftp, '.').catch(() => '/')
+ canonicalPath = canonicalPath.replace(/\/$/, '') + '/' + dirName
+ } else {
+ canonicalPath = canonicalPath.replace(/\/$/, '') + '/' + dirName
+ }
+ // Re-resolve in case of symlinks
+ canonicalPath = await sftpRealpath(sftp, canonicalPath).catch(() => canonicalPath)
+ } else if (action === 'list') {
+ // Just list current
+ }
+ // Step 3: list directory
+ let entries = await sftpReaddir(sftp, canonicalPath).catch(() => [])
+ let contents = Array.isArray(entries)
+ ? entries.filter(e => e.filename !== '.' && e.filename !== '..').map(e => ({
+ name: e.filename,
+ type: e.attrs.isDirectory() ? 'dir' : 'file'
+ }))
+ : []
+ closeSftp()
+ resolve({ path: canonicalPath, contents })
+ } catch (e) {
+ closeSftp()
+ resolve({ path: currentPath, contents: [], error: e.message })
+ }
+ })
+ })
+})
+
+ipcMain.handle('startSSHTunnel', async (_event, params) => {
+ return startSSHTunnel(params)
+})
+
+ipcMain.handle('confirmMongoTunnel', async (_event, loadBlocking ) => {
+ return confirmMongoTunnel(loadBlocking)
+})
+
+ipcMain.handle('stopSSHTunnel', async () => {
+ return stopSSHTunnel()
+})
+
+ipcMain.handle('getRemoteLStat', async (_event, path) => {
+ return getRemoteLStat(path)
+})
+
+ipcMain.handle('checkRemoteFileExists', async (_event, path) => {
+ return checkRemoteFileExists(path)
+})
+
+ipcMain.handle('readRemoteFile', async (_event, { path, encoding = 'utf8' } = {}) => {
+ return readRemoteFile(path, { encoding })
+})
+
+ipcMain.handle('setRemoteWorkspacePath', async (_event, path) => {
+ return setRemoteWorkspacePath(path)
+})
+
+ipcMain.handle('startJupyterTunnel', async () => {
+ return startPortTunnel({ name: 'jupyter', localPort: jupyterLocalPort, remotePort: jupyterRemotePort, ensureRemoteOpen: true })
+})
+
+/**
+ * @description This request handler lists the contents of a remote directory on the server.
+ * @param {string} path - The remote path of the folder to list
+ * @returns {Promise<{success: boolean, error?: string}>}
+ */
+ipcMain.handle('listRemoteDirectory', async (_event, { path: remotePath }) => {
+ return new Promise((resolve, reject) => {
+ const activeTunnel = getActiveTunnel()
+ if (!activeTunnel) {
+ return resolve({ path: remotePath, contents: [], error: 'No active SSH tunnel' })
+ }
+ try {
+ activeTunnel.sftp((err, sftp) => {
+ if (err || !sftp) return resolve({ path: remotePath, contents: [], error: err ? err.message : 'No SFTP' })
+ // Normalize path for SFTP: always use absolute, default to home dir as '.'
+ function normalizePath(p) {
+ if (!p || p === '') return '.' // SFTP: '.' means home dir
+ if (p === '~') return '.'
+ if (p.startsWith('~/')) return p.replace(/^~\//, '')
+ // Always use forward slash for remote paths
+ return p.replace(/\\/g, '/')
+ }
+ const targetPath = normalizePath(remotePath)
+ // First, resolve canonical/absolute path
+ sftp.realpath(targetPath, (err2, absPath) => {
+ const canonicalPath = (!err2 && absPath) ? absPath : targetPath
+ sftp.readdir(canonicalPath, (err3, list) => {
+ // Always close SFTP session after use
+ if (sftp && typeof sftp.end === 'function') {
+ try { sftp.end() } catch (e) {}
+ } else if (sftp && typeof sftp.close === 'function') {
+ try { sftp.close() } catch (e) {}
+ }
+ if (err3) return resolve({ path: canonicalPath, contents: [], error: err3.message })
+ const contents = Array.isArray(list)
+ ? list.filter(e => e.filename !== '.' && e.filename !== '..').map(e => ({
+ name: e.filename,
+ type: e.attrs.isDirectory() ? 'dir' : 'file'
+ }))
+ : []
+ resolve({ path: canonicalPath, contents })
+ })
+ })
+ })
+ } catch (e) {
+ resolve({ path: remotePath, contents: [], error: e.message })
+ }
+ })
+})
+
+// SSH key management
+ipcMain.handle('generateSSHKey', async (_event, { comment, username }) => {
+ try {
+ const userDataPath = app.getPath('userData')
+ const privKeyPath = path.join(userDataPath, `${username || 'user'}_id_rsa`)
+ const pubKeyPath = path.join(userDataPath, `${username || 'user'}_id_rsa.pub`)
+ let privateKey, publicKey
+ if (fs.existsSync(privKeyPath) && fs.existsSync(pubKeyPath)) {
+ privateKey = fs.readFileSync(privKeyPath, 'utf8')
+ publicKey = fs.readFileSync(pubKeyPath, 'utf8')
+ } else {
+ const result = await generateSSHKeyPair(comment, username)
+ privateKey = result.privateKey
+ publicKey = result.publicKey
+ fs.writeFileSync(privKeyPath, privateKey, { mode: 0o600 })
+ fs.writeFileSync(pubKeyPath, publicKey, { mode: 0o644 })
+ }
+ return { privateKey, publicKey }
+ } catch (err) {
+ return { error: err.message }
+ }
+})
+
+ipcMain.handle('getSSHKey', async (_event, { username }) => {
+ try {
+ const userDataPath = app.getPath('userData')
+ const privKeyPath = path.join(userDataPath, `${username || 'user'}_id_rsa`)
+ const pubKeyPath = path.join(userDataPath, `${username || 'user'}_id_rsa.pub`)
+ let privateKey, publicKey
+ if (fs.existsSync(privKeyPath) && fs.existsSync(pubKeyPath)) {
+ privateKey = fs.readFileSync(privKeyPath, 'utf8')
+ publicKey = fs.readFileSync(pubKeyPath, 'utf8')
+ return { privateKey, publicKey }
+ } else {
+ return { privateKey: '', publicKey: '' }
+ }
+ } catch (err) {
+ return { error: err.message }
+ }
+})
+
+
+
+// ----- Unused -----
+// export function getRemoteMongoDBPath() {
+// const remotePlatform = detectRemoteOS()
+
+// if (remotePlatform === "win32") {
+// // Check if mongod is in the process.env.PATH
+// const paths = process.env.PATH.split(path.delimiter)
+// for (let i = 0; i < paths.length; i++) {
+// const binPath = path.join(paths[i], "mongod.exe")
+// if (fs.existsSync(binPath)) {
+// console.log("mongod found in PATH")
+// return binPath
+// }
+// }
+// // Check if mongod is in the default installation path on Windows - C:\Program Files\MongoDB\Server\\bin\mongod.exe
+// const programFilesPath = process.env["ProgramFiles"]
+// if (programFilesPath) {
+// const mongoPath = path.join(programFilesPath, "MongoDB", "Server")
+// // Check if the MongoDB directory exists
+// if (!fs.existsSync(mongoPath)) {
+// console.error("MongoDB directory not found")
+// return null
+// }
+// const dirs = fs.readdirSync(mongoPath)
+// for (let i = 0; i < dirs.length; i++) {
+// const binPath = path.join(mongoPath, dirs[i], "bin", "mongod.exe")
+// if (fs.existsSync(binPath)) {
+// return binPath
+// }
+// }
+// }
+// console.error("mongod not found")
+// return null
+// } else if (process.platform === "darwin") {
+// // Check if it is installed in the .medomics directory
+// const binPath = path.join(process.env.HOME, ".medomics", "mongodb", "bin", "mongod")
+// if (fs.existsSync(binPath)) {
+// console.log("mongod found in .medomics directory")
+// return binPath
+// }
+// if (process.env.NODE_ENV !== "production") {
+// // Check if mongod is in the process.env.PATH
+// const paths = process.env.PATH.split(path.delimiter)
+// for (let i = 0; i < paths.length; i++) {
+// const binPath = path.join(paths[i], "mongod")
+// if (fs.existsSync(binPath)) {
+// console.log("mongod found in PATH")
+// return binPath
+// }
+// }
+// // Check if mongod is in the default installation path on macOS - /usr/local/bin/mongod
+// const binPath = "/usr/local/bin/mongod"
+// if (fs.existsSync(binPath)) {
+// return binPath
+// }
+// }
+// console.error("mongod not found")
+// return null
+// } else if (process.platform === "linux") {
+// // Check if mongod is in the process.env.PATH
+// const paths = process.env.PATH.split(path.delimiter)
+// for (let i = 0; i < paths.length; i++) {
+// const binPath = path.join(paths[i], "mongod")
+// if (fs.existsSync(binPath)) {
+// return binPath
+// }
+// }
+// console.error("mongod not found in PATH" + paths)
+// // Check if mongod is in the default installation path on Linux - /usr/bin/mongod
+// if (fs.existsSync("/usr/bin/mongod")) {
+// return "/usr/bin/mongod"
+// }
+// console.error("mongod not found in /usr/bin/mongod")
+
+// if (fs.existsSync("/home/" + process.env.USER + "/.medomics/mongodb/bin/mongod")) {
+// return "/home/" + process.env.USER + "/.medomics/mongodb/bin/mongod"
+// }
+// return null
+// } else {
+// return "mongod"
+// }
+// }
+
+// export function checkRemoteFolderExists(folderPath) {
+// // Ensure tunnel is active and SSH client is available
+// const tunnelObject = getActiveTunnel()
+// if (!tunnelObject) {
+// const errMsg = "No active SSH tunnel for remote folder creation."
+// console.error(errMsg)
+// return Promise.resolve("tunnel inactive")
+// }
+
+// return new Promise((resolve, reject) => {
+// tunnelObject.sftp((err, sftp) => {
+// if (err) {
+// console.error("SFTP error:", err)
+// resolve("sftp error")
+// return
+// }
+
+// // Check if folder exists
+// sftp.stat(folderPath, (statErr, stats) => {
+// if (!statErr && stats && stats.isDirectory()) {
+// // Folder exists
+// sftp.end && sftp.end()
+// resolve("exists")
+// } else {
+// resolve("does not exist")
+// }
+// })
+// })
+// })
+// }
\ No newline at end of file
diff --git a/main/utils/workspace.js b/main/utils/workspace.js
deleted file mode 100644
index 0e0d38ad..00000000
--- a/main/utils/workspace.js
+++ /dev/null
@@ -1,221 +0,0 @@
-import { app, dialog, ipcRenderer } from "electron"
-import MEDconfig from "../../medomics.dev"
-
-const fs = require("fs")
-var path = require("path")
-const dirTree = require("directory-tree")
-
-/**
- * @description Set the working directory
- * @summary Opens the dialog to select the working directory and creates the folder structure if it does not exist
- * When the working directory is set, the function returns the folder structure of the working directory as a JSON object in a reply to Next.js
- * @param {*} event
- * @param {*} mainWindow
- * @param {*} hasBeenSet
- */
-export function setWorkingDirectory(event, mainWindow) {
- dialog
- .showOpenDialog(mainWindow, {
- // Opens the dialog to select the working directory (Select a folder window)
- properties: ["openDirectory"]
- })
- .then((result) => {
- if (result.canceled) {
- // If the user cancels the dialog
- console.log("Dialog was canceled")
- event.reply("messageFromElectron", "Dialog was canceled")
- } else {
- const file = result.filePaths[0]
- if (dirTree(file).children.length > 0) {
- // If the selected folder is not empty
- console.log("Selected folder is not empty")
- event.reply("messageFromElectron", "Selected folder is not empty")
- // Open a dialog to ask the user if he wants to still use the selected folder as the working directory or if he wants to select another folder
- dialog
- .showMessageBox(mainWindow, {
- type: "question",
- buttons: ["Yes", "No"],
- title: "Folder is not empty",
- message: "The selected folder is not empty. Do you want to use this folder as the working directory?"
- })
- .then((result) => {
- if (result.response === 0) {
- // If the user clicks on "Yes"
- mainWindow.webContents.send("setWorkingDirectoryInApp", file)
- mainWindow.webContents.send("setRecentWorkspacesInApp", file)
- } else if (result.response === 1) {
- // If the user clicks on "No"
- console.log("Dialog was canceled")
- event.reply("messageFromElectron", "Dialog was canceled")
- }
- })
- } else if (file === app.getPath("sessionData")) {
- // If the working directory is already set to the selected folder
- console.log("Working directory is already set to " + file)
- } else {
- // If the working directory is not set to the selected folder
- // The working directory is set to the selected folder and the folder structure is returned to Next.js
- mainWindow.webContents.send("setWorkingDirectoryInApp", file)
- }
- }
- })
- .catch((err) => {
- console.log(err)
- })
-}
-
-function getWorkingDirectory() {
- // Returns the working directory
- return app.getPath("sessionData")
-}
-
-/**
- * Loads the recent workspaces
- * @returns {Array} An array of workspaces
- */
-export function loadWorkspaces() {
- const userDataPath = app.getPath("userData")
- const workspaceFilePath = path.join(userDataPath, "workspaces.json")
- if (fs.existsSync(workspaceFilePath)) {
- const workspaces = JSON.parse(fs.readFileSync(workspaceFilePath, "utf8"))
- // Sort workspaces by date, most recent first
- let sortedWorkspaces = workspaces.sort((a, b) => new Date(b.last_time_it_was_opened) - new Date(a.last_time_it_was_opened))
- // Check if the workspaces still exist
- let workspacesThatStillExist = []
- sortedWorkspaces.forEach((workspace) => {
- if (fs.existsSync(workspace.path)) {
- workspacesThatStillExist.push(workspace)
- } else {
- console.log("Workspace does not exist anymore: ", workspace.path)
- }
- })
- return workspacesThatStillExist
- } else {
- return []
- }
-}
-
-/**
- * Saves the recent workspaces
- * @param {Array} workspaces An array of workspaces
- */
-function saveWorkspaces(workspaces) {
- const userDataPath = app.getPath("userData")
- const workspaceFilePath = path.join(userDataPath, "workspaces.json")
- fs.writeFileSync(workspaceFilePath, JSON.stringify(workspaces))
-}
-
-/**
- * Updates the recent workspaces
- * @param {String} workspacePath The path of the workspace to update
- */
-export function updateWorkspace(workspacePath) {
- const workspaces = loadWorkspaces()
- const workspaceIndex = workspaces.findIndex((workspace) => workspace.path === workspacePath)
- if (workspaceIndex !== -1) {
- // Workspace exists, update it
- workspaces[workspaceIndex].status = "opened"
- workspaces[workspaceIndex].last_time_it_was_opened = new Date().toISOString()
- } else {
- // Workspace doesn't exist, add it
- workspaces.push({
- path: workspacePath,
- status: "opened",
- last_time_it_was_opened: new Date().toISOString()
- })
- }
- app.setPath("sessionData", workspacePath)
- saveWorkspaces(workspaces)
-}
-
-/**
- * Generate recent workspaces options
- * @param {*} event The event
- * @param {*} mainWindow The main window
- * @param {*} hasBeenSet A boolean indicating if the workspace has been set
- * @param {*} workspacesArray The array of workspaces, if null, the function will load the workspaces
- * @returns {Array} An array of recent workspaces options
- */
-export function getRecentWorkspacesOptions(event, mainWindow, hasBeenSet, serverPort, workspacesArray = null) {
- let workspaces
- if (workspacesArray === null) {
- workspaces = loadWorkspaces()
- } else {
- workspaces = workspacesArray
- }
- const recentWorkspaces = workspaces.filter((workspace) => workspace.status === "opened")
- if (event !== null) {
- event.reply("recentWorkspaces", recentWorkspaces)
- }
- const recentWorkspacesOptions = recentWorkspaces.map((workspace) => {
- return {
- label: workspace.path,
- click() {
- updateWorkspace(workspace.path)
- let workspaceObject = {
- workingDirectory: dirTree(workspace.path),
- hasBeenSet: true,
- newPort: serverPort
- }
- hasBeenSet = true
- //mainWindow.webContents.send("openWorkspace", workspaceObject)
- }
- }
- })
- return recentWorkspacesOptions
-}
-
-// Function to create the working directory
-export function createWorkingDirectory() {
- // See the workspace menuTemplate in the repository
- createFolder("DATA")
- createFolder("EXPERIMENTS")
-}
-
-// Function to create a folder from a given path
-function createFolder(folderString) {
- // Creates a folder in the working directory
- const folderPath = path.join(app.getPath("sessionData"), folderString)
- // Check if the folder already exists
- if (!fs.existsSync(folderPath)) {
- fs.mkdir(folderPath, { recursive: true }, (err) => {
- if (err) {
- console.error(err)
- return
- }
- console.log("Folder created successfully!")
- })
- }
-}
-
-// Function to create the .medomics directory and necessary files
-export const createMedomicsDirectory = (directoryPath) => {
- const medomicsDir = path.join(directoryPath, ".medomics")
- const mongoDataDir = path.join(medomicsDir, "MongoDBdata")
- const mongoConfigPath = path.join(medomicsDir, "mongod.conf")
-
- if (!fs.existsSync(medomicsDir)) {
- // Create .medomicsDir
- fs.mkdirSync(medomicsDir)
- }
-
- if (!fs.existsSync(mongoDataDir)) {
- // Create MongoDB data dir
- fs.mkdirSync(mongoDataDir)
- }
-
- // Create mongod.conf
- const mongoConfig = `
- systemLog:
- destination: file
- path: ${path.join(medomicsDir, "mongod.log")}
- logAppend: true
- storage:
- dbPath: ${mongoDataDir}
- net:
- bindIp: localhost
- port: ${MEDconfig.mongoPort}
- `
- fs.writeFileSync(mongoConfigPath, mongoConfig)
-
-}
diff --git a/medomics.dev.js b/medomics.dev.js
index 491e335a..ea7c44bc 100644
--- a/medomics.dev.js
+++ b/medomics.dev.js
@@ -11,4 +11,4 @@ const config = {
portFindingMethod: PORT_FINDING_METHOD.FIX
}
-export default config
+export default config
\ No newline at end of file
diff --git a/package.json b/package.json
index 694bffa6..853d307e 100644
--- a/package.json
+++ b/package.json
@@ -6,16 +6,32 @@
"author": "MEDomicsLab Team",
"main": "app/background.js",
"repository": "https://github.com/MEDomicsLab/MEDomics",
+ "bin": {
+ "medomics-server": "./backend/cli/medomics-server.mjs"
+ },
"scripts": {
"dev": "nextron",
+ "dev:headless": "nextron --electron-options=\"--no-gui\"",
"dev:linux": "bash ./utilScripts/go_build.sh && nextron ",
+ "backend:start": "node ./backend/cli/medomics-server.mjs start --json",
+ "backend:status": "node ./backend/cli/medomics-server.mjs status --json",
+ "backend:ensure": "node ./backend/cli/medomics-server.mjs ensure --json --go --mongo --jupyter",
+ "backend:stop": "node ./backend/cli/medomics-server.mjs stop --json",
"build": "nextron build",
"build:linux": "bash ./utilScripts/build_preparation_linux.sh && nextron build --linux",
"build:win": ".\\utilScripts\\build_preparation_win.bat && nextron build --win",
"build:mac": "bash ./utilScripts/build_preparation_mac.sh && nextron build --mac",
+ "build:client": "nextron build --config electron-builder.client.yml",
+ "build:client:win": "nextron build --win --config electron-builder.client.yml",
+ "build:client:linux": "nextron build --linux --config electron-builder.client.yml",
+ "build:client:mac": "nextron build --mac --config electron-builder.client.yml",
"publish:linux": "bash ./utilScripts/build_preparation_linux.sh && nextron build --linux --publish always",
"publish:win": ".\\utilScripts\\build_preparation_win.bat && nextron build --win --publish always",
- "publish:mac": "nextron build --mac --publish always"
+ "publish:mac": "nextron build --mac --publish always",
+ "build:server:win": "npx nexe ./backend/cli/medomics-server.mjs -t windows-x64-14.5.0 -o build/server/medomics-server-win.exe",
+ "build:server:linux": "npx nexe ./backend/cli/medomics-server.mjs -t linux-x64-14.15.2 -o build/server/medomics-server-linux",
+ "build:server:mac": "npx nexe ./backend/cli/medomics-server.mjs -t mac-x64-14.9.0 -o build/server/medomics-server-mac",
+ "pack:server": "node ./tools/pack_server.js"
},
"dependencies": {
"@blueprintjs/core": "^5.3.0",
@@ -52,6 +68,7 @@
"eslint-config-next": "^13.5.3",
"eslint-config-prettier": "^8.10.0",
"eslint-plugin-prettier": "^5.2.3",
+ "express": "^5.2.1",
"file-saver": "^2.0.5",
"flexlayout-react": "^0.7.7",
"html-react-parser": "^3.0.12",
@@ -63,8 +80,10 @@
"jquery-csv": "^1.0.21",
"lodash": "^4.17.21",
"mongodb": "^6.6.2",
- "node-pty": "1.2.0-beta.11",
"mongodb-client-encryption": "^6.1.0",
+ "node-forge": "^1.3.1",
+ "node-loader": "^2.1.0",
+ "node-pty": "1.2.0-beta.11",
"node-sys": "^1.2.4",
"papaparse": "^5.4.1",
"primeicons": "^7.0.0",
@@ -96,6 +115,7 @@
"react-tooltip": "^5.10.0",
"react-zoom-pan-pinch": "^3.1.0",
"reactflow": "^11.5.6",
+ "ssh2": "^1.16.0",
"three": "^0.156.1",
"uuid": "^13.0.0",
"xlsx": "https://cdn.sheetjs.com/xlsx-0.20.0/xlsx-0.20.0.tgz",
@@ -113,6 +133,7 @@
"electron-builder": "^26.0.11",
"eslint": "^8.44.0",
"eslint-plugin-react": "^7.32.2",
+ "nexe": "^4.0.0-rc.7",
"next": "^13.5.3",
"nextron": "^8.5.0",
"prettier": "^3.4.2",
@@ -120,4 +141,4 @@
"react-bootstrap": "^2.7.1",
"react-dom": "^18.2.0"
}
-}
\ No newline at end of file
+}
diff --git a/pythonCode/med_libs/server_utils.py b/pythonCode/med_libs/server_utils.py
index 5b5938fd..16bbf5d9 100644
--- a/pythonCode/med_libs/server_utils.py
+++ b/pythonCode/med_libs/server_utils.py
@@ -1,3 +1,4 @@
+import json
import os
import sys
import traceback
diff --git a/pythonCode/modules/connection/__init__.py b/pythonCode/modules/connection/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/pythonCode/modules/connection/connection_test_request.py b/pythonCode/modules/connection/connection_test_request.py
new file mode 100644
index 00000000..3dd15406
--- /dev/null
+++ b/pythonCode/modules/connection/connection_test_request.py
@@ -0,0 +1,56 @@
+import json
+import sys
+import os
+from pathlib import Path
+
+sys.path.append(
+ str(Path(os.path.dirname(os.path.abspath(__file__))).parent.parent))
+from med_libs.server_utils import go_print
+from med_libs.GoExecutionScript import GoExecutionScript, parse_arguments
+
+json_params_dict, id_ = parse_arguments()
+go_print("running script.py:" + id_)
+
+
+class GoExecScriptConnectionTest(GoExecutionScript):
+ """
+ This class is used to execute a process from Go
+
+ Args:
+ json_params: The input json params
+ _id: The id of the page that made the request if any
+ """
+
+ def __init__(self, json_params: dict, _id: str = None):
+ super().__init__(json_params, _id)
+ self.results = {"data": "nothing to return"}
+
+ def _custom_process(self, json_config: dict) -> dict:
+ """
+ This function is used to test if the connection to the server is working.
+
+ Args:
+ json_config: The input json params
+ """
+ go_print(json.dumps(json_config, indent=4))
+
+ # Get the directory where the current script is located
+ script_dir = os.path.dirname(os.path.abspath(__file__))
+
+ # Define the path for your new text file
+ file_path = os.path.join(script_dir, "test.txt")
+
+ # Write something to the file
+ with open(file_path, "w") as f:
+ f.write("This is a test file created next to connection_test_request.py.\n")
+
+ self.results = {
+ "status": "success",
+ "message": "Connection test successful",
+ "data": "yippie"
+ }
+ return self.results
+
+
+script = GoExecScriptConnectionTest(json_params_dict, id_)
+script.start()
\ No newline at end of file
diff --git a/pythonCode/modules/connection/register_ssh_key.py b/pythonCode/modules/connection/register_ssh_key.py
new file mode 100644
index 00000000..b45d1f6c
--- /dev/null
+++ b/pythonCode/modules/connection/register_ssh_key.py
@@ -0,0 +1,44 @@
+import json
+import sys
+import os
+from pathlib import Path
+# add a .parent to the import if your script is in a subfolder of modules folder :
+# sys.path.append(
+ #str(Path(os.path.dirname(os.path.abspath(__file__))).parent.parent))
+sys.path.append(
+ str(Path(os.path.dirname(os.path.abspath(__file__))).parent))
+from med_libs.server_utils import go_print
+from med_libs.GoExecutionScript import GoExecutionScript, parse_arguments
+
+json_params_dict, id_ = parse_arguments()
+go_print("running script.py:" + id_)
+
+
+class GoExecScriptRegisterSSHKey(GoExecutionScript):
+ """
+ This class is used to execute a process from Go
+
+ Args:
+ json_params: The input json params
+ _id: The id of the page that made the request if any
+ """
+
+ def __init__(self, json_params: dict, _id: str = None):
+ super().__init__(json_params, _id)
+ self.results = {"data": "nothing to return"}
+
+ def _custom_process(self, json_config: dict) -> dict:
+ """
+ This function is used to register and validate an SSH key
+ sent through the established tunnel.
+
+ Args:
+ json_config: The input json params
+ """
+ go_print(json.dumps(json_config, indent=4))
+
+ return self.results
+
+
+script = GoExecScriptRegisterSSHKey(json_params_dict, id_)
+script.start()
\ No newline at end of file
diff --git a/pythonCode/modules/connection/test.txt b/pythonCode/modules/connection/test.txt
new file mode 100644
index 00000000..8adc6066
--- /dev/null
+++ b/pythonCode/modules/connection/test.txt
@@ -0,0 +1 @@
+This is a test file created next to connection_test_request.py.
diff --git a/pythonCode/modules/exploratory/start_dtale.py b/pythonCode/modules/exploratory/start_dtale.py
index 3d84f0bf..e4601e29 100644
--- a/pythonCode/modules/exploratory/start_dtale.py
+++ b/pythonCode/modules/exploratory/start_dtale.py
@@ -33,6 +33,7 @@ def __init__(self, json_params: dict, _id: str = "default_id"):
self.row_count = 45
self.json_config = json_params
self.is_calculating = True
+ self.start_error = None
self.progress_thread = threading.Thread(
target=self._update_progress_periodically, args=())
self.progress_thread.daemon = True
@@ -50,6 +51,10 @@ def _custom_process(self, json_config: dict) -> dict:
self.web_server_thread.start()
self.progress_thread.join()
self.web_server_thread.join()
+ if self.start_error:
+ raise RuntimeError(self.start_error)
+ if self._progress.get("error"):
+ raise RuntimeError(str(self._progress.get("error")))
return {"results_html": "html"}
def _update_progress_periodically(self):
@@ -58,6 +63,12 @@ def _update_progress_periodically(self):
"""
while self.is_calculating:
go_print(str(dtale.instances()))
+ if self.start_error:
+ self._progress["error"] = str(self.start_error)
+ self._progress["currentLabel"] = "D-Tale startup failed"
+ self.push_progress()
+ self.is_calculating = False
+ break
if self.port is not None:
if is_port_in_use(self.port):
self._progress["web_server_url"] = f"http://localhost:{self.port}/"
@@ -76,17 +87,24 @@ def _server_process(self):
"""
This function is used to run the dashboard
"""
- # MongoDB setup
- mongo_client = pymongo.MongoClient("mongodb://localhost:54017/")
- database = mongo_client["data"]
- collection = database[self.json_config["dataset"]["id"]]
- collection_data = collection.find({}, {'_id': False})
- df = pd.DataFrame(list(collection_data))
+ try:
+ # MongoDB setup
+ mongo_client = pymongo.MongoClient("mongodb://localhost:54017/")
+ database = mongo_client["data"]
+ collection = database[self.json_config["dataset"]["id"]]
+ collection_data = collection.find({}, {'_id': False})
+ df = pd.DataFrame(list(collection_data))
- # DTale
- self.dataset = self.json_config['dataset']
- d = dtale.show(df, subprocess=False, port=self.port, force=True)
- self.is_calculating = False
+ # DTale
+ self.dataset = self.json_config['dataset']
+ dtale.show(df, subprocess=False, port=self.port, force=True)
+ self.is_calculating = False
+ except Exception as error:
+ self.start_error = str(error)
+ self._progress["error"] = str(error)
+ self._progress["currentLabel"] = "D-Tale startup failed"
+ self.push_progress()
+ self.is_calculating = False
script = GoExecScriptDTale(json_params_dict, id_)
diff --git a/renderer/components/dbComponents/dataTableFromDB.jsx b/renderer/components/dbComponents/dataTableFromDB.jsx
index 766f5119..100b8def 100644
--- a/renderer/components/dbComponents/dataTableFromDB.jsx
+++ b/renderer/components/dbComponents/dataTableFromDB.jsx
@@ -12,7 +12,9 @@ import { Message } from "primereact/message"
import { Skeleton } from "primereact/skeleton"
import React, { useEffect, useState } from "react"
import { toast } from "react-toastify"
-import { connectToMongoDB, getCollectionTags, insertMEDDataObjectIfNotExists } from "../mongoDB/mongoDBUtils"
+import { connectToMongoDB, getCollectionTags, getPathFromMEDDataObject, insertMEDDataObjectIfNotExists, insertObjectIntoCollection, overwriteMEDDataObjectProperties } from "../mongoDB/mongoDBUtils"
+import { getTunnelState } from "../../utilities/tunnelState"
+import { lstat } from "../../utilities/fileManagement/fileOps"
import { MEDDataObject } from "../workspace/NewMedDataObject"
import InputToolsComponent from "./InputToolsComponent"
import { collectionExists, getCollectionData } from "./utils"
@@ -90,10 +92,38 @@ const DataTableFromDB = ({ data, tablePropsData, tablePropsColumn, isReadOnly })
// Fetch data from MongoDB on component mount
useEffect(() => {
const getData = async () => {
- // Get total count of documents in the collection
const db = await connectToMongoDB()
const collection = db.collection(data.id)
- const count = await collection.countDocuments()
+
+ let count = await collection.countDocuments()
+ const objectPath = await resolveObjectPath()
+ const tunnel = getTunnelState()
+ const isRemoteMode = !!(tunnel && tunnel.tunnelActive)
+ const medDataObject = await db.collection("medDataObjects").findOne({ id: data.id })
+ const currentMTimeMs = await getSourceMTimeMs(objectPath, isRemoteMode)
+ const lastIngestedMTimeMs = medDataObject?.sourceMTimeMs
+ const shouldReingestForChange =
+ !!objectPath &&
+ !!currentMTimeMs &&
+ !!lastIngestedMTimeMs &&
+ Number(lastIngestedMTimeMs) !== Number(currentMTimeMs)
+ const shouldReingestForEmpty = !!objectPath && count === 0
+
+ if (shouldReingestForChange || shouldReingestForEmpty) {
+ try {
+ await reingestFromSourceFile({
+ silent: true,
+ resolvedPath: objectPath,
+ sourceMTimeMs: currentMTimeMs
+ })
+ count = await collection.countDocuments()
+ } catch (error) {
+ console.error("Auto re-ingest failed:", error)
+ }
+ } else if (objectPath && currentMTimeMs && !lastIngestedMTimeMs && count > 0) {
+ await overwriteMEDDataObjectProperties(data.id, { sourceMTimeMs: currentMTimeMs })
+ }
+
setCollectionSize(count)
console.log("Fetching data with:", data)
let collectionName = data.extension === "view" ? data.name : data.id
@@ -379,6 +409,85 @@ const DataTableFromDB = ({ data, tablePropsData, tablePropsColumn, isReadOnly })
setLoadingData(false)
}
+ const resolveObjectPath = async () => {
+ if (data?.path) {
+ return data.path
+ }
+ if (!data?.id) {
+ return null
+ }
+ try {
+ const dbPath = await getPathFromMEDDataObject(data.id)
+ return dbPath || null
+ } catch (error) {
+ console.error("Failed to resolve object path:", error)
+ return null
+ }
+ }
+
+ const getSourceMTimeMs = async (objectPath, isRemoteMode) => {
+ if (!objectPath) return null
+ try {
+ const fileInfo = await lstat(objectPath, { isRemote: isRemoteMode })
+ if (!fileInfo || !fileInfo.stats) return null
+ const stats = fileInfo.stats
+ if (typeof stats.mtimeMs === "number") return stats.mtimeMs
+ if (stats.mtime instanceof Date) return stats.mtime.getTime()
+ if (typeof stats.mtime === "number") return stats.mtime * 1000
+ return null
+ } catch (error) {
+ console.error("Failed to read source file mtime:", error)
+ return null
+ }
+ }
+
+
+ const reingestFromSourceFile = async ({ silent = false, resolvedPath = null, sourceMTimeMs = null } = {}) => {
+ if (!data?.id) {
+ if (!silent) toast.error("No collection ID available for re-ingest.")
+ return
+ }
+ setLoadingData(true)
+ try {
+ const objectPath = resolvedPath || (await resolveObjectPath())
+ if (!objectPath) {
+ if (!silent) toast.error("Could not find source file path for this dataset.")
+ return
+ }
+
+ const inferredType = (data?.extension || data?.name?.split(".")?.pop() || "csv").toLowerCase()
+ const medDataObject = { id: data.id, type: inferredType }
+ const tunnel = getTunnelState()
+
+ if (tunnel && tunnel.tunnelActive && tunnel.localExpressPort) {
+ const response = await window.backend.requestExpress({
+ method: "post",
+ path: "/insert-object-into-collection",
+ host: tunnel.host,
+ port: tunnel.localExpressPort,
+ body: { objectPath, medDataObject }
+ })
+ if (!response?.data?.success) {
+ throw new Error(response?.data?.error || "Remote re-ingest failed")
+ }
+ } else {
+ await insertObjectIntoCollection({ objectPath, medDataObject })
+ }
+
+ if (sourceMTimeMs) {
+ await overwriteMEDDataObjectProperties(data.id, { sourceMTimeMs })
+ }
+
+ await refreshData()
+ if (!silent) toast.success("Re-ingest completed. Data refreshed.")
+ } catch (error) {
+ console.error("Re-ingest failed:", error)
+ if (!silent) toast.error("Re-ingest failed. See console for details.")
+ } finally {
+ setLoadingData(false)
+ }
+ }
+
// Function to generate a random UUID
const usePersistentUUID = () => {
const [id, setId] = useState("")
diff --git a/renderer/components/dbComponents/utils.js b/renderer/components/dbComponents/utils.js
index 677b880e..70c7a51d 100644
--- a/renderer/components/dbComponents/utils.js
+++ b/renderer/components/dbComponents/utils.js
@@ -1,5 +1,12 @@
+import { getTunnelState } from "../../utilities/tunnelState"
+
const MongoClient = require("mongodb").MongoClient
-const mongoUrl = "mongodb://127.0.0.1:54017"
+function getMongoUrl() {
+ // Use tunnel state if available
+ const tunnel = getTunnelState()
+ console.log("tunnel in getMongoUrl:", tunnel)
+ return "mongodb://127.0.0.1:" + (tunnel && tunnel.tunnelActive && tunnel.localDBPort ? tunnel.localDBPort : "54017")
+}
/**
* @description Check if a database exists
@@ -7,7 +14,8 @@ const mongoUrl = "mongodb://127.0.0.1:54017"
* @param {String} dbname
*/
export const collectionExists = async (collectionName, dbname = "data") => {
- const client = new MongoClient(mongoUrl)
+ const client = new MongoClient(getMongoUrl())
+ console.log("Checking if collection exists with URL:", getMongoUrl())
try {
await client.connect()
const db = client.db(dbname)
@@ -30,7 +38,8 @@ export const collectionExists = async (collectionName, dbname = "data") => {
* @returns {Array} fetchedData
*/
export const getCollectionData = async (collectionName, first = null, rows = null, dbname = "data") => {
- const client = new MongoClient(mongoUrl)
+ const client = new MongoClient(getMongoUrl())
+ console.log("Fetching data from collection with URL:", getMongoUrl())
let fetchedData = []
try {
await client.connect()
@@ -81,7 +90,7 @@ export const getCollectionData = async (collectionName, first = null, rows = nul
* @returns {Array} fetchedDataFiltered
*/
export const getCollectionDataFilterd = async (collectionName, filter, first = null, rows = null, sortCriteria = null, dbname = "data") => {
- const client = new MongoClient(mongoUrl)
+ const client = new MongoClient(getMongoUrl())
let fetchedData = []
try {
await client.connect()
@@ -126,7 +135,7 @@ export const getCollectionDataFilterd = async (collectionName, filter, first = n
* @description Get documents count with filter use
*/
export const getCollectionDataCount = async (collectionName, filter, dbname = "data") => {
- const client = new MongoClient(mongoUrl)
+ const client = new MongoClient(getMongoUrl())
try {
await client.connect()
const db = client.db(dbname)
@@ -148,7 +157,7 @@ export const getCollectionDataCount = async (collectionName, filter, dbname = "d
* @returns columnTypes
*/
export const getCollectionColumnTypes = async (collectionName, dbname = "data") => {
- const client = new MongoClient(mongoUrl, { useNewUrlParser: true, useUnifiedTopology: true })
+ const client = new MongoClient(getMongoUrl(), { useNewUrlParser: true, useUnifiedTopology: true })
try {
await client.connect()
const db = client.db(dbname)
diff --git a/renderer/components/exploratory/dtale.jsx b/renderer/components/exploratory/dtale.jsx
index eac15a43..9e1d906b 100644
--- a/renderer/components/exploratory/dtale.jsx
+++ b/renderer/components/exploratory/dtale.jsx
@@ -1,15 +1,15 @@
import React, { useState, useContext, useEffect } from "react"
import { LayoutModelContext } from "../layout/layoutContext"
-import { requestBackend } from "../../utilities/requests"
import { Tag } from "primereact/tag"
import { Tooltip } from "primereact/tooltip"
import { Button } from "primereact/button"
-import ProgressBarRequests from "../generalPurpose/progressBarRequests"
import { IoClose } from "react-icons/io5"
import { getId } from "../../utilities/staticFunctions"
import { Stack } from "react-bootstrap"
import { Card } from "primereact/card"
import Input from "../learning/input"
+import { WorkspaceContext } from "../workspace/workspaceContext"
+import { getTunnelState } from "../../utilities/tunnelState"
/**
*
@@ -21,30 +21,107 @@ import Input from "../learning/input"
*
* @returns A card with the D-Tale module
*/
-const DTaleProcess = ({ uniqueId, pageId, port, setError, onDelete }) => {
+const DTaleProcess = ({ uniqueId, pageId, setError, onDelete }) => {
const [mainDataset, setMainDataset] = useState()
const [mainDatasetHasWarning, setMainDatasetHasWarning] = useState({ state: false, tooltip: "" })
const [isCalculating, setIsCalculating] = useState(false)
- const [progress, setProgress] = useState({ now: 0, currentLabel: 0 })
const [serverPath, setServerPath] = useState("")
const { dispatchLayout } = useContext(LayoutModelContext)
+ const { workspace } = useContext(WorkspaceContext)
const [name, setName] = useState("")
+ const [currentRouteId, setCurrentRouteId] = useState("")
+ const [progressPercent, setProgressPercent] = useState(null)
+
+ const parseProgressPayload = (payload) => {
+ if (payload && typeof payload === "object") {
+ return payload
+ }
+ if (typeof payload !== "string") {
+ return null
+ }
+
+ let candidate = payload
+ for (let i = 0; i < 4; i++) {
+ if (typeof candidate !== "string") {
+ return candidate && typeof candidate === "object" ? candidate : null
+ }
+ try {
+ const parsed = JSON.parse(candidate)
+ if (typeof parsed === "string") {
+ candidate = parsed
+ continue
+ }
+ return parsed && typeof parsed === "object" ? parsed : null
+ } catch (_) {
+ const startIdx = candidate.indexOf("{")
+ const endIdx = candidate.lastIndexOf("}")
+ if (startIdx >= 0 && endIdx > startIdx) {
+ const trimmed = candidate.substring(startIdx, endIdx + 1)
+ if (trimmed !== candidate) {
+ candidate = trimmed
+ continue
+ }
+ }
+ break
+ }
+ }
+ return null
+ }
+
+ const resolveExpressPort = async (isRemoteMode) => {
+ const tunnel = getTunnelState()
+ if (isRemoteMode && tunnel?.tunnelActive && tunnel.localExpressPort) {
+ return Number(tunnel.localExpressPort)
+ }
+ const expressPort = await window.backend.getExpressPort()
+ return Number(expressPort)
+ }
+
+ const resolveLocalDtaleUrl = async (requestId, remotePort, isRemoteMode) => {
+ if (!isRemoteMode) {
+ return `http://127.0.0.1:${remotePort}/`
+ }
+
+ const tunnelName = `dtale-${requestId}`
+ const startRes = await window.backend.startPortTunnel({
+ name: tunnelName,
+ localPort: 0,
+ remotePort: Number(remotePort),
+ ensureRemoteOpen: true
+ })
+
+ let localPort = startRes?.localPort
+ if (!localPort) {
+ const tunnel = getTunnelState()
+ const existing = (tunnel?.tunnels || []).find((entry) => entry.name === tunnelName && entry.status === "forwarding")
+ localPort = existing?.localPort
+ }
+ if (!localPort) {
+ throw new Error("Failed to resolve local D-Tale tunnel port")
+ }
+ return `http://127.0.0.1:${localPort}/`
+ }
/**
*
* @param {String} serverPath The server path
* @description This function is used to shutdown the dtale server
*/
- const shutdownDTale = (serverPath) => {
- console.log("shutting down dtale: ", serverPath)
- if (serverPath != "") {
- fetch(serverPath + "/shutdown", {
- mode: "no-cors",
- credentials: "include",
- method: "GET"
+ const shutdownDTale = async () => {
+ try {
+ const isRemoteMode = !!workspace?.isRemote
+ const expressPort = await resolveExpressPort(isRemoteMode)
+ await window.backend.requestExpress({
+ method: "post",
+ port: expressPort,
+ path: "/exploratory/dtale/stop",
+ body: { requestId: uniqueId }
})
- .then((response) => console.log(response))
- .catch((error) => console.log(error))
+ if (isRemoteMode) {
+ await window.backend.stopPortTunnel({ name: `dtale-${uniqueId}` })
+ }
+ } catch (error) {
+ console.warn("Error while stopping D-Tale service:", error)
}
}
@@ -52,49 +129,89 @@ const DTaleProcess = ({ uniqueId, pageId, port, setError, onDelete }) => {
* @description This function is used to open the html viewer with the given file path
*/
const generateReport = () => {
- shutdownDTale(serverPath)
- requestBackend(
- port,
- "removeId/" + uniqueId + "/" + pageId + "-" + mainDataset.value.name,
- { dataset: mainDataset.value },
- (response) => {
- console.log(response)
- setIsCalculating(true)
- setServerPath("")
- requestBackend(
- port,
- "exploratory/start_dtale/" + uniqueId + "/" + pageId + "-" + mainDataset.value.name,
- { dataset: mainDataset.value },
- (response) => {
- console.log(response)
- if (response.error) {
- setError(response.error)
- }
- setServerPath("")
+ setIsCalculating(true)
+ setServerPath("")
+ setProgressPercent(0)
+ ;(async () => {
+ try {
+ await shutdownDTale()
+ const isRemoteMode = !!workspace?.isRemote
+ const expressPort = await resolveExpressPort(isRemoteMode)
+ const routeId = `${uniqueId}/${pageId}-${mainDataset.value.name}`
+ setCurrentRouteId(routeId)
+ const response = await window.backend.requestExpress({
+ method: "post",
+ port: expressPort,
+ path: "/exploratory/dtale/start",
+ body: {
+ requestId: uniqueId,
+ pageId,
+ dataset: mainDataset.value
},
- (error) => {
- console.log(error)
- setIsCalculating(false)
- }
- )
- },
- (error) => {
- console.log(error)
+ timeout: 180000
+ })
+ const payload = response?.data || {}
+ if (!payload.success) {
+ throw new Error(payload.error || "Failed to start D-Tale")
+ }
+ setProgressPercent(100)
+ const localUrl = await resolveLocalDtaleUrl(uniqueId, payload.remotePort, isRemoteMode)
+ setServerPath(localUrl)
+ setName(payload.name || mainDataset.value.name)
+ } catch (error) {
+ console.error(error)
+ setProgressPercent(null)
+ setError(error?.message || "Failed to start D-Tale")
+ } finally {
+ setIsCalculating(false)
+ setCurrentRouteId("")
}
- )
+ })()
}
- /**
- *
- * @param {Object} data Data received from the server on progress update
- */
- const onProgressDataReceived = (data) => {
- if (data.web_server_url) {
- setServerPath(data.web_server_url)
- setName(data.name)
- setIsCalculating(false)
+ useEffect(() => {
+ if (!isCalculating || !currentRouteId) {
+ return
}
- }
+
+ let isDisposed = false
+
+ const pollProgress = async () => {
+ try {
+ const isRemoteMode = !!workspace?.isRemote
+ const expressPort = await resolveExpressPort(isRemoteMode)
+ const response = await window.backend.requestExpress({
+ method: "post",
+ port: expressPort,
+ path: "/exploratory/dtale/progress",
+ body: { routeId: currentRouteId },
+ timeout: 10000
+ })
+
+ if (isDisposed) return
+ const payload = response?.data || {}
+ const progressData = parseProgressPayload(payload?.progress)
+ const nowValue = progressData?.now
+ const progressNumber = Number(nowValue)
+ if (Number.isFinite(progressNumber)) {
+ const bounded = Math.max(0, Math.min(100, Math.round(progressNumber)))
+ setProgressPercent(bounded)
+ }
+ } catch (error) {
+ if (!isDisposed) {
+ console.warn("Failed to fetch D-Tale progress:", error)
+ }
+ }
+ }
+
+ pollProgress()
+ const intervalId = setInterval(pollProgress, 1000)
+
+ return () => {
+ isDisposed = true
+ clearInterval(intervalId)
+ }
+ }, [isCalculating, currentRouteId, workspace?.isRemote])
/**
*
@@ -129,23 +246,12 @@ const DTaleProcess = ({ uniqueId, pageId, port, setError, onDelete }) => {
className="btn-close-output-card"
onClick={() => {
onDelete(uniqueId)
- shutdownDTale(serverPath)
+ shutdownDTale()
}}
/>
- {isCalculating && (
-
- )}
+ {isCalculating && Starting D-Tale service{Number.isFinite(progressPercent) ? ` (${progressPercent}%)` : ""}...
}
>
)
}
@@ -158,7 +264,7 @@ const DTaleProcess = ({ uniqueId, pageId, port, setError, onDelete }) => {
*
* @returns the exploratory page with the module page
*/
-const DTale = ({ pageId, port, setError }) => {
+const DTale = ({ pageId, setError }) => {
const [processes, setProcesses] = useState([])
// when the component is mounted, add a new process
@@ -216,7 +322,7 @@ const DTale = ({ pageId, port, setError }) => {
>
{processes.map((id) => (
-
+
))}
diff --git a/renderer/components/exploratory/exploratoryRemoteUtils.js b/renderer/components/exploratory/exploratoryRemoteUtils.js
new file mode 100644
index 00000000..f19bc195
--- /dev/null
+++ b/renderer/components/exploratory/exploratoryRemoteUtils.js
@@ -0,0 +1,30 @@
+import { getTunnelState } from "../../utilities/tunnelState"
+
+export const resolveExploratoryExpressPort = async (isRemoteMode) => {
+ const tunnel = getTunnelState()
+ if (isRemoteMode && tunnel?.tunnelActive && tunnel.localExpressPort) {
+ return Number(tunnel.localExpressPort)
+ }
+ const expressPort = await window.backend.getExpressPort()
+ return Number(expressPort)
+}
+
+export const resolveExploratoryReportUrl = ({ reportPath, localExpressPort, isRemoteMode, reportType = "Exploratory" }) => {
+ if (!reportPath) return ""
+ const tunnel = getTunnelState()
+
+ if (isRemoteMode) {
+ const expressTunnel = (tunnel?.tunnels || []).find((entry) => entry.name === "express" && entry.status === "forwarding")
+ const port = Number(expressTunnel?.localPort || tunnel?.localExpressPort || localExpressPort)
+ if (!Number.isFinite(port)) {
+ throw new Error(`No local Express tunnel port available for ${reportType} report`)
+ }
+ return `http://127.0.0.1:${port}${reportPath}`
+ }
+
+ return `http://127.0.0.1:${localExpressPort}${reportPath}`
+}
+
+export const openExploratoryReportInIframe = ({ dispatchLayout, url, name, idPrefix }) => {
+ dispatchLayout({ type: "openInIFrame", payload: { path: url, name, id: `${idPrefix}-${Date.now()}` } })
+}
diff --git a/renderer/components/exploratory/sweetViz.jsx b/renderer/components/exploratory/sweetViz.jsx
index d28da24d..32da7a5a 100644
--- a/renderer/components/exploratory/sweetViz.jsx
+++ b/renderer/components/exploratory/sweetViz.jsx
@@ -1,7 +1,5 @@
import React, { useState, useContext } from "react"
import { LayoutModelContext } from "../layout/layoutContext"
-import { MEDDataObject } from "../workspace/NewMedDataObject"
-import { requestBackend } from "../../utilities/requests"
import { Stack } from "react-bootstrap"
import { Tag } from "primereact/tag"
import { Tooltip } from "primereact/tooltip"
@@ -11,10 +9,9 @@ import { Button } from "primereact/button"
import { ToggleButton } from "primereact/togglebutton"
import ProgressBarRequests from "../generalPurpose/progressBarRequests"
import { getCollectionColumns } from "../mongoDB/mongoDBUtils"
-import { randomUUID } from "crypto"
-import { insertMEDDataObjectIfNotExists } from "../mongoDB/mongoDBUtils"
-import { DataContext } from "../workspace/dataContext"
import { toast } from "react-toastify"
+import { WorkspaceContext } from "../workspace/workspaceContext"
+import { openExploratoryReportInIframe, resolveExploratoryExpressPort, resolveExploratoryReportUrl } from "./exploratoryRemoteUtils"
/**
*
@@ -24,7 +21,7 @@ import { toast } from "react-toastify"
*
* @returns A card with the sweetviz module
*/
-const SweetViz = ({ pageId, port, setError }) => {
+const SweetViz = ({ pageId, setError }) => {
const [mainDataset, setMainDataset] = useState()
const [compDataset, setCompDataset] = useState()
const [mainDatasetHasWarning, setMainDatasetHasWarning] = useState({ state: false, tooltip: "" })
@@ -35,10 +32,8 @@ const SweetViz = ({ pageId, port, setError }) => {
const [progress, setProgress] = useState({ now: 0, currentLabel: 0 })
const [mainDatasetTarget, setMainDatasetTarget] = useState()
const [mainDatasetTargetChoices, setMainDatasetTargetChoices] = useState()
- const [report, setReport] = useState(null)
- const { globalData } = useContext(DataContext)
-
- var path = require("path")
+ const [reportUrl, setReportUrl] = useState("")
+ const { workspace } = useContext(WorkspaceContext)
/**
* @description Change the selected target
@@ -65,88 +60,47 @@ const SweetViz = ({ pageId, port, setError }) => {
}
}
- /**
- * @description Load the generated report in database
- */
- const setReportInDB = async (htmlFileID) => {
- let globalDataCopy = { ...globalData }
- const sweetvizFolder = new MEDDataObject({
- id: randomUUID(),
- name: "sweetviz_reports",
- type: "directory",
- parentID: "DATA",
- childrenIDs: [],
- inWorkspace: false
- })
- const parentId = await insertMEDDataObjectIfNotExists(sweetvizFolder)
- // Append the new object to a local global data copy to avoid calling MEDDataObject.updateWorkspaceDataObject() twice
- if (parentId == sweetvizFolder.id) {
- globalDataCopy[parentId] = sweetvizFolder
- console.log("COPY", globalDataCopy)
- }
- let medObjectName =
- compDataset && compareChecked ?
- path.basename(mainDataset.value.name, ".csv") +
- "_" + path.basename(compDataset.name, ".csv") +
- ".html" : path.basename(mainDataset.value.name, ".csv") +
- ".html"
- medObjectName = MEDDataObject.getUniqueNameForCopy(globalDataCopy, medObjectName, parentId)
- const newReport = new MEDDataObject({
- id: htmlFileID,
- name: medObjectName,
- type: "html",
- parentID: parentId,
- childrenIDs: [],
- inWorkspace: false
- })
- console.log("htmlFileID", htmlFileID)
- await insertMEDDataObjectIfNotExists(newReport)
- setReport(newReport)
- MEDDataObject.updateWorkspaceDataObject()
- }
-
- /**
- *
- * @param {String} filePath The file path to open
- * @returns The file path to open
- */
- const handleOpenFile = (localReport) => () => {
- const objectToRet = {
- index: localReport.id,
- data: localReport.name,
- extension: localReport.type
- }
- dispatchLayout({ type: "openHtmlViewer", payload: objectToRet })
+ const openReportInIframe = (url) => {
+ openExploratoryReportInIframe({ dispatchLayout, url, name: "SweetViz", idPrefix: "sweetviz" })
}
/**
* @description This function is used to open the html viewer with the given file path
*/
const generateReport = () => {
- const htmlFileID = randomUUID()
setIsCalculating(true)
- setReport(null)
- requestBackend(
- port,
- "exploratory/start_sweetviz/" + pageId,
- { mainDataset: mainDataset.value, compDataset: compDataset && compareChecked ? compDataset : "", htmlFileID: htmlFileID, target: mainDatasetTarget },
- (response) => {
- console.log(response)
- if (response.error) {
- setError(response.error)
- toast.error("Error generating report")
- } else {
- setReportInDB(htmlFileID)
- toast.success("Report generated successfully")
+ setReportUrl("")
+ ;(async () => {
+ try {
+ const isRemoteMode = !!workspace?.isRemote
+ const expressPort = await resolveExploratoryExpressPort(isRemoteMode)
+ const response = await window.backend.requestExpress({
+ method: "post",
+ port: expressPort,
+ path: "/exploratory/sweetviz/start",
+ body: {
+ pageId,
+ mainDataset: mainDataset.value,
+ compDataset: compDataset && compareChecked ? compDataset : "",
+ target: mainDatasetTarget
+ },
+ timeout: 180000
+ })
+ const payload = response?.data || {}
+ if (!payload.success) {
+ throw new Error(payload.error || "Error generating report")
}
- setIsCalculating(false)
- },
- (error) => {
- console.log(error)
- setError(error)
+ const url = resolveExploratoryReportUrl({ reportPath: payload.reportPath, localExpressPort: expressPort, isRemoteMode, reportType: "SweetViz" })
+ setReportUrl(url)
+ toast.success("Report generated successfully")
+ } catch (error) {
+ console.error(error)
+ setError(error?.message || "Error generating report")
toast.error("Error generating report")
+ } finally {
+ setIsCalculating(false)
}
- )
+ })()
}
return (
@@ -238,7 +192,7 @@ const SweetViz = ({ pageId, port, setError }) => {
)}
- {isCalculating && !report && (
+ {isCalculating && !reportUrl && (
{
requestTopic={"exploratory/progress/" + pageId}
/>
)}
- {report && (
+ {reportUrl && (
-
+
)}
diff --git a/renderer/components/exploratory/yDataProfiling.jsx b/renderer/components/exploratory/yDataProfiling.jsx
index e5bcd33d..ca07d294 100644
--- a/renderer/components/exploratory/yDataProfiling.jsx
+++ b/renderer/components/exploratory/yDataProfiling.jsx
@@ -5,15 +5,12 @@ import { Button } from "primereact/button"
import { ToggleButton } from "primereact/togglebutton"
import { Card } from "primereact/card"
import { LayoutModelContext } from "../layout/layoutContext"
-import { requestBackend } from "../../utilities/requests"
import { Stack } from "react-bootstrap"
import { toast } from "react-toastify"
import Input from "../learning/input"
import ProgressBarRequests from "../generalPurpose/progressBarRequests"
-import { randomUUID } from "crypto"
-import { insertMEDDataObjectIfNotExists } from "../mongoDB/mongoDBUtils"
-import { DataContext } from "../workspace/dataContext"
-import { MEDDataObject } from "../workspace/NewMedDataObject"
+import { WorkspaceContext } from "../workspace/workspaceContext"
+import { openExploratoryReportInIframe, resolveExploratoryExpressPort, resolveExploratoryReportUrl } from "./exploratoryRemoteUtils"
/**
*
@@ -22,7 +19,7 @@ import { MEDDataObject } from "../workspace/NewMedDataObject"
* @param {Function} setError The function to set the error
* @returns
*/
-const YDataProfiling = ({ pageId, port, setError }) => {
+const YDataProfiling = ({ pageId, setError }) => {
const [mainDataset, setMainDataset] = useState()
const [compDataset, setCompDataset] = useState()
const [mainDatasetHasWarning, setMainDatasetHasWarning] = useState({ state: false, tooltip: "" })
@@ -30,92 +27,50 @@ const YDataProfiling = ({ pageId, port, setError }) => {
const [compareChecked, setCompareChecked] = useState(false)
const { dispatchLayout } = useContext(LayoutModelContext)
const [isCalculating, setIsCalculating] = useState(false)
- const [report, setReport] = useState(null)
+ const [reportUrl, setReportUrl] = useState("")
const [progress, setProgress] = useState({ now: 0, currentLabel: 0 })
- const { globalData } = useContext(DataContext)
+ const { workspace } = useContext(WorkspaceContext)
- var path = require("path")
-
- /**
- *
- * @param {String} filePath The file path to open
- * @returns The file path to open
- */
- const handleOpenFile = (localReport) => () => {
- const objectToRet = {
- index: localReport.id,
- data: localReport.name,
- extension: localReport.type
- }
- dispatchLayout({ type: "openHtmlViewer", payload: objectToRet })
- }
-
- /**
- * @description Load the generated report in database
- */
- const setReportInDB = async (htmlFileID) => {
- let globalDataCopy = { ...globalData }
- const ydataprofilingFolder = new MEDDataObject({
- id: randomUUID(),
- name: "ydataProfiling_reports",
- type: "directory",
- parentID: "DATA",
- childrenIDs: [],
- inWorkspace: false
- })
- const parentId = await insertMEDDataObjectIfNotExists(ydataprofilingFolder)
- // Append the new object to a local global data copy to avoid calling MEDDataObject.updateWorkspaceDataObject() twice
- if (parentId == ydataprofilingFolder.id) {
- globalDataCopy[parentId] = ydataprofilingFolder
- }
- let medObjectName =
- compDataset && compareChecked ?
- path.basename(mainDataset.value.name, ".csv") +
- "_" + path.basename(compDataset.name, ".csv") +
- ".html" : path.basename(mainDataset.value.name, ".csv") +
- ".html"
- medObjectName = MEDDataObject.getUniqueNameForCopy(globalDataCopy, medObjectName, parentId)
- const newReport = new MEDDataObject({
- id: htmlFileID,
- name: medObjectName,
- type: "html",
- parentID: parentId,
- childrenIDs: [],
- inWorkspace: false
- })
- await insertMEDDataObjectIfNotExists(newReport)
- setReport(newReport)
- MEDDataObject.updateWorkspaceDataObject()
+ const openReportInIframe = (url) => {
+ openExploratoryReportInIframe({ dispatchLayout, url, name: "YData Profiling", idPrefix: "ydata" })
}
/**
* @description This function is used to open the html viewer with the given file path
*/
const generateReport = () => {
- const htmlFileID = randomUUID()
setIsCalculating(true)
- setReport(null)
- requestBackend(
- port,
- "exploratory/start_ydata_profiling/" + pageId,
- { mainDataset: mainDataset.value, compDataset: compDataset && compareChecked ? compDataset : "", htmlFileID: htmlFileID },
- (response) => {
- console.log(response)
- if (response.error) {
- setError(response.error)
- toast.error("Error generating report")
- } else {
- setReportInDB(htmlFileID)
- toast.success("Report generated successfully")
+ setReportUrl("")
+ ;(async () => {
+ try {
+ const isRemoteMode = !!workspace?.isRemote
+ const expressPort = await resolveExploratoryExpressPort(isRemoteMode)
+ const response = await window.backend.requestExpress({
+ method: "post",
+ port: expressPort,
+ path: "/exploratory/ydata/start",
+ body: {
+ pageId,
+ mainDataset: mainDataset.value,
+ compDataset: compDataset && compareChecked ? compDataset : ""
+ },
+ timeout: 180000
+ })
+ const payload = response?.data || {}
+ if (!payload.success) {
+ throw new Error(payload.error || "Error generating report")
}
- setIsCalculating(false)
- },
- (error) => {
- console.log(error)
- setError(error)
+ const url = resolveExploratoryReportUrl({ reportPath: payload.reportPath, localExpressPort: expressPort, isRemoteMode, reportType: "YData" })
+ setReportUrl(url)
+ toast.success("Report generated successfully")
+ } catch (error) {
+ console.error(error)
+ setError(error?.message || "Error generating report")
toast.error("Error generating report")
+ } finally {
+ setIsCalculating(false)
}
- )
+ })()
}
return (
@@ -193,7 +148,7 @@ const YDataProfiling = ({ pageId, port, setError }) => {
)}
- {isCalculating && !report && (
+ {isCalculating && !reportUrl && (
{
requestTopic={"exploratory/progress/" + pageId}
/>
)}
- {report && (
+ {reportUrl && (
-
+
)}
diff --git a/renderer/components/extractionMEDimage/flowCanvas.jsx b/renderer/components/extractionMEDimage/flowCanvas.jsx
index ead8b6f4..18edf89b 100644
--- a/renderer/components/extractionMEDimage/flowCanvas.jsx
+++ b/renderer/components/extractionMEDimage/flowCanvas.jsx
@@ -4,7 +4,7 @@ import { toast } from "react-toastify"
// Import utilities
import uuid from "react-native-uuid"
-import { downloadFile, loadJsonSync } from "../../utilities/fileManagementUtils"
+import { downloadFile, loadJsonSync } from "../../utilities/fileManagement/fileOps"
import { requestJson } from "../../utilities/requests"
@@ -647,7 +647,7 @@ const FlowCanvas = ({ workflowType, setWorkflowType }) => {
const restoreFlow = async () => {
try {
// Ask user for the json file to open
- const flow = await loadJsonSync() // wait for the json file to be loaded (see /utilities/fileManagementUtils.js)
+ const flow = await loadJsonSync() // wait for the json file to be loaded (via fileOps)
console.log("loaded flow", flow)
// TODO : should have conditions regarding json file used for import!
diff --git a/renderer/components/flow/JupyterNoteBookViewer.jsx b/renderer/components/flow/JupyterNoteBookViewer.jsx
index abef99cf..b31226d7 100644
--- a/renderer/components/flow/JupyterNoteBookViewer.jsx
+++ b/renderer/components/flow/JupyterNoteBookViewer.jsx
@@ -1,19 +1,20 @@
-import React, { useContext, useEffect, useState } from "react"
+import { useEffect, useState } from "react"
import path from "node:path"
import Iframe from "react-iframe"
import { defaultJupyterPort } from "../layout/flexlayout/mainContainerClass"
-import { LayoutModelContext } from "../layout/layoutContext"
import { ipcRenderer } from "electron"
+import { useTunnel } from "../tunnel/TunnelContext"
/**
* Jupyter Notebook viewer
* @param {string} filePath - the path of the file to edit
+ * @param {string} startJupyterServer - function to start the Jupyter server
+ * @param {boolean} isRemote - whether the file is remote or local
+ * @param {object} jupyterStatus - status of the Jupyter server (running, error)
+ * @param {function} setJupyterStatus - function to set the Jupyter server status
* @returns {JSX.Element} - A Jupyter Notebook viewer
*/
-const JupyterNotebookViewer = ({ filePath, startJupyterServer }) => {
- const exec = require("child_process").exec
- const {jupyterStatus, setJupyterStatus} = useContext(LayoutModelContext)
- const [jupyterURL, setJupyterURL] = useState("")
+const JupyterNotebookViewer = ({ filePath, startJupyterServer, isRemote = false, jupyterStatus, setJupyterStatus }) => {
const [loading, setLoading] = useState(true)
const fileName = path.basename(filePath) // Get the file name from the path
// Get the relative path after "DATA" in the filePath
@@ -28,67 +29,68 @@ const JupyterNotebookViewer = ({ filePath, startJupyterServer }) => {
}
const relativePath = match ? match[0] : filePath
- const getPythonPath = async () => {
- let pythonPath = ""
- await ipcRenderer.invoke("getBundledPythonEnvironment").then((res) => {
- pythonPath = res
- })
- // Check if pythonPath is set
- if (pythonPath === "") {
- return null
- }
- return pythonPath
- }
+ const tunnel = useTunnel()
const checkJupyterServerRunning = async () => {
- try {
- const pythonPath = await getPythonPath()
- if (!pythonPath) {
- console.error("Python path is not set. Cannot check Jupyter server status.")
- return false
- }
- const result = await exec(`${pythonPath} -m jupyter notebook list`)
- if (result.stderr) {
- return false
- }
- return result.stdout.includes(defaultJupyterPort.toString())
- } catch (error) {
- console.error("Error checking Jupyter server status:", error)
- return false
- }
+ return await ipcRenderer.invoke("checkJupyterIsRunning")
}
+ ipcRenderer.on("jupyterReady", () => {
+ if (filePath) {
+ refreshIframe()
+ }
+ })
+
useEffect(() => {
+ console.log("JupyterNoteBookViewer mounted, checking Jupyter server status...")
+
const runJupyter = async () => {
const isRunning = await checkJupyterServerRunning()
- if (!isRunning) {
+ console.log("Jupyter server running status:", isRunning)
+ if (!isRunning.running) {
// Start the Jupyter server
setJupyterStatus({ running: false, error: null })
setLoading(true)
try{
await startJupyterServer()
- setJupyterStatus({ running: true, error: null })
+ if (isRemote) {
+ let tunnelSuccess = await ipcRenderer.invoke('startJupyterTunnel')
+ console.log("SSH Tunnel start result:", tunnelSuccess, jupyterStatus)
+ if (!tunnelSuccess) {
+ setJupyterStatus({ running: false, error: "Failed to start SSH tunnel for Jupyter. Please check the tunnel settings." })
+ setLoading(false)
+ return
+ }
+ }
setLoading(false)
+ // Start polling every 2 seconds to check if Jupyter is running, and refresh iframe when ready
+ let hasRefreshed = false
+ const intervalId = setInterval(async () => {
+ const status = await checkJupyterServerRunning()
+ if (status && status.running && !hasRefreshed) {
+ refreshIframe()
+ hasRefreshed = true
+ clearInterval(intervalId)
+ }
+ }, 2000);
} catch (error) {
setLoading(false)
setJupyterStatus({ running: false, error: "Failed to start Jupyter server. Please check the logs." })
console.error("Error starting Jupyter server:", error)
return
}
- setLoading(false)
- }
+ }
+ setLoading(false)
}
runJupyter()
- }
- , [])
+ }, [])
- useEffect(() => {
- if (jupyterStatus.running) {
- const url = "http://localhost:" + defaultJupyterPort + "/notebooks/" + relativePath
- setJupyterURL(url)
- setLoading(false)
+ const getJupyterURL = () => {
+ if (isRemote) {
+ return "http://localhost:" + tunnel.localJupyterPort + "/notebooks/" + relativePath
}
- }, [filePath, jupyterStatus, relativePath])
+ return "http://localhost:" + defaultJupyterPort + "/notebooks/" + relativePath
+ }
const refreshIframe = () => {
document.getElementById("iframe-" + fileName).src += ''
@@ -114,7 +116,7 @@ const JupyterNotebookViewer = ({ filePath, startJupyterServer }) => {
) : (
<>
{!jupyterStatus.running && {jupyterStatus.error}
}
-
+
+
+ )
+}
+
+export default ConnectionModal
diff --git a/renderer/components/mainPages/exploratory.jsx b/renderer/components/mainPages/exploratory.jsx
index 2ff61d59..7ccfa0de 100644
--- a/renderer/components/mainPages/exploratory.jsx
+++ b/renderer/components/mainPages/exploratory.jsx
@@ -1,6 +1,5 @@
import React, { useContext } from "react"
import ModulePage from "./moduleBasics/modulePage"
-import { WorkspaceContext } from "../workspace/workspaceContext"
import { ErrorRequestContext } from "../generalPurpose/errorRequestContext"
import DTale from "../exploratory/dtale"
import SweetViz from "../exploratory/sweetViz"
@@ -12,7 +11,6 @@ import { shell } from 'electron'
* @returns the exploratory page
*/
const ExploratoryPage = () => {
- const { port } = useContext(WorkspaceContext)
const { setError } = useContext(ErrorRequestContext)
return (
@@ -42,9 +40,9 @@ const ExploratoryPage = () => {
-
-
-
+
+
+
>
)
diff --git a/renderer/components/mainPages/home.jsx b/renderer/components/mainPages/home.jsx
index 92de8951..1611e75d 100644
--- a/renderer/components/mainPages/home.jsx
+++ b/renderer/components/mainPages/home.jsx
@@ -1,7 +1,9 @@
-import React, { useContext, useEffect, useState } from "react"
+import React, { useContext, useEffect, useRef, useState } from "react"
import Image from "next/image"
import myimage from "../../../resources/medomics_transparent_bg.png"
import { Button, Stack } from "react-bootstrap"
+import { ProgressBar } from "primereact/progressbar"
+import { ProgressSpinner } from "primereact/progressspinner"
import { WorkspaceContext } from "../workspace/workspaceContext"
import { ipcRenderer } from "electron"
import FirstSetupModal from "../generalPurpose/installation/firstSetupModal"
@@ -11,8 +13,8 @@ import { randomUUID } from "crypto"
import { requestBackend } from "../../utilities/requests"
import { ServerConnectionContext } from "../serverConnection/connectionContext"
import { toast } from "react-toastify"
-import { FaRegQuestionCircle } from "react-icons/fa"
-
+import { FaRegQuestionCircle } from "react-icons/fa";
+import ConnectionModal from "./connectionModal"
/**
* @returns the home page component
@@ -23,7 +25,25 @@ const HomePage = () => {
const [appVersion, setAppVersion] = useState("")
const [sampleGenerated, setSampleGenerated] = useState(false)
const { port } = useContext(ServerConnectionContext)
+ const [showConnectionModal, setShowConnectionModal] = useState(false)
+
+
const [requirementsMet, setRequirementsMet] = useState(true)
+ // Local backend presence (Express/GO orchestration) check
+ const [localBackend, setLocalBackend] = useState({ checking: true, installed: false, detail: null })
+ const localBackendPollRef = useRef(null)
+ const [installState, setInstallState] = useState({ active: false, phase: '', percent: 0, speed: 0 })
+
+ const checkLocalBackendNow = async () => {
+ try {
+ const res = await ipcRenderer.invoke('checkLocalBackend')
+ setLocalBackend({ checking: false, installed: !!(res && res.installed), detail: res })
+ } catch {
+ // If the check fails, don't flip UI into a blocked state indefinitely; assume installed=true to avoid hard lock
+ toast.error('Error checking local server installation status')
+ setLocalBackend(prev => ({ ...prev, checking: false }))
+ }
+ }
async function handleWorkspaceChange() {
ipcRenderer.send("messageFromNext", "requestDialogFolder")
@@ -52,14 +72,12 @@ const HomePage = () => {
"/input/generate_sample_data/",
jsonToSend,
async (jsonResponse) => {
- console.log("jsonResponse", jsonResponse)
if (jsonResponse.error) {
- console.log("Sample data error")
if (jsonResponse.error.message) {
- console.error(jsonResponse.error.message)
+ console.error("Sample data generating error: ", jsonResponse.error.message)
toast.error(jsonResponse.error.message)
} else {
- console.error(jsonResponse.error)
+ console.error("Sample data generating error: ", jsonResponse.error)
toast.error(jsonResponse.error)
}
} else {
@@ -72,16 +90,19 @@ const HomePage = () => {
},
(error) => {
console.log(error)
- toast.error("Error generating sample data " + error)
+ toast.error("Error generating sample data :", error)
}
)
}
// Check if the requirements are met
useEffect(() => {
+ // Initial local backend presence check (stub-aware)
+ checkLocalBackendNow()
+
ipcRenderer.invoke("checkRequirements").then((data) => {
console.log("Requirements: ", data)
- if (data.pythonInstalled && data.mongoDBInstalled) {
+ if (data && data.result && data.result.pythonInstalled && data.result.mongoDBInstalled) {
setRequirementsMet(true)
} else {
setRequirementsMet(false)
@@ -96,6 +117,118 @@ const HomePage = () => {
})
}, [])
+ // Auto-refresh local install status: poll until installed, and refresh on window focus
+ useEffect(() => {
+ // Listen for installer progress events to surface success toasts
+ const onProgress = (_event, payload) => {
+ try {
+ if (!payload || !payload.phase) return
+ // Map phases to progress bands
+ const bands = {
+ download: { start: 0, end: 70 },
+ verify: { start: 70, end: 80 },
+ extract: { start: 80, end: 95 },
+ finalize: { start: 95, end: 100 }
+ }
+
+ const setActive = () => setInstallState(prev => ({ ...prev, active: true }))
+ const setPhase = (phase) => setInstallState(prev => ({ ...prev, phase }))
+
+ switch (payload.phase) {
+ case 'fetch-manifest':
+ case 'github-fetch-releases':
+ case 'github-pick-release':
+ case 'github-select-asset':
+ case 'download-start':
+ setActive(); setPhase('download')
+ setInstallState(prev => ({ ...prev, percent: bands.download.start }))
+ break
+ case 'download-progress': {
+ setActive(); setPhase('download')
+ const raw = Number(payload.percent || 0)
+ const pct = Math.max(0, Math.min(100, raw))
+ const mapped = bands.download.start + (bands.download.end - bands.download.start) * (pct / 100)
+ const speed = Number(payload.speed || 0)
+ setInstallState(prev => ({ ...prev, percent: mapped, speed }))
+ break
+ }
+ case 'download-complete':
+ setActive(); setPhase('verify')
+ setInstallState(prev => ({ ...prev, percent: bands.download.end }))
+ break
+ case 'verify-start':
+ setActive(); setPhase('verify')
+ setInstallState(prev => ({ ...prev, percent: bands.verify.start }))
+ break
+ case 'verify-ok':
+ case 'verify-skip':
+ setActive(); setPhase('extract')
+ setInstallState(prev => ({ ...prev, percent: bands.verify.end }))
+ break
+ case 'extract-start':
+ setActive(); setPhase('extract')
+ setInstallState(prev => ({ ...prev, percent: bands.extract.start }))
+ break
+ case 'extract-complete':
+ setActive(); setPhase('finalize')
+ setInstallState(prev => ({ ...prev, percent: bands.extract.end }))
+ break
+ case 'done':
+ toast.success('Local server installed and ready.')
+ // Refresh local backend status immediately
+ checkLocalBackendNow()
+ setInstallState({ active: false, phase: '', percent: 0, speed: 0 })
+ break
+ case 'already-installed':
+ // Backend was already present on disk; treat like success
+ toast.success('Local server already installed and ready.')
+ checkLocalBackendNow()
+ setInstallState({ active: false, phase: '', percent: 0, speed: 0 })
+ break
+ case 'error':
+ // Any terminal error from the installer should clear the progress UI
+ // The invoking handler shows a toast based on the IPC result.
+ setInstallState({ active: false, phase: '', percent: 0, speed: 0 })
+ break
+ }
+ } catch(e) {
+ console.error("Error handling localBackendInstallProgress event:", e)
+ }
+ }
+ ipcRenderer.on('localBackendInstallProgress', onProgress)
+
+ // Always clear any existing poller first
+ if (localBackendPollRef.current) {
+ clearInterval(localBackendPollRef.current)
+ localBackendPollRef.current = null
+ }
+
+ const onFocus = () => {
+ // On focus, do a quick re-check (useful if user completed install outside the app)
+ checkLocalBackendNow()
+ }
+
+ window.addEventListener('focus', onFocus)
+
+ if (!localBackend.installed) {
+ // Poll every 5s until installed; lightweight IPC call
+ localBackendPollRef.current = setInterval(() => {
+ checkLocalBackendNow()
+ }, 5000)
+ }
+
+ return () => {
+ window.removeEventListener('focus', onFocus)
+ if (localBackendPollRef.current) {
+ clearInterval(localBackendPollRef.current)
+ localBackendPollRef.current = null
+ }
+ try { ipcRenderer.removeListener('localBackendInstallProgress', onProgress) } catch(e) {
+ console.error("Error removing localBackendInstallProgress listener:", e)
+ }
+ }
+ }, [localBackend.installed])
+
// We set the workspace hasBeenSet state
useEffect(() => {
const checkDataSampleExists = async () => {
@@ -122,7 +255,10 @@ const HomePage = () => {
// We set the recent workspaces -> We send a message to the main process to get the recent workspaces, the workspace context will be updated by the main process in _app.js
useEffect(() => {
ipcRenderer.invoke("checkRequirements").then((data) => {
- setRequirementsMet(data.pythonInstalled && data.mongoDBInstalled)
+ if (!data) {
+ return
+ }
+ setRequirementsMet(data.result.pythonInstalled && data.result.mongoDBInstalled)
})
}, [])
@@ -134,6 +270,41 @@ const HomePage = () => {
ipcRenderer.send("messageFromNext", "getRecentWorkspaces")
}, [])
+ const handleRemoteConnect = () => {
+ toast.success("Connected to remote workspace!");
+ };
+
+ const handleInstallLocalBackend = async () => {
+ try {
+ const res = await ipcRenderer.invoke('installLocalBackendFromURL', { version: null })
+ if (res && res.success) {
+ toast.success('Local server installed.')
+ } else {
+ toast.info(res?.error || 'Installer not available yet')
+ }
+ } catch (e) {
+ toast.error(e?.message || String(e))
+ } finally {
+ const chk = await ipcRenderer.invoke('checkLocalBackend')
+ setLocalBackend({ checking: false, installed: !!(chk && chk.installed), detail: chk })
+ }
+ }
+
+ const handleLocateLocalBackend = async () => {
+ try {
+ const pick = await ipcRenderer.invoke('open-dialog-backend-exe')
+ if (pick && pick.success && pick.path) {
+ const setRes = await ipcRenderer.invoke('setLocalBackendPath', pick.path)
+ if (setRes && setRes.success) toast.success('Server path saved.')
+ }
+ } catch (e) {
+ toast.error(e?.message || String(e))
+ } finally {
+ const chk = await ipcRenderer.invoke('checkLocalBackend')
+ setLocalBackend({ checking: false, installed: !!(chk && chk.installed), detail: chk })
+ }
+ }
+
return (
<>
{
{hasBeenSet ? (
<>
-
Set up your workspace to get started
-
+ {!requirementsMet && process.platform !=="darwin" && }
+ {showConnectionModal && setShowConnectionModal(false)}
+ onConnect={handleRemoteConnect}
+ />}
{!requirementsMet && process.platform !== "darwin" && (
diff --git a/renderer/components/mainPages/htmlViewer.jsx b/renderer/components/mainPages/htmlViewer.jsx
index 99cffbe4..0850e672 100644
--- a/renderer/components/mainPages/htmlViewer.jsx
+++ b/renderer/components/mainPages/htmlViewer.jsx
@@ -4,7 +4,7 @@ import Iframe from "react-iframe"
import { DataContext } from "../workspace/dataContext"
import { MEDDataObject } from "../workspace/NewMedDataObject"
import { WorkspaceContext } from "../workspace/workspaceContext"
-import { toLocalPath } from "../../utilities/fileManagementUtils"
+import { toLocalPath } from "../../utilities/fileManagement/fileOps"
/**
* @param config currently a MEDDataObject
diff --git a/renderer/components/mainPages/remoteServer.tsx b/renderer/components/mainPages/remoteServer.tsx
new file mode 100644
index 00000000..1fc868ad
--- /dev/null
+++ b/renderer/components/mainPages/remoteServer.tsx
@@ -0,0 +1,134 @@
+import React from "react"
+import { useTunnel } from "../tunnel/TunnelContext"
+import { ipcRenderer } from "electron"
+
+const StatusPill = ({ label, value }: { label: string; value: string }) => {
+ const color = value === "running" || value === "forwarding" ? "#22c55e" : value === "error" ? "#ef4444" : value === "timeout" ? "#f59e0b" : "#64748b"
+ return (
+
+ {label}:
+ {value || "unknown"}
+
+ )
+}
+
+export default function RemoteServerPage() {
+ const tunnel = useTunnel() as any
+ const {
+ host,
+ username,
+ serverStartedRemotely,
+ expressStatus,
+ expressLogPath,
+ tunnels
+ } = tunnel
+
+ // Derive GO/Mongo statuses from tunnels list to avoid brittle flags
+ const goDerived = React.useMemo(() => {
+ const t = Array.isArray(tunnels) ? tunnels.find((x: any) => String(x?.name).toLowerCase() === 'go') : null
+ return t && t.status ? t.status : 'unknown'
+ }, [tunnels])
+ const mongoDerived = React.useMemo(() => {
+ const t = Array.isArray(tunnels) ? tunnels.find((x: any) => String(x?.name).toLowerCase() === 'mongo') : null
+ return t && t.status ? t.status : 'unknown'
+ }, [tunnels])
+
+ const [log, setLog] = React.useState("")
+ const [streaming, setStreaming] = React.useState(false)
+ const logRef = React.useRef(null)
+
+ const appendLog = React.useCallback((chunk: string) => {
+ setLog((prev) => {
+ const next = prev + chunk
+ // Keep last ~5000 lines to avoid memory growth
+ const lines = next.split(/\r?\n/)
+ const max = 5000
+ return lines.length > max ? lines.slice(lines.length - max).join("\n") : next
+ })
+ }, [])
+
+ React.useEffect(() => {
+ const onData = (_e: any, data: string) => appendLog(data)
+ const onState = (_e: any, s: { streaming?: boolean }) => setStreaming(!!s?.streaming)
+ ipcRenderer.on('remoteServerLog:data', onData)
+ ipcRenderer.on('remoteServerLog:state', onState)
+ return () => {
+ ipcRenderer.removeListener('remoteServerLog:data', onData)
+ ipcRenderer.removeListener('remoteServerLog:state', onState)
+ }
+ }, [appendLog])
+
+ React.useEffect(() => {
+ // Auto-start log streaming when server started remotely and we have a path
+ if (serverStartedRemotely && expressLogPath) {
+ ipcRenderer.invoke('startRemoteServerLogStream')
+ }
+ return () => {
+ ipcRenderer.invoke('stopRemoteServerLogStream')
+ }
+ }, [serverStartedRemotely, expressLogPath])
+
+ React.useEffect(() => {
+ // autoscroll to bottom on new data
+ if (logRef.current) {
+ logRef.current.scrollTop = logRef.current.scrollHeight
+ }
+ }, [log])
+
+ return (
+
+
+ 🖥️
+
Remote Server
+
+
+
+
Host: {host || "-"}
+
User: {username || "-"}
+
Started Via App: {serverStartedRemotely ? "Yes" : "No"}
+
+
+
+
+
Statuses
+
+
+
+
+
+ {Array.isArray(tunnels) && tunnels.length > 0 && (
+
+
Active Tunnels
+
+
+
Name
+
Local
+
Remote
+
Status
+
+ {tunnels.map((t: any, i: number) => (
+
+
{t.name || '-'}
+
{t.localPort ?? '-'}
+
{t.remotePort ?? '-'}
+
{t.status || 'unknown'}
+
+ ))}
+
+
+ )}
+
+
+
+
+
Live Output
+ {expressLogPath ? `(${expressLogPath})` : ''}
+ {streaming ? 'streaming' : 'idle'}
+ setLog("")} style={{ border: '1px solid #cbd5e1', background: '#f8fafc', borderRadius: 6, padding: '2px 8px', cursor: 'pointer' }}>Clear
+
+
+
{log || 'No output yet.'}
+
+
+ )
+}
diff --git a/renderer/components/mainPages/settings.jsx b/renderer/components/mainPages/settings.jsx
index a02badfe..fddc96fb 100644
--- a/renderer/components/mainPages/settings.jsx
+++ b/renderer/components/mainPages/settings.jsx
@@ -17,6 +17,9 @@ import { Column } from "primereact/column"
import { WorkspaceContext } from "../workspace/workspaceContext"
import FirstSetupModal from "../generalPurpose/installation/firstSetupModal"
import { requestBackend } from "../../utilities/requests"
+import { useTunnel } from "../tunnel/TunnelContext"
+import axios from "axios"
+import { toast } from "react-toastify"
const util = require("util")
const exec = util.promisify(require("child_process").exec)
@@ -24,18 +27,19 @@ const exec = util.promisify(require("child_process").exec)
* Settings page
* @returns {JSX.Element} Settings page
*/
-const SettingsPage = ({pageId = "settings", checkJupyterIsRunning, startJupyterServer, stopJupyterServer}) => {
+const SettingsPage = ({pageId = "settings", checkJupyterIsRunning, startJupyterServer, stopJupyterServer, jupyterStatus, setJupyterStatus}) => {
const { workspace, port } = useContext(WorkspaceContext)
const [settings, setSettings] = useState(null) // Settings object
const [serverIsRunning, setServerIsRunning] = useState(false) // Boolean to know if the server is running
const [mongoServerIsRunning, setMongoServerIsRunning] = useState(false) // Boolean to know if the server is running
- const [jupyterServerIsRunning, setjupyterServerIsRunning] = useState(false) // Boolean to know if Jupyter Noteobok is running
const [activeIndex, setActiveIndex] = useState(0) // Index of the active tab
const [condaPath, setCondaPath] = useState("") // Path to the conda environment
const [seed, setSeed] = useState(54288) // Seed for random number generation
const [pythonEmbedded, setPythonEmbedded] = useState({}) // Boolean to know if python is embedded
const [showPythonPackages, setShowPythonPackages] = useState(false) // Boolean to know if python packages are shown
+ const tunnel = useTunnel()
+
/**
* Check if the mongo server is running and set the state
* @returns {void}
@@ -130,7 +134,7 @@ const SettingsPage = ({pageId = "settings", checkJupyterIsRunning, startJupyterS
})
}
})
- }, 5000)
+ }, workspace.isRemote ? 10000 : 5000) // Greater interval if remote workspace since requests take longer
return () => clearInterval(interval)
})
@@ -148,33 +152,68 @@ const SettingsPage = ({pageId = "settings", checkJupyterIsRunning, startJupyterS
const getJupyterStatus = async () => {
console.log("Checking jupyter status")
- const running = await checkJupyterIsRunning()
- setjupyterServerIsRunning(running)
+ let running = false
+ if (workspace.isRemote) {
+ window.backend.requestExpress({ method: 'get', path: '/check-jupyter-status', host: tunnel.host })
+ .then((response) => {
+ console.log("Jupyter status on remote server: ", response)
+ if (response.status == 200 && response.data.running) {
+ console.log("Jupyter is running on remote server")
+ setJupyterStatus(response.data)
+ } else {
+ console.error("Jupyter check on server failed: ", response.data.error)
+ setJupyterStatus(response.data)
+ }
+ })
+ .catch((error) => {
+ console.error("Error checking Jupyter status on remote server: ", error)
+ setJupyterStatus({ running: false, error: error.message })
+ })
+ } else {
+ await checkJupyterIsRunning()
+ }
}
const startMongo = () => {
let workspacePath = workspace.workingDirectory.path
- const mongoConfigPath = path.join(workspacePath, ".medomics", "mongod.conf")
- let mongod = getMongoDBPath()
- let mongoResult = spawn(mongod, ["--config", mongoConfigPath])
-
- mongoResult.stdout.on("data", (data) => {
- console.log(`MongoDB stdout: ${data}`)
- })
-
- mongoResult.stderr.on("data", (data) => {
- console.error(`MongoDB stderr: ${data}`)
- })
-
- mongoResult.on("close", (code) => {
- console.log(`MongoDB process exited with code ${code}`)
- })
-
- mongoResult.on("error", (err) => {
- console.error("Failed to start MongoDB: ", err)
- // reject(err)
- })
- console.log("Mongo result from start ", mongoResult)
+ if (workspace.isRemote) {
+ window.backend.requestExpress({ method: 'post', path: '/start-mongo', host: tunnel.host, body: { workspacePath: workspacePath } })
+ .then((response) => {
+ if (response.data.success) {
+ toast.success("MongoDB started successfully on remote server")
+ console.log("MongoDB started successfully on remote server")
+ } else {
+ toast.error("Failed to start MongoDB on remote server: ", response.data.error)
+ console.error("Failed to start MongoDB on remote server: ", response.data.error)
+ }
+ })
+ .catch((error) => {
+ console.error("Error starting MongoDB on remote server: ", error)
+ toast.error("Error starting MongoDB on remote server: ", error)
+ })
+ } else {
+ const mongoConfigPath = path.join(workspacePath, ".medomics", "mongod.conf")
+ let mongod = getMongoDBPath()
+ let mongoResult = spawn(mongod, ["--config", mongoConfigPath])
+
+ mongoResult.stdout.on("data", (data) => {
+ console.log(`MongoDB stdout: ${data}`)
+ })
+
+ mongoResult.stderr.on("data", (data) => {
+ console.error(`MongoDB stderr: ${data}`)
+ })
+
+ mongoResult.on("close", (code) => {
+ console.log(`MongoDB process exited with code ${code}`)
+ })
+
+ mongoResult.on("error", (err) => {
+ console.error("Failed to start MongoDB: ", err)
+ // reject(err)
+ })
+ console.log("Mongo result from start ", mongoResult)
+ }
}
const installMongoDB = () => {
@@ -285,21 +324,21 @@ const SettingsPage = ({pageId = "settings", checkJupyterIsRunning, startJupyterS
Jupyter Notebook server status :
- {jupyterServerIsRunning ? "Running" : "Stopped"}
- {jupyterServerIsRunning ? : }
+ {jupyterStatus.running ? "Running" : "Stopped"}
+ {jupyterStatus.running ? : }
{startJupyterServer()}}
- style={{ backgroundColor: jupyterServerIsRunning ? "grey" : "#54a559", borderColor: jupyterServerIsRunning ? "grey" : "#54a559", marginRight: "1rem" }}
- disabled={jupyterServerIsRunning}
+ style={{ backgroundColor: jupyterStatus.running ? "grey" : "#54a559", borderColor: jupyterStatus.running ? "grey" : "#54a559", marginRight: "1rem" }}
+ disabled={jupyterStatus.running}
/>
{stopJupyterServer()}}
- style={{ backgroundColor: jupyterServerIsRunning ? "#d55757" : "grey", borderColor: jupyterServerIsRunning ? "#d55757" : "grey" }}
- disabled={!jupyterServerIsRunning}
+ style={{ backgroundColor: jupyterStatus.running ? "#d55757" : "grey", borderColor: jupyterStatus.running ? "#d55757" : "grey" }}
+ disabled={!jupyterStatus.running}
/>
diff --git a/renderer/components/mongoDB/mongoDBUtils.js b/renderer/components/mongoDB/mongoDBUtils.js
index dac2da0d..2d5e3083 100644
--- a/renderer/components/mongoDB/mongoDBUtils.js
+++ b/renderer/components/mongoDB/mongoDBUtils.js
@@ -4,8 +4,16 @@ import { toast } from "react-toastify"
const { MongoClient } = require("mongodb")
const fs = require("fs")
const Papa = require("papaparse")
+import { getTunnelState } from "../../utilities/tunnelState"
+
+function getMongoUri() {
+ const tunnel = getTunnelState()
+ if (tunnel && tunnel.tunnelActive && tunnel.localDBPort) {
+ return `mongodb://localhost:${tunnel.localDBPort}`
+ }
+ return "mongodb://localhost:54017"
+}
-const uri = "mongodb://localhost:54017" // Remplacez par votre URI MongoDB
const dbName = "data" // Remplacez par le nom de votre base de données
let client
@@ -22,7 +30,7 @@ function stripIds(doc = {}) {
export async function connectToMongoDB() {
if (!client) {
- client = new MongoClient(uri, { useNewUrlParser: true, useUnifiedTopology: true })
+ client = new MongoClient(getMongoUri(), { useNewUrlParser: true, useUnifiedTopology: true })
await client.connect()
}
return client.db(dbName)
@@ -357,6 +365,11 @@ async function insertCSVIntoCollection(filePath, collectionName) {
},
complete: async (results) => {
try {
+ if (results.data.length == 0) {
+ console.warn("CSV file is empty or has no valid rows.")
+ resolve(null)
+ return
+ }
// Additional cleanup for any remaining NaN values
const cleanedData = results.data.map(row => {
const cleanRow = {}
@@ -744,6 +757,28 @@ export async function getAllCollections() {
return await db.listCollections().toArray()
}
+
+export async function insertObjectIntoCollection(data) {
+ switch (data.medDataObject.type) {
+ case "csv":
+ await insertCSVIntoCollection(data.objectPath, data.medDataObject.id)
+ break
+ case "html":
+ await insertHTMLIntoCollection(data.objectPath, data.medDataObject.id)
+ break
+ case "png":
+ await insertPNGIntoCollection(data.objectPath, data.medDataObject.id)
+ break
+ case "pkl":
+ await insertPKLIntoCollection(data.objectPath, data.medDataObject.id)
+ break
+ case "jpg":
+ await insertJPGIntoCollection(data.objectPath, data.medDataObject.id)
+ break
+ default:
+ break
+ }
+}
/**
* @description Compute class imbalance statistics for a dataset
* @param {String} collectionId MongoDB collection id
diff --git a/renderer/components/tunnel/TunnelContext.jsx b/renderer/components/tunnel/TunnelContext.jsx
new file mode 100644
index 00000000..82571a1b
--- /dev/null
+++ b/renderer/components/tunnel/TunnelContext.jsx
@@ -0,0 +1,104 @@
+import { createContext, useState, useContext } from "react";
+
+// TunnelContext will store info about the active SSH tunnel
+export const TunnelContext = createContext({
+ host: null,
+ tunnelActive: false,
+ localAddress: "localhost",
+ localExpressPort: null,
+ remoteExpressPort: null,
+ localGoPort: null,
+ remoteGoPort: null,
+ localDBPort: null,
+ remoteDBPort: null,
+ localJupyterPort: null,
+ remoteJupyterPort: null,
+ remotePort: null,
+ username: null,
+ serverStartedRemotely: false,
+ expressStatus: "unknown",
+ expressLogPath: null,
+ remoteWorkspacePath: null,
+ remoteBackendExecutablePath: null,
+ requirementsMetRemote: false,
+ requirementsDetailsRemote: null,
+ requirementsCheckedAt: null,
+ tunnels: [],
+ setTunnelInfo: () => {},
+ clearTunnelInfo: () => {},
+})
+
+export const TunnelProvider = ({ children }) => {
+ const [tunnelInfo, setTunnelInfo] = useState({
+ host: null,
+ tunnelActive: false,
+ localAddress: "localhost",
+ localExpressPort: null,
+ remoteExpressPort: null,
+ localGoPort: null,
+ remoteGoPort: null,
+ localDBPort: null,
+ remoteDBPort: null,
+ localJupyterPort: null,
+ remoteJupyterPort: null,
+ remotePort: null,
+ username: null,
+ serverStartedRemotely: false,
+ expressStatus: "unknown",
+ expressLogPath: null,
+ remoteWorkspacePath: null,
+ remoteBackendExecutablePath: null,
+ requirementsMetRemote: false,
+ requirementsDetailsRemote: null,
+ requirementsCheckedAt: null,
+ tunnels: [],
+ })
+
+ const setTunnel = (info) => {
+ if (!info || typeof info !== 'object') return
+ const safeInfo = { ...info }
+ delete safeInfo.password
+ delete safeInfo.privateKey
+ const hasFlag = Object.prototype.hasOwnProperty.call(safeInfo, 'tunnelActive')
+ setTunnelInfo((prev) => {
+ const nextTunnelActive = hasFlag ? !!safeInfo.tunnelActive : !!prev.tunnelActive
+ return { ...prev, ...safeInfo, tunnelActive: nextTunnelActive }
+ })
+ }
+
+ const clearTunnel = () => {
+ setTunnelInfo({
+ host: null,
+ tunnelActive: false,
+ localAddress: "localhost",
+ localExpressPort: null,
+ remoteExpressPort: null,
+ localGoPort: null,
+ remoteGoPort: null,
+ localDBPort: null,
+ remoteDBPort: null,
+ localJupyterPort: null,
+ remoteJupyterPort: null,
+ remotePort: null,
+ username: null,
+ serverStartedRemotely: false,
+ expressStatus: "unknown",
+ expressLogPath: null,
+ remoteWorkspacePath: null,
+ remoteBackendExecutablePath: null,
+ requirementsMetRemote: false,
+ requirementsDetailsRemote: null,
+ requirementsCheckedAt: null,
+ tunnels: [],
+ })
+ }
+
+ return (
+
+ {children}
+
+ )
+}
+
+// Custom hook for easy access
+export const useTunnel = () => useContext(TunnelContext)
diff --git a/renderer/components/workspace/NewMedDataObject.js b/renderer/components/workspace/NewMedDataObject.js
index f320b73f..cde6dad3 100644
--- a/renderer/components/workspace/NewMedDataObject.js
+++ b/renderer/components/workspace/NewMedDataObject.js
@@ -3,7 +3,7 @@ import { ipcRenderer } from "electron"
import fs from "fs-extra"
import path from "path"
import { toast } from "react-toastify"
-import { getPathSeparator } from "../../utilities/fileManagementUtils"
+import { getPathSeparator, mkdirp, pathExists, remoteDirname, renamePath, rmrf } from "../../utilities/fileManagement/fileOps"
import {
deleteMEDDataObject,
downloadCollectionToFile,
@@ -12,6 +12,7 @@ import {
updateMEDDataObjectName,
updateMEDDataObjectPath
} from "../mongoDB/mongoDBUtils"
+import { getTunnelState } from "../../utilities/tunnelState"
/**
* @description class definition of a MEDDataObject
@@ -154,7 +155,7 @@ export class MEDDataObject {
pathParts.unshift(object.name)
}
}
- return path.join(workspacePath, ...pathParts)
+ return path.posix.join(workspacePath, ...pathParts)
}
/**
@@ -191,8 +192,8 @@ export class MEDDataObject {
* @returns {String} pathToCreate the path where the file was saved
*/
static writeFileSync(exportObj, path, name, extension) {
- let newPath = typeof path === "string" ? path : path.join(getPathSeparator())
- const pathToCreate = `${newPath}${getPathSeparator()}${name}.${extension}`
+ let newPath = typeof path === "string" ? path : path.join(getPathSeparator({ isRemote: false }))
+ const pathToCreate = `${newPath}${getPathSeparator({ isRemote: false })}${name}.${extension}`
if (!fs.existsSync(newPath)) {
this.createFolderFSsync(newPath).then(() => {
let convertedExportObj = typeof exportObj === "string" ? exportObj : JSON.stringify(exportObj, null, 2)
@@ -230,13 +231,13 @@ export class MEDDataObject {
}
/**
- * @description Delete a MEDDataObject and its children from the dictionary and the local workspace
+ * @description Delete a MEDDataObject and its children from the dictionary and the workspace
* @param {Dictionary} dict - dictionary of all MEDDataObjects
* @param {String} id - the id of the object to delete
* @param {String} workspacePath - the root path of the workspace
* @returns {Promise}
*/
- static async deleteObjectAndChildren(dict, id, workspacePath) {
+ static async deleteObjectAndChildren(dict, id, workspacePath, isRemote = false) {
// Get the object to delete
const objectToDelete = dict[id]
@@ -249,7 +250,13 @@ export class MEDDataObject {
if (objectToDelete.inWorkspace) {
// Get the full path of the object in the workspace
const fullPath = this.getFullPath(dict, id, workspacePath)
- fs.rmSync(fullPath, { recursive: true, force: true })
+ try {
+ await rmrf(fullPath, { isRemote })
+ } catch (error) {
+ console.error(`Failed to delete ${fullPath} from ${isRemote ? "remote" : "local"} workspace: ${error.message}`)
+ toast.error(`Failed to delete ${objectToDelete.name}: ${error.message}`)
+ return
+ }
console.log(`Deleted ${fullPath} from workspace`)
}
@@ -266,7 +273,7 @@ export class MEDDataObject {
* @param {Boolean} notify - Wether to display a toast message while success
* @returns {Promise}
*/
- static async deleteObjectAndChildrenFromWorkspace(dict, id, workspacePath, notify = true) {
+ static async deleteObjectAndChildrenFromWorkspace(dict, id, workspacePath, notify = true, isRemote = false) {
// Get the object to delete
const objectToDelete = dict[id]
@@ -279,7 +286,13 @@ export class MEDDataObject {
if (objectToDelete.inWorkspace) {
// Get the full path of the object in the workspace
const fullPath = this.getFullPath(dict, id, workspacePath)
- fs.rmSync(fullPath, { recursive: true, force: true })
+ try {
+ await rmrf(fullPath, { isRemote })
+ } catch (error) {
+ console.error(`Failed to delete ${fullPath} from ${isRemote ? "remote" : "local"} workspace: ${error.message}`)
+ toast.error(`Failed to delete ${objectToDelete.name}: ${error.message}`)
+ return
+ }
console.log(`Deleted ${fullPath} from workspace`)
const success = await overwriteMEDDataObjectProperties(id, { inWorkspace: false })
if (success) {
@@ -395,9 +408,10 @@ export class MEDDataObject {
* @param {String} id - the id of the MEDDataObject to rename
* @param {String} newName - the new name for the MEDDataObject
* @param {String} workspacePath - the root path of the workspace
+ * @param {string} isRemote - A flag indicating if the workspace is remote
* @returns {void}
*/
- static async rename(dict, id, newName, workspacePath) {
+ static async rename(dict, id, newName, workspacePath, isRemote = false) {
const object = dict[id]
if (!object) {
@@ -426,8 +440,20 @@ export class MEDDataObject {
toast.error(`Failed to rename ${object.name}`)
return
}
- fs.renameSync(oldPath, newPath)
+ if (isRemote) {
+ try {
+ await renamePath(oldPath, newPath, { isRemote: true })
+ console.log(`Renamed ${oldPath} to ${newPath} on remote`)
+ } catch (error) {
+ console.error(`Failed to rename ${oldPath} to ${newPath} on remote: ${error.message}`)
+ toast.error(`Failed to rename ${object.name} on remote: ${error.message}`)
+ return
+ }
+ } else {
+ fs.renameSync(oldPath, newPath)
+ }
console.log(`Renamed ${oldPath} to ${newPath}`)
+ toast.success(`Renamed ${oldPath} to ${newPath}`)
}
// Notify the system to update the workspace
@@ -442,8 +468,9 @@ export class MEDDataObject {
* @param {Boolean} notify - Wether to display a toast message while success
* @param {Set} syncedObjects - A set to track already synced objects to avoid infinite loops
*/
- static async sync(dict, id, workspacePath, notify = true, syncedObjects = new Set()) {
+ static async sync(dict, id, workspacePath, notify = true, syncedObjects = new Set(), isRemote) {
const medDataObject = dict[id]
+ console.log(`Syncing MEDDataObject with id ${id}, name ${medDataObject ? medDataObject.name : "unknown"}, isRemote: ${isRemote}`)
if (!medDataObject) {
console.log(`MEDDataObject with id ${id} not found`)
@@ -451,7 +478,7 @@ export class MEDDataObject {
}
// Check if the object is already in workspace
- if (medDataObject.type == "directory" && medDataObject.inWorkspace && fs.existsSync(this.getFullPath(dict, id, workspacePath))) {
+ if (medDataObject.type == "directory" && medDataObject.inWorkspace && (await pathExists(this.getFullPath(dict, id, workspacePath), { isRemote }))) {
console.log(`MEDDataObject with id ${id} is already saved locally in workspace`)
toast.info(`${medDataObject.name} is already saved locally in workspace`)
return
@@ -465,35 +492,59 @@ export class MEDDataObject {
// Recursively sync parent objects
if (medDataObject.parentID && medDataObject.parentID !== "ROOT") {
- await this.sync(dict, medDataObject.parentID, workspacePath, notify, syncedObjects)
+ await this.sync(dict, medDataObject.parentID, workspacePath, notify, syncedObjects, isRemote)
}
// Define the file path where the content will be downloaded
const filePath = this.getFullPath(dict, id, workspacePath)
-
// Ensure the directory exists
- const directoryPath = path.dirname(filePath)
- if (!fs.existsSync(directoryPath)) {
- fs.mkdirSync(directoryPath, { recursive: true })
+ if (isRemote) {
+ const directoryPath = remoteDirname(filePath, { isRemote: true })
+ console.log(`Creating directory at ${directoryPath} for remote sync`)
+ const dirExists = await pathExists(directoryPath, { isRemote: true })
+ if (!dirExists) {
+ await mkdirp(directoryPath, { isRemote: true })
+ }
+ } else {
+ const directoryPath = path.dirname(filePath)
+ const dirExists = await pathExists(directoryPath, { isRemote: false })
+ if (!dirExists) {
+ await mkdirp(directoryPath, { isRemote: false })
+ }
}
// Download the content based on the type
try {
if (medDataObject.type != "directory" && medDataObject.type != "medml" && medDataObject.type != "medeval" && medDataObject.type != "medmlres" && medDataObject.type != "medmodel") {
- await downloadCollectionToFile(id, filePath, medDataObject.type)
- if (medDataObject.inWorkspace && notify) {
- toast.success(`Sync ${medDataObject.name} successfully`)
- }
+ if (isRemote) {
+ const tunnelState = getTunnelState()
+ window.backend.requestExpress({ method: 'post', path: '/download-collection-to-file', host: tunnelState.host, body: { collectionId: id, filePath: filePath, type: medDataObject.type } })
+ .then((response) => {
+ if (response.data.success) {
+ console.log(`Downloaded collection ${id} to remote file successfully`)
+ } else {
+ toast.error(`Failed to download collection ${id} to remote file: ${response.data.error}`)
+ }
+ })
+ .catch((err) => {
+ toast.error(`Failed to download collection ${id} to remote file: ${err && err.message ? err.message : String(err)}`)
+ })
+ } else {
+ await downloadCollectionToFile(id, filePath, medDataObject.type)
+ if (medDataObject.inWorkspace && notify) {
+ toast.success(`Sync ${medDataObject.name} successfully`)
+ }
+ }
}
// Sync child objects for specific types
if (medDataObject.type === "medml" || medDataObject.type === "medeval" || medDataObject.type === "medmlres" || medDataObject.type === "medmodel") {
- await this.syncChildren(dict, medDataObject.childrenIDs, workspacePath, notify, syncedObjects)
+ await this.syncChildren(dict, medDataObject.childrenIDs, workspacePath, notify, syncedObjects, isRemote)
}
// If the object is a directory, create it in the workspace and sync its children
if (medDataObject.type === "directory") {
- await this.syncChildren(dict, medDataObject.childrenIDs, workspacePath, notify, syncedObjects)
+ await this.syncChildren(dict, medDataObject.childrenIDs, workspacePath, notify, syncedObjects, isRemote)
}
// Update inWorkspace property to true after successful download
@@ -522,16 +573,17 @@ export class MEDDataObject {
* @param {String} workspacePath - the root path of the workspace
* @param {Boolean} notify - Whether to display a toast message while success
* @param {Set} syncedObjects - A set to track already synced objects to avoid infinite loops
+ * @param {Set} isRemote - A flag indicating if the sync is for a remote workspace
*/
- static async syncChildren(dict, childrenIDs, workspacePath, notify, syncedObjects) {
+ static async syncChildren(dict, childrenIDs, workspacePath, notify, syncedObjects, isRemote) {
for (const childID of childrenIDs) {
const child = dict[childID]
if (!child) {
console.log(`Child MEDDataObject with id ${childID} not found`)
continue
}
- await this.sync(dict, childID, workspacePath, notify, syncedObjects)
- await this.syncChildren(dict, child.childrenIDs, workspacePath, notify, syncedObjects)
+ await this.sync(dict, childID, workspacePath, notify, syncedObjects, isRemote)
+ await this.syncChildren(dict, child.childrenIDs, workspacePath, notify, syncedObjects, isRemote)
}
}
diff --git a/renderer/components/workspace/workspaceContext.jsx b/renderer/components/workspace/workspaceContext.jsx
index 68b915ea..e9441496 100644
--- a/renderer/components/workspace/workspaceContext.jsx
+++ b/renderer/components/workspace/workspaceContext.jsx
@@ -19,7 +19,7 @@ const WorkspaceContext = createContext(null)
* @summary The workspace object is the object that contains the workspace information. It is an object that contains the following properties:'hasBeenSet' and 'workspaceObject'.
* The 'hasBeenSet' property is a boolean that indicates if the workspace has been set. The 'workspaceObject' property is the workspace containing information about all the files and folders in the workspace.
*/
-function WorkspaceProvider({ workspace, setWorkspace, port, setPort, recentWorkspaces, setRecentWorkspaces, children }) {
+function WorkspaceProvider({ workspace, setWorkspace, port, setPort, recentWorkspaces, setRecentWorkspaces, isRemoteWorkspace, setIsRemoteWorkspace, children }) {
/**
*
* @param {string} name The name of the folder to get the path of
@@ -37,7 +37,7 @@ function WorkspaceProvider({ workspace, setWorkspace, port, setPort, recentWorks
}
return (
<>
- {children}
+ {children}
>
)
}
diff --git a/renderer/next-env.d.ts b/renderer/next-env.d.ts
new file mode 100644
index 00000000..4f11a03d
--- /dev/null
+++ b/renderer/next-env.d.ts
@@ -0,0 +1,5 @@
+///
+///
+
+// NOTE: This file should not be edited
+// see https://nextjs.org/docs/basic-features/typescript for more information.
diff --git a/renderer/pages/_app.js b/renderer/pages/_app.js
index 77aac8c0..d894530c 100644
--- a/renderer/pages/_app.js
+++ b/renderer/pages/_app.js
@@ -14,7 +14,12 @@ import { MEDDataObject } from "../components/workspace/NewMedDataObject"
import { WorkspaceProvider } from "../components/workspace/workspaceContext"
import { loadMEDDataObjects, updateGlobalData } from "../utilities/appUtils/globalDataUtils"
import { NotificationContextProvider } from "../components/generalPurpose/notificationContext"
+import { TunnelProvider } from "../components/tunnel/TunnelContext";
+import { setTunnelState, clearTunnelState } from "../utilities/tunnelState"
+import { downloadCollectionToFile, insertObjectIntoCollection } from "../components/mongoDB/mongoDBUtils"
import { ThemeProvider } from "../components/theme/themeContext"
+import { SidebarLoadingProvider } from "../components/layout/sidebarTools/SidebarLoadingContext"
+import SidebarLoadingController from "../components/layout/sidebarTools/SidebarLoadingController"
import { SupersetRequestProvider } from "../components/mainPages/superset/supersetRequestContext"
// CSS
@@ -124,7 +129,8 @@ function App() {
const [layoutModel, setLayoutModel] = useState(initialLayout)
const [workspaceObject, setWorkspaceObject] = useState({
hasBeenSet: false,
- workingDirectory: ""
+ workingDirectory: "",
+ isRemote: false
})
const [recentWorkspaces, setRecentWorkspaces] = useState([]) // The list of recent workspaces
const [port, setPort] = useState() // The port of the server
@@ -183,6 +189,32 @@ function App() {
setRecentWorkspaces(data)
})
+ ipcRenderer.on("tunnelStateUpdate", (event, state) => {
+ setTunnelState(state)
+ })
+
+ ipcRenderer.on("tunnelStateClear", () => {
+ clearTunnelState()
+ })
+
+ ipcRenderer.on("insertObjectIntoCollection", (event, data) => {
+ if (process.platform === "win32") {
+ if (data.objectPath.startsWith("/")) {
+ data.objectPath = data.ObjectPath.slice(1)
+ }
+ }
+ insertObjectIntoCollection(data)
+ })
+
+ ipcRenderer.on("downloadCollectionToFile", (event, data) => {
+ if (process.platform === "win32") {
+ if (data.filePath.startsWith("/")) {
+ data.filePath = data.filePath.slice(1)
+ }
+ }
+ downloadCollectionToFile(data.collectionId, data.filePath, data.type)
+ })
+
/**
* This is to log messages from the main process in the console
*/
@@ -198,10 +230,16 @@ function App() {
}
}, []) // Here, we specify that the hook should only be called at the launch of the app
+ // Helper to dispatch custom sidebar loading event
+ function setSidebarLoadingCustom(processing, message) {
+ window.dispatchEvent(new CustomEvent("sidebarLoading", { detail: { processing, message } }))
+ }
+
// This useEffect hook is called whenever the `globalData` state changes.
useEffect(() => {
console.log("globalData changed", globalData)
MEDDataObject.verifyLockedObjects(globalData)
+ setSidebarLoadingCustom(false, "")
}, [globalData])
// This useEffect hook is called whenever the `layoutModel` state changes.
@@ -213,13 +251,24 @@ function App() {
// This useEffect hook is called whenever the `workspaceObject` state changes.
useEffect(() => {
async function getGlobalData() {
- await updateGlobalData(workspaceObject)
- const newGlobalData = await loadMEDDataObjects()
- setGlobalData(newGlobalData)
+ let result
+ if (workspaceObject.isRemote) {
+ result = await ipcRenderer.invoke("confirmMongoTunnel", true)
+ }
+ setSidebarLoadingCustom(true, "Loading workspace data...")
+ if (!result || (result && result.success)) {
+ await updateGlobalData(workspaceObject)
+ const newGlobalData = await loadMEDDataObjects(workspaceObject.isRemote)
+ setGlobalData(newGlobalData)
+ }
+ setSidebarLoadingCustom(false, "")
}
if (workspaceObject.hasBeenSet == true) {
console.log("workspaceObject changed", workspaceObject)
getGlobalData()
+ } else {
+ console.log("Clearing global data because workspaceObject has not been set yet", workspaceObject)
+ setGlobalData({})
}
}, [workspaceObject])
@@ -252,15 +301,20 @@ function App() {
setSupersetPort={setSupersetPort}
>
-
- {/* This is the WorkspaceProvider, which provides the workspace model to all the children components of the LayoutManager */}
- {/* This is the LayoutContextProvider, which provides the layout model to all the children components of the LayoutManager */}
-
- {/** We pass the initialLayout as a parameter */}
-
+
+
+
+
+ {/* This is the WorkspaceProvider, which provides the workspace model to all the children components of the LayoutManager */}
+ {/* This is the LayoutContextProvider, which provides the layout model to all the children components of the LayoutManager */}
+
+ {/** We pass the initialLayout as a parameter */}
+
+
+
diff --git a/renderer/tsconfig.json b/renderer/tsconfig.json
new file mode 100644
index 00000000..8d7eb0ea
--- /dev/null
+++ b/renderer/tsconfig.json
@@ -0,0 +1,25 @@
+{
+ "compilerOptions": {
+ "target": "ES2020",
+ "lib": ["dom", "dom.iterable", "esnext"],
+ "allowJs": true,
+ "skipLibCheck": true,
+ "strict": false,
+ "forceConsistentCasingInFileNames": true,
+ "noEmit": true,
+ "esModuleInterop": true,
+ "module": "esnext",
+ "moduleResolution": "node",
+ "resolveJsonModule": true,
+ "isolatedModules": true,
+ "jsx": "preserve",
+ "incremental": true
+ },
+ "include": [
+ "next-env.d.ts",
+ "types/**/*.d.ts",
+ "**/*.ts",
+ "**/*.tsx"
+ ],
+ "exclude": ["node_modules"]
+}
diff --git a/renderer/types/global.d.ts b/renderer/types/global.d.ts
new file mode 100644
index 00000000..c0057b05
--- /dev/null
+++ b/renderer/types/global.d.ts
@@ -0,0 +1,12 @@
+/* eslint-disable */
+export {}
+
+declare global {
+ interface Window {
+ backend: {
+ requestExpress: (req: any) => Promise
+ request: (req: any) => Promise
+ getExpressPort: () => Promise
+ }
+ }
+}
diff --git a/renderer/utilities/README.md b/renderer/utilities/README.md
index ae9a6758..8120b5c3 100644
--- a/renderer/utilities/README.md
+++ b/renderer/utilities/README.md
@@ -6,7 +6,8 @@ Here are some utility files containing javascript functions for general purposes
| File | Description |
| ------------- | ---------------------------------------------------------------------------------- |
-| /fileManagementUtils.js | File management functions for reading/writing files|
+| /fileManagement/fileOps.js | Preferred public file management API (routes local vs remote operations) |
+| /fileManagementUtils.js | Local workspace implementation + compatibility helpers (still used by fileOps and older call sites) |
| /requests.js | Functions for communications between frontend and backend |
-| /storageManager.js | Class for file management purposes (same thing as fileManagementUtils.js but in a class approach) |
+| /storageManager.js | Class-based file management utilities (older alternative approach) |
diff --git a/renderer/utilities/appUtils/globalDataUtils.js b/renderer/utilities/appUtils/globalDataUtils.js
index 8a5ec01e..a578d73f 100644
--- a/renderer/utilities/appUtils/globalDataUtils.js
+++ b/renderer/utilities/appUtils/globalDataUtils.js
@@ -1,6 +1,7 @@
import { MEDDataObject } from "../../components/workspace/NewMedDataObject"
import { recursivelyRecenseWorkspaceTree } from "./workspaceUtils"
import { connectToMongoDB, insertMEDDataObjectIfNotExists } from "../../components/mongoDB/mongoDBUtils"
+import { pathExists } from "../fileManagement/fileOps"
/**
* @description Used to update the data present in the DB with local files not present in the database
@@ -24,36 +25,34 @@ export const updateGlobalData = async (workspaceObject) => {
usedIn: null
})
await insertMEDDataObjectIfNotExists(rootDataObject, rootPath)
- await recursivelyRecenseWorkspaceTree(rootChildren, rootParentID)
+ await recursivelyRecenseWorkspaceTree(rootChildren, rootParentID, workspaceObject.isRemote)
}
/**
* @descritption load the MEDDataObjects from the MongoDB database
* @returns medDataObjectsDict dict containing the MEDDataObjects in the Database
*/
-export async function loadMEDDataObjects() {
+export async function loadMEDDataObjects(isRemote = false) {
let medDataObjectsDict = {}
try {
// Get global data
- const fs = require("fs")
const db = await connectToMongoDB()
const collection = db.collection("medDataObjects")
const medDataObjectsArray = await collection.find().toArray()
// Format data
- medDataObjectsArray.forEach((data) => {
+ for (const data of medDataObjectsArray) {
const medDataObject = new MEDDataObject(data)
// Check if local objects still exist
if (medDataObject.inWorkspace && medDataObject.path) {
- try {
- fs.accessSync(medDataObject.path)
+ const exists = await pathExists(medDataObject.path, { isRemote })
+ if (exists) {
medDataObjectsDict[medDataObject.id] = medDataObject
- } catch (error) {
- console.error(`${medDataObject.name}: not found locally, path will be set to null`, medDataObject)
+ } else {
+ console.error(`${medDataObject.name}: not found ${isRemote ? "remotely" : "locally"}, path will be set to null`, medDataObject)
medDataObject.path = null
medDataObject.inWorkspace = false
medDataObjectsDict[medDataObject.id] = medDataObject
- // Update database
collection.updateOne(
{ id: medDataObject.id },
{ $set: { path: null, inWorkspace: false } }
@@ -66,7 +65,7 @@ export async function loadMEDDataObjects() {
} else {
medDataObjectsDict[medDataObject.id] = medDataObject
}
- })
+ }
} catch (error) {
console.error("Failed to load MEDDataObjects: ", error)
}
diff --git a/renderer/utilities/appUtils/workspaceUtils.js b/renderer/utilities/appUtils/workspaceUtils.js
index 7a0ebe55..0138a3a0 100644
--- a/renderer/utilities/appUtils/workspaceUtils.js
+++ b/renderer/utilities/appUtils/workspaceUtils.js
@@ -1,9 +1,7 @@
import { MEDDataObject } from "../../components/workspace/NewMedDataObject"
import { randomUUID } from "crypto"
import { insertMEDDataObjectIfNotExists } from "../../components/mongoDB/mongoDBUtils"
-
-// Import fs and path
-const fs = require("fs")
+import { lstat } from "../fileManagement/fileOps"
const path = require("path")
/**
@@ -15,23 +13,27 @@ const path = require("path")
* @description This function is used to recursively recense the directory tree and add the files and folders to the global data object
* It is called when the working directory is set
*/
-export async function recursivelyRecenseWorkspaceTree(children, parentID) {
+export async function recursivelyRecenseWorkspaceTree(children, parentID, isRemote = false) {
+ let childType
for (const child of children) {
- let isDirect = false
- // Check if the path exists
- if (fs.existsSync(child.path)) {
- const stats = fs.lstatSync(child.path)
- isDirect = stats.isDirectory()
+ let fileInfo
+ try {
+ fileInfo = await lstat(child.path, { isRemote })
+ } catch (error) {
+ console.error(`Error getting file info for ${child.path}:`, error)
+ continue
}
- let uuid = child.name == "DATA" || child.name == "EXPERIMENTS" ? child.name : randomUUID()
- let childType =
- isDirect &&
- path.extname(child.path).slice(1) != "medml" &&
- path.extname(child.path).slice(1) != "medmlres" &&
- path.extname(child.path).slice(1) != "medeval" &&
- path.extname(child.path).slice(1) != "medmodel"
+ if (!fileInfo) return
+ const fileExt = isRemote ? (child.name.includes(".") ? "." + child.name.split(".")[1] : child.name) : path.extname(child.path)
+ childType = fileInfo.isDir &&
+ fileExt.slice(1) != "medml" &&
+ fileExt.slice(1) != "medmlres" &&
+ fileExt.slice(1) != "medeval" &&
+ fileExt.slice(1) != "medmodel"
? "directory"
- : path.extname(child.path).slice(1)
+ : fileExt.slice(1)
+ let uuid = child.name == "DATA" || child.name == "EXPERIMENTS" ? child.name : randomUUID()
+
let childObject = new MEDDataObject({
id: uuid,
name: child.name,
diff --git a/renderer/utilities/fileManagement/fileOps.js b/renderer/utilities/fileManagement/fileOps.js
new file mode 100644
index 00000000..2e615952
--- /dev/null
+++ b/renderer/utilities/fileManagement/fileOps.js
@@ -0,0 +1,370 @@
+import { ipcRenderer } from "electron"
+import {
+ downloadFile as localDownloadFile,
+ loadCSVFromPath as localLoadCSVFromPath,
+ loadCSVPath as localLoadCSVPath,
+ loadJSONFromPath as localLoadJSONFromPath,
+ loadJsonPath as localLoadJsonPath,
+ loadJsonSync as localLoadJsonSync,
+ loadXLSXFromPath as localLoadXLSXFromPath,
+ toLocalPath as localToLocalPath
+} from "../fileManagementUtils"
+
+// Local-only deps (safe in Electron renderer; must be gated by isRemote)
+const fs = require("fs")
+const Papa = require("papaparse")
+const dfd = require("../danfo.js")
+const XLSX = require("xlsx")
+
+function removeEmptyRows(df, numberOfRowToCheck) {
+ let dfLastRows = df.tail(numberOfRowToCheck)
+ let indexToDrop = []
+ let rowCounts = dfLastRows.count()
+ rowCounts.values.forEach((rowCount, index) => {
+ if (rowCount <= 1) {
+ indexToDrop.push(rowCounts.$index[index])
+ }
+ })
+ return { index: indexToDrop, inplace: true }
+}
+
+async function readRemoteFile(filePath, encoding = "utf8") {
+ const result = await ipcRenderer.invoke("readRemoteFile", {
+ path: normalizeRemotePath(filePath),
+ encoding
+ })
+ if (!result || !result.success) {
+ throw new Error(result?.error || "readRemoteFile failed")
+ }
+ return result.content
+}
+
+/**
+ * @param {object} workspace WorkspaceContext.workspace
+ * @returns {boolean}
+ */
+export function isRemoteWorkspace(workspace) {
+ return !!workspace?.isRemote
+}
+
+/**
+ * Remote paths should be treated as POSIX-style for SFTP.
+ * @param {string} filePath
+ */
+export function normalizeRemotePath(filePath) {
+ return (filePath || "").replace(/\\/g, "/")
+}
+
+/**
+ * Returns the path separator based on local vs remote workspace.
+ * Remote paths should always use POSIX separator.
+ * @param {{ isRemote?: boolean }} [opts]
+ */
+export function getPathSeparator(opts = {}) {
+ if (opts.isRemote) return "/"
+ return process.platform === "win32" ? "\\" : "/"
+}
+
+/**
+ * Cross-platform dirname helper that works for both local and remote path styles.
+ * For remote, returns POSIX-style dirname.
+ * @param {string} filePath
+ * @param {{ isRemote?: boolean }} [opts]
+ */
+export function remoteDirname(filePath, opts = {}) {
+ if (!filePath) return ""
+ const normalized = opts.isRemote ? normalizeRemotePath(filePath) : filePath
+ const separator = normalized.includes("\\") ? "\\" : "/"
+ const idx = normalized.lastIndexOf(separator)
+ if (idx === -1) return ""
+ if (idx === 0) return separator
+ return normalized.slice(0, idx)
+}
+
+/**
+ * Check if a path exists on local disk or remote workspace.
+ * Remote requires active SSH tunnel (handled in main).
+ * @param {string} filePath
+ * @param {{ isRemote?: boolean }} [opts]
+ */
+export async function pathExists(filePath, opts = {}) {
+ const isRemote = !!opts.isRemote
+ if (!filePath) return false
+
+ if (isRemote) {
+ const status = await ipcRenderer.invoke("checkRemoteFileExists", normalizeRemotePath(filePath))
+ return status === "exists"
+ }
+
+ return fs.existsSync(filePath)
+}
+
+/**
+ * lstat for local/remote.
+ * @param {string} filePath
+ * @param {{ isRemote?: boolean }} [opts]
+ * @returns {Promise<{ isDir: boolean, isFile: boolean, stats?: any } | null>}
+ */
+export async function lstat(filePath, opts = {}) {
+ const isRemote = !!opts.isRemote
+ if (!filePath) return null
+
+ if (isRemote) {
+ const result = await ipcRenderer.invoke("getRemoteLStat", normalizeRemotePath(filePath))
+ if (!result || result === "tunnel inactive" || result === "sftp error") return null
+ return result
+ }
+
+ try {
+ const stats = fs.lstatSync(filePath)
+ return { isDir: stats.isDirectory(), isFile: stats.isFile(), stats }
+ } catch {
+ return null
+ }
+}
+
+/**
+ * mkdir -p for local/remote.
+ * For remote, this delegates to main's `createRemoteFolder` which can create recursively.
+ * @param {string} dirPath
+ * @param {{ isRemote?: boolean }} [opts]
+ */
+export async function mkdirp(dirPath, opts = {}) {
+ const isRemote = !!opts.isRemote
+ if (!dirPath) throw new Error("mkdirp: dirPath is required")
+
+ if (isRemote) {
+ const result = await ipcRenderer.invoke("createRemoteFolder", {
+ path: normalizeRemotePath(dirPath),
+ recursive: true
+ })
+ if (!result || !result.success) {
+ throw new Error(`mkdirp(remote) failed: ${result?.error || "unknown error"}`)
+ }
+ return normalizeRemotePath(dirPath)
+ }
+
+ await fs.promises.mkdir(dirPath, { recursive: true })
+ return dirPath
+}
+
+/**
+ * rm -rf for local/remote.
+ * @param {string} targetPath
+ * @param {{ isRemote?: boolean }} [opts]
+ */
+export async function rmrf(targetPath, opts = {}) {
+ const isRemote = !!opts.isRemote
+ if (!targetPath) throw new Error("rmrf: targetPath is required")
+
+ if (isRemote) {
+ const result = await ipcRenderer.invoke("deleteRemoteFile", { path: normalizeRemotePath(targetPath), recursive: true })
+ if (!result || !result.success) {
+ throw new Error(`rmrf(remote) failed: ${result?.error || "unknown error"}`)
+ }
+ return
+ }
+
+ fs.rmSync(targetPath, { recursive: true, force: true })
+}
+
+/**
+ * rename/move for local/remote.
+ * @param {string} oldPath
+ * @param {string} newPath
+ * @param {{ isRemote?: boolean }} [opts]
+ */
+export async function renamePath(oldPath, newPath, opts = {}) {
+ const isRemote = !!opts.isRemote
+ if (!oldPath || !newPath) throw new Error("renamePath: oldPath and newPath are required")
+
+ if (isRemote) {
+ const result = await ipcRenderer.invoke("renameRemoteFile", {
+ oldPath: normalizeRemotePath(oldPath),
+ newPath: normalizeRemotePath(newPath)
+ })
+ if (!result || !result.success) {
+ throw new Error(`renamePath(remote) failed: ${result?.error || "unknown error"}`)
+ }
+ return
+ }
+
+ fs.renameSync(oldPath, newPath)
+}
+
+/**
+ * Load JSON object from path.
+ * Local mode delegates to compatibility wrappers for existing behavior.
+ * Remote mode loads file over SFTP.
+ * @param {string} absPath
+ * @param {{ isRemote?: boolean }} [opts]
+ * @param {(data: any) => void} [whenLoaded]
+ */
+export function loadJsonPath(absPath, opts = {}, whenLoaded) {
+ const isRemote = !!opts.isRemote
+ if (!isRemote) {
+ const json = localLoadJsonPath(absPath)
+ if (typeof whenLoaded === "function") whenLoaded(json)
+ return json
+ }
+
+ return readRemoteFile(absPath, "utf8")
+ .then((content) => {
+ const parsed = JSON.parse(content)
+ if (typeof whenLoaded === "function") whenLoaded(parsed)
+ return parsed
+ })
+ .catch((error) => {
+ console.error("loadJsonPath(remote) error:", error)
+ if (typeof whenLoaded === "function") whenLoaded(null)
+ return null
+ })
+}
+
+/**
+ * Load CSV rows from path and invoke callback.
+ * @param {string} absPath
+ * @param {(rows: any[]) => void} whenLoaded
+ * @param {{ isRemote?: boolean }} [opts]
+ */
+export function loadCSVPath(absPath, whenLoaded, opts = {}) {
+ const isRemote = !!opts.isRemote
+ if (!isRemote) {
+ return localLoadCSVPath(absPath, whenLoaded)
+ }
+
+ return readRemoteFile(absPath, "utf8")
+ .then((content) => {
+ const parsed = Papa.parse(content, { header: true, skipEmptyLines: true })
+ whenLoaded(parsed?.data || [])
+ return parsed?.data || []
+ })
+ .catch((error) => {
+ console.error("loadCSVPath(remote) error:", error)
+ whenLoaded([])
+ return []
+ })
+}
+
+/**
+ * Load CSV and normalize similarly to existing loadCSVFromPath behavior.
+ * @param {string} filePath
+ * @param {(rows: any[]) => void} whenLoaded
+ * @param {{ isRemote?: boolean }} [opts]
+ */
+export function loadCSVFromPath(filePath, whenLoaded, opts = {}) {
+ const isRemote = !!opts.isRemote
+ if (!isRemote) {
+ return localLoadCSVFromPath(filePath, whenLoaded)
+ }
+
+ return loadCSVPath(
+ filePath,
+ (rows) => {
+ try {
+ const df = new dfd.DataFrame(rows)
+ const dfJSON = dfd.toJSON(df)
+ whenLoaded(dfJSON)
+ } catch {
+ whenLoaded(rows)
+ }
+ },
+ { isRemote: true }
+ )
+}
+
+/**
+ * Load JSON dataset from path and invoke callback with dataframe-like JSON.
+ * @param {string} filePath
+ * @param {(rows: any[]) => void} whenLoaded
+ * @param {{ isRemote?: boolean }} [opts]
+ */
+export function loadJSONFromPath(filePath, whenLoaded, opts = {}) {
+ const isRemote = !!opts.isRemote
+ if (!isRemote) {
+ return localLoadJSONFromPath(filePath, whenLoaded)
+ }
+
+ return readRemoteFile(filePath, "utf8")
+ .then((content) => {
+ const result = JSON.parse(content)
+ try {
+ const df = new dfd.DataFrame(result)
+ df.drop(removeEmptyRows(df, 5))
+ const dfJSON = dfd.toJSON(df)
+ whenLoaded(dfJSON)
+ return dfJSON
+ } catch {
+ whenLoaded(result)
+ return result
+ }
+ })
+ .catch((error) => {
+ console.error("loadJSONFromPath(remote) error:", error)
+ whenLoaded([])
+ return []
+ })
+}
+
+/**
+ * Load XLSX from path and invoke callback with dataframe-like JSON.
+ * @param {string} filePath
+ * @param {(rows: any[]) => void} whenLoaded
+ * @param {{ isRemote?: boolean }} [opts]
+ */
+export function loadXLSXFromPath(filePath, whenLoaded, opts = {}) {
+ const isRemote = !!opts.isRemote
+ if (!isRemote) {
+ return localLoadXLSXFromPath(filePath, whenLoaded)
+ }
+
+ return readRemoteFile(filePath, "base64")
+ .then((base64) => {
+ const workbook = XLSX.read(Buffer.from(base64, "base64"), { type: "buffer" })
+ const firstSheetName = workbook.SheetNames && workbook.SheetNames.length > 0 ? workbook.SheetNames[0] : null
+ const rows = firstSheetName ? XLSX.utils.sheet_to_json(workbook.Sheets[firstSheetName], { defval: null }) : []
+ try {
+ const df = new dfd.DataFrame(rows)
+ df.drop(removeEmptyRows(df, 5))
+ const dfJSON = dfd.toJSON(df)
+ whenLoaded(dfJSON)
+ return dfJSON
+ } catch {
+ whenLoaded(rows)
+ return rows
+ }
+ })
+ .catch((error) => {
+ console.error("loadXLSXFromPath(remote) error:", error)
+ whenLoaded([])
+ return []
+ })
+}
+
+/**
+ * Convert a path into a local URI when needed by embedded viewers.
+ * Delegates to compatibility wrapper implementation.
+ * @param {string} path
+ */
+export function toLocalPath(path) {
+ return localToLocalPath(path)
+}
+
+/**
+ * Trigger browser download of a JSON-serializable object.
+ * Delegates to compatibility wrapper implementation.
+ * @param {any} exportObj
+ * @param {string} exportName
+ */
+export function downloadFile(exportObj, exportName) {
+ return localDownloadFile(exportObj, exportName)
+}
+
+/**
+ * Open file dialog and load selected JSON.
+ * Delegates to compatibility wrapper implementation.
+ * @returns {Promise}
+ */
+export function loadJsonSync() {
+ return localLoadJsonSync()
+}
diff --git a/renderer/utilities/fileManagementUtils.js b/renderer/utilities/fileManagementUtils.js
index 5695dfcb..0f3ac8ce 100644
--- a/renderer/utilities/fileManagementUtils.js
+++ b/renderer/utilities/fileManagementUtils.js
@@ -5,9 +5,9 @@ const fs = require("fs")
const Path = require("path")
const { parse } = require("csv-parse")
// Replace danfojs imports with local implementation
-const dfd = require("../utilities/danfo.js")
+const dfd = require("./danfo.js")
// Using the same implementation for both browser and Node environments
-const dfdNode = require("../utilities/danfo.js")
+const dfdNode = require("./danfo.js")
var Papa = require("papaparse")
import { ipcRenderer } from "electron"
@@ -451,6 +451,21 @@ const getFileReadingMethodFromExtension = {
xlsx: (path, whenLoaded) => loadXLSXFromPath(path, whenLoaded)
}
+/**
+ * Cross-platform equivalent to path.dirname(): works for both '/' and '\\' separators.
+ * @param {string} filePath - The path to extract the directory from.
+ * @returns {string} Directory path
+ */
+function remoteDirname(filePath) {
+ if (!filePath) return ''
+ // Normalize to handle both separators
+ const separator = filePath.includes('\\') ? '\\' : '/'
+ const idx = filePath.lastIndexOf(separator)
+ if (idx === -1) return ''
+ if (idx === 0) return separator
+ return filePath.slice(0, idx)
+}
+
export {
getPathSeparator,
splitStringAtTheLastSeparator,
@@ -472,5 +487,6 @@ export {
createFolderSync,
loadJSONFromPath,
loadXLSXFromPath,
- getFileReadingMethodFromExtension
+ getFileReadingMethodFromExtension,
+ remoteDirname
}
\ No newline at end of file
diff --git a/renderer/utilities/requests.js b/renderer/utilities/requests.js
index cc5120d2..484fd59a 100644
--- a/renderer/utilities/requests.js
+++ b/renderer/utilities/requests.js
@@ -1,17 +1,25 @@
import { ipcRenderer } from "electron"
import axios from "axios"
import { toast } from "react-toastify"
+import { getTunnelState } from "./tunnelState"
/**
*
- * @param {int} port server port
+ * @param {int} port server port (optional, can be null if using tunnel)
* @param {string} topic route to send the request to
* @param {Object} json2send json to send
* @param {Function} jsonReceivedCB executed when the json is received
* @param {Function} onError executed when an error occurs
*/
export const requestBackend = (port, topic, json2send, jsonReceivedCB, onError) => {
- axiosPostJsonGo(port, topic, json2send, jsonReceivedCB, onError)
+ // Use tunnel state if available
+ const tunnel = getTunnelState()
+ let finalPort = port
+ // Prefer direct GO tunnel when available
+ if (tunnel && tunnel.tunnelActive && tunnel.localGoPort) {
+ finalPort = tunnel.localGoPort
+ }
+ axiosPostJsonGo(finalPort, topic, json2send, jsonReceivedCB, onError)
}
/**
@@ -24,7 +32,13 @@ export const requestBackend = (port, topic, json2send, jsonReceivedCB, onError)
* @param {Function} onError executed when an error occurs
*/
export const requestJson = (port, topic, json2send, jsonReceivedCB, onError) => {
- let url = "http://localhost:" + port + (topic[0] != "/" ? "/" : "") + topic
+ const tunnel = getTunnelState()
+ let finalPort = port
+ // Prefer direct GO tunnel when available
+ if (tunnel && tunnel.tunnelActive && tunnel.localGoPort) {
+ finalPort = tunnel.localGoPort
+ }
+ let url = "http://localhost:" + finalPort + (topic[0] != "/" ? "/" : "") + topic
if (topic.includes("http")) {
url = topic
}
@@ -67,11 +81,11 @@ export const requestJson = (port, topic, json2send, jsonReceivedCB, onError) =>
*/
export const axiosPostJsonGo = async (port, topic, json2send, jsonReceivedCB, onError) => {
try {
- let url = "http://localhost:" + port + (topic[0] != "/" ? "/" : "") + topic
+ let url = "http://localhost" + ":" + port + (topic[0] != "/" ? "/" : "") + topic
if (topic.includes("http")) {
url = topic
}
- console.log(url)
+ console.log("[axiosPostJsonGo] Request URL:", url)
const response = await axios.post(url, { message: JSON.stringify(json2send) }, { headers: { "Content-Type": "application/json" } })
if (response.data.type == "toParse") {
let cleanResponse = {}
diff --git a/renderer/utilities/tunnelState.js b/renderer/utilities/tunnelState.js
new file mode 100644
index 00000000..a8146eee
--- /dev/null
+++ b/renderer/utilities/tunnelState.js
@@ -0,0 +1,70 @@
+// Simple tunnel state for use outside React (e.g., in .js files)
+
+let tunnelInfo = {
+ host: null,
+ tunnelActive: false,
+ localAddress: "localhost",
+ localExpressPort: null,
+ remoteExpressPort: null,
+ localGoPort: null,
+ remoteGoPort: null,
+ localDBPort: null,
+ remoteDBPort: null,
+ localJupyterPort: null,
+ remoteJupyterPort: null,
+ remotePort: null,
+ username: null,
+ // Service statuses and flags
+ serverStartedRemotely: false,
+ expressStatus: "unknown",
+ expressLogPath: null,
+ // Optional persisted remote context
+ remoteWorkspacePath: null,
+ remoteBackendExecutablePath: null,
+ requirementsMetRemote: false,
+ requirementsDetailsRemote: null,
+ requirementsCheckedAt: null,
+ tunnels: [],
+};
+
+export function setTunnelState(info) {
+ // Exclude password
+ const { password, privateKey, ...safeInfo } = info
+ const hasFlag = Object.prototype.hasOwnProperty.call(safeInfo, 'tunnelActive')
+ const nextTunnelActive = hasFlag
+ ? !!safeInfo.tunnelActive
+ : (typeof tunnelInfo.tunnelActive === 'boolean' ? tunnelInfo.tunnelActive : false)
+ tunnelInfo = { ...tunnelInfo, ...safeInfo, tunnelActive: nextTunnelActive }
+}
+
+export function clearTunnelState() {
+ tunnelInfo = {
+ host: null,
+ tunnelActive: false,
+ localAddress: "localhost",
+ localExpressPort: null,
+ remoteExpressPort: null,
+ localGoPort: null,
+ remoteGoPort: null,
+ localDBPort: null,
+ remoteDBPort: null,
+ localJupyterPort: null,
+ remoteJupyterPort: null,
+ remotePort: null,
+ username: null,
+ serverStartedRemotely: false,
+ expressStatus: "unknown",
+ expressLogPath: null,
+ remoteWorkspacePath: null,
+ remoteBackendExecutablePath: null,
+ requirementsMetRemote: false,
+ requirementsDetailsRemote: null,
+ requirementsCheckedAt: null,
+ tunnels: [],
+ };
+}
+
+export function getTunnelState() {
+ return tunnelInfo
+}
+
diff --git a/tools/call_check_requirements.js b/tools/call_check_requirements.js
new file mode 100644
index 00000000..bcfa45f0
--- /dev/null
+++ b/tools/call_check_requirements.js
@@ -0,0 +1,6 @@
+const axios = require('axios')
+axios.get('http://localhost:5000/check-requirements').then(res => {
+ console.log('check-requirements response:', res.data)
+}).catch(err => {
+ console.error('error calling check-requirements:', err.message)
+})
diff --git a/tools/fork_and_call.js b/tools/fork_and_call.js
new file mode 100644
index 00000000..79f6e94f
--- /dev/null
+++ b/tools/fork_and_call.js
@@ -0,0 +1,24 @@
+const { fork } = require('child_process')
+const path = require('path')
+const axios = require('axios')
+
+const backend = path.join(__dirname, '..', 'backend', 'expressServer.mjs')
+console.log('Forking backend:', backend)
+const child = fork(backend, { silent: true })
+child.on('message', async (msg) => {
+ console.log('Parent received message from backend:', msg)
+ if (msg && msg.type === 'EXPRESS_PORT') {
+ const port = msg.expressPort
+ try {
+ const res = await axios.get(`http://localhost:${port}/check-requirements`)
+ console.log('check-requirements response:', res.data)
+ } catch (err) {
+ console.error('error calling check-requirements:', err.message)
+ } finally {
+ child.kill()
+ }
+ }
+})
+
+child.on('exit', (code) => console.log('Child exited', code))
+child.on('error', (err) => console.error('Child error', err))
diff --git a/tools/pack_server.js b/tools/pack_server.js
new file mode 100644
index 00000000..b807d933
--- /dev/null
+++ b/tools/pack_server.js
@@ -0,0 +1,268 @@
+/*
+ Packs the standalone Node server distribution with backend (Express),
+ Go binary, Python code, and PythonEnv helpers. Produces a ZIP per platform.
+*/
+const fs = require('fs');
+const fsp = fs.promises;
+const path = require('path');
+const cp = require('child_process');
+const zipLocal = require('zip-local');
+
+function sh(cmd, opts = {}) {
+ cp.execSync(cmd, { stdio: 'inherit', ...opts });
+}
+
+async function removeRecursiveSymlink(candidatePath) {
+ try {
+ const stats = await fsp.lstat(candidatePath);
+ if (!stats.isSymbolicLink()) {
+ return;
+ }
+ } catch (err) {
+ if (err?.code === 'ENOENT') {
+ return;
+ }
+ throw err;
+ }
+
+ // Remove the symlink target to avoid infinite recursion while zipping
+ await fsp.rm(candidatePath, { recursive: true, force: true });
+}
+
+async function cpRecursive(src, dest) {
+ await fsp.cp(src, dest, { recursive: true, force: true });
+}
+
+async function ensureDir(dir) {
+ await fsp.mkdir(dir, { recursive: true });
+}
+
+async function fileExists(p) {
+ try {
+ await fsp.access(p);
+ return true;
+ } catch {
+ return false;
+ }
+}
+
+// Remove dependency specs that tend to create symlinks/junctions (file:/link:/workspace:)
+async function sanitizeStagedBackend(stagedBackendDir) {
+ const stagedPkgPath = path.join(stagedBackendDir, 'package.json');
+ if (!(await fileExists(stagedPkgPath))) return;
+
+ const pkg = JSON.parse(await fsp.readFile(stagedPkgPath, 'utf8'));
+ const deps = pkg.dependencies || {};
+ const badSpecs = new Set(['file:', 'link:', 'workspace:']);
+ const removed = [];
+ for (const [name, spec] of Object.entries(deps)) {
+ if (typeof spec !== 'string') continue;
+ for (const prefix of badSpecs) {
+ if (spec.startsWith(prefix)) {
+ removed.push([name, spec]);
+ delete deps[name];
+ break;
+ }
+ }
+ }
+ if (removed.length) {
+ console.warn('Sanitizing staged backend dependencies that can create recursive symlinks:');
+ for (const [name, spec] of removed) console.warn(` - removed ${name}: ${spec}`);
+ pkg.dependencies = deps;
+ await fsp.writeFile(stagedPkgPath, JSON.stringify(pkg, null, 2));
+ }
+
+ // Ensure installs are driven from the sanitized package.json
+ // (package-lock/shrinkwrap can reintroduce local/file deps even with flags)
+ await fsp.rm(path.join(stagedBackendDir, 'package-lock.json'), { force: true }).catch(() => {});
+ await fsp.rm(path.join(stagedBackendDir, 'npm-shrinkwrap.json'), { force: true }).catch(() => {});
+}
+
+// Copy a directory tree while excluding specified top-level names (e.g., node_modules)
+async function copyDirExcluding(src, dest, excludeNames = new Set()) {
+ await ensureDir(dest);
+ const entries = await fsp.readdir(src, { withFileTypes: true });
+ for (const entry of entries) {
+ if (excludeNames.has(entry.name)) continue;
+ const srcPath = path.join(src, entry.name);
+ const destPath = path.join(dest, entry.name);
+ if (entry.isDirectory()) {
+ await copyDirExcluding(srcPath, destPath, excludeNames);
+ } else if (entry.isSymbolicLink()) {
+ // Skip symlinks to avoid EPERM on Windows
+ continue;
+ } else {
+ await fsp.copyFile(srcPath, destPath);
+ }
+ }
+}
+
+async function main() {
+ const args = process.argv.slice(2);
+ const platformArg = args.find(a => a.startsWith('--platform='));
+ const platform = platformArg ? platformArg.split('=')[1] : process.platform;
+ if (!['win32','linux','darwin'].includes(platform)) {
+ console.error('Unsupported or missing --platform. Use win32 | linux | darwin');
+ process.exit(1);
+ }
+
+ const root = process.cwd();
+ const version = require(path.join(root, 'package.json')).version;
+ const outBase = path.join(root, 'build', 'server', platform);
+ const distDir = path.join(root, 'build', 'dist');
+ await ensureDir(outBase);
+ await ensureDir(distDir);
+
+ // 0) Build Node server binary for the target platform
+ console.log('Building Node server binary with nexe for', platform);
+ const serverBinDir = path.join(root, 'build', 'server');
+ await ensureDir(serverBinDir);
+ if (platform === 'win32') {
+ sh('npm run build:server:win', { shell: true });
+ } else if (platform === 'linux') {
+ sh('npm run build:server:linux', { shell: true });
+ } else {
+ sh('npm run build:server:mac', { shell: true });
+ }
+
+ // 1) Build Go server for the target platform
+ console.log('Building Go server for', platform);
+ const goServerDir = path.join(root, 'go_server');
+ const goOutDir = path.join(root, 'go_executables');
+ await ensureDir(goOutDir);
+ if (platform === 'win32') {
+ sh(`go build -o "${path.join(goOutDir, 'server_go_win32.exe')}" main.go`, { cwd: goServerDir, shell: true });
+ } else if (platform === 'linux') {
+ sh(`CGO_ENABLED=0 GOOS=linux GOARCH=amd64 go build -o "${path.join(goOutDir, 'server_go')}" main.go`, { cwd: goServerDir, shell: true });
+ } else {
+ sh(`go build -o "${path.join(goOutDir, 'server_go')}" main.go`, { cwd: goServerDir, shell: true });
+ }
+
+ // 2) Stage files
+ console.log('Staging files...');
+ // Clean staging dir
+ const entries = await fsp.readdir(outBase).catch(() => []);
+ for (const e of entries) {
+ await fsp.rm(path.join(outBase, e), { recursive: true, force: true });
+ }
+
+ // Copy backend, pythonCode, pythonEnv
+ // Copy backend excluding node_modules to avoid Windows symlink EPERM
+ const stagedBackendDir = path.join(outBase, 'backend');
+ await copyDirExcluding(path.join(root, 'backend'), stagedBackendDir, new Set(['node_modules']));
+ await sanitizeStagedBackend(stagedBackendDir);
+ await cpRecursive(path.join(root, 'pythonCode'), path.join(outBase, 'pythonCode'));
+ await cpRecursive(path.join(root, 'pythonEnv'), path.join(outBase, 'pythonEnv'));
+
+ // Copy Go executable (platform-specific)
+ await ensureDir(path.join(outBase, 'go_executables'));
+ if (platform === 'win32') {
+ await fsp.copyFile(path.join(goOutDir, 'server_go_win32.exe'), path.join(outBase, 'go_executables', 'server_go_win32.exe'));
+ } else {
+ await fsp.copyFile(path.join(goOutDir, 'server_go'), path.join(outBase, 'go_executables', 'server_go'));
+ }
+
+ // Copy Node server binary into bundle root
+ if (platform === 'win32') {
+ await fsp.copyFile(
+ path.join(serverBinDir, 'medomics-server-win.exe'),
+ path.join(outBase, 'medomics-server.exe')
+ );
+ } else if (platform === 'linux') {
+ await fsp.copyFile(
+ path.join(serverBinDir, 'medomics-server-linux'),
+ path.join(outBase, 'medomics-server')
+ );
+ await fsp.chmod(path.join(outBase, 'medomics-server'), 0o755);
+ } else {
+ await fsp.copyFile(
+ path.join(serverBinDir, 'medomics-server-mac'),
+ path.join(outBase, 'medomics-server')
+ );
+ await fsp.chmod(path.join(outBase, 'medomics-server'), 0o755);
+ }
+
+ // 3) Create helper scripts
+ // Install backend production dependencies into the staged backend
+ console.log('Installing backend production dependencies...');
+ // Ensure a clean node_modules in staging
+ await fsp.rm(path.join(stagedBackendDir, 'node_modules'), { recursive: true, force: true }).catch(() => {});
+ // Use npm with --prefix to install into the staged backend folder
+ // Add --no-bin-links to avoid symlink creation on Windows (EPERM without admin/dev-mode)
+ // Also disable lockfile creation; we already removed lockfiles in the staged backend.
+ sh(`npm install --omit=dev --no-bin-links --no-package-lock --prefix "${stagedBackendDir}"`, { shell: true });
+
+ // If the staged backend depends on the parent repo via file:/link:/workspace:,
+ // npm can create a self-referencing symlink/junction that points back into the repo.
+ // zip-local follows symlinks when stat'ing and will recurse until it hits ELOOP.
+ // Remove known self-link candidates proactively.
+ await removeRecursiveSymlink(path.join(stagedBackendDir, 'node_modules', 'medomics-platform'));
+ await removeRecursiveSymlink(path.join(stagedBackendDir, 'node_modules', 'medomicslab-application'));
+
+ // Extra safety: if a published package ever includes build artifacts, drop them.
+ await fsp.rm(path.join(stagedBackendDir, 'node_modules', 'medomics-platform', 'build', 'server'), { recursive: true, force: true }).catch(() => {});
+
+ const readme = `MEDomicsLab Server Bundle (v${version})\n\n` +
+`Quick start:\n` +
+`- Ensure Node.js 18+ is installed and on PATH.\n` +
+`- Run the appropriate start script for your OS.\n\n` +
+`Scripts:\n` +
+`- Windows: start.bat, stop.bat\n` +
+`- Linux/mac: start.sh, stop.sh (chmod +x *.sh)\n\n` +
+`Start script runs:\n` +
+`node ./backend/cli/medomics-server.mjs start --json\n` +
+`node ./backend/cli/medomics-server.mjs ensure --json --go --mongo --jupyter\n`;
+ await fsp.writeFile(path.join(outBase, 'README.txt'), readme, 'utf8');
+
+ if (platform === 'win32') {
+ await fsp.writeFile(path.join(outBase, 'start.bat'), [
+ '@echo off',
+ 'net session >nul 2>&1',
+ 'if %errorLevel% == 0 (',
+ ' echo Running as admin.',
+ ' goto :main',
+ ') else (',
+ ' powershell Start-Process "%~f0" -Verb RunAs',
+ ' exit /b',
+ ')',
+ ':main',
+ 'set NODE_ENV=production',
+ 'medomics-server.exe start --json',
+ ''
+ ].join('\r\n'), 'utf8');
+ await fsp.writeFile(path.join(outBase, 'stop.bat'), [
+ '@echo off',
+ 'medomics-server.exe stop --json',
+ ''
+ ].join('\r\n'), 'utf8');
+ } else {
+ await fsp.writeFile(path.join(outBase, 'start.sh'), [
+ '#!/usr/bin/env bash',
+ 'set -e',
+ 'export NODE_ENV=production',
+ './medomics-server start --json',
+ ''
+ ].join('\n'), 'utf8');
+ await fsp.chmod(path.join(outBase, 'start.sh'), 0o755);
+ await fsp.writeFile(path.join(outBase, 'stop.sh'), [
+ '#!/usr/bin/env bash',
+ 'set -e',
+ './medomics-server stop --json',
+ ''
+ ].join('\n'), 'utf8');
+ await fsp.chmod(path.join(outBase, 'stop.sh'), 0o755);
+ }
+
+ // 4) Zip
+ const zipName = `MEDomicsLab-Server-${version}-${platform}.zip`;
+ const zipPath = path.join(distDir, zipName);
+ console.log('Creating zip:', zipPath);
+ zipLocal.sync.zip(outBase).compress().save(zipPath);
+
+ console.log('Server package created at', zipPath);
+}
+
+main().catch(err => {
+ console.error(err);
+ process.exit(1);
+});
diff --git a/tools/test_spawn_backend.js b/tools/test_spawn_backend.js
new file mode 100644
index 00000000..5ca54dba
--- /dev/null
+++ b/tools/test_spawn_backend.js
@@ -0,0 +1,12 @@
+// small helper to fork the backend and listen for EXPRESS_PORT message
+const { fork } = require('child_process')
+const path = require('path')
+
+const backend = path.join(__dirname, '..', 'backend', 'expressServer.mjs')
+console.log('Forking backend:', backend)
+const child = fork(backend, { silent: false })
+child.on('message', (msg) => {
+ console.log('Parent received message from backend:', msg)
+})
+child.on('exit', (code) => console.log('Child exited', code))
+child.on('error', (err) => console.error('Child error', err))
diff --git a/utilScripts/install_locally_remote_backend.sh b/utilScripts/install_locally_remote_backend.sh
new file mode 100755
index 00000000..7490b044
--- /dev/null
+++ b/utilScripts/install_locally_remote_backend.sh
@@ -0,0 +1,149 @@
+#!/usr/bin/env bash
+#
+# install_locally_remote_backend.sh
+#
+# Builds the MEDomicsLab server bundle from the local repo and installs it
+# on the current machine (i.e. you are already on the server). After running
+# this script you can start the server with:
+#
+# ~/.medomics/medomics-server/current/start.sh
+#
+# Usage:
+# ./utilScripts/install_locally_remote_backend.sh \
+# [--platform linux|darwin] \
+# [--version