diff --git a/config.json b/config.json index 5ff49378..c1bfb37c 100644 --- a/config.json +++ b/config.json @@ -12,7 +12,7 @@ "BucketName":"zetaforge", "Database":"./zetaforge.db", "SetupVersion":"1", - + "KubeContext": "docker-desktop", "Local": { "BucketPort": 8333, "Driver": "docker-desktop" diff --git a/deploy_app.py b/deploy_app.py index 36d56445..e4a163bb 100644 --- a/deploy_app.py +++ b/deploy_app.py @@ -37,16 +37,7 @@ def get_download_file(client_version, system, arch): bucket_key += ".tar.gz" return bucket_key -def upload_to_s3(file_path, object_key): - s3_client = boto3.client('s3') - bucket_name = 'forge-executables-test' - try: - with open(file_path, 'rb') as file: - s3_client.upload_fileobj(file, bucket_name, object_key) - print(f"File {file_path} uploaded to S3 bucket {bucket_name} as {object_key}") - except Exception as e: - print(f"Error uploading file to S3: {str(e)}") def get_package_version(): # Specify the path to the package.json file @@ -82,7 +73,8 @@ def main(): frontend = os.path.join("frontend", "server2") os.makedirs(frontend, exist_ok=True) res = subprocess.run('npm install', cwd="frontend", stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True) - + + for os_ in os_list: print(f"Compiling {version} for {os_}..") for goarch in args.arch: diff --git a/executor.go b/executor.go index d69f2d6d..486ae304 100644 --- a/executor.go +++ b/executor.go @@ -512,7 +512,7 @@ func terminateArgo(ctx context.Context, cfg Config, db *sql.DB, name string, id func buildImage(ctx context.Context, source string, tag string, logger *log.Logger, cfg Config) error { if cfg.Local.Driver == "minikube" { - minikubeBuild := cmd.NewCmd("minikube", "-p", "zetaforge", "image", "build", "-t", tag, source) + minikubeBuild := cmd.NewCmd("minikube", "-p", cfg.KubeContext, "image", "build", "-t", tag, source) minikubeChan := minikubeBuild.Start() lineCount := 0 done := false @@ -542,7 +542,7 @@ func buildImage(ctx context.Context, source string, tag string, logger *log.Logg } } - minikubeImage := cmd.NewCmd("minikube", "-p", "zetaforge", "image", "ls") + minikubeImage := cmd.NewCmd("minikube", "-p", cfg.KubeContext, "image", "ls") <-minikubeImage.Start() for _, line := range minikubeImage.Status().Stdout { if "docker.io/"+tag == line { diff --git a/frontend/agents/gpt-4_python_compute/generate/computations.mjs b/frontend/agents/gpt-4_python_compute/generate/computations.mjs new file mode 100644 index 00000000..a8258fc7 --- /dev/null +++ b/frontend/agents/gpt-4_python_compute/generate/computations.mjs @@ -0,0 +1,74 @@ +import { Configuration, OpenAIApi } from "openai"; +const openaiSystemContent = `You are an assistant that generates python code and returns it in a way that must follow the template below. +You absolutely need to give a python code section without abbreviation that follows the template. Do not put code lines at the root, but give only the functions and imports. + +By default, when requested to do change or add to the code, modify the latest code section. But when the user ask to do it on another section, do so. + +In the template, the function compute contains the code and the function test contains a series of call to compute that runs and prints multiple tests. + +Don't insert a __main__ section. + +Template: +import ... + +def compute(in1, in2, in3,...): + '''A textual description of the compute function.''' + + #some code + return {{'out1': out1, 'out2': out2, ...}} + +def test(): + # Call compute multiple times based on a series of inputs. The outputs are then compare with the expected outputs. Print the results and indicate if the tests passed. +`; + +function extractPythonCode(response) { + const patternBackticks = /```python\n(.*?)```/gs; + const matchesBackticks = [...response.matchAll(patternBackticks)]; + if (matchesBackticks.length === 0) { + return response; + } + const processedCodeBlocks = matchesBackticks.map((match) => { + let codeBlock = match[1]; + codeBlock = codeBlock.replace(/^\s*compute\(.*?\)\s*$/gm, ""); + codeBlock = codeBlock.replace(/^\s*test\(.*?\)\s*$/gm, ""); + return codeBlock.trim(); + }); + return processedCodeBlocks.join("\n\n"); +} + +export async function computeAgent( + userPrompt, + modelVersion, + conversationHistory, + apiKey, +) { + if (conversationHistory.length > 0) { + conversationHistory = [conversationHistory[conversationHistory.length - 1]]; + } + + const escapedHistory = []; + for (const entry of conversationHistory) { + const prompt = entry.prompt.replace(/{/g, "{{").replace(/}/g, "}}"); + const response = entry.response.replace(/{/g, "{{").replace(/}/g, "}}"); + escapedHistory.push({ role: "user", content: prompt }); + escapedHistory.push({ role: "assistant", content: response }); + } + + const messages = [ + { role: "system", content: openaiSystemContent }, + ...escapedHistory, + ]; + messages.push({ role: "user", content: userPrompt }); + + const configuration = new Configuration({ apiKey: apiKey }); + const openai = new OpenAIApi(configuration); + + const response = await openai.createChatCompletion({ + model: modelVersion, + messages: messages, + }); + + const code = extractPythonCode(response.data.choices[0].message.content); + + return { response: code, model: modelVersion }; +} diff --git a/frontend/agents/gpt-4_python_view/generate/computations.mjs b/frontend/agents/gpt-4_python_view/generate/computations.mjs new file mode 100644 index 00000000..f3edc73b --- /dev/null +++ b/frontend/agents/gpt-4_python_view/generate/computations.mjs @@ -0,0 +1,96 @@ +import { Configuration, OpenAIApi } from "openai"; +const openaiSystemContent = `You are an assistant that generates python code and returns it in a way that must follow the template below. Your goal is to generate a view.html file that satisfy the user requirement. +Most importantly, you must rely on the prompt to generate the html_template file that satisfy the user request. The html should contain everything to display in a browser and must rely on CDN or skypack when needed. +You absolutely need to give a python code section without abbreviation that follows the template. Do not put code lines at the root, but give only the functions and imports. + +By default, when requested to do change or add to the code, modify the latest code section. But when the user asks to do it on another section, do so. + +In the template, the function compute contains the code and the function test contains a series of call to compute that runs and prints multiple tests. + +Don't insert a __main__ section. + +Template: +from string import Template + +def compute(in1): + '''Generates an HTML file with a unique name and returns the file name.''' + + html_template = Template(\` + + + + Hello Block View + + + \${in1} + + + \`) + + # Build and save the html file + html_path = f"view.html" + html_code = html_template.substitute(in1=in1) + with open(html_path, "w") as file: + file.write(html_code) + + return {{'html': f"view.html"}} + +def test(): + '''Test the compute function.''' + + print('Running test') + result = compute('Hello view block') + print(f"Generated HTML file: {{result['html']}}") +`; + +function extractPythonCode(response) { + const patternBackticks = /```python\n(.*?)```/gs; + const matchesBackticks = [...response.matchAll(patternBackticks)]; + if (matchesBackticks.length === 0) { + return response; + } + const processedCodeBlocks = matchesBackticks.map((match) => { + let codeBlock = match[1]; + codeBlock = codeBlock.replace(/^\s*compute\(.*?\)\s*$/gm, ""); + codeBlock = codeBlock.replace(/^\s*test\(.*?\)\s*$/gm, ""); + return codeBlock.trim(); + }); + return processedCodeBlocks.join("\n\n"); +} + +export async function computeViewAgent( + userPrompt, + modelVersion, + conversationHistory, + apiKey, +) { + if (conversationHistory.length > 0) { + conversationHistory = [conversationHistory[conversationHistory.length - 1]]; + } + + const escapedHistory = []; + for (const entry of conversationHistory) { + const prompt = entry.prompt.replace(/{/g, "{{").replace(/}/g, "}}"); + const response = entry.response.replace(/{/g, "{{").replace(/}/g, "}}"); + escapedHistory.push({ role: "user", content: prompt }); + escapedHistory.push({ role: "assistant", content: response }); + } + + const messages = [ + { role: "system", content: openaiSystemContent }, + ...escapedHistory, + ]; + messages.push({ role: "user", content: userPrompt }); + + const configuration = new Configuration({ apiKey: apiKey }); + const openai = new OpenAIApi(configuration); + + const response = await openai.createChatCompletion({ + model: modelVersion, + messages: messages, + }); + + const code = extractPythonCode(response.data.choices[0].message.content); + + return { response: code, model: modelVersion }; +} diff --git a/frontend/core/blocks/canny-edge/computations.py b/frontend/core/blocks/canny-edge/computations.py index dcaa1a58..60c26491 100644 --- a/frontend/core/blocks/canny-edge/computations.py +++ b/frontend/core/blocks/canny-edge/computations.py @@ -1,9 +1,7 @@ import os - import cv2 from PIL import Image - def compute(image_path, range_min, range_max, step): """ Computes the Canny edge detection on an image at various thresholds and saves the results. diff --git a/frontend/electron-builder.json5 b/frontend/electron-builder.json5 index b562e31c..7395499b 100644 --- a/frontend/electron-builder.json5 +++ b/frontend/electron-builder.json5 @@ -2,19 +2,21 @@ * @see https://www.electron.build/configuration/configuration */ { + afterPack: "./macPacker.cjs", appId: "com.zetane.zetaforge", productName: "ZetaForge", asar: true, directories: { output: "release/${version}", }, - files: ["dist-electron", "dist"], + files: ["dist-electron", "dist", "server"], mac: { artifactName: "${productName}-${version}-darwin-${arch}.${ext}", target: ["tar.gz"], + binaries: ["launch.command"], }, win: { - target: ["tar.gz"], + target: ["tar.gz", "zip"], artifactName: "${productName}-${version}-windows-${arch}.${ext}", }, linux: { @@ -47,6 +49,11 @@ to: "resources", filter: ["**/*"], }, + { + from: "./logs", + to: "logs", + filter: ["**/*"], + }, ], nsis: { oneClick: false, diff --git a/frontend/electron/electron-env.d.ts b/frontend/electron/electron-env.d.ts index 683a8658..ce5d6577 100644 --- a/frontend/electron/electron-env.d.ts +++ b/frontend/electron/electron-env.d.ts @@ -8,4 +8,5 @@ declare namespace NodeJS { /** /dist/ or /public/ */ VITE_PUBLIC: string } -} \ No newline at end of file +} + diff --git a/frontend/electron/main/index.ts b/frontend/electron/main/index.ts index 8589a583..8b68276e 100644 --- a/frontend/electron/main/index.ts +++ b/frontend/electron/main/index.ts @@ -13,9 +13,10 @@ import { release } from "node:os"; import { dirname, join } from "node:path"; import { fileURLToPath } from "node:url"; import "../../polyfill/crypto"; -import { startExpressServer } from "../../server/express.mjs"; -import { update } from "./update"; -import sourcemap from "source-map-support"; +import {gracefullyStopAnvil, startExpressServer } from "../../server/express.mjs"; +import sourcemap from 'source-map-support'; +import path from "path"; +import fs from 'fs'; Sentry.init({ dsn: "https://7fb18e8e487455a950298625457264f3@o1096443.ingest.us.sentry.io/4507031960223744", @@ -26,6 +27,8 @@ const __dirname = dirname(__filename); import { appRouter } from "../../server/router"; import { absoluteCachePath } from "../../server/cache"; +let router_created = false + sourcemap.install(); // The built directory structure @@ -38,8 +41,38 @@ sourcemap.install(); // ├─┬ dist // │ └── index.html > Electron-Renderer // -process.env.DIST_ELECTRON = join(__dirname, "../"); -process.env.DIST = join(process.env.DIST_ELECTRON, "../dist"); + + + + + +ipcMain.handle('request-latest-logs', async () => { + try { + const logContent = fs.readFileSync(path.join(logFilePath, 'app.log'), 'utf-8'); + const logEntries = logContent.split('\n').filter(Boolean).map(line => JSON.parse(line)); + return logEntries; + } catch (error) { + console.error('Error reading log file:', error); + return []; + } +}); + + +const logFilePath = app.isPackaged + ? path.join(process.resourcesPath, 'logs') + : path.join(process.cwd(), 'logs'); + +fs.mkdirSync(path.dirname(logFilePath), { recursive: true }); + +const appLogs = path.join(logFilePath, 'app.log') +fs.writeFileSync(appLogs, "") //reset log file + + + + + +process.env.DIST_ELECTRON = join(__dirname, '../') +process.env.DIST = join(process.env.DIST_ELECTRON, '../dist') process.env.VITE_PUBLIC = process.env.VITE_DEV_SERVER_URL ? join(process.env.DIST_ELECTRON, "../public") : process.env.DIST; @@ -53,6 +86,16 @@ if (targetIndex !== -1) { process.env.VITE_ZETAFORGE_IS_DEV = "False"; } +const isPip = '--is_pip' + +const targetPipIndex = process.argv.indexOf(isPip) + +if(targetPipIndex !== -1) { + process.env.VITE_IS_PIP = 'True' +} else { + process.env.VITE_IS_PIP = 'False' +} + // Disable GPU Acceleration for Windows 7 if (release().startsWith("6.1")) app.disableHardwareAcceleration(); @@ -133,15 +176,40 @@ async function createWindow() { }, }); - createIPCHandler({ router: appRouter, windows: [win] }); + //Creating multiple IPCHandlers causes errors on darwin, when closing and opening windows + if(!router_created) { + + createIPCHandler({ router: appRouter, windows: [win] }); + router_created = true + } // Pass the menuTemplate const menu = Menu.buildFromTemplate(menuTemplate); Menu.setApplicationMenu(menu); - ipcMain.handle("get-cache", () => { - return absoluteCachePath; - }); + try{ + ipcMain.handle("get-cache", () => { + return absoluteCachePath; + }); + } catch { + ipcMain.removeHandler("get-cache") + ipcMain.handle("get-cache", () => { + return absoluteCachePath + }) + } + + + try{ + ipcMain.handle('get-path', (_, arg) => { + return app.getPath(arg) + }) + } catch(err) { + ipcMain.removeHandler('get-path') + ipcMain.handle('get-path', (_, arg) => { + return app.getPath(arg) + }) + } + if (url) { // electron-vite-vue#298 @@ -189,7 +257,15 @@ app.on("window-all-closed", () => { if (process.platform !== "darwin") app.quit(); }); -app.on("second-instance", () => { +app.on('will-quit', async () => { + await gracefullyStopAnvil() +}) + + + + + +app.on('second-instance', () => { if (win) { // Focus on the main window if the user tried to open another if (win.isMinimized()) win.restore(); diff --git a/frontend/electron/main/update.ts b/frontend/electron/main/update.ts index f69bcd1d..587d5476 100644 --- a/frontend/electron/main/update.ts +++ b/frontend/electron/main/update.ts @@ -26,6 +26,7 @@ export function update(win: Electron.BrowserWindow) { win.webContents.send('update-can-available', { update: false, version: app.getVersion(), newVersion: arg?.version }) }) + // Checking for updates ipcMain.handle('check-update', async () => { if (!app.isPackaged) { @@ -39,6 +40,7 @@ export function update(win: Electron.BrowserWindow) { return { message: 'Network error', error } } }) + // Start downloading and feedback on progress ipcMain.handle('start-download', (event: Electron.IpcMainInvokeEvent) => { diff --git a/frontend/electron/preload/index.ts b/frontend/electron/preload/index.ts index 98bef3a1..64825266 100644 --- a/frontend/electron/preload/index.ts +++ b/frontend/electron/preload/index.ts @@ -3,13 +3,33 @@ import { exposeElectronTRPC } from 'electron-trpc/main'; import path from "path"; import * as Sentry from "@sentry/electron"; + Sentry.init({ dsn: "https://7fb18e8e487455a950298625457264f3@o1096443.ingest.us.sentry.io/4507031960223744" }); + + + + // --------- Expose API to the Renderer process --------- contextBridge.exposeInMainWorld('cache', { local: async () => path.join(await ipcRenderer.invoke('get-cache') + path.sep), }) + +contextBridge.exposeInMainWorld('systemLogs', { + onUpdateLogs: (callback: any) => { + // Safely expose the event listener + const handler = (_:any, logs: any) => callback(logs); + ipcRenderer.on('update-logs', handler); + + // Return a function to remove the listener, which can be called later + return () => ipcRenderer.removeListener('update-logs', handler); + }, + requestLatestLogs: () => ipcRenderer.invoke('request-latest-logs'), + +}); + + // --------- Preload scripts loading --------- function domReady(condition: DocumentReadyState[] = ['complete', 'interactive']) { return new Promise(resolve => { diff --git a/frontend/launch.command b/frontend/launch.command new file mode 100755 index 00000000..78ec2c17 --- /dev/null +++ b/frontend/launch.command @@ -0,0 +1,20 @@ +#!/bin/bash + +SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +cd "$SCRIPT_DIR" + +source ~/.zshrc +source ~/.bash_profile + +while read -r line; do + if [[ ":$PATH:" != *":$line:"* ]]; then + PATH="$line:$PATH" + fi +done < /etc/paths + +export PATH + + + + +./ZetaForge diff --git a/frontend/logs/README.md b/frontend/logs/README.md new file mode 100644 index 00000000..e69de29b diff --git a/frontend/macPacker.cjs b/frontend/macPacker.cjs new file mode 100644 index 00000000..0dd9214a --- /dev/null +++ b/frontend/macPacker.cjs @@ -0,0 +1,44 @@ +exports.default = async function (context) { + console.log(`\n- [INFO] building for ${context.electronPlatformName} \n`); + if (context.electronPlatformName !== "darwin") { + return; + } + const { readFileSync, writeFileSync, copyFileSync } = await import("fs"); + const path = (await import("path")).default; + + const appOutDir = context.appOutDir; + const appName = context.packager.appInfo.productFilename; + // const { readFileSync, writeFileSync, copyFileSync} = require('fs'); + // const path = require('path'); + + // Path to the Info.plist file + const plistPath = path.join( + appOutDir, + `${appName}.app`, + "Contents", + "Info.plist", + ); + + // // Modify Info.plist + const plist = readFileSync(plistPath, "utf8"); + const modifiedPlist = plist.replace( + /CFBundleExecutable<\/key>\s*[^<]*<\/string>/, + "CFBundleExecutablelaunch.command", + ); + writeFileSync(plistPath, modifiedPlist, "utf8"); + + // // Copy launch.sh to the .app bundle + const scriptSourcePath = path.join(__dirname, "launch.command"); + const scriptDestinationPath = path.join( + appOutDir, + `${appName}.app`, + "Contents", + "MacOS", + "launch.command", + ); + copyFileSync(scriptSourcePath, scriptDestinationPath); + + // // Make sure the script is executable + // chmodSync(scriptDestinationPath, '0755'); + console.log("\n- [INFO] Testing afterPack functionality\n"); +}; diff --git a/frontend/package-lock.json b/frontend/package-lock.json index 63709303..fe165391 100644 --- a/frontend/package-lock.json +++ b/frontend/package-lock.json @@ -1,14 +1,15 @@ { "name": "zetaforge", - "version": "0.3.2", + "version": "0.4.0", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "zetaforge", - "version": "0.3.2", + "version": "0.4.0", "license": "AGPL-3.0-only", "dependencies": { + "@aws-sdk/client-s3": "^3.525.0", "@carbon/icons-react": "^11.46.0", "@sentry/electron": "^4.22.0", "@sentry/react": "^7.112.2", @@ -17,18 +18,22 @@ "allotment": "^1.20.2", "electron-trpc": "^0.5.2", "electron-updater": "^6.1.1", + "express": "^4.18.2", "getmac": "^6.6.0", "js-sha1": "^0.7.0", "localstorage-migrator": "^1.0.8", "pino": "^9.2.0", "pino-caller": "^3.4.0", "pino-pretty": "^11.2.1", + "pyodide": "^0.26.2", + "python-shell": "^5.0.0", "rfdc": "^1.3.1", "s3-sync-client": "^4.3.1", "source-map-support": "^0.5.21", "use-debounce": "^10.0.3", "use-immer": "^0.9.0", - "uuidv7": "^0.6.3" + "uuidv7": "^0.6.3", + "vite-plugin-top-level-await": "^1.4.2" }, "devDependencies": { "@aws-sdk/client-s3": "^3.525.0", @@ -44,6 +49,8 @@ "@fortawesome/fontawesome-svg-core": "^6.5.1", "@tanstack/react-query": "^4.36.1", "@trpc/react-query": "^10.45.1", + "@types/app-root-dir": "^0.1.4", + "@types/express": "^4.17.21", "@types/react": "^18.2.20", "@types/react-dom": "^18.2.7", "@types/sha256": "^0.2.2", @@ -3485,6 +3492,22 @@ "@lezer/lr": "^1.0.0" } }, + "node_modules/@rollup/plugin-virtual": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/@rollup/plugin-virtual/-/plugin-virtual-3.0.2.tgz", + "integrity": "sha512-10monEYsBp3scM4/ND4LNH5Rxvh3e/cVeL3jWTgZ2SrQ+BmUoQcopVQvnaMcOnykb1VkxUFuDAN+0FnpTFRy2A==", + "engines": { + "node": ">=14.0.0" + }, + "peerDependencies": { + "rollup": "^1.20.0||^2.0.0||^3.0.0||^4.0.0" + }, + "peerDependenciesMeta": { + "rollup": { + "optional": true + } + } + }, "node_modules/@rollup/rollup-android-arm-eabi": { "version": "4.21.3", "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.21.3.tgz", @@ -3492,7 +3515,6 @@ "cpu": [ "arm" ], - "dev": true, "optional": true, "os": [ "android" @@ -3505,7 +3527,6 @@ "cpu": [ "arm64" ], - "dev": true, "optional": true, "os": [ "android" @@ -3518,7 +3539,6 @@ "cpu": [ "arm64" ], - "dev": true, "optional": true, "os": [ "darwin" @@ -3531,7 +3551,6 @@ "cpu": [ "x64" ], - "dev": true, "optional": true, "os": [ "darwin" @@ -3544,7 +3563,6 @@ "cpu": [ "arm" ], - "dev": true, "optional": true, "os": [ "linux" @@ -3557,7 +3575,6 @@ "cpu": [ "arm" ], - "dev": true, "optional": true, "os": [ "linux" @@ -3570,7 +3587,6 @@ "cpu": [ "arm64" ], - "dev": true, "optional": true, "os": [ "linux" @@ -3583,7 +3599,6 @@ "cpu": [ "arm64" ], - "dev": true, "optional": true, "os": [ "linux" @@ -3596,7 +3611,6 @@ "cpu": [ "ppc64" ], - "dev": true, "optional": true, "os": [ "linux" @@ -3609,7 +3623,6 @@ "cpu": [ "riscv64" ], - "dev": true, "optional": true, "os": [ "linux" @@ -3622,7 +3635,6 @@ "cpu": [ "s390x" ], - "dev": true, "optional": true, "os": [ "linux" @@ -3635,7 +3647,6 @@ "cpu": [ "x64" ], - "dev": true, "optional": true, "os": [ "linux" @@ -3648,7 +3659,6 @@ "cpu": [ "x64" ], - "dev": true, "optional": true, "os": [ "linux" @@ -3661,7 +3671,6 @@ "cpu": [ "arm64" ], - "dev": true, "optional": true, "os": [ "win32" @@ -3674,7 +3683,6 @@ "cpu": [ "ia32" ], - "dev": true, "optional": true, "os": [ "win32" @@ -3687,7 +3695,6 @@ "cpu": [ "x64" ], - "dev": true, "optional": true, "os": [ "win32" @@ -4677,6 +4684,206 @@ "node": ">=16.0.0" } }, + "node_modules/@swc/core": { + "version": "1.7.22", + "resolved": "https://registry.npmjs.org/@swc/core/-/core-1.7.22.tgz", + "integrity": "sha512-Asn79WKqyjEuO2VEeSnVjn2YiRMToRhFJwOsQeqftBvwWMn1FGUuzVcXtkQFBk37si8Gh2Vkk/+p0u4K5NxDig==", + "hasInstallScript": true, + "dependencies": { + "@swc/counter": "^0.1.3", + "@swc/types": "^0.1.12" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/swc" + }, + "optionalDependencies": { + "@swc/core-darwin-arm64": "1.7.22", + "@swc/core-darwin-x64": "1.7.22", + "@swc/core-linux-arm-gnueabihf": "1.7.22", + "@swc/core-linux-arm64-gnu": "1.7.22", + "@swc/core-linux-arm64-musl": "1.7.22", + "@swc/core-linux-x64-gnu": "1.7.22", + "@swc/core-linux-x64-musl": "1.7.22", + "@swc/core-win32-arm64-msvc": "1.7.22", + "@swc/core-win32-ia32-msvc": "1.7.22", + "@swc/core-win32-x64-msvc": "1.7.22" + }, + "peerDependencies": { + "@swc/helpers": "*" + }, + "peerDependenciesMeta": { + "@swc/helpers": { + "optional": true + } + } + }, + "node_modules/@swc/core-darwin-arm64": { + "version": "1.7.22", + "resolved": "https://registry.npmjs.org/@swc/core-darwin-arm64/-/core-darwin-arm64-1.7.22.tgz", + "integrity": "sha512-B2Bh2W+C7ALdGwDxRWAJ+UtNExfozvwyayGiNkbR3wmDKXXeQfhGM5MK+QYUWKu7UQ6ATq69OyZrxofDobKUug==", + "cpu": [ + "arm64" + ], + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=10" + } + }, + "node_modules/@swc/core-darwin-x64": { + "version": "1.7.22", + "resolved": "https://registry.npmjs.org/@swc/core-darwin-x64/-/core-darwin-x64-1.7.22.tgz", + "integrity": "sha512-s34UQntnQ6tL9hS9aX3xG7OfGhpmy05FEEndbHaooGO8O+L5k8uWxhE5KhYCOC0N803sGdZg6YZmKtYrWN/YxA==", + "cpu": [ + "x64" + ], + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=10" + } + }, + "node_modules/@swc/core-linux-arm-gnueabihf": { + "version": "1.7.22", + "resolved": "https://registry.npmjs.org/@swc/core-linux-arm-gnueabihf/-/core-linux-arm-gnueabihf-1.7.22.tgz", + "integrity": "sha512-SE69+oos1jLOXx5YdMH//Qc5zQc2xYukajB+0BWmkcFd/S/cCanGWYtdSzYausm8af2Fw1hPJMNIfndJLnBDFw==", + "cpu": [ + "arm" + ], + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=10" + } + }, + "node_modules/@swc/core-linux-arm64-gnu": { + "version": "1.7.22", + "resolved": "https://registry.npmjs.org/@swc/core-linux-arm64-gnu/-/core-linux-arm64-gnu-1.7.22.tgz", + "integrity": "sha512-59FzDW/ojgiTj4dlnv3Z3ESuVlzhSAq9X12CNYh4/WTCNA8BoJqOnWMRQKspWtoNlnVviFLMvpek0pGXHndEBA==", + "cpu": [ + "arm64" + ], + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=10" + } + }, + "node_modules/@swc/core-linux-arm64-musl": { + "version": "1.7.22", + "resolved": "https://registry.npmjs.org/@swc/core-linux-arm64-musl/-/core-linux-arm64-musl-1.7.22.tgz", + "integrity": "sha512-cMQMI8YRO/XR3OrYuiUlWksNsJOZSkA6gSLNyH6eHTw+FOAzv05oJ4SFYe6s1WesrOqRwhpez6y5H6OIP/EKzg==", + "cpu": [ + "arm64" + ], + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=10" + } + }, + "node_modules/@swc/core-linux-x64-gnu": { + "version": "1.7.22", + "resolved": "https://registry.npmjs.org/@swc/core-linux-x64-gnu/-/core-linux-x64-gnu-1.7.22.tgz", + "integrity": "sha512-639kA7MXrWqWYfwuSJ+XTg21VYb/5o99R1zJrndoEjEX6m7Wza/sXssQKU5jbbkPoSEKVKNP3n/gazLWiUKgiQ==", + "cpu": [ + "x64" + ], + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=10" + } + }, + "node_modules/@swc/core-linux-x64-musl": { + "version": "1.7.22", + "resolved": "https://registry.npmjs.org/@swc/core-linux-x64-musl/-/core-linux-x64-musl-1.7.22.tgz", + "integrity": "sha512-f3zfGgY8EJQUOk3ve25ZTkNkhB/kHo9QlN2r+0exaE1g9W7X8IS6J8pWzF3hJrV2P9dBi6ofMOt+opVA89JKHA==", + "cpu": [ + "x64" + ], + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=10" + } + }, + "node_modules/@swc/core-win32-arm64-msvc": { + "version": "1.7.22", + "resolved": "https://registry.npmjs.org/@swc/core-win32-arm64-msvc/-/core-win32-arm64-msvc-1.7.22.tgz", + "integrity": "sha512-p/Fav5U+LtTJD/tbbS0dKK8SVVAhXo5Jdm1TDeBPJ4BEIVguYBZEXgD3CW9wY4K34g1hscpiz2Q2rktfhFj1+A==", + "cpu": [ + "arm64" + ], + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=10" + } + }, + "node_modules/@swc/core-win32-ia32-msvc": { + "version": "1.7.22", + "resolved": "https://registry.npmjs.org/@swc/core-win32-ia32-msvc/-/core-win32-ia32-msvc-1.7.22.tgz", + "integrity": "sha512-HbmfasaCNTqeCTvDjleYj+jJZQ6MlraiVOdhW55KtbA9mAVQdPBq6DDAvR7VOero3wUNYUM/e36otFKgEJI5Rg==", + "cpu": [ + "ia32" + ], + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=10" + } + }, + "node_modules/@swc/core-win32-x64-msvc": { + "version": "1.7.22", + "resolved": "https://registry.npmjs.org/@swc/core-win32-x64-msvc/-/core-win32-x64-msvc-1.7.22.tgz", + "integrity": "sha512-lppIveE+hpe7WXny/9cUT+T6sBM/ND0E+dviKWJ5jFBISj2KWomlSJGUjYEsRGJVPnTEc8uOlKK7etmXBhQx9A==", + "cpu": [ + "x64" + ], + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=10" + } + }, + "node_modules/@swc/counter": { + "version": "0.1.3", + "resolved": "https://registry.npmjs.org/@swc/counter/-/counter-0.1.3.tgz", + "integrity": "sha512-e2BR4lsJkkRlKZ/qCHPw9ZaSxc0MVUd7gtbtaB7aMvHeJVYe8sOB8DBZkP2DtISHGSku9sCK6T6cnY0CtXrOCQ==" + }, + "node_modules/@swc/types": { + "version": "0.1.12", + "resolved": "https://registry.npmjs.org/@swc/types/-/types-0.1.12.tgz", + "integrity": "sha512-wBJA+SdtkbFhHjTMYH+dEH1y4VpfGdAc2Kw/LK09i9bXd/K6j6PkDcFCEzb6iVfZMkPRrl/q0e3toqTAJdkIVA==", + "dependencies": { + "@swc/counter": "^0.1.3" + } + }, "node_modules/@szmarczak/http-timer": { "version": "4.0.6", "resolved": "https://registry.npmjs.org/@szmarczak/http-timer/-/http-timer-4.0.6.tgz", @@ -4771,6 +4978,12 @@ ], "peer": true }, + "node_modules/@types/app-root-dir": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/@types/app-root-dir/-/app-root-dir-0.1.4.tgz", + "integrity": "sha512-KQDMeSvfgmzVfXFzzEGYRdGxokYVrpfuB2Zr2Mbt5G5MQKiiPH2OTNGv9XRqoXN+9vVHcDercrmqWVGRuGWY+A==", + "dev": true + }, "node_modules/@types/babel__core": { "version": "7.20.5", "resolved": "https://registry.npmjs.org/@types/babel__core/-/babel__core-7.20.5.tgz", @@ -4812,6 +5025,16 @@ "@babel/types": "^7.20.7" } }, + "node_modules/@types/body-parser": { + "version": "1.19.5", + "resolved": "https://registry.npmjs.org/@types/body-parser/-/body-parser-1.19.5.tgz", + "integrity": "sha512-fB3Zu92ucau0iQ0JMCFQE7b/dv8Ot07NI3KaZIkIUNXq82k4eBAqUaneXfleGY9JWskeS9y+u0nXMyspcuQrCg==", + "dev": true, + "dependencies": { + "@types/connect": "*", + "@types/node": "*" + } + }, "node_modules/@types/cacheable-request": { "version": "6.0.3", "resolved": "https://registry.npmjs.org/@types/cacheable-request/-/cacheable-request-6.0.3.tgz", @@ -4823,6 +5046,15 @@ "@types/responselike": "^1.0.0" } }, + "node_modules/@types/connect": { + "version": "3.4.38", + "resolved": "https://registry.npmjs.org/@types/connect/-/connect-3.4.38.tgz", + "integrity": "sha512-K6uROf1LD88uDQqJCktA4yzL1YYAK6NgfsI0v/mTgyPKWsX1CnJ0XPSDhViejru1GcRkLWb8RlzFYJRqGUbaug==", + "dev": true, + "dependencies": { + "@types/node": "*" + } + }, "node_modules/@types/css-font-loading-module": { "version": "0.0.7", "resolved": "https://registry.npmjs.org/@types/css-font-loading-module/-/css-font-loading-module-0.0.7.tgz", @@ -4841,8 +5073,31 @@ "node_modules/@types/estree": { "version": "1.0.5", "resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.5.tgz", - "integrity": "sha512-/kYRxGDLWzHOB7q+wtSUQlFrtcdUccpfy+X+9iMBpHK8QLLhx2wIPYuS5DYtR9Wa/YlZAbIovy7qVdB1Aq6Lyw==", - "dev": true + "integrity": "sha512-/kYRxGDLWzHOB7q+wtSUQlFrtcdUccpfy+X+9iMBpHK8QLLhx2wIPYuS5DYtR9Wa/YlZAbIovy7qVdB1Aq6Lyw==" + }, + "node_modules/@types/express": { + "version": "4.17.21", + "resolved": "https://registry.npmjs.org/@types/express/-/express-4.17.21.tgz", + "integrity": "sha512-ejlPM315qwLpaQlQDTjPdsUFSc6ZsP4AN6AlWnogPjQ7CVi7PYF3YVz+CY3jE2pwYf7E/7HlDAN0rV2GxTG0HQ==", + "dev": true, + "dependencies": { + "@types/body-parser": "*", + "@types/express-serve-static-core": "^4.17.33", + "@types/qs": "*", + "@types/serve-static": "*" + } + }, + "node_modules/@types/express-serve-static-core": { + "version": "4.19.5", + "resolved": "https://registry.npmjs.org/@types/express-serve-static-core/-/express-serve-static-core-4.19.5.tgz", + "integrity": "sha512-y6W03tvrACO72aijJ5uF02FRq5cgDR9lUxddQ8vyF+GvmjJQqbzDcJngEjURc+ZsG31VI3hODNZJ2URj86pzmg==", + "dev": true, + "dependencies": { + "@types/node": "*", + "@types/qs": "*", + "@types/range-parser": "*", + "@types/send": "*" + } }, "node_modules/@types/fs-extra": { "version": "9.0.13", @@ -4868,6 +5123,12 @@ "resolved": "https://registry.npmjs.org/@types/http-cache-semantics/-/http-cache-semantics-4.0.4.tgz", "integrity": "sha512-1m0bIFVc7eJWyve9S0RnuRgcQqF/Xd5QsUZAZeQFr1Q3/p9JWoQQEqmVy+DPTNpGXwhgIetAoYF8JSc33q29QA==" }, + "node_modules/@types/http-errors": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/@types/http-errors/-/http-errors-2.0.4.tgz", + "integrity": "sha512-D0CFMMtydbJAegzOyHjtiKPLlvnm3iTZyZRSZoLq2mRhDdmLfIWOCYPfQJ4cu2erKghU++QvjcUjp/5h7hESpA==", + "dev": true + }, "node_modules/@types/keyv": { "version": "3.1.4", "resolved": "https://registry.npmjs.org/@types/keyv/-/keyv-3.1.4.tgz", @@ -4876,6 +5137,12 @@ "@types/node": "*" } }, + "node_modules/@types/mime": { + "version": "1.3.5", + "resolved": "https://registry.npmjs.org/@types/mime/-/mime-1.3.5.tgz", + "integrity": "sha512-/pyBZWSLD2n0dcHE3hq8s8ZvcETHtEuF+3E7XVt0Ig2nvsVQXdghHVcEkIWjy9A0wKfTn97a/PSDYohKIlnP/w==", + "dev": true + }, "node_modules/@types/minimatch": { "version": "5.1.2", "resolved": "https://registry.npmjs.org/@types/minimatch/-/minimatch-5.1.2.tgz", @@ -4919,6 +5186,18 @@ "integrity": "sha512-hCZTSvwbzWGvhqxp/RqVqwU999pBf2vp7hzIjiYOsl8wqOmUxkQ6ddw1cV3l8811+kdUFus/q4d1Y3E3SyEifA==", "dev": true }, + "node_modules/@types/qs": { + "version": "6.9.16", + "resolved": "https://registry.npmjs.org/@types/qs/-/qs-6.9.16.tgz", + "integrity": "sha512-7i+zxXdPD0T4cKDuxCUXJ4wHcsJLwENa6Z3dCu8cfCK743OGy5Nu1RmAGqDPsoTDINVEcdXKRvR/zre+P2Ku1A==", + "dev": true + }, + "node_modules/@types/range-parser": { + "version": "1.2.7", + "resolved": "https://registry.npmjs.org/@types/range-parser/-/range-parser-1.2.7.tgz", + "integrity": "sha512-hKormJbkJqzQGhziax5PItDUTMAM9uE2XXQmM37dyd4hVM+5aVl7oVxMVUiVQn2oCQFN/LKCZdvSM0pFRqbSmQ==", + "dev": true + }, "node_modules/@types/react": { "version": "18.3.7", "resolved": "https://registry.npmjs.org/@types/react/-/react-18.3.7.tgz", @@ -4946,6 +5225,27 @@ "@types/node": "*" } }, + "node_modules/@types/send": { + "version": "0.17.4", + "resolved": "https://registry.npmjs.org/@types/send/-/send-0.17.4.tgz", + "integrity": "sha512-x2EM6TJOybec7c52BX0ZspPodMsQUd5L6PRwOunVyVUhXiBSKf3AezDL8Dgvgt5o0UfKNfuA0eMLr2wLT4AiBA==", + "dev": true, + "dependencies": { + "@types/mime": "^1", + "@types/node": "*" + } + }, + "node_modules/@types/serve-static": { + "version": "1.15.7", + "resolved": "https://registry.npmjs.org/@types/serve-static/-/serve-static-1.15.7.tgz", + "integrity": "sha512-W8Ym+h8nhuRwaKPaDw34QUkwsGi6Rc4yYqvKFo5rm2FUEhCFbzVWrxXUxuKK8TASjWsysJY0nsmNCGhCOIsrOw==", + "dev": true, + "dependencies": { + "@types/http-errors": "*", + "@types/node": "*", + "@types/send": "*" + } + }, "node_modules/@types/sha256": { "version": "0.2.2", "resolved": "https://registry.npmjs.org/@types/sha256/-/sha256-0.2.2.tgz", @@ -6664,7 +6964,7 @@ "version": "4.0.0", "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-4.0.0.tgz", "integrity": "sha512-mxIojEAQcuEvT/lyXq+jf/3cO/KoA6z4CeNDGGevTybECPOMFCnQy3OPahluUkbqgPNGw5Bi78UC7Po6Lhy+NA==", - "dev": true, + "devOptional": true, "dependencies": { "readdirp": "^4.0.1" }, @@ -9296,7 +9596,6 @@ "version": "2.3.3", "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz", "integrity": "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==", - "dev": true, "hasInstallScript": true, "optional": true, "os": [ @@ -9894,7 +10193,7 @@ "version": "4.3.7", "resolved": "https://registry.npmjs.org/immutable/-/immutable-4.3.7.tgz", "integrity": "sha512-1hqclzwYwjRDFLjcFxOM5AYkkG0rpFPpr1RLPMEuGczoS7YA8gLhy8SWXYRAA/XwfEHpfo3cw5JGioS32fnMRw==", - "dev": true + "devOptional": true }, "node_modules/import-fresh": { "version": "3.3.0", @@ -11743,8 +12042,7 @@ "node_modules/picocolors": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.0.tgz", - "integrity": "sha512-TQ92mBOW0l3LeMeyLV6mzy/kWr8lkd/hp3mTg7wYK7zJhuBStmGMBG0BdeDZS/dZx1IukaX6Bk11zcln25o1Aw==", - "dev": true + "integrity": "sha512-TQ92mBOW0l3LeMeyLV6mzy/kWr8lkd/hp3mTg7wYK7zJhuBStmGMBG0BdeDZS/dZx1IukaX6Bk11zcln25o1Aw==" }, "node_modules/picomatch": { "version": "2.3.1", @@ -11963,7 +12261,6 @@ "version": "8.4.47", "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.47.tgz", "integrity": "sha512-56rxCq7G/XfB4EkXq9Egn5GCqugWvDFjafDOThIdMBsI15iqPqR5r15TfSr1YPYeEI19YeaXMCbY6u88Y76GLQ==", - "dev": true, "funding": [ { "type": "opencollective", @@ -12147,7 +12444,6 @@ "version": "3.3.7", "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.7.tgz", "integrity": "sha512-eSRppjcPIatRIMC1U6UngP8XFcz8MQWGQdt1MTBQ7NaAmvXDfvNxbvWV3x2y6CdEUciCSsDHDQZbhYaB8QEo2g==", - "dev": true, "funding": [ { "type": "github", @@ -12378,6 +12674,25 @@ "node": ">=6" } }, + "node_modules/pyodide": { + "version": "0.26.2", + "resolved": "https://registry.npmjs.org/pyodide/-/pyodide-0.26.2.tgz", + "integrity": "sha512-8VCRdFX83gBsWs6XP2rhG8HMaB+JaVyyav4q/EMzoV8fXH8HN6T5IISC92SNma6i1DRA3SVXA61S1rJcB8efgA==", + "dependencies": { + "ws": "^8.5.0" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/python-shell": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/python-shell/-/python-shell-5.0.0.tgz", + "integrity": "sha512-RUOOOjHLhgR1MIQrCtnEqz/HJ1RMZBIN+REnpSUrfft2bXqXy69fwJASVziWExfFXsR1bCY0TznnHooNsCo0/w==", + "engines": { + "node": ">=0.10" + } + }, "node_modules/qs": { "version": "6.13.0", "resolved": "https://registry.npmjs.org/qs/-/qs-6.13.0.tgz", @@ -12612,7 +12927,7 @@ "version": "4.0.1", "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-4.0.1.tgz", "integrity": "sha512-GkMg9uOTpIWWKbSsgwb5fA4EavTR+SG/PMPoAY8hkhHfEEY0/vqljY+XHqtDf2cr2IJtoNRDbrrEpZUiZCkYRw==", - "dev": true, + "devOptional": true, "engines": { "node": ">= 14.16.0" }, @@ -12801,7 +13116,6 @@ "version": "4.21.3", "resolved": "https://registry.npmjs.org/rollup/-/rollup-4.21.3.tgz", "integrity": "sha512-7sqRtBNnEbcBtMeRVc6VRsJMmpI+JU1z9VTvW8D4gXIYQFz0aLcsE6rRkyghZkLfEgUZgVvOG7A5CVz/VW5GIA==", - "dev": true, "dependencies": { "@types/estree": "1.0.5" }, @@ -12974,7 +13288,7 @@ "version": "1.79.1", "resolved": "https://registry.npmjs.org/sass/-/sass-1.79.1.tgz", "integrity": "sha512-+mA7svoNKeL0DiJqZGeR/ZGUu8he4I8o3jyUcOFyo4eBJrwNgIMmAEwCMo/N2Y3wdjOBcRzoNxZIOtrtMX8EXg==", - "dev": true, + "devOptional": true, "dependencies": { "chokidar": "^4.0.0", "immutable": "^4.0.0", @@ -13350,7 +13664,6 @@ "version": "1.2.1", "resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.2.1.tgz", "integrity": "sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA==", - "dev": true, "engines": { "node": ">=0.10.0" } @@ -14500,7 +14813,6 @@ "version": "5.4.6", "resolved": "https://registry.npmjs.org/vite/-/vite-5.4.6.tgz", "integrity": "sha512-IeL5f8OO5nylsgzd9tq4qD2QqI0k2CQLGrWD0rCN0EQJZpBK5vJAx0I+GDkMOXxQX/OfFHMuLIx6ddAxGX/k+Q==", - "dev": true, "dependencies": { "esbuild": "^0.21.3", "postcss": "^8.4.43", @@ -14597,6 +14909,31 @@ "integrity": "sha512-EQ7ORuPp8vFPCqfuGnVo7d36fXS0IFH4/RUlKb1drseix3TQEPcgwEuFADdXBxRgqMp70njz/1m0kdf5lEsm8w==", "dev": true }, + "node_modules/vite-plugin-top-level-await": { + "version": "1.4.4", + "resolved": "https://registry.npmjs.org/vite-plugin-top-level-await/-/vite-plugin-top-level-await-1.4.4.tgz", + "integrity": "sha512-QyxQbvcMkgt+kDb12m2P8Ed35Sp6nXP+l8ptGrnHV9zgYDUpraO0CPdlqLSeBqvY2DToR52nutDG7mIHuysdiw==", + "dependencies": { + "@rollup/plugin-virtual": "^3.0.2", + "@swc/core": "^1.7.0", + "uuid": "^10.0.0" + }, + "peerDependencies": { + "vite": ">=2.8" + } + }, + "node_modules/vite-plugin-top-level-await/node_modules/uuid": { + "version": "10.0.0", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-10.0.0.tgz", + "integrity": "sha512-8XkAphELsDnEGrDxUOHB3RGvXz6TeuYSGEZBOjtTtPm2lwhGBjLgOzLHB63IUWfBpNucQjND6d3AOudO+H3RWQ==", + "funding": [ + "https://github.com/sponsors/broofa", + "https://github.com/sponsors/ctavan" + ], + "bin": { + "uuid": "dist/bin/uuid" + } + }, "node_modules/vite/node_modules/@esbuild/aix-ppc64": { "version": "0.21.5", "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.21.5.tgz", @@ -14604,7 +14941,6 @@ "cpu": [ "ppc64" ], - "dev": true, "optional": true, "os": [ "aix" @@ -14620,7 +14956,6 @@ "cpu": [ "arm" ], - "dev": true, "optional": true, "os": [ "android" @@ -14636,7 +14971,6 @@ "cpu": [ "arm64" ], - "dev": true, "optional": true, "os": [ "android" @@ -14652,7 +14986,6 @@ "cpu": [ "x64" ], - "dev": true, "optional": true, "os": [ "android" @@ -14668,7 +15001,6 @@ "cpu": [ "arm64" ], - "dev": true, "optional": true, "os": [ "darwin" @@ -14684,7 +15016,6 @@ "cpu": [ "x64" ], - "dev": true, "optional": true, "os": [ "darwin" @@ -14700,7 +15031,6 @@ "cpu": [ "arm64" ], - "dev": true, "optional": true, "os": [ "freebsd" @@ -14716,7 +15046,6 @@ "cpu": [ "x64" ], - "dev": true, "optional": true, "os": [ "freebsd" @@ -14732,7 +15061,6 @@ "cpu": [ "arm" ], - "dev": true, "optional": true, "os": [ "linux" @@ -14748,7 +15076,6 @@ "cpu": [ "arm64" ], - "dev": true, "optional": true, "os": [ "linux" @@ -14764,7 +15091,6 @@ "cpu": [ "ia32" ], - "dev": true, "optional": true, "os": [ "linux" @@ -14780,7 +15106,6 @@ "cpu": [ "loong64" ], - "dev": true, "optional": true, "os": [ "linux" @@ -14796,7 +15121,6 @@ "cpu": [ "mips64el" ], - "dev": true, "optional": true, "os": [ "linux" @@ -14812,7 +15136,6 @@ "cpu": [ "ppc64" ], - "dev": true, "optional": true, "os": [ "linux" @@ -14828,7 +15151,6 @@ "cpu": [ "riscv64" ], - "dev": true, "optional": true, "os": [ "linux" @@ -14844,7 +15166,6 @@ "cpu": [ "s390x" ], - "dev": true, "optional": true, "os": [ "linux" @@ -14860,7 +15181,6 @@ "cpu": [ "x64" ], - "dev": true, "optional": true, "os": [ "linux" @@ -14876,7 +15196,6 @@ "cpu": [ "x64" ], - "dev": true, "optional": true, "os": [ "netbsd" @@ -14892,7 +15211,6 @@ "cpu": [ "x64" ], - "dev": true, "optional": true, "os": [ "openbsd" @@ -14908,7 +15226,6 @@ "cpu": [ "x64" ], - "dev": true, "optional": true, "os": [ "sunos" @@ -14924,7 +15241,6 @@ "cpu": [ "arm64" ], - "dev": true, "optional": true, "os": [ "win32" @@ -14940,7 +15256,6 @@ "cpu": [ "ia32" ], - "dev": true, "optional": true, "os": [ "win32" @@ -14956,7 +15271,6 @@ "cpu": [ "x64" ], - "dev": true, "optional": true, "os": [ "win32" @@ -14969,7 +15283,6 @@ "version": "0.21.5", "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.21.5.tgz", "integrity": "sha512-mg3OPMV4hXywwpoDxu3Qda5xCKQi+vCTZq8S9J/EpkhB2HzKXq4SNFZE3+NK93JYxc8VMSep+lOUSC/RVKaBqw==", - "dev": true, "hasInstallScript": true, "bin": { "esbuild": "bin/esbuild" @@ -15304,6 +15617,26 @@ "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==" }, + "node_modules/ws": { + "version": "8.18.0", + "resolved": "https://registry.npmjs.org/ws/-/ws-8.18.0.tgz", + "integrity": "sha512-8VbfWfHLbbwu3+N6OKsOMpBdT4kXPDDB9cJk2bJ6mh9ucxdlnNvH1e+roYkKmN9Nxw2yjz7VzeO9oOz2zJ04Pw==", + "engines": { + "node": ">=10.0.0" + }, + "peerDependencies": { + "bufferutil": "^4.0.1", + "utf-8-validate": ">=5.0.2" + }, + "peerDependenciesMeta": { + "bufferutil": { + "optional": true + }, + "utf-8-validate": { + "optional": true + } + } + }, "node_modules/xmlbuilder": { "version": "15.1.1", "resolved": "https://registry.npmjs.org/xmlbuilder/-/xmlbuilder-15.1.1.tgz", diff --git a/frontend/package.json b/frontend/package.json index a08ed6fb..c34cbb93 100644 --- a/frontend/package.json +++ b/frontend/package.json @@ -1,6 +1,6 @@ { "name": "zetaforge", - "version": "0.3.2", + "version": "0.4.0", "main": "dist-electron/main/index.js", "description": "ZetaForge", "author": "Zetane ", @@ -17,9 +17,9 @@ "build:all": "tsc && vite build && electron-builder --x64 --arm64 -mwl", "build:darwin-amd64": "tsc && vite build && electron-builder --mac --x64", "build:darwin-arm64": "tsc && vite build && electron-builder --mac --arm64", - "build:windows": "tsc && vite build && electron-builder --win --x64", - "build:linux-amd64": "tsc && vite build && electron-builder --linux --x64", - "build:linux-arm64": "tsc && vite build && electron-builder --linux --arm64", + "build:windows": "tsc && vite build && electron-builder --win --x64", + "build:linux-amd64": "tsc && vite build && electron-builder --linux --x64", + "build:linux-arm64": "tsc && vite build && electron-builder --linux --arm64", "preview": "vite preview", "pree2e": "vite build --mode=test", "lint": "eslint .", @@ -30,6 +30,7 @@ }, "dependencies": { "@carbon/icons-react": "^11.46.0", + "@aws-sdk/client-s3": "^3.525.0", "@sentry/electron": "^4.22.0", "@sentry/react": "^7.112.2", "@types/source-map-support": "^0.5.10", @@ -37,18 +38,22 @@ "allotment": "^1.20.2", "electron-trpc": "^0.5.2", "electron-updater": "^6.1.1", + "express": "^4.18.2", "getmac": "^6.6.0", "js-sha1": "^0.7.0", "localstorage-migrator": "^1.0.8", "pino": "^9.2.0", "pino-caller": "^3.4.0", "pino-pretty": "^11.2.1", + "pyodide": "^0.26.2", + "python-shell": "^5.0.0", "rfdc": "^1.3.1", "s3-sync-client": "^4.3.1", "source-map-support": "^0.5.21", "use-debounce": "^10.0.3", "use-immer": "^0.9.0", - "uuidv7": "^0.6.3" + "uuidv7": "^0.6.3", + "vite-plugin-top-level-await": "^1.4.2" }, "devDependencies": { "@aws-sdk/client-s3": "^3.525.0", @@ -64,6 +69,8 @@ "@fortawesome/fontawesome-svg-core": "^6.5.1", "@tanstack/react-query": "^4.36.1", "@trpc/react-query": "^10.45.1", + "@types/app-root-dir": "^0.1.4", + "@types/express": "^4.17.21", "@types/react": "^18.2.20", "@types/react-dom": "^18.2.7", "@types/sha256": "^0.2.2", diff --git a/frontend/resources/compileComputation.mjs b/frontend/resources/compileComputation.mjs new file mode 100644 index 00000000..6e87e20f --- /dev/null +++ b/frontend/resources/compileComputation.mjs @@ -0,0 +1,156 @@ +import { parser } from "@lezer/python"; + +async function validateSource(source) { + // await PythonShell.checkSyntax(source); + //PYODIDE REQUIRES TO INSTALL DISTRIBUTIONS FOR NODEJS, + //SO IT MIGHT NOT BE FEASIBLE. + //BELOW IS THE ENCHANCED VERSION OF THE SYNTAX CHECKER + try { + const tree = parser.parse(source); + const cursor = tree.cursor(); + + do { + if (cursor.type.name === "⚠") { + throw new Error("Syntax error found at: ", cursor.type.name); + } + } while (cursor.next()); + } catch (err) { + throw new Error("Please check your python syntax: ", err.message); + } +} + +export async function compileComputationFunction(source) { + const result = await extractIO(source); + const docstring = getDocstring(source); + if (docstring) { + result["description"] = docstring; + } + return result; +} + +async function extractIO(source) { + await validateSource(source); + const tree = parser.parse(source); + const cursor = tree.cursor(); + + const functionInfo = {}; + while (cursor.next()) { + if (cursor.node.name === "FunctionDefinition") { + cursor.firstChild(); + cursor.nextSibling(); + const functionName = source.slice(cursor.node.from, cursor.node.to); + if (functionName === "compute") { + cursor.nextSibling(); + const inputs = {}; + cursor.firstChild(); + cursor.nextSibling(); + while (cursor.node.name !== ")") { + const param = source.slice(cursor.node.from, cursor.node.to); + inputs[param] = { + type: "Any", // Type inference is complex in Python + connections: [], + relays: [], + }; + cursor.nextSibling(); + if (cursor.node.name === ",") cursor.nextSibling(); + } + + functionInfo["inputs"] = inputs; + cursor.parent(); + + const outputs = {}; + while (cursor.nextSibling()) { + if (cursor.node.name === "Body") { + cursor.firstChild(); + while (cursor.nextSibling()) { + if (cursor.node.name === "ReturnStatement") { + cursor.firstChild(); + cursor.nextSibling(); + if (cursor.node.name === "DictionaryExpression") { + cursor.firstChild(); + cursor.nextSibling(); + while (cursor.node.name !== "}") { + if (cursor.node.name === "String") { + cursor.nextSibling(); + if (cursor.node.name === ":") { + cursor.prevSibling(); + const outputName = source.slice( + cursor.node.from + 1, + cursor.node.to - 1, + ); + outputs[outputName] = { + type: "Any", + connections: [], + relays: [], + }; + } + } + cursor.nextSibling(); + } + } + } + } + } + } + functionInfo["outputs"] = outputs; + break; + } + } + } + return functionInfo; +} + +function getDocstring(source) { + const tree = parser.parse(source); + const cursor = tree.cursor(); + + const docstrings = {}; + + while (cursor.next()) { + if (cursor.node.name === "FunctionDefinition") { + cursor.firstChild(); // Move to the 'def' keyword + cursor.nextSibling(); // Move to the function name + + const functionName = source.slice(cursor.node.from, cursor.node.to); + + if (functionName === "compute") { + cursor.firstChild(); + while (cursor.node.name !== "Body") { + cursor.nextSibling(); + } + cursor.firstChild(); + while (cursor.nextSibling()) { + if (cursor.node.name === "ExpressionStatement") { + const docstring = source.slice( + cursor.node.from + 3, + cursor.node.to - 3, + ); + return docstring; + } + } + } + + cursor.nextSibling(); // Move to the parameters + cursor.nextSibling(); // Move to the colon or the body + + while (cursor.nextSibling()) { + if (cursor.node.name === "Body") { + cursor.firstChild(); // Enter the body + // Check if the first statement is a docstring + if (cursor.node.name === "ExpressionStatement") { + cursor.firstChild(); // Move to the string + if (cursor.node.name === "String") { + let docstring = source.slice(cursor.node.from, cursor.node.to); + docstring = docstring.slice(1, -1); // Remove the quotes from the string + docstrings[functionName] = docstring; + } + cursor.parent(); // Move back to the ExpressionStatement + } + break; + } + } + } + } + + return docstrings; +} diff --git a/frontend/resources/compileComputation.py b/frontend/resources/compileComputation.py index e7737321..a415486c 100644 --- a/frontend/resources/compileComputation.py +++ b/frontend/resources/compileComputation.py @@ -2,11 +2,9 @@ import json import sys - def extract_io(source): tree = ast.parse(source) function_info = {} - for node in ast.walk(tree): if isinstance(node, ast.FunctionDef) and node.name == 'compute': # Extract docstring @@ -49,8 +47,8 @@ def extract_io(source): return function_info + if __name__ == '__main__': source = sys.stdin.read() io = extract_io(source) - print(json.dumps(io)) \ No newline at end of file diff --git a/frontend/resources/runTest.mjs b/frontend/resources/runTest.mjs new file mode 100644 index 00000000..8927d7aa --- /dev/null +++ b/frontend/resources/runTest.mjs @@ -0,0 +1,75 @@ +import { spawn } from "child_process"; +import fs from "fs"; +import path from "path"; + +function runCommand(command, args, logFile) { + return new Promise((resolve, reject) => { + const process = spawn(command, args); + + const logStream = fs.createWriteStream(logFile, { flags: "a" }); + + process.stdout.on("data", (data) => { + const output = data.toString("utf-8"); + process.stdout.write(output); + logStream.write(data); + }); + + process.stderr.on("data", (data) => { + const output = data.toString("utf-8"); + process.stderr.write(output); + logStream.write(data); + }); + + process.on("close", (code) => { + logStream.end(); + if (code !== 0) { + reject( + new Error( + `Command '${command} ${args.join(" ")}' failed with error code ${code}`, + ), + ); + } else { + resolve(); + } + }); + }); +} + +export async function runTestContainer(blockDir, blockKey) { + const containerDir = "/app"; // The directory inside the container where you want to mount + const imageName = blockKey; + const containerName = blockKey; + + const commands = [ + ["docker", ["rm", containerName]], + ["docker", ["rmi", imageName]], + ["docker", ["build", "-t", imageName, blockDir]], + [ + "docker", + [ + "run", + "--name", + containerName, + "-v", + `${blockDir}:${containerDir}`, + imageName, + "python", + "-B", + "-c", + "from computations import test; test()", + ], + ], + ]; + + const logFile = path.join(blockDir, "logs.txt"); + fs.writeFileSync(logFile, ""); // Clear the log file + + for (const [command, args] of commands) { + console.log(`Executing: ${command} ${args.join(" ")}`); + try { + await runCommand(command, args, logFile); + } catch (error) { + console.log(`Command '${command}' failed with error`); + } + } +} diff --git a/frontend/server/blockSerialization.js b/frontend/server/blockSerialization.js index a1ab00d2..e7882af6 100644 --- a/frontend/server/blockSerialization.js +++ b/frontend/server/blockSerialization.js @@ -1,7 +1,7 @@ -import { spawnAsync } from "./spawnAsync"; import { app } from "electron"; import fs from "fs/promises"; import path from "path"; +import { logger } from "./logger"; import { BLOCK_SPECS_FILE_NAME, SUPPORTED_FILE_EXTENSIONS, @@ -9,8 +9,11 @@ import { CHAT_HISTORY_FILE_NAME, } from "../src/utils/constants"; import { fileExists, getDirectoryTree } from "./fileSystem"; -import { logger } from "./logger"; import { HttpStatus, ServerError } from "./serverError"; +import { compileComputationFunction } from "../resources/compileComputation.mjs"; +import { runTestContainer } from "../resources/runTest.mjs"; +import { computeAgent } from "../agents/gpt-4_python_compute/generate/computations.mjs"; +import { computeViewAgent } from "../agents/gpt-4_python_view/generate/computations.mjs"; const READ_ONLY_FILES = [BLOCK_SPECS_FILE_NAME, CHAT_HISTORY_FILE_NAME]; @@ -20,8 +23,8 @@ export async function compileComputation(pipelinePath, blockId) { const source = await fs.readFile(sourcePath, { encoding: "utf8" }); const scriptPath = app.isPackaged - ? path.join(process.resourcesPath, "resources", "compileComputation.py") - : path.join("resources", "compileComputation.py"); + ? path.join(process.resourcesPath, "resources", "compileComputation.mjs") + : path.join("resources", "compileComputation.mjs"); if (!(await fileExists(scriptPath))) { throw new ServerError( `Could not find script for compilation: ${scriptPath}`, @@ -29,13 +32,9 @@ export async function compileComputation(pipelinePath, blockId) { ); } + let io; try { - const stdout = await spawnAsync("python", [scriptPath], { - input: source, - encoding: "utf8", - }); - const io = JSON.parse(stdout); - console.log(io); + io = await compileComputationFunction(source); return io; } catch (error) { const message = `Compilation failed for block \nblock path: ${blockPath} \nscript path: ${scriptPath}`; @@ -44,47 +43,6 @@ export async function compileComputation(pipelinePath, blockId) { } } -export async function saveBlockSpecs(pipelinePath, blockId, specs) { - const specsPath = path.join(pipelinePath, blockId, BLOCK_SPECS_FILE_NAME); - - removeConnections(specs.inputs); - removeConnections(specs.outputs); - - specs.views.node.pos_x = 0; - specs.views.node.pos_y = 0; - - await fs.writeFile(specsPath, JSON.stringify(specs, null, 2)); -} - -function removeConnections(io) { - for (const key in io) { - io[key].connections = []; - } - - return io; -} - -export async function runTest(pipelinePath, blockId) { - const blockPath = path.join(pipelinePath, blockId); - const scriptPath = app.isPackaged - ? path.join(process.resourcesPath, "resources", "run_test.py") - : path.join("resources", "run_test.py"); - if (!(await fileExists(scriptPath))) { - throw new ServerError( - `Could not find script for running tests: ${scriptPath}`, - HttpStatus.INTERNAL_SERVER_ERROR, - ); - } - - try { - await spawnAsync("python", [scriptPath, blockPath, blockId]); - } catch (error) { - const message = `Failed to run tests for block \nblock path: ${blockPath} \nscript path: ${scriptPath}`; - logger.error(error, message); - throw new ServerError(message, HttpStatus.INTERNAL_SERVER_ERROR, error); - } -} - export async function getBlockDirectory(pipelinePath, blockId) { const blockDirectory = path.join(pipelinePath, blockId); @@ -137,6 +95,41 @@ export async function updateBlockFile( await fs.writeFile(absoluteFilePath, content); } +export async function saveBlockSpecs(pipelinePath, blockId, specs) { + const specsPath = path.join(pipelinePath, blockId, BLOCK_SPECS_FILE_NAME); + + removeConnections(specs.inputs); + removeConnections(specs.outputs); + + specs.views.node.pos_x = 0; + specs.views.node.pos_y = 0; + + await fs.writeFile(specsPath, JSON.stringify(specs, null, 2)); +} + +function removeConnections(io) { + for (const key in io) { + io[key].connections = []; + } + + return io; +} + +export async function runTest(pipelinePath, blockId) { + const blockPath = path.join(pipelinePath, blockId); + const scriptPath = app.isPackaged + ? path.join(process.resourcesPath, "resources", "runTest.mjs") + : path.join("resources", "runTest.mjs"); + if (!(await fileExists(scriptPath))) { + throw new ServerError( + `Could not find script for running tests: ${scriptPath}`, + HttpStatus.INTERNAL_SERVER_ERROR, + ); + } + + return await runTestContainer(blockPath, blockId); +} + function getFilePermissions(name) { const readOnly = READ_ONLY_FILES.includes(name); const supported = isFileSupported(name); @@ -163,28 +156,52 @@ export async function callAgent( conversationHistory, apiKey, ) { - let agents = "agents"; - if (app.isPackaged) { - agents = path.join(process.resourcesPath, "agents"); - } - const scriptPath = path.join( - agents, - agentName, - "generate", - "computations.py", - ); + + //KEEPING THOSE FOR REFERENCE, IF WE EVER DECIDE TO MIGRATE BACK TO PYTHON AGENTS + + + // let agents = "agents"; + // if (app.isPackaged) { + // agents = path.join(process.resourcesPath, "agents"); + // } + // const scriptPath = path.join( + // agents, + // agentName, + // "generate", + // "computations.py", + // ); try { - const stdout = await spawnAsync("python", [scriptPath], { - input: JSON.stringify({ + //KEEPING THOSE FOR REFERENCE, IF WE EVER DECIDE TO MIGRATE BACK TO PYTHON AGENTS + + // const stdout = await spawnAsync("python", [scriptPath], { + // input: JSON.stringify({ + // apiKey, + // userMessage, + // conversationHistory, + // }), + // encoding: "utf8", + // }); + // const response = JSON.parse(stdout).response; + // return response; + + if (agentName === "gpt-4_python_compute") { + const result = await computeAgent( + userMessage, + "gpt-4o", + conversationHistory, apiKey, + ); + return result.response; + } else { + const result = await computeViewAgent( userMessage, + "gpt-4o", conversationHistory, - }), - encoding: "utf8", - }); - const response = JSON.parse(stdout).response; - return response; + apiKey, + ); + return result.response; + } } catch (error) { const message = `Unable to call agent ${agentName} with message ${userMessage}`; logger.error(error, message); diff --git a/frontend/server/express.mjs b/frontend/server/express.mjs index 62c35f1f..422a7efb 100644 --- a/frontend/server/express.mjs +++ b/frontend/server/express.mjs @@ -1,4 +1,5 @@ import bodyParser from "body-parser"; +import { spawn, exec, spawnSync } from "child_process"; import compression from "compression"; import cors from "cors"; import "dotenv/config"; @@ -9,12 +10,53 @@ import multer from "multer"; import path from "path"; import sha256 from "sha256"; import getMAC from "getmac"; +import { logger } from "./logger"; + +let anvilProcess = null; + +const logFile = electronApp.isPackaged + ? path.join(process.resourcesPath, "logs", "app.log") + : path.join(process.cwd(), "logs", "app.log"); + +function sleep(time) { + return new Promise((resolve) => setTimeout(resolve, time)); +} + +async function gracefullyStopAnvil() { + if (anvilProcess !== null) { + anvilProcess.kill("SIGINT"); + async function checkAnvil() { + if (anvilProcess.exitCode === null) { + await sleep(3000); + await checkAnvil(); + } + } + await checkAnvil(); + } +} + +//for some reason, the logs inside child processes are not written to log file, hence, I am adding the logs here. +function saveChildProcessLog(log) { + const logDict = { + childProcess: log, + }; + const formattedLog = JSON.stringify(logDict) + "\n"; + try { + fs.appendFileSync(logFile, formattedLog); + } catch (err) { + console.error("FAILED TO APPEND LOG"); + } +} function startExpressServer() { const app = express(); const port = 3330; - app.use(cors()); + app.use( + cors({ + origin: "*", + }), + ); app.use(compression()); // http://expressjs.com/en/advanced/best-practice-security.html#at-a-minimum-disable-x-powered-by-header @@ -38,7 +80,7 @@ function startExpressServer() { ); app.use(bodyParser.json()); - const upload = multer({ dest: "_temp_import" }); + const upload = multer(); app.get("/distinct-id", async (req, res) => { try { @@ -109,9 +151,391 @@ function startExpressServer() { res.send("Folder imported successfully"); }); + app.get("/get-kube-contexts", async (req, res) => { + exec("kubectl version --client", (error, stdout, stderr) => { + if (error) { + res + .status(500) + .json({ message: "kubectl is not installed.", error: error.message }); + return; + } + if (stderr) { + res.status(500).json({ + message: "kubectl is installed but there was an error.", + stderr: stderr, + }); + return; + } + }); + + async function getKubectlContexts() { + return new Promise((resolve, reject) => { + exec("kubectl config get-contexts -o name", (error, stdout, stderr) => { + if (error) { + reject(new Error("kubectl error: " + error.message)); + return; + } + if (stderr) { + reject(new Error("kubectl error: " + stderr)); + return; + } + + const contexts = stdout + .trim() + .split("\n") + .filter((context) => context.trim() !== ""); + if (contexts.length === 0) { + reject( + new Error( + "Kubectl cannot find any contexts. Please check your kubectl path, Docker driver, and running kubecontexts. If you're running on Windows, please run zetaforge in admin mode. You can ignore this message if you'll use cloud settings.", + ), + ); + return; + } + + resolve(contexts); + }); + }); + } + + try { + const contexts = await getKubectlContexts(); + res.status(200).json(contexts); + } catch (err) { + console.log(err); + res.status(500).send({ kubeErr: err.message }); + } + }); + + app.get("/get-anvil-config", (req, res) => { + if (!electronApp.isPackaged) { + const response = { has_config: false }; + res.status(200).send(response); + } + const anvilDir = path.join(process.resourcesPath, "server2"); + const anvilFiles = fs.readdirSync(anvilDir); + if (anvilFiles.includes("config.json")) { + const configFile = path.join(anvilDir, "config.json"); + const configStr = fs.readFileSync(configFile); + const config = JSON.parse(configStr); + const response = { + has_config: true, + config: config, + }; + res.status(200).send(response); + } else { + const response = { has_config: false }; + res.status(200).send(response); + } + }); + + app.post("/launch-anvil-from-config", async (req, res) => { + if (!electronApp.isPackaged) { + return res.sendStatus(200); + } + + const anvilTimeoutPromise = new Promise((resolve, reject) => { + setTimeout( + () => { + reject(new Error("Anvil Timeout Error")); + }, + 3 * 60 * 1000, + ); + }); + + if (anvilProcess !== null) { + try { + const anvilFiles = fs.readdirSync(anvilDir); + const anvilExec = anvilFiles.filter((file) => + file.startsWith("s2-"), + )[0]; + spawnSync(anvilExec, ["--uninstall"]); + } catch (err) { + logger.error(err.message); + } + + await gracefullyStopAnvil(); + anvilProcess = null; + } + const anvilDir = path.join(process.resourcesPath, "server2"); + const configPath = path.join(anvilDir, "config.json"); + const configStr = fs.readFileSync(configPath); + const config = JSON.parse(configStr); + + const context = config.KubeContext; + const kubeConfig = ["config", "use-context", context]; + const kubeExec = spawn("kubectl", kubeConfig); + + kubeExec.stderr.on("data", (data) => { + saveChildProcessLog(`stderr: ${data}`); + res + .status(500) + .send({ err: "CAN'T SET KUBECONTEXT", kubeErr: data.toString() }); + }); + + const anvilFiles = fs.readdirSync(anvilDir); + const anvilExec = anvilFiles.filter((file) => file.startsWith("s2-"))[0]; + const runAnvil = new Promise((resolve, reject) => { + anvilProcess = spawn(path.join(anvilDir, anvilExec), { + cwd: anvilDir, + detached: true, + stdio: ["ignore", "pipe", "pipe"], // Ignore stdin, use pipes for stdout and stderr + }); + + anvilProcess.stdout.on("data", (data) => { + saveChildProcessLog(`[server] stdout: ${data}`); + if ( + data.toString().includes("[GIN-debug] Listening and serving HTTP on") + ) { + resolve(); + } + }); + const regex = /listen tcp :\d+: bind: address already in use/; + anvilProcess.stderr.on("data", (data) => { + saveChildProcessLog(`[server] stderr: ${data}`); + if ( + data + .toString() + .toLowerCase() + .includes("failed to fetch kubernetes resources;") || + data + .toString() + .toLowerCase() + .includes("failed to get client config;") || + data.toString().toLowerCase().includes("failed to install argo;") || + data + .toString() + .toLowerCase() + .includes("failed to check for minikube profile;") || + data + .toString() + .toLowerCase() + .includes("failed to marshall minikube profile;") || + data + .toString() + .toLowerCase() + .includes("failed to marshall minikube profile;") || + data + .toString() + .toLowerCase() + .includes("failed to find the profile;") || + data + .toString() + .toLowerCase() + .includes("failed to check minikube status;") || + data + .toString() + .toLowerCase() + .includes("failed to parse profile status;") || + data + .toString() + .toLowerCase() + .includes("failed to check for minikube profile;") || + regex.test(data.toString().toLowerCase()) + ) { + reject(new Error(`Kubeservices not found: ${data.toString()}`)); + } + }); + }); + + const runAnvilPromise = Promise.race([anvilTimeoutPromise, runAnvil]); + + runAnvilPromise + .then((response) => { + res.status(200).send({ success: response }); + }) + .catch((err) => { + res + .status(500) + .send({ err: "Error while launching anvil", kubeErr: err.message }); + }); + }); + + app.post("/launch-anvil", async (req, res) => { + const anvilDir = path.join(process.resourcesPath, "server2"); + + const anvilTimeoutPromise = new Promise((resolve, reject) => { + setTimeout( + () => { + reject(new Error("Anvil Timeout Error")); + }, + 3 * 60 * 1000, + ); + }); + + if (!electronApp.isPackaged) { + res.sendStatus(200); + } + + if (anvilProcess !== null) { + try { + const anvilFiles = fs.readdirSync(anvilDir); + const anvilExec = anvilFiles.filter((file) => + file.startsWith("s2-"), + )[0]; + + spawnSync(anvilExec, ["--uninstall"]); + } catch (err) { + logger.error(err); + } + + await gracefullyStopAnvil(); + } + anvilProcess = null; + + const body = req.body; + + const config = { + IsLocal: true, + IsDev: process.env.VITE_ZETAFORGE_IS_DEV === "True" ? true : false, + ServerPort: parseInt(body.anvilPort), + KanikoImage: "gcr.io/kaniko-project/executor:latest", + WorkDir: "/app", + FileDir: "/files", + ComputationFile: "computations.py", + EntrypointFile: "entrypoint.py", + ServiceAccount: "executor", + Bucket: "forge-bucket", + BucketName: "zetaforge", + Database: "./zetaforge.db", + KubeContext: body.KubeContext, + SetupVersion: "1", + Local: { + BucketPort: parseInt(body.s3Port), + Driver: body.driver, + }, + Cloud: { + Provider: "debug", + Debug: { + RegistryPort: 5000, + }, + }, + }; + const configDir = path.join(anvilDir, "config.json"); + const configStr = JSON.stringify(config); + try { + fs.writeFileSync(configDir, configStr); + } catch (err) { + res.status(500).send("Error happend while writing config.js"); + } + const kubeConfig = ["config", "use-context", body.KubeContext]; + const kubeExec = spawn("kubectl", kubeConfig); + kubeExec.stderr.on("data", (data) => { + saveChildProcessLog(`stderr: ${data.toString()}`); + res + .status(500) + .send({ err: "CAN'T SET KUBECONTEXT", kubeErr: data.toString() }); + }); + const anvilFiles = fs.readdirSync(anvilDir); + + const anvilExec = anvilFiles.filter((file) => file.startsWith("s2-"))[0]; + const runAnvil = new Promise((resolve, reject) => { + anvilProcess = spawn(path.join(anvilDir, anvilExec), { + cwd: anvilDir, + // detached: true, + stdio: ["ignore", "pipe", "pipe"], // Ignore stdin, use pipes for stdout and stderr + }); + + anvilProcess.stdout.on("data", (data) => { + saveChildProcessLog(`[server] stdout: ${data.toString()}`); + + if ( + data.toString().includes("[GIN-debug] Listening and serving HTTP on") + ) { + resolve(); + } + }); + const regex = /listen tcp :\d+: bind: address already in use/; + + anvilProcess.stderr.on("data", (data) => { + saveChildProcessLog(`[server] stderr: ${data.toString()}`); + if ( + data + .toString() + .toLowerCase() + .includes("failed to fetch kubernetes resources;") || + data + .toString() + .toLowerCase() + .includes("failed to get client config;") || + data.toString().toLowerCase().includes("failed to install argo;") || + data + .toString() + .toLowerCase() + .includes("failed to check for minikube profile;") || + data + .toString() + .toLowerCase() + .includes("failed to marshall minikube profile;") || + data + .toString() + .toLowerCase() + .includes("failed to marshall minikube profile;") || + data + .toString() + .toLowerCase() + .includes("failed to find the profile;") || + data + .toString() + .toLowerCase() + .includes("failed to check minikube status;") || + data + .toString() + .toLowerCase() + .includes("failed to parse profile status;") || + data + .toString() + .toLowerCase() + .includes("failed to check for minikube profile;") || + regex.test(data.toString().toLowerCase()) + ) { + reject(new Error(`Kubeservices not found: ${data.toString()}`)); + } + }); + }); + + const runAnvilPromise = Promise.race([anvilTimeoutPromise, runAnvil]); + + runAnvilPromise + .then(() => { + res.sendStatus(200); + }) + .catch((err) => { + if (anvilProcess !== null) { + anvilProcess.kill("SIGINT"); + + anvilProcess = null; + } + res + .status(500) + .send({ err: "Error while launching anvil", kubeErr: err.message }); + }); + }); + + app.get("/isPackaged", (req, res) => { + const isPip = process.env.VITE_IS_PIP === "True" ? true : false; + return res.status(200).json(electronApp.isPackaged && !isPip); + }); + app.listen(port, () => { console.log(`Server running at http://localhost:${port}`); }); + + process.on("SIGINT", () => { + console.log("SIGINT ANVIL"); + if (anvilProcess !== null) { + console.log("KILLING ANVIL..."); + anvilProcess.kill("SIGINT"); + } + }); + + process.on("SIGTERM", () => { + console.log("SIGINT ANVIL"); + if (anvilProcess !== null) { + console.log("KILLING ANVIL..."); + anvilProcess.kill("SIGINT"); + } + }); } -export { startExpressServer }; +export { startExpressServer, gracefullyStopAnvil }; diff --git a/frontend/server/fileSystem.js b/frontend/server/fileSystem.js index 5cdc3351..5666b186 100644 --- a/frontend/server/fileSystem.js +++ b/frontend/server/fileSystem.js @@ -80,6 +80,7 @@ export async function fileExists(filePath) { await fs.access(filePath); return true; } catch { + return false; } } diff --git a/frontend/server/logger.js b/frontend/server/logger.js index 10855025..cf285863 100644 --- a/frontend/server/logger.js +++ b/frontend/server/logger.js @@ -2,9 +2,64 @@ import pino from "pino"; import config from "../config"; import pinoCaller from "pino-caller"; import process from "process"; +import { app as electronApp } from "electron"; +import path from "path"; +import fs from "fs"; let configuredLogger; -if (config.logger.pretty.toLowerCase() === "true") { +try { + // Determine the log file path + const logFilePath = electronApp.isPackaged + ? path.join(process.resourcesPath, "logs") + : path.join(process.cwd(), "logs"); + + fs.mkdirSync(path.dirname(logFilePath), { recursive: true }); + + if (config.logger.pretty.toLowerCase() === "true") { + configuredLogger = pinoCaller( + pino({ + level: config.logger.level, + transport: { + targets: [ + { target: "pino-pretty" }, // Pretty print to console + { + target: "pino/file", + options: { destination: path.join(logFilePath, "app.log") }, + }, // Log to file + ], + }, + }), + ); + } else { + configuredLogger = pinoCaller( + pino({ + level: config.logger.level, + transport: { + targets: [ + { + target: "pino/file", + options: { destination: path.join(logFilePath, "app.log") }, + }, + { target: "pino-pretty" }, + ], + }, + }), + ); + } + pino.destination(path.join(logFilePath, "app2.log")); + + // Override console.log to use Pino + if (electronApp.isPackaged) { + console.log = (...args) => { + configuredLogger.info(...args); + }; + console.error = (...args) => { + configuredLogger.error(...args); + }; + } +} catch (err) { + console.log(err); + configuredLogger = pinoCaller( pino({ level: config.logger.level, @@ -12,14 +67,6 @@ if (config.logger.pretty.toLowerCase() === "true") { target: "pino-pretty", }, }), - { - relativeTo: process.cwd(), - }, ); -} else { - configuredLogger = pino({ - level: config.logger.level, - }); } - export const logger = configuredLogger; diff --git a/frontend/server/pipelineSerialization.js b/frontend/server/pipelineSerialization.js index 9b9c8606..846668f4 100644 --- a/frontend/server/pipelineSerialization.js +++ b/frontend/server/pipelineSerialization.js @@ -6,7 +6,6 @@ import { BLOCK_SPECS_FILE_NAME, PIPELINE_SPECS_FILE_NAME, } from "../src/utils/constants"; -import { setDifference } from "../utils/set.js"; import { fileExists, filterDirectories, @@ -16,10 +15,8 @@ import { checkAndUpload, checkAndCopy, uploadDirectory } from "./s3.js"; import { createExecution, getBuildContextStatus, - getPipelinesByUuid, } from "./anvil"; import { logger } from "./logger"; -import { computeMerkleTreeForDirectory } from "./merkle.js"; export async function saveSpec(spec, writePath) { const pipelineSpecsPath = path.join(writePath, PIPELINE_SPECS_FILE_NAME); @@ -50,7 +47,7 @@ function hasContainer(blockSpec) { export async function copyPipeline(pipelineSpecs, fromDir, toDir) { const bufferPath = path.resolve(process.cwd(), fromDir); - + // Takes existing pipeline + spec const writePipelineDirectory = toDir; const pipelineSpecsPath = path.join( @@ -60,16 +57,22 @@ export async function copyPipeline(pipelineSpecs, fromDir, toDir) { const fromBlockIndex = await getBlockIndex([bufferPath]); - let toBlockIndex = {}; + + if (await fileExists(writePipelineDirectory)) { - toBlockIndex = await getBlockIndex([writePipelineDirectory]); + + await getBlockIndex([writePipelineDirectory]); } else { + await fs.mkdir(writePipelineDirectory, { recursive: true }); } // Gets pipeline specs from the specs coming from the graph // Submitted by the client + const newPipelineBlocks = getPipelineBlocks(pipelineSpecs); + + //gives an error on lint format check, but keeping it as it's also on the master: f/client-launch-anvil const existingPipelineBlocks = (await fileExists(pipelineSpecsPath)) ? await readPipelineBlocks(pipelineSpecsPath) : new Set(); @@ -105,13 +108,13 @@ export async function copyPipeline(pipelineSpecs, fromDir, toDir) { ); } } - await fs.writeFile(pipelineSpecsPath, JSON.stringify(pipelineSpecs, null, 2)); return { specs: PIPELINE_SPECS_FILE_NAME, dirPath: writePipelineDirectory }; } async function getBlockIndex(blockDirectories) { + const blockIndex = {}; for (const directory of blockDirectories) { try { @@ -195,8 +198,7 @@ export async function executePipeline( specs["name"] = name; specs["id"] = id; - //const merkle = await computeMerkleTreeForDirectory(path); - //const pipelines = await getPipelinesByUuid(anvilHostConfiguration, id); + await uploadBuildContexts( anvilHostConfiguration, @@ -226,7 +228,6 @@ async function uploadBlocks( const parameters = node.action?.parameters; const container = node.action?.container; - if (parameters) { for (const paramKey in parameters) { const param = parameters[paramKey]; @@ -238,6 +239,7 @@ async function uploadBlocks( if (filePath && filePath.trim()) { await checkAndUpload(awsKey, filePath, anvilConfiguration); + param.value = `"${fileName}"`; param.type = "blob"; } @@ -275,6 +277,7 @@ async function uploadBuildContexts( pipelineSpecs, rebuild, ); + await Promise.all( buildContextStatuses .filter((status) => !status.isUploaded) diff --git a/frontend/server/s3.js b/frontend/server/s3.js index 215764ee..d8304bff 100644 --- a/frontend/server/s3.js +++ b/frontend/server/s3.js @@ -80,6 +80,7 @@ export async function checkAndUpload(key, filePath, anvilConfiguration) { export async function uploadDirectory(key, diretoryPath, anvilConfiguration) { const files = await getDirectoryFilesRecursive(diretoryPath); + await Promise.all( files.map((f) => upload( diff --git a/frontend/server2/entrypoint.py b/frontend/server2/entrypoint.py deleted file mode 100644 index 8fff0d8a..00000000 --- a/frontend/server2/entrypoint.py +++ /dev/null @@ -1,87 +0,0 @@ -import os -import ast -import inspect -import shutil -import json -from computations import compute - -def main(): - original_path = os.getcwd() - - # Check for all the files in the /files directory - files_dir = os.path.join("/files") - if not os.path.exists(files_dir): - os.makedirs(files_dir, exist_ok=True) - - if os.path.exists(files_dir): - for item in os.listdir(files_dir): - src_path = os.path.join(files_dir, item) - dst_path = os.path.join(os.getcwd(), item) - shutil.move(src_path, dst_path) - - # Get current dir files and folders - initial_files_and_folders = set(os.listdir()) - #print("Initial files: ", initial_files_and_folders) - - params = list() - inputs = dict() - debug_inputs = dict() - - block_id = os.getenv("_blockid_") - #print("block id: ", block_id) - - for key in inspect.signature(compute).parameters.keys(): - value = os.getenv(key) - debug_inputs[key] = value - - print("debug|||", debug_inputs) - - for key in inspect.signature(compute).parameters.keys(): - value = os.getenv(key) - debug_inputs[key] = value - params.append(ast.literal_eval(value)) - inputs[key] = value - - json_inputs = json.dumps(inputs) - print("inputs|||", json_inputs) - - outputs = compute(*params) - - # the "|||" are used for parsing - json_outputs = json.dumps(outputs) - print("outputs|||", json_outputs) - - os.chdir(original_path) - for key, value in outputs.items(): - with open(block_id + "-" + key + ".txt", "w") as file: - file.write(json.dumps(value)) - - # Check the current execution directory for files and folders after the compute function executes - current_files_and_folders = set(os.listdir()) - #print("Current dir: ", current_files_and_folders) - - # Diff the new exec dir files and folders from the 1st exec dir files and folders state - new_items = current_files_and_folders - initial_files_and_folders - #print("New items: ", new_items) - - # 6. Copy any new files in the current execution directory to /files - if os.path.exists(files_dir): - for item in new_items: - src_path = os.path.join(os.getcwd(), item) - dst_path = os.path.join(files_dir, item) - if os.path.isdir(src_path): - shutil.copytree(src_path, dst_path, dirs_exist_ok=True) - else: - shutil.copy2(src_path, dst_path) - - # Moving offers no noticeable speedup in execution time -# if os.path.exists(files_dir): -# for item in new_items: -# src_path = os.path.join(os.getcwd(), item) -# dst_path = os.path.join(files_dir, item) -# -# print(f"Moving {item} from {src_path} to {dst_path}") -# shutil.move(src_path, dst_path) - -if __name__ == "__main__": - main() diff --git a/frontend/src/App.jsx b/frontend/src/App.jsx index 141b3ef4..b124cc21 100644 --- a/frontend/src/App.jsx +++ b/frontend/src/App.jsx @@ -13,8 +13,137 @@ import SocketFetcher from "@/components/ui/SocketFetcher"; import ServiceInitializer from "@/components/ServiceInitializer"; import "./styles/globals.scss"; +import AnvilConfigurationsModal from "./components/ui/modal/AnvilConfigurationsModal"; +import { useState, useEffect } from "react"; +import axios from "axios"; +import { useAtom } from "jotai"; +import { availableKubeContexts } from "@/atoms/kubecontextAtom"; +import { ping } from "./client/anvil"; +import { activeConfigurationAtom } from "./atoms/anvilConfigurationsAtom"; +import AnvilLauncherStatus from "./components/ui/AnvilLauncherStatus"; + +const serverAddress = import.meta.env.VITE_EXPRESS; export default function App() { + const [appIsPackaged, setIsPackaged] = useState(false); + const [_, setAvailableKubeContexts] = useAtom(availableKubeContexts); + const [configOpen, setConfigOpen] = useState(false); + const [confirmationOpen, setConfirmationIsOpen] = useState(false); + const [confirmationText, setConfirmationText] = useState([]); + const [errModalOpen, setErrModalOpen] = useState(false); + const [errText, setErrText] = useState([]); + const [loading, setIsLoading] = useState(false); + const [currentConfig] = useAtom(activeConfigurationAtom); + const [logViewerOpen, SystemLogViewerOpen] = useState(false); + + useEffect(() => { + async function initialLaunch() { + let isPackaged = false; + try { + const res = await axios.get(`${serverAddress}/isPackaged`); + isPackaged = res.data; + setIsPackaged(res.data); + } catch (err) { + console.log(err.message); + } + + if (isPackaged) { + const canPing = await ping(currentConfig); + if (!canPing) { + try { + const kubeResponse = await axios.get( + `${serverAddress}/get-kube-contexts`, + ); + setAvailableKubeContexts(kubeResponse.data); + } catch (err) { + //once user set their cloud settings, this message won't display again. + setErrText([ + "Cannot find kubectl or there's an error with kubectl command. Please try again or check if kubectl is in your path or check if your Docker daemon is running. You can ignore this message if you'll use cloud anvil", + err.response?.data?.kubeErr, + ]); + setErrModalOpen(true); + } + + try { + const anvilRes = await axios.get( + `${serverAddress}/get-anvil-config`, + ); + const data = anvilRes.data; + if (data.has_config) { + console.log("HERE ON SECOND RUN"); + const config = data.config; + const bucketPort = config.Local.BucketPort; + const driver = config.Local.Driver; + const serverPort = config.ServerPort; + const context = config.KubeContext; + //check this, in case user might've manually changed something, which could potentially cause bugs + + const openConfig = + currentConfig?.anvil?.port === serverPort.toString(); + if (config.IsLocal === true && openConfig === true) { + console.log("MUST REACH HERE"); + setConfirmationIsOpen(true); + const configText = [ + "Are you sure you want to run anvil locally with the following configurations?", + "If you are using minikube driver, please make sure you've setup your minikube cluster, and that it's running.", + `HOST: 127.0.0.1`, + `PORT: ${serverPort}`, + `Context: ${context}`, + `Driver: ${driver}`, + ]; + setConfirmationText(configText); + } else { + setConfigOpen(true); + } + } else { + setConfigOpen(true); + setConfirmationIsOpen(false); + } + } catch (err) { + //once user set their cloud settings, this message won't display again. + setErrText([ + "Error occurred while trying to find your local Anvil configrations. Please try again to set your configurations and relaunch. If you want to use cloud settings, you can set your configurations to cloud instance, and ignore this message.", + err.response?.data?.err, + err.response?.data?.kubeErr, + ]); + setErrModalOpen(true); + } + } else { + //this is in case, in any point user wishes to change an already pinging config. + try { + const kubeResponse = await axios.get( + `${serverAddress}/get-kube-contexts`, + ); + setAvailableKubeContexts(kubeResponse.data); + } catch (err) {} + } + } + } + + initialLaunch(); + }, []); + + const confirmSettings = async () => { + SystemLogViewerOpen(true); + try { + setIsLoading(true); + const res = await axios.post(`${serverAddress}/launch-anvil-from-config`); + setConfirmationIsOpen(false); + setIsLoading(false); + setConfigOpen(false); + setIsLoading(false); + } catch (err) { + setConfirmationIsOpen(false); + setIsLoading(false); + setConfigOpen(true); + } + }; + + const closeConfirmation = () => { + setConfirmationIsOpen(false); + setConfigOpen(true); + }; + return ( @@ -24,6 +153,34 @@ export default function App() { + setConfigOpen(false)} + isInitial={true} + appIsPackaged={appIsPackaged} + initialLoading={loading} + /> + + { + setErrModalOpen(false); + setConfigOpen(true); + }} + errorOpen={errModalOpen} + errorMessage={errText} + logViewerTitle={"System Logs"} + logViewerOpen={logViewerOpen} + logViewerClose={() => { + SystemLogViewerOpen(false); + }} + /> + diff --git a/frontend/src/atoms/anvilConfigurationsAtom.js b/frontend/src/atoms/anvilConfigurationsAtom.js index 22b9223f..a64d1396 100644 --- a/frontend/src/atoms/anvilConfigurationsAtom.js +++ b/frontend/src/atoms/anvilConfigurationsAtom.js @@ -58,3 +58,13 @@ export const editConfigurationAtom = atom( set(userAnvilConfigurationsAtom, (prev) => prev.with(index, configuration)); }, ); + +export const updateContextAndDriver = (newConfig) => { + return (oldState) => ({ + ...oldState, + anvil: { + ...oldState.anvil, + ...newConfig, + }, + }); +}; diff --git a/frontend/src/atoms/kubecontextAtom.js b/frontend/src/atoms/kubecontextAtom.js new file mode 100644 index 00000000..5209111f --- /dev/null +++ b/frontend/src/atoms/kubecontextAtom.js @@ -0,0 +1,10 @@ +import { atom } from "jotai"; + +export const availableKubeContexts = atom([]); +export const choosenKubeContexts = atom(""); +export const isPackaged = atom(false); +export const runningKubeContext = atom(""); +export const kubeErrorModalIsOpen = atom(false); +export const kubeErrors = atom([]); +export const drivers = atom(["docker", "minikube"]); +export const chosenDriver = atom(""); diff --git a/frontend/src/atoms/pipelineAtom.js b/frontend/src/atoms/pipelineAtom.js index 86af9a90..60649b3c 100644 --- a/frontend/src/atoms/pipelineAtom.js +++ b/frontend/src/atoms/pipelineAtom.js @@ -40,6 +40,7 @@ const defaultWorkspace = { fetchInterval: 5 * 1000, offset: 0, limit: 15, + //executions: {}, connected: false, }; diff --git a/frontend/src/client/anvil.js b/frontend/src/client/anvil.js index 853f6848..370b86e2 100644 --- a/frontend/src/client/anvil.js +++ b/frontend/src/client/anvil.js @@ -63,7 +63,7 @@ export async function terminateExecution(configuration, executionId) { return body; } -export async function getAllPipelines(configuration, limit, offset) { +export async function getAllPipelines(configuration) { const response = await handleRequest( buildUrl( getScheme(configuration.anvil.host), @@ -82,19 +82,18 @@ export async function getAllPipelines(configuration, limit, offset) { } export async function ping(configuration) { - const response = await handleRequest( - buildUrl( - getScheme(configuration.anvil.host), - configuration.anvil.host, - configuration.anvil.port, - "ping", - ), - HttpMethod.GET, - configuration.anvil.token, - {}, - ); - try { + const response = await handleRequest( + buildUrl( + getScheme(configuration.anvil.host), + configuration.anvil.host, + configuration.anvil.port, + "ping", + ), + HttpMethod.GET, + configuration.anvil.token, + {}, + ); return response.ok; } catch { return false; diff --git a/frontend/src/components/ZetaneDrawflowEditor.js b/frontend/src/components/ZetaneDrawflowEditor.js index 02c501fd..d5c13a37 100644 --- a/frontend/src/components/ZetaneDrawflowEditor.js +++ b/frontend/src/components/ZetaneDrawflowEditor.js @@ -78,6 +78,9 @@ export default class Drawflow { this.container.addEventListener("mouseup", this.dragEnd.bind(this)); this.container.addEventListener("mousemove", this.position.bind(this)); this.container.addEventListener("mousedown", this.click.bind(this)); + this.container.addEventListener("mouseleave", (_) => { + this.editor_selected = false; + }); this.container.addEventListener("touchend", this.dragEnd.bind(this)); this.container.addEventListener("touchmove", this.position.bind(this)); @@ -89,7 +92,7 @@ export default class Drawflow { this.container.addEventListener("keydown", this.key.bind(this)); /* Zoom Mouse */ - this.container.addEventListener("wheel", this.zoom_enter.bind(this)); + this.container.addEventListener("wheel", this.handleZoom.bind(this)); /* Update data Nodes */ this.container.addEventListener("dblclick", this.dblclick.bind(this)); /* Mobile zoom */ @@ -121,12 +124,11 @@ export default class Drawflow { if (this.prevDiff > 100) { if (curDiff > this.prevDiff) { // The distance between the two pointers has increased - - this.zoom_in(); + //this.zoom_in(); } if (curDiff < this.prevDiff) { // The distance between the two pointers has decreased - this.zoom_out(); + //this.zoom_out(); } } this.prevDiff = curDiff; @@ -625,21 +627,8 @@ export default class Drawflow { } } - zoom_enter(event) { - event.preventDefault(); - if (event.deltaY > 0) { - // Zoom Out - this.zoom_out(event); - } else { - // Zoom In - this.zoom_in(event); - } - } - zoom_refresh() { - this.canvas_x = (this.canvas_x / this.zoom_last_value) * this.zoom; - this.canvas_y = (this.canvas_y / this.zoom_last_value) * this.zoom; - this.zoom_last_value = this.zoom; + this.precanvas.style.transformOrigin = "0 0"; this.precanvas.style.transform = "translate(" + this.canvas_x + @@ -650,59 +639,53 @@ export default class Drawflow { ")"; } - zoom_in(event) { - if (this.zoom < this.zoom_max) { - const rect = this.precanvas.getBoundingClientRect(); - const pointerX = (event.clientX - rect.left) / this.zoom; - const pointerY = (event.clientY - rect.top) / this.zoom; - - const centerPoint = { x: pointerX, y: pointerY }; - let newZoom = this.zoom + this.zoom_value; - - if (newZoom > this.zoom_max) newZoom = this.zoom_max; - if (newZoom < this.zoom_min) newZoom = this.zoom_min; - - const deltaZoom = newZoom - this.zoom; - - const offsetX = 33; // keeps the zoom - left - const offsetY = 13; // keeps the zoom - top - - this.canvas_x -= deltaZoom * centerPoint.x - offsetX; - this.canvas_y -= deltaZoom * centerPoint.y - offsetY; - - this.zoom = newZoom; - this.zoom_refresh(); - } + applyTransform() { + this.precanvas.style.transformOrigin = "0 0"; + this.precanvas.style.transform = `translate(${this.canvas_x}px, ${this.canvas_y}px) scale(${this.zoom})`; } - zoom_out(event) { - if (this.zoom > this.zoom_min) { - const rect = this.precanvas.getBoundingClientRect(); - const pointerX = (event.clientX - rect.left) / this.zoom; - const pointerY = (event.clientY - rect.top) / this.zoom; - - const centerPoint = { x: pointerX, y: pointerY }; - let newZoom = this.zoom - this.zoom_value; - - if (newZoom > this.zoom_max) newZoom = this.zoom_max; - if (newZoom < this.zoom_min) newZoom = this.zoom_min; - - const deltaZoom = this.zoom - newZoom; - - const offsetX = 33; - const offsetY = 13; - - this.canvas_x += deltaZoom * centerPoint.x - offsetX; - this.canvas_y += deltaZoom * centerPoint.y - offsetY; + handleZoom(event) { + event.preventDefault(); - this.zoom = newZoom; - this.zoom_refresh(); - } + const mouseX = event.clientX; + const mouseY = event.clientY; + + // Calculate new zoom level + const delta = event.deltaY; + let newZoom = this.zoom * 0.999 ** delta; + // Limit zoom level + newZoom = Math.min(Math.max(newZoom, 0.01), 20); + + // Calculate the point in canvas coordinates + const pointX = (mouseX - this.canvas_x) / this.zoom; + const pointY = (mouseY - this.canvas_y) / this.zoom; + + // Calculate new canvas position to keep the point fixed + this.canvas_x = mouseX - pointX * newZoom; + this.canvas_y = mouseY - pointY * newZoom; + + // Update zoom + this.zoom = newZoom; + + this.applyTransform(); + + console.log( + "Zoom:", + this.zoom, + "Canvas Position:", + { x: this.canvas_x, y: this.canvas_y }, + "Mouse:", + { x: mouseX, y: mouseY }, + "Fixed Point:", + { x: pointX, y: pointY }, + ); } zoom_reset() { if (this.zoom != 1) { this.zoom = 1; + this.canvas_x = 0; + this.canvas_y = 0; this.zoom_refresh(); } } diff --git a/frontend/src/components/ui/AnvilLauncherStatus.jsx b/frontend/src/components/ui/AnvilLauncherStatus.jsx new file mode 100644 index 00000000..a6622a09 --- /dev/null +++ b/frontend/src/components/ui/AnvilLauncherStatus.jsx @@ -0,0 +1,34 @@ +import ClosableModal from "./modal/ClosableModal"; +import StatusModal from "./modal/StatusModal"; +import SystemLogViewer from "./SystemLogViewer"; +export default function AnvilLauncherStatus(props) { + return ( + <> + + + + + + + + + ); +} diff --git a/frontend/src/components/ui/LogsButton.jsx b/frontend/src/components/ui/LogsButton.jsx index daf42bbf..54aa321f 100644 --- a/frontend/src/components/ui/LogsButton.jsx +++ b/frontend/src/components/ui/LogsButton.jsx @@ -8,7 +8,6 @@ import { logsAtom } from "@/atoms/logsAtom"; export default function LogsButton() { const [modalContent, setModalContent] = useAtom(modalContentAtom); const [logs, _] = useAtom(logsAtom); - const modalPopper = (content) => { setModalContent({ ...modalContent, diff --git a/frontend/src/components/ui/Navbar.jsx b/frontend/src/components/ui/Navbar.jsx index c16428e9..266e62a3 100644 --- a/frontend/src/components/ui/Navbar.jsx +++ b/frontend/src/components/ui/Navbar.jsx @@ -25,12 +25,14 @@ import SavePipelineButton from "./SavePipelineButton"; import AnvilConfigurationsModal from "./modal/AnvilConfigurationsModal"; import { activeConfigurationAtom } from "@/atoms/anvilConfigurationsAtom"; import { PipelineStopButton } from "./PipelineStopButton"; +import { useState } from "react"; export default function Navbar({ children }) { const [darkMode, setDarkMode] = useAtom(darkModeAtom); const [modalContent, setModalContent] = useAtom(modalContentAtom); const [pipeline, setPipeline] = useImmerAtom(pipelineAtom); const [configuration] = useAtom(activeConfigurationAtom); + const [disable, setDisable] = useState(false); const modalPopper = (content) => { setModalContent({ @@ -111,7 +113,11 @@ export default function Navbar({ children }) { {runButton} - + diff --git a/frontend/src/components/ui/PipelineLogs.jsx b/frontend/src/components/ui/PipelineLogs.jsx index 351a4edb..da6aba1a 100644 --- a/frontend/src/components/ui/PipelineLogs.jsx +++ b/frontend/src/components/ui/PipelineLogs.jsx @@ -4,6 +4,8 @@ import { useAtom } from "jotai"; import { useMemo } from "react"; import ScrollToBottom from "react-scroll-to-bottom"; import ClosableModal from "./modal/ClosableModal"; +import SystemLogViewer from "./SystemLogViewer"; +import { Tab, Tabs, TabList, TabPanel, TabPanels } from "@carbon/react"; export const isEmpty = (obj) => { for (var i in obj) return false; @@ -38,16 +40,32 @@ export const PipelineLogs = ({ title, filter }) => { }); return ( - - -
- -
-
-
+ <> + + + + PipelineLogs + System Logs + + + + +
+ +
+
+
+ + + + +
+
+
+ ); }; diff --git a/frontend/src/components/ui/RunPipelineButton.jsx b/frontend/src/components/ui/RunPipelineButton.jsx index 4609702e..fafe0d4a 100644 --- a/frontend/src/components/ui/RunPipelineButton.jsx +++ b/frontend/src/components/ui/RunPipelineButton.jsx @@ -1,13 +1,13 @@ import { drawflowEditorAtom } from "@/atoms/drawflowAtom"; import { pipelineAtom } from "@/atoms/pipelineAtom"; -import { useQueryClient } from "@tanstack/react-query"; +import { useQueryClient, useQuery } from "@tanstack/react-query"; import { mixpanelAtom } from "@/atoms/mixpanelAtom"; import generateSchema from "@/utils/schemaValidation"; import { trpc } from "@/utils/trpc"; -import { Button } from "@carbon/react"; +import { Button, Tooltip } from "@carbon/react"; import { useAtom } from "jotai"; import { useImmerAtom } from "jotai-immer"; -import { useState } from "react"; +import { useState, useEffect } from "react"; import { uuidv7 } from "uuidv7"; import ClosableModal from "./modal/ClosableModal"; import { workspaceAtom } from "@/atoms/pipelineAtom"; @@ -147,6 +147,7 @@ export default function RunPipelineButton({ children, action }) { margin: "5px", }; + return ( <>