From bc2334dff8cbdf63e5a09d629e26df2927bbe774 Mon Sep 17 00:00:00 2001 From: Erich Gatejen Date: Tue, 26 Aug 2025 12:27:07 -0600 Subject: [PATCH 01/25] Add token management, REST API improvements, and enhanced notifications - Introduced token management with REST API endpoints for creating, viewing, and deleting tokens. - Refined `SnackbarNotification` component usage for consistent notifications. - Enhanced REST API logic to unify `authenticateTokenOrToken` usage. - Improved `design` and `runs` queries and adjusted PostgreSQL queries for clarity. - Updated container `entrypoint.sh` to securely generate and export JWT secrets. --- README.md | 2 + docs/ARCH.md | 52 ++ docs/RESTAPI.md | 108 +++ docs/notes/rest.txt | 12 + integration/build/all/files/entrypoint.sh | 7 + service/providers/psql/schema/full.sql | 13 + service/server/admin.js | 140 +++- service/server/admin_rest.js | 79 ++ service/server/common.js | 114 +++ service/server/proxy.js | 28 +- service/server/run.js | 20 +- service/server/run_rest.js | 18 +- .../src/components/SnackbarNotification.vue | 57 ++ service/src/routes/admin/AdminView.vue | 695 +++++++++++++----- service/src/routes/main/MainView.vue | 37 +- service/src/routes/run/RunView.vue | 22 +- service/src/routes/settings/SettingsView.vue | 67 +- service/src/utils/date.ts | 17 + service/test/working.json | 2 +- 19 files changed, 1220 insertions(+), 270 deletions(-) create mode 100644 docs/ARCH.md create mode 100644 docs/RESTAPI.md create mode 100644 docs/notes/rest.txt create mode 100644 service/src/components/SnackbarNotification.vue create mode 100644 service/src/utils/date.ts diff --git a/README.md b/README.md index d01c9c0..54bcd62 100644 --- a/README.md +++ b/README.md @@ -78,6 +78,8 @@ Now that you have s running container, you can run our demo to see how it works. - [Building and developing on your laptop/pc](docs/LOCALDEV.md) - [Basic demo](docs/BASICDEMO.md) - [AI Providers](docs/PROVIDERS.md) +- [Architecture and design information](docs/ARCH.md) +- [REST API](docs/RESTAPI.md) ## Links diff --git a/docs/ARCH.md b/docs/ARCH.md new file mode 100644 index 0000000..b32121a --- /dev/null +++ b/docs/ARCH.md @@ -0,0 +1,52 @@ +# WWWHerd. Herding the internet. +Copyright (c) 2025 Ginfra Project. All rights reserved. + +*You can use your wwwherds and we will take care of the code.* + +## COMPONENTS + +### API Server + +The api server is found in service/server/. This is the UI and REST api support for the application. It supports +both JTW user authentication and configured token configuration. + +IMPORTANT: The environment variable WWWHERD_JWT_SECRET should be set to a JWT secret key for encrypting the JWT tokens. +The following is the default value: + +``` +export WWWHERD_JWT_SECRET="7ZjeJaw72bM2akfbGUfevK2Y5I1jFMQdvY86tzmGPCRdfq6oMVdKzCr0QCxUr9t" +``` + +The default is find for demos, but it is extremely dangerous to use in any sort of production environment. The all-in-one +container available from the WWWHerd project (i.e. wwwherd/all:latest) will automatically generate a new key the first +time the container is started. + +### Frontend + +The frontend is found in service/src/. + +### Service Node + +The service node is found in service/ and is a ginfra-based service. It handles separable scaling components for the +following services: + +- Database management +- Run job dispatch and monitoring +- Periodic data processes + +### Magnitude service + +The service node is found in magnitude/ and is a ginfra-based service. It handles actually running the AI processes/ + +### Postgres node + +It is found in postgres/. Intended to build postgres node containers. It is not required and exists as a convenience. +Scaled postgres instances must be separately architected using known practices that are beyond the scope of our +documentation. + +### Configuration node + +It is found in config/. Intended to build ginfra configuration node containers. It is not required and exists as a +convenience for those that want to use Ginfra for management. + + diff --git a/docs/RESTAPI.md b/docs/RESTAPI.md new file mode 100644 index 0000000..46adc41 --- /dev/null +++ b/docs/RESTAPI.md @@ -0,0 +1,108 @@ +# WWWHerd. Herding the internet. +Copyright (c) 2025 Ginfra Project. All rights reserved. + +*You can use your wwwherds and we will take care of the code.* + +## REST API + +All calls require the header 'auth-token' containing the token added in the UI. All examples assume the environment +variable WWWHERD_REST_TOKEN with the auth-token. + +### GET /api/design + +Gets all designs. + +Example: +```bash +curl -X GET \ + -H "Content-Type: application/json" \ + -H "auth-token: ${WWWHERD_REST_TOKEN}" \ + http://localhost:8000/api/design +``` + +### GET /api/design/:designId + +Get a design with the id as :designId. + +Example: +```bash +curl -X GET \ + -H "Content-Type: application/json" \ + -H "auth-token: ${WWWHERD_REST_TOKEN}" \ + http://localhost:8000/api/design/1 +``` + +### POST /api/runs + +The json body will have the following fields: +- name +- narrative +- values +- host +- (int) provider_id +- flowids +- tag_ids + +You can get these values as the output from GET /api/design or GET /api/design/:designId. A successful response will +have the run's id. + +Example. This assumes [the integration test](../integration/demo/wwwherd_test.json) has been imported: +```bash + curl -X POST \ + -H "Content-Type: application/json" \ + -H "auth-token: ${WWWHERD_REST_TOKEN}" \ + -d '{"name":"Run Jacks Pet Store ","narrative":"Verify Jacks Pet Store.","values":null,"host":"http://localhost:8888","flowids":[1,2,3],"provider_id":1,"tag_ids":[2,3] }' \ + http://localhost:8000/api/runs +``` + +### GET /api/runs/:runId + +Get information and status about the run with the id as :runId. + +The status values are as follows: +1= New and not yet seen by the system. +2= Waiting for a node. +3= Running +4= Cancelling +5= Canceled +6= Done without result +7= Done and errored +8= Done with a passing result +9= Done and failed by assertions +10= Skipped (unlikely at the run level) +11= Blocked (unlikely at the run level) +12= Disabled (unlikely at the run level) +13= Unknown. This will always be a bug. + +Example: +```bash +curl -X GET \ + -H "Content-Type: application/json" \ + -H "auth-token: ${WWWHERD_REST_TOKEN}" \ + http://localhost:8000/api/runs/1 +``` + +### GET /api/cancel/:runId + +Cancel the run with the id as :runId. + +Example: +```bash +curl -X GET \ + -H "Content-Type: application/json" \ + -H "auth-token: ${WWWHERD_REST_TOKEN}" \ + http://localhost:8000/api/cancel/1 +``` + +### GET /api/run-logs/:runId + +Get the logs for the run with the id as :runId. The logs won't be available until the run is done. The data returned +is Base64 encoded. + +Example: +```bash +curl -X GET \ + -H "Content-Type: application/json" \ + -H "auth-token: ${WWWHERD_REST_TOKEN}" \ + http://localhost:8000/api/run-logs/1 | base64 --decode +``` diff --git a/docs/notes/rest.txt b/docs/notes/rest.txt new file mode 100644 index 0000000..3be834a --- /dev/null +++ b/docs/notes/rest.txt @@ -0,0 +1,12 @@ +curl -X GET \ + -H "Content-Type: application/json" \ + -H "auth-token: ${WWWHERD_REST_TOKEN}" \ + http://localhost:8000/api/design + + curl -X POST \ + -H "Content-Type: application/json" \ + -H "auth-token: ${WWWHERD_REST_TOKEN}" \ + -d '{ + "name":"Run Jacks Pet Store ","narrative":"Verify Jack's Pet Store.","values":null,"host":"http://localhost:8888","flowids":[1,2,3],"provider_id":1,"tag_ids":[2,3] + }' \ + http://localhost:8000/api/design diff --git a/integration/build/all/files/entrypoint.sh b/integration/build/all/files/entrypoint.sh index 6bef226..bc35987 100644 --- a/integration/build/all/files/entrypoint.sh +++ b/integration/build/all/files/entrypoint.sh @@ -28,6 +28,13 @@ replace_config_placeholders() { echo "Config file $config_file updated successfully" } +# JWT SECRET +if [[ ! -f "/jwt_secret" ]]; then + # Generate 64 character random alphanumeric string (upper and lowercase letters + numbers) + JWT_SECRET=$(cat /dev/urandom | tr -dc 'a-zA-Z0-9' | fold -w 64 | head -n 1) + echo "$JWT_SECRET" > /jwt_secret +fi +export WWWHERD_JWT_SECRET=$(cat /jwt_secret) pg_ctlcluster 16 main start diff --git a/service/providers/psql/schema/full.sql b/service/providers/psql/schema/full.sql index bc48fff..53d17ab 100644 --- a/service/providers/psql/schema/full.sql +++ b/service/providers/psql/schema/full.sql @@ -57,6 +57,19 @@ CREATE TABLE accounts INSERT INTO accounts (username, hpassword, org_id, roll_id, state_id) VALUES ('admin', 'aaaaaaaa', 1, 1, 1); +CREATE TABLE tokens +( + id SERIAL PRIMARY KEY, + name TEXT NOT NULL, + token TEXT UNIQUE NOT NULL, + create_date DATE NOT NULL DEFAULT CURRENT_DATE, + expire_date DATE NOT NULL, + org_id INT NOT NULL, + CONSTRAINT fktok_org FOREIGN KEY (org_id) + REFERENCES organizations (id) + CONSTRAINT fktok_unique_name_org UNIQUE (name, org_id), +); + CREATE TABLE provider_types ( id SERIAL PRIMARY KEY, diff --git a/service/server/admin.js b/service/server/admin.js index 78c0b0a..b339400 100644 --- a/service/server/admin.js +++ b/service/server/admin.js @@ -173,10 +173,148 @@ export async function changeUserPassword(orgId, username, oldPassword, newPasswo } +// -- TOKEN ---------------------------------------------------------------------------------------------------------- + +export async function addToken(orgId, name, token, expireDate) { + if (!orgId) { + throw new Error('Organization ID cannot be null or empty'); + } + if (!name || name.trim() === '') { + throw new Error('Token name cannot be null or empty'); + } + if (!token || token.trim() === '') { + throw new Error('Token cannot be null or empty'); + } + if (!expireDate) { + throw new Error('Expire date cannot be null or empty'); + } + + let client = getClient(); + try { + // Check if token name already exists for this org + const existingToken = await client.query( + 'SELECT id FROM tokens WHERE name = $1 AND org_id = $2', + [name, orgId] + ); + + if (existingToken.rowCount > 0) { + throw new Error('Token name already exists for this organization'); + } + + const result = await client.query( + 'INSERT INTO tokens (name, token, expire_date, org_id) VALUES ($1, $2, $3, $4) RETURNING *', + [name, token, expireDate, orgId] + ); + + logger.info(`Token '${name}' added successfully for organization ${orgId}`); + return result.rows[0]; + } catch (error) { + logger.error('Error adding token:', error); + throw error; + } +} + +export async function getTokensForOrg(orgId) { + if (!orgId) { + throw new Error('Organization ID cannot be null or empty'); + } + + let client = getClient(); + try { + const result = await client.query( + 'SELECT id, name, create_date, expire_date FROM tokens WHERE org_id = $1 ORDER BY create_date DESC', + [orgId] + ); + + return result.rows; + } catch (error) { + logger.error('Error getting tokens for organization:', error); + throw error; + } +} + +export async function getTokenInfo(orgId, token) { + if (!token || token.trim() === '') { + throw new Error('Token cannot be null or empty'); + } + + let client = getClient(); + try { + const result = await client.query( + 'SELECT name, expire_date FROM tokens WHERE token = $1 AND org_id = $2', + [token, orgId] + ); + + if (result.rowCount === 0) { + return null; + } + + return result.rows[0]; + } catch (error) { + logger.error('Error getting token info:', error); + throw error; + } +} + +export async function getTokenInfoNoOrg(token) { + if (!token || token.trim() === '') { + throw new Error('Token cannot be null or empty'); + } + + let client = getClient(); + try { + const result = await client.query( + 'SELECT name, expire_date FROM tokens WHERE token = $1', + [token] + ); + + if (result.rowCount === 0) { + return null; + } + + return result.rows[0]; + } catch (error) { + logger.error('Error getting token info:', error); + throw error; + } +} + +export async function deleteToken(tokenId, orgId) { + if (!tokenId) { + throw new Error('Token ID cannot be null or empty'); + } + if (!orgId) { + throw new Error('Organization ID cannot be null or empty'); + } + + let client = getClient(); + try { + const result = await client.query( + 'DELETE FROM tokens WHERE id = $1 AND org_id = $2 RETURNING name', + [tokenId, orgId] + ); + + if (result.rowCount === 0) { + throw new Error('Token not found or not authorized to delete'); + } + + logger.info(`Token '${result.rows[0].name}' deleted successfully for organization ${orgId}`); + return { success: true, deletedToken: result.rows[0].name }; + } catch (error) { + logger.error('Error deleting token:', error); + throw error; + } +} + export default { getUsers, addUser, changeUserState, changeUserPassword, - queryAccountById + queryAccountById, + addToken, + getTokensForOrg, + getTokenInfo, + deleteToken, + getTokenInfoNoOrg } \ No newline at end of file diff --git a/service/server/admin_rest.js b/service/server/admin_rest.js index e2d1bb7..8493a20 100644 --- a/service/server/admin_rest.js +++ b/service/server/admin_rest.js @@ -101,4 +101,83 @@ export async function addAdmin(app) { } }); + // -- Token management endpoints ------------------------------------------------------------------------------ + + app.post('/api/tokens', authenticateToken, async (req, res) => { + if (!isAdminRole(req.user.username)) { + return res.sendStatus(403); + } + try { + const { name, token, expireDate } = req.body; + + if (!name || !token || !expireDate) { + return res.status(400).json({ + error: 'Name, token, and expire date are required' + }); + } + + const result = await proxy.addToken(1, name, token, expireDate); + res.json(result); + } catch (error) { + if (error.message.includes('already exists')) { + return res.status(409).json({ error: error.message }); + } + res.status(500).json({ error: error.message }); + } + }); + + app.get('/api/tokens', authenticateToken, async (req, res) => { + if (!isAdminRole(req.user.username)) { + return res.sendStatus(403); + } + try { + const tokens = await proxy.getTokensForOrg(1); + res.json(tokens); + } catch (error) { + res.status(500).json({ error: error.message }); + } + }); + + app.get('/api/tokens/:token/info', authenticateToken, async (req, res) => { + if (!isAdminRole(req.user.username)) { + return res.sendStatus(403); + } + try { + const { token } = req.params; + const tokenInfo = await proxy.getTokenInfo(1, token); + + if (!tokenInfo) { + return res.status(404).json({ error: 'Token not found' }); + } + + res.json(tokenInfo); + } catch (error) { + res.status(500).json({ error: error.message }); + } + }); + + app.delete('/api/tokens/:tokenId', authenticateToken, async (req, res) => { + if (!isAdminRole(req.user.username)) { + return res.sendStatus(403); + } + try { + const tokenId = parseInt(req.params.tokenId); + + if (isNaN(tokenId)) { + return res.status(400).json({ + error: 'Invalid token ID: must be a valid number' + }); + } + + const result = await proxy.deleteToken(1, tokenId); + res.json(result); + } catch (error) { + if (error.message.includes('not found')) { + return res.status(404).json({ error: 'Token not found' }); + } + res.status(500).json({ error: error.message }); + } + }); + + } diff --git a/service/server/common.js b/service/server/common.js index 0d9c8d1..a1773d2 100644 --- a/service/server/common.js +++ b/service/server/common.js @@ -5,6 +5,8 @@ import {activeAccounts, logger} from './store.js'; import jwt from 'jsonwebtoken'; +import proxy from "./proxy.js"; +import {getTokenInfoNoOrg} from "./admin.js"; // Obviously, it is incredibly unsafe to not set in the environment, but for testing and demo it should be ok. export const JWT_SECRET = process.env.WWWHERD_JWT_SECRET || "7ZjeJaw72bM2akfbGUfevK2Y5I1jFMQdvY86tzmGPCRdfq6oMVdKzCr0QCxUr9t"; @@ -38,5 +40,117 @@ export const authenticateToken = (req, res, next) => { }); }; +// Token cache to store org_id numbers mapped by tokens +const tokenCache = new Map(); + +// JWT or rest token. +export const authenticateTokenOrToken = (req, res, next) => { + const authHeader = req.headers['authorization']; + const token = authHeader && authHeader.split(' ')[1]; + + if (token) { + jwt.verify(token, JWT_SECRET, (err, user) => { + if (err) { + logger.error('Bad login:', err); + return res.sendStatus(403); + } + req.user = user; + req.org_id = 0; + if (!activeAccounts.has(req.user.username)) { + logger.error('Bad user in login:', err); + return res.sendStatus(401); + } + next(); + }); + return + } + + // See if a token needs to be cached. + try { + const authtoken = req.headers['auth-token']; + + if (tokenCache.has(authtoken)) { + if (isTokenExpired(authtoken)) { + return res.sendStatus(406); + } else { + const ctoken = tokenCache.get(authtoken); + req.user = "" + req.org_id = ctoken.org_id; + next(); + return; + } + } + + const tokenInfo = proxy.getTokenInfoNoOrg(authtoken); + tokenInfo.then(data => { + if (!data || data === null) { + return res.sendStatus(403); + } + + setTokenCache(authtoken, data.org_id, data.expire_date); + if (isTokenExpired(authtoken)) { + return res.sendStatus(406); + } + req.user = "" + req.org_id = data.org_id; + next(); + + }).catch(error => { + logger.error('Error getting token info:', error); + return res.sendStatus(403); + }); + + } catch (error) { + logger.error('Error in token authentication:', error); + return res.sendStatus(500); + } +}; + + +// Helper functions for token cache management +export function setTokenCache(token, org_id, expiration_date) { + tokenCache.set(token, { + org_id: org_id, + expiration_date: expiration_date + }); +} + +export function getTokenCache(token) { + return tokenCache.get(token); +} + +export function hasTokenCache(token) { + return tokenCache.has(token); +} + +export function deleteTokenCache(token) { + return tokenCache.delete(token); +} + +export function clearTokenCache() { + tokenCache.clear(); +} + +// Helper function to check if token is expired +export function isTokenExpired(token) { + const cacheEntry = tokenCache.get(token); + if (!cacheEntry) return true; + + const now = new Date(); + const expiration = new Date(cacheEntry.expiration_date); + return now > expiration; +} + +// Helper function to clean up expired tokens +export function cleanupExpiredTokens() { + const now = new Date(); + for (const [token, data] of tokenCache.entries()) { + const expiration = new Date(data.expiration_date); + if (now > expiration) { + tokenCache.delete(token); + } + } +} + diff --git a/service/server/proxy.js b/service/server/proxy.js index 4ed211e..25319cc 100644 --- a/service/server/proxy.js +++ b/service/server/proxy.js @@ -35,6 +35,27 @@ async function changeUserPassword(orgId, username, oldPassword, newPassword) { return await admin.changeUserPassword(orgId, username, oldPassword, newPassword); } +export async function addToken(orgId, name, token, expireDate) { + return await admin.addToken(orgId, name, token, expireDate); +} + +export async function getTokensForOrg(orgId) { + return await admin.getTokensForOrg(orgId); +} + +async function getTokenInfo(orgId, token) { + return await admin.getTokenInfo(orgId, token); +} + +// Internal use. No API access. +async function getTokenInfoNoOrg(token) { + return await admin.getTokenInfoNoOrg(token); +} + +export async function deleteToken(orgId, tokenId) { + return await admin.deleteToken(orgId, tokenId); +} + // -- CatTags ---------------------------------------------------------------------------------------------------------- async function getAllTags(orgId) { @@ -266,5 +287,10 @@ export default { queryAccountById, changeUserPassword, insertFlowStats, - insertCaseStats + insertCaseStats, + addToken, + getTokensForOrg, + getTokenInfo, + deleteToken, + getTokenInfoNoOrg }; \ No newline at end of file diff --git a/service/server/run.js b/service/server/run.js index bde0907..751a573 100644 --- a/service/server/run.js +++ b/service/server/run.js @@ -42,7 +42,7 @@ async function queryAllDesignsByOrgId(orgId) { let client = getClient() try { let query = ` - SELECT cc.id, cc.name, cc.narrative, cc.values, cc.host, cc.org_id, cc.flowids, cc.provider_id, + SELECT cc.id, cc.name, cc.narrative, cc.values, cc.host, cc.flowids, cc.provider_id, COALESCE(array_agg(ct.tag_id) FILTER (WHERE ct.tag_id IS NOT NULL), '{}') as tag_ids FROM design cc LEFT JOIN design_tags ct ON cc.id = ct.design_id @@ -63,12 +63,12 @@ async function queryDesignById(designId) { let client = getClient() try { let query = ` - SELECT cc.id, cc.name, cc.narrative, cc.host, cc.design_status, cc.org_id, cc.flowids, cc.provider_id, + SELECT cc.id, cc.name, cc.narrative, cc.host, cc.design_status, cc.flowids, cc.provider_id, COALESCE(array_agg(ct.tag_id) FILTER (WHERE ct.tag_id IS NOT NULL), '{}') as tag_ids FROM design cc LEFT JOIN design_tags ct ON cc.id = ct.design_id WHERE cc.id = $1 - GROUP BY cc.id, cc.name, cc.narrative, cc.org_id`; + GROUP BY cc.id, cc.name, cc.narrative`; const params = [designId]; const result = await client.query(query, params); @@ -89,7 +89,8 @@ async function updateDesign(orgId, designId, name, narrative, values, host, prov await client.query('BEGIN'); const result = await client.query( - 'UPDATE design SET name = $1, narrative = $2, values = $3, host = $4, provider_id = $5, flowids = $6 WHERE id = $7 AND org_id = $8 RETURNING *', + `UPDATE design SET name = $1, narrative = $2, values = $3, host = $4, provider_id = $5, flowids = $6 WHERE id = $7 AND org_id = $8 RETURNING + name, narative, values, host, provider_id, flowids` [name, narrative, values, host, normalizedProviderId, flowids, designId, orgId] ); @@ -121,7 +122,7 @@ async function deleteDesign(orgId, designId) { await client.query('DELETE FROM design_tags WHERE design_id = $1', [designId]); // Delete the design - const result = await client.query('DELETE FROM design WHERE id = $1 AND org_id = $2 RETURNING *', [designId, orgId]); + const result = await client.query('DELETE FROM design WHERE id = $1 AND org_id = $2 RETURNING id', [designId, orgId]); await client.query('COMMIT'); return result.rows[0] || null; @@ -141,7 +142,8 @@ async function addRun(orgId, name, narrative, values, host, provider_id, flowids try { await client.query('BEGIN'); const result = await client.query( - 'INSERT INTO runs (org_id, name, narrative, values, host, run_status, flowids, provider_id) VALUES ($1, $2, $3, $4, $5, $6, $7, $8) RETURNING *', + `INSERT INTO runs (org_id, name, narrative, values, host, run_status, flowids, provider_id) VALUES ($1, $2, $3, $4, $5, $6, $7, $8) RETURNING + id, name, narrative, values, host, run_status, flowids, provider_id`, [orgId, name, narrative, values, host, 1, flowids, provider_id] ); const runId = result.rows[0].id; @@ -167,12 +169,12 @@ async function queryAllRunsByOrgId(orgId) { try { // TODO we don't need to get everything. let query = ` - SELECT cc.id, cc.name, cc.narrative, cc.values, cc.host, cc.run_status, cc.org_id, cc.flowids, cc.provider_id, + SELECT cc.id, cc.name, cc.narrative, cc.values, cc.host, cc.run_status, cc.flowids, cc.provider_id, COALESCE(array_agg(ct.tag_id) FILTER (WHERE ct.tag_id IS NOT NULL), '{}') as tag_ids FROM runs cc LEFT JOIN run_tags ct ON cc.id = ct.run_id WHERE cc.org_id = $1 - GROUP BY cc.id, cc.name, cc.narrative, cc.org_id`; + GROUP BY cc.id, cc.name, cc.narrative`; const params = [orgId]; const result = await client.query(query, params); @@ -310,7 +312,7 @@ async function deleteRunHistory(orgId, runId) { await client.query('DELETE FROM run_logs WHERE run_id = $1', [runId]); const result = await client.query( - 'DELETE FROM runs_history WHERE id = $1 AND org_id = $2 RETURNING *', + 'DELETE FROM runs_history WHERE id = $1 AND org_id = $2 RETURNING id', [runId, orgId] ); diff --git a/service/server/run_rest.js b/service/server/run_rest.js index 078887f..4ca5a28 100644 --- a/service/server/run_rest.js +++ b/service/server/run_rest.js @@ -7,12 +7,12 @@ import {logger} from './store.js'; import { getClient } from './client.js'; import proxy from "./proxy.js"; -import { isAdminRole, authenticateToken } from "./common.js"; +import {isAdminRole, authenticateToken, authenticateTokenOrToken} from "./common.js"; export async function addRun(app) { // -- Design ------------------------------------------------------------------------------------------------------- - app.get('/api/design', authenticateToken, async (req, res) => { + app.get('/api/design', authenticateTokenOrToken, async (req, res) => { try { const runs = await proxy.getAllDesign(1); res.json(runs); @@ -21,9 +21,9 @@ export async function addRun(app) { } }); - app.get('/api/design/:designId', authenticateToken, async (req, res) => { + app.get('/api/design/:designId', authenticateTokenOrToken, async (req, res) => { try { - const run = await proxy.getRun(1, parseInt(req.params.designId)); + const run = await proxy.getDesign(1, parseInt(req.params.designId)); if (!run) { return res.status(404).json({error: 'DEsign not found'}); } @@ -89,7 +89,7 @@ export async function addRun(app) { } }); - app.get('/api/runs/:runId', authenticateToken, async (req, res) => { + app.get('/api/runs/:runId', authenticateTokenOrToken, async (req, res) => { try { const run = await proxy.getRun(1, parseInt(req.params.runId)); if (!run) { @@ -101,7 +101,7 @@ export async function addRun(app) { } }); - app.post('/api/runs', authenticateToken, async (req, res) => { + app.post('/api/runs', authenticateTokenOrToken, async (req, res) => { try { const result = await proxy.addRun(1, req.body.name, req.body.narrative, req.body.values, req.body.host, parseInt(req.body.provider_id), req.body.flowids, req.body.tag_ids); @@ -115,7 +115,7 @@ export async function addRun(app) { } }); - app.delete('/api/runs/:runId', authenticateToken, async (req, res) => { + app.delete('/api/runs/:runId', authenticateTokenOrToken, async (req, res) => { try { const result = await proxy.deleteRun(1, parseInt(req.params.runId)); if (!result) { @@ -139,7 +139,7 @@ export async function addRun(app) { } }); - app.get('/api/cancel/:runId', authenticateToken, async (req, res) => { + app.get('/api/cancel/:runId', authenticateTokenOrToken, async (req, res) => { try { const run = await proxy.cancelRun(1, parseInt(req.params.runId)); if (!run) { @@ -151,7 +151,7 @@ export async function addRun(app) { } }); - app.get('/api/run-logs/:runId', async (req, res) => { + app.get('/api/run-logs/:runId', authenticateTokenOrToken, async (req, res) => { try { const { runId } = req.params; diff --git a/service/src/components/SnackbarNotification.vue b/service/src/components/SnackbarNotification.vue new file mode 100644 index 0000000..3fa3e46 --- /dev/null +++ b/service/src/components/SnackbarNotification.vue @@ -0,0 +1,57 @@ + + + diff --git a/service/src/routes/admin/AdminView.vue b/service/src/routes/admin/AdminView.vue index 0a7adf2..0e6e918 100644 --- a/service/src/routes/admin/AdminView.vue +++ b/service/src/routes/admin/AdminView.vue @@ -14,6 +14,7 @@ mdi-account-multipleUsers + mdi-keyTokens mdi-robot-outlineProviders mdi-desktop-classicSystem @@ -109,6 +110,77 @@ + + + + Tokens + + + + {{ tokensError }} + + + Close + + + Add Token + + + + + + + Name + Created + Expires + Status + Actions + + + + + + mdi-key + + {{ token.name }} + {{ formatDate(token.create_date) }} + {{ formatDate(token.expire_date) }} + + + {{ isExpired(token.expire_date) ? 'Expired' : 'Active' }} + + + + + + + + + + No tokens found. + + + @@ -405,6 +477,117 @@ + + + + Add New Token + +
+ + + + + +
+ +
+ + Important: Copy this token now! You will never be able to see it again. + + + + + + Click the copy icon to copy the token to your clipboard. + +
+
+ + + + mdi-close + Cancel + + + mdi-check + Create Token + + + mdi-check + I've Copied the Token + + +
+
+ + + + + Delete Token + + Are you sure you want to delete the token "{{ tokenToDelete?.name }}"? This action cannot be undone. + + + + + Cancel + + + Delete + + + + + - - {{ snackbar.text }} - - + \ No newline at end of file diff --git a/service/src/routes/run/RunView.vue b/service/src/routes/run/RunView.vue index 13288a2..e58c493 100644 --- a/service/src/routes/run/RunView.vue +++ b/service/src/routes/run/RunView.vue @@ -582,12 +582,7 @@ + @click="showExecInfoDialog = true" color="softbutton" variant="outlined" density="comfortable" class="mt-2"> mdi-console Exec Info @@ -595,31 +590,22 @@ - + mdi-clipboard-list Orders and Results - + + + mdi-chart-box-outline + Report + + - + mdi-text-box-outline Fetch Logs - @@ -648,6 +634,16 @@ + + + @@ -1097,7 +1093,6 @@ - import {computed, onMounted, ref, watch} from 'vue' -import {useAuthStore} from "@/stores/auth.ts"; import {canCancelRun, canDeleteRun, canRecycleRun, categories, deleteDesign, type Design, design, fetchCategories, fetchProviders, fetchTags, getDesignColor, getRunColor, getTagColor, providers, refreshDesign, type Run, runs, selectedCategory, tags, formatDate, getProviderName, newRun} from "@/stores/data.ts"; @@ -1184,8 +1178,7 @@ import LogsDialog from "@/components/LogsDialog.vue"; import HelpButton from "@/components/HelpButton.vue"; import Sidebar from "@/components/Sidebar.vue"; import SnackbarNotification from "@/components/SnackbarNotification.vue"; - -const authStore = useAuthStore() +import ReportDialog from "@/components/ReportDialog.vue"; // --------------------------------------------------------------------------------------------------------------------- // -- Design ----------------------------------------------------------------------------------------------------------- @@ -1198,7 +1191,6 @@ const runFormValid = ref(false) const selectedDesign = ref(null) const showTagSelectDialog = ref(false) const deleteDialog = ref(false) -//const selectedDesignId = ref(null) const startRunDialog = ref(false) const showRunTagsDialog = ref(false) const submittingRun = ref(false) @@ -1351,7 +1343,6 @@ const draggableRunFlows = computed({ } }) - // Update the onFlowOrderChange to be more robust const onFlowOrderChange = (event: any) => { // Let the computed property handle the update @@ -1653,6 +1644,66 @@ const showCaseDetails = (case_: any) => { showCaseDetailsDialog.value = true } +// -- REPORT -------------------------------------------------------- + +// Add new reactive variables for report functionality +const showReportDialog = ref(false) +const reportData = ref(null) +const loadingReport = ref(false) +const reportError = ref(null) +const currentReportRunId = ref(null) + +// Add the showReport method +const showReport = async (runId: number) => { + currentReportRunId.value = runId + showReportDialog.value = true + await fetchReport(runId) +} + +// Add the fetchReport method +const fetchReport = async (runId: number) => { + loadingReport.value = true + reportError.value = null + reportData.value = null + + try { + const response = await apiClient.get('/api/stats/report/' + runId) + if (!response.ok) { + throw new Error(`Failed to fetch report: ${response.status} ${response.statusText}`) + } + + const data = await response.json() + reportData.value = data + } catch (error) { + console.error('Error fetching report:', error) + reportError.value = error instanceof Error ? error.message : 'Failed to load report' + } finally { + loadingReport.value = false + } +} + +// Add the refreshReport method +const refreshReport = async () => { + if (currentReportRunId.value) { + await fetchReport(currentReportRunId.value) + } +} + +// Add the downloadReportJson method +const downloadReportJson = () => { + if (!reportData.value) return + const jsonString = JSON.stringify(reportData.value, null, 2) + const blob = new Blob([jsonString], { type: 'application/json' }) + const url = URL.createObjectURL(blob) + const link = document.createElement('a') + link.href = url + link.download = `run-report-${currentReportRunId.value}-${Date.now()}.json` + document.body.appendChild(link) + link.click() + document.body.removeChild(link) + URL.revokeObjectURL(url) +} + // --------------------------------------------------------------------------------------------------------------------- // -- Values ------------------------------------------------------------------------------------------------------------- @@ -1834,4 +1885,37 @@ onMounted(async () => { color: #f0f0f0; } +.report-content { + padding: 16px; +} + +.report-section h3 { + color: #1976d2; + border-bottom: 2px solid #e0e0e0; + padding-bottom: 8px; +} + +.report-section h4 { + color: #424242; + margin-top: 24px; +} + +.report-json { + background-color: #f5f5f5; + border: 1px solid #e0e0e0; + border-radius: 4px; + padding: 12px; + font-family: 'Courier New', monospace; + font-size: 12px; + line-height: 1.4; + overflow-x: auto; + white-space: pre-wrap; + word-wrap: break-word; +} + +.report-icon { + margin-right: 8px; +} + + -- GitLab From 6ec355cb4e3a4bb87b0f256c486fb38c9bd6b8d4 Mon Sep 17 00:00:00 2001 From: Erich Gatejen Date: Wed, 27 Aug 2025 11:02:50 -0600 Subject: [PATCH 03/25] cost info --- common/data/types.go | 1 + magnitude/service/common/utils.go | 42 ++++++- magnitude/service/flow/script.go | 10 +- magnitude/service/raw/script.go | 8 +- magnitude/src/index.ts | 30 ++++- service/providers/psql/schema/full.sql | 18 ++- service/providers/psql/wwwherd_stats.go | 12 +- service/server/main.js | 5 + service/server/stats.js | 96 ++++++++++++++++ service/server/stats_rest.js | 5 +- service/service/stats.go | 143 ++++++++++++++++++++++-- service/src/components/LogsDialog.vue | 47 +++++++- service/src/components/ReportDialog.vue | 40 ++++--- service/src/routes/admin/AdminView.vue | 15 ++- service/src/routes/run/RunView.vue | 76 ++++++++----- service/test/working.json | 2 +- 16 files changed, 467 insertions(+), 83 deletions(-) diff --git a/common/data/types.go b/common/data/types.go index f39b772..870f310 100644 --- a/common/data/types.go +++ b/common/data/types.go @@ -32,6 +32,7 @@ type OrderCaseItem struct { //Log string `json:"log,omitempty"` Error string `json:"error,omitempty"` Time int64 `json:"time,omitempty"` + Costs string `json:"costs,omitempty"` // Runtime discarded Compiled interface{} `json:"-"` diff --git a/magnitude/service/common/utils.go b/magnitude/service/common/utils.go index 75b39ba..bf33c18 100644 --- a/magnitude/service/common/utils.go +++ b/magnitude/service/common/utils.go @@ -16,11 +16,13 @@ import ( "gitlab.com/ginfra/wwwherd/magnitude/local" "go.uber.org/zap" "io" + "strconv" "strings" ) -func CleanLog(dirtext string, dirlog []interface{}) ([]interface{}, string, bool) { +func CleanLog(dirtext string, dirlog []interface{}) ([]interface{}, string, bool, string) { filtered := make([]interface{}, 0) + cost := "" errored := false errtext := "" for _, entry := range dirlog { @@ -29,6 +31,11 @@ func CleanLog(dirtext string, dirlog []interface{}) ([]interface{}, string, bool if !strings.HasPrefix(msg, dirtext) && !strings.Contains(msg, "[start] agent") { filtered = append(filtered, entry) } + + // Check if message starts with "COST:" and append to cost variable + if strings.HasPrefix(msg, "COST:") { + cost += msg[5:] + } } if t, ok := e["type"].(string); ok && t == "ERROR" { // Case has errored @@ -58,7 +65,7 @@ func CleanLog(dirtext string, dirlog []interface{}) ([]interface{}, string, bool } } } - return filtered, errtext, errored + return filtered, errtext, errored, cost } func RunDirective(directive sflow.Directive, stdin io.WriteCloser, scanner *bufio.Scanner) ([]interface{}, error) { @@ -103,3 +110,34 @@ func RunDirective(directive sflow.Directive, stdin io.WriteCloser, scanner *bufi return jsonObj, nil } + +func TabulateCosts(data string) string { + + costMap := make(map[string]float64) + + // Split by whitespace to get tokens + tokens := strings.Fields(data) + + for _, token := range tokens { + // Split each token by colon to separate name from value + parts := strings.SplitN(token, ":", 2) + if len(parts) == 2 { + name := parts[0] + valueStr := parts[1] + + // Parse numeric value + if value, err := strconv.ParseFloat(valueStr, 64); err == nil { + costMap[name] += value + } + } + } + + // Build accumulated results as space-separated name:summed_values + var costResults []string + for name, sum := range costMap { + result := fmt.Sprintf("%s:%f", name, sum) + costResults = append(costResults, result) + } + + return strings.Join(costResults, " ") +} diff --git a/magnitude/service/flow/script.go b/magnitude/service/flow/script.go index da3e4e3..40d9759 100644 --- a/magnitude/service/flow/script.go +++ b/magnitude/service/flow/script.go @@ -17,6 +17,7 @@ import ( "gitlab.com/ginfra/wwwherd/magnitude/local" scommon "gitlab.com/ginfra/wwwherd/magnitude/service/common" "go.uber.org/zap" + "strings" ) type Dispatcher struct { @@ -33,6 +34,7 @@ func (d *Dispatcher) RunCase(runner *script.Runner, thecase *data.OrderCaseItem, err error dirlog []interface{} log = make([]interface{}, 0, len(thecase.Compiled.(sflow.CompiledCase).Directives)+1) + sb strings.Builder ) // Get the CompiledCase from the run @@ -62,7 +64,8 @@ func (d *Dispatcher) RunCase(runner *script.Runner, thecase *data.OrderCaseItem, }, runner.Stdin, scanner); err != nil { local.Logger.Error("Error setting or clearing flow prompt. Terminating.", zap.Error(err)) } - wf, _, we := scommon.CleanLog(w.String(), dirlog) + wf, _, we, cost := scommon.CleanLog(w.String(), dirlog) + sb.WriteString(cost) log = append(log, map[string]interface{}{ "directive": w.String(), "log": wf, @@ -92,7 +95,8 @@ func (d *Dispatcher) RunCase(runner *script.Runner, thecase *data.OrderCaseItem, // Clean log and check for failure. // TODO mess code. Clean it up. if dirlog != nil { - filtered, etemp, errored := scommon.CleanLog(directive.Type.String(), dirlog) + filtered, etemp, errored, cost := scommon.CleanLog(directive.Type.String(), dirlog) + sb.WriteString(cost) if etemp != "" { thecase.Error = etemp } @@ -118,5 +122,7 @@ func (d *Dispatcher) RunCase(runner *script.Runner, thecase *data.OrderCaseItem, } } + thecase.Costs = scommon.TabulateCosts(sb.String()) + return log, err } diff --git a/magnitude/service/raw/script.go b/magnitude/service/raw/script.go index dd7e5c2..30a47a5 100644 --- a/magnitude/service/raw/script.go +++ b/magnitude/service/raw/script.go @@ -16,6 +16,7 @@ import ( "gitlab.com/ginfra/wwwherd/magnitude/local" scommon "gitlab.com/ginfra/wwwherd/magnitude/service/common" "go.uber.org/zap" + "strings" ) type Dispatcher struct { @@ -34,6 +35,8 @@ func (d *Dispatcher) RunCase(runner *script.Runner, thecase *data.OrderCaseItem, errored bool etemp string log = make([]interface{}, 0, len(thecase.Compiled.(sflow.CompiledCase).Directives)+1) + sb strings.Builder + cost string ) // Scanner to read stdout line by line @@ -54,7 +57,8 @@ func (d *Dispatcher) RunCase(runner *script.Runner, thecase *data.OrderCaseItem, } // Clean log and check for failure. - filtered, etemp, errored = scommon.CleanLog(sflow.DirectiveRaw.String(), dirlog) + filtered, etemp, errored, cost = scommon.CleanLog(sflow.DirectiveRaw.String(), dirlog) + sb.WriteString(cost) if etemp != "" { thecase.Error = etemp } @@ -76,5 +80,7 @@ func (d *Dispatcher) RunCase(runner *script.Runner, thecase *data.OrderCaseItem, thecase.Result = data.RunStatusPassed } + thecase.Costs = scommon.TabulateCosts(sb.String()) + return log, err } diff --git a/magnitude/src/index.ts b/magnitude/src/index.ts index f68b0c8..fd33e32 100644 --- a/magnitude/src/index.ts +++ b/magnitude/src/index.ts @@ -116,6 +116,32 @@ const agent = startBrowserAgent({ process.exit(9); }); +// Listen to token usage events +agent.then(resolvedAgent => { + resolvedAgent.events.on('tokensUsed', (usage: any) => { + if (usage.inputTokens !== undefined) { + console.log('COST: inputTokens:' + usage.inputTokens); + } + if (usage.inputCost !== undefined) { + console.log('COST: inputCost:' + usage.inputCost); + } + if (usage.outputTokens !== undefined) { + console.log('COST: outputTokens:' + usage.outputTokens); + } + if (usage.outputCost !== undefined) { + console.log('COST: outputCost:' + usage.outputCost); + } + if (usage.cacheWriteInputTokens !== undefined) { + console.log('COST: cacheWriteInputTokens:' + usage.cacheWriteInputTokens); + } + if (usage.cacheReadInputTokens !== undefined) { + console.log('COST: cacheReadInputTokens:' + usage.cacheReadInputTokens); + } + }); +}).catch(error => { + console.error('Failed to set up token usage listener:', error); +}); + let prompt = "" let promptset = "" @@ -144,10 +170,6 @@ Given the actions of an LLM agent executing a test case, and a screenshot taken Check to evaluate: `.trim(); -interface CheckEvents { - 'checkStarted': (check: string) => void; - 'checkDone': (check: string, passed: boolean) => void; -} // Check doesn't scroll, we we have to just use act with the correct prompt. async function check(description: string): Promise { diff --git a/service/providers/psql/schema/full.sql b/service/providers/psql/schema/full.sql index 53d17ab..3b2d91a 100644 --- a/service/providers/psql/schema/full.sql +++ b/service/providers/psql/schema/full.sql @@ -66,8 +66,8 @@ CREATE TABLE tokens expire_date DATE NOT NULL, org_id INT NOT NULL, CONSTRAINT fktok_org FOREIGN KEY (org_id) - REFERENCES organizations (id) - CONSTRAINT fktok_unique_name_org UNIQUE (name, org_id), + REFERENCES organizations (id), + CONSTRAINT fktok_unique_name_org UNIQUE (name, org_id) ); CREATE TABLE provider_types @@ -371,6 +371,13 @@ CREATE TABLE case_stats time_ms INT NOT NULL, provider_id INT NOT NULL, + cost_ic FLOAT DEFAULT 0, -- inputCost + cost_oc FLOAT DEFAULT 0, -- outputCost + cost_it INT DEFAULT 0, -- inputTokens + cost_ot INT DEFAULT 0, -- outputTokens + cost_cwit INT DEFAULT 0, -- cacheWriteInputTokens + cost_crit INT DEFAULT 0, -- cacheReadInputTokens + CONSTRAINT fkcase_stat_org FOREIGN KEY (org_id) REFERENCES organizations (id) ); @@ -387,6 +394,13 @@ CREATE TABLE flow_stats time_ms INT NOT NULL, provider_id INT NOT NULL, + cost_ic FLOAT DEFAULT 0, -- inputCost + cost_oc FLOAT DEFAULT 0, -- outputCost + cost_it INT DEFAULT 0, -- inputTokens + cost_ot INT DEFAULT 0, -- outputTokens + cost_cwit INT DEFAULT 0, -- cacheWriteInputTokens + cost_crit INT DEFAULT 0, -- cacheReadInputTokens + CONSTRAINT fkflow_stat_org FOREIGN KEY (org_id) REFERENCES organizations (id) ); diff --git a/service/providers/psql/wwwherd_stats.go b/service/providers/psql/wwwherd_stats.go index 3ec65f3..83585fd 100644 --- a/service/providers/psql/wwwherd_stats.go +++ b/service/providers/psql/wwwherd_stats.go @@ -44,8 +44,9 @@ func (p *providerDirect) PushStatsFlow(orgId int, data []interface{}) error { // Use json_populate_recordset for bulk insert query := ` - INSERT INTO flow_stats (org_id, flow_id, run_id, result, time_ms, provider_id) - SELECT batch.org_id, batch.flow_id, batch.run_id, batch.result, batch.time_ms, batch.provider_id + INSERT INTO flow_stats (org_id, flow_id, run_id, result, time_ms, provider_id, cost_ic, cost_oc, cost_it, cost_ot, cost_cwit, cost_crit) + SELECT batch.org_id, batch.flow_id, batch.run_id, batch.result, batch.time_ms, batch.provider_id, + batch.cost_ic, batch.cost_oc, batch.cost_it, batch.cost_ot, batch.cost_cwit, batch.cost_crit FROM json_populate_recordset(null::flow_stats, $1) AS batch ` @@ -88,11 +89,12 @@ func (p *providerDirect) PushStatsCase(orgId int, data []interface{}) error { // Use json_populate_recordset for bulk insert query := ` - INSERT INTO case_stats (org_id, case_id, flow_id, run_id, result, time_ms, provider_id) - SELECT batch.org_id, batch.case_id, batch.flow_id, batch.run_id, batch.result, batch.time_ms, batch.provider_id + INSERT INTO case_stats (org_id, case_id, flow_id, run_id, result, time_ms, provider_id, cost_ic, cost_oc, cost_it, cost_ot, cost_cwit, cost_crit) + SELECT batch.org_id, batch.case_id, batch.flow_id, batch.run_id, batch.result, batch.time_ms, batch.provider_id, + batch.cost_ic, batch.cost_oc, batch.cost_it, batch.cost_ot, batch.cost_cwit, batch.cost_crit FROM json_populate_recordset(null::case_stats, $1) AS batch ` - + _, err = cnx.Pool.Exec(context.Background(), query, string(jsonData)) if err != nil { return base.NewGinfraErrorChild("PushStatsCase failed to execute insert", err) diff --git a/service/server/main.js b/service/server/main.js index e192c67..4b0933f 100644 --- a/service/server/main.js +++ b/service/server/main.js @@ -384,6 +384,11 @@ app.delete('/api/providers/:providerId', authenticateToken, async (req, res) => res.json({message: 'Provider deleted successfully'}); } catch (error) { logger.error('Delete provider error:', error); + // A design is still using it. + if (error.message && error.message.includes('fkdesign_prov_org_id')) { + return res.status(409).json({error: error.message}); + } + res.status(500).json({error: error.message}); } }); diff --git a/service/server/stats.js b/service/server/stats.js index 9956f37..e6f77f8 100644 --- a/service/server/stats.js +++ b/service/server/stats.js @@ -9,6 +9,77 @@ import { getClient } from './client.js'; // ------------------------------------------------------------------------------------------------------------ // - REPORT +function extractCostFields(d) { + let c_cost_ic = 0.0, c_cost_oc = 0.0; + let c_cost_it = 0, c_cost_ot = 0, c_cost_cwit = 0, c_cost_crit = 0; + + // Check if "costs" exists in the object + if (!d.hasOwnProperty("costs")) { + return [c_cost_ic, c_cost_oc, c_cost_it, c_cost_ot, c_cost_cwit, c_cost_crit]; + } + + // Extract costs as a string + const costsString = d["costs"]; + if (typeof costsString !== "string") { + return [c_cost_ic, c_cost_oc, c_cost_it, c_cost_ot, c_cost_cwit, c_cost_crit]; + } + + // Tokenize the string on whitespace + const tokens = costsString.split(/\s+/).filter(token => token.length > 0); + + // Match tokens to variables + for (const token of tokens) { + const parts = token.split(":"); + if (parts.length !== 2) { + continue; + } + + const key = parts[0]; + const value = parts[1]; + + switch (key) { + case "inputCost": + const inputCost = parseFloat(value); + if (!isNaN(inputCost)) { + c_cost_ic = inputCost; + } + break; + case "outputCost": + const outputCost = parseFloat(value); + if (!isNaN(outputCost)) { + c_cost_oc = outputCost; + } + break; + case "inputTokens": + const inputTokens = parseFloat(value); + if (!isNaN(inputTokens)) { + c_cost_it = Math.floor(inputTokens); + } + break; + case "outputTokens": + const outputTokens = parseFloat(value); + if (!isNaN(outputTokens)) { + c_cost_ot = Math.floor(outputTokens); + } + break; + case "cacheWriteInputTokens": + const cacheWriteInputTokens = parseFloat(value); + if (!isNaN(cacheWriteInputTokens)) { + c_cost_cwit = Math.floor(cacheWriteInputTokens); + } + break; + case "cacheReadInputTokens": + const cacheReadInputTokens = parseFloat(value); + if (!isNaN(cacheReadInputTokens)) { + c_cost_crit = Math.floor(cacheReadInputTokens); + } + break; + } + } + + return [c_cost_ic, c_cost_oc, c_cost_it, c_cost_ot, c_cost_cwit, c_cost_crit]; +} + async function runReport(orgId, runId) { const client = getClient(); @@ -78,6 +149,14 @@ async function runReport(orgId, runId) { ordersData = null; } + let f_cost_ic = 0.0; // float cost input cost + let f_cost_oc = 0.0; // float cost output cost + let f_cost_it = 0; // float cost input tokens + let f_cost_ot = 0; // float cost output tokens + let f_cost_cwit = 0; // float cost cache write input tokens + let f_cost_crit = 0; // float cost cache read input tokens + + if (ordersData && ordersData.flows && Array.isArray(ordersData.flows)) { // Process flow statistics const flowStatusCounts = {}; @@ -126,6 +205,14 @@ async function runReport(orgId, runId) { caseTimeTotals[caseStatus].total += caseTime; caseTimeTotals[caseStatus].count++; caseTimeTotals[caseStatus].times.push(caseTime); + + const [flow_ic, flow_oc, flow_it, flow_ot, flow_cwit, flow_crit] = extractCostFields(testCase); + f_cost_ic += flow_ic; + f_cost_oc += flow_oc; + f_cost_it += flow_it; + f_cost_ot += flow_ot; + f_cost_cwit += flow_cwit; + f_cost_crit += flow_crit; }); } }); @@ -159,6 +246,15 @@ async function runReport(orgId, runId) { maxTimeMs: Math.max(...times) }; }); + + report.costInfo = { + total_input_cost: f_cost_ic, + total_output_cost: f_cost_oc, + total_input_tokens: f_cost_it, + total_output_tokens: f_cost_ot, + total_cache_write_input_tokens: f_cost_cwit, + total_cache_read_input_tokens: f_cost_crit, + } } logger.info(`Report generated successfully for run ${runId}: ${report.caseStats.totalCases} cases, ${report.flowStats.totalFlows} flows`); diff --git a/service/server/stats_rest.js b/service/server/stats_rest.js index ecff4f7..0b00c0a 100644 --- a/service/server/stats_rest.js +++ b/service/server/stats_rest.js @@ -3,11 +3,8 @@ * * DB helpers. */ -import {logger} from './store.js'; -import { getClient } from './client.js'; - import proxy from "./proxy.js"; -import {isAdminRole, authenticateToken, authenticateTokenOrToken} from "./common.js"; +import {authenticateToken, authenticateTokenOrToken} from "./common.js"; export async function addStats(app) { diff --git a/service/service/stats.go b/service/service/stats.go index f630326..28dd717 100644 --- a/service/service/stats.go +++ b/service/service/stats.go @@ -11,9 +11,12 @@ import ( "encoding/json" "fmt" "gitlab.com/ginfra/ginfra/base" + "gitlab.com/ginfra/ginfra/common/service/gotel" "gitlab.com/ginfra/wwwherd/service/local" "go.uber.org/zap" "strconv" + "strings" + "time" ) // ##################################################################################################################### @@ -32,6 +35,66 @@ func convertToInt(i interface{}) (int, error) { } } +func extractCostFields(d map[string]interface{}) (float64, float64, int, int, int, int) { + var c_cost_ic, c_cost_oc float64 + var c_cost_it, c_cost_ot, c_cost_cwit, c_cost_crit int + + // Check if "costs" exists in the map + costsInterface, ok := d["costs"] + if !ok { + return c_cost_ic, c_cost_oc, c_cost_it, c_cost_ot, c_cost_cwit, c_cost_crit + } + + // Extract costs as a string + costsString, ok := costsInterface.(string) + if !ok { + return c_cost_ic, c_cost_oc, c_cost_it, c_cost_ot, c_cost_cwit, c_cost_crit + } + + // Tokenize the string on whitespace + tokens := strings.Fields(costsString) + + // Match tokens to variables + for _, token := range tokens { + parts := strings.Split(token, ":") + if len(parts) != 2 { + continue + } + + key := parts[0] + value := parts[1] + + switch key { + case "inputCost": + if val, err := strconv.ParseFloat(value, 64); err == nil { + c_cost_ic = val + } + case "outputCost": + if val, err := strconv.ParseFloat(value, 64); err == nil { + c_cost_oc = val + } + case "inputTokens": + if val, err := strconv.ParseFloat(value, 64); err == nil { + c_cost_it = int(val) + } + case "outputTokens": + if val, err := strconv.ParseFloat(value, 64); err == nil { + c_cost_ot = int(val) + } + case "cacheWriteInputTokens": + if val, err := strconv.ParseFloat(value, 64); err == nil { + c_cost_cwit = int(val) + } + case "cacheReadInputTokens": + if val, err := strconv.ParseFloat(value, 64); err == nil { + c_cost_crit = int(val) + } + } + } + + return c_cost_ic, c_cost_oc, c_cost_it, c_cost_ot, c_cost_cwit, c_cost_crit +} + func extractFlowFields(d map[string]interface{}) (int, int, int, error) { // Extract idInterface, ok := d["id"] @@ -98,6 +161,10 @@ func (d *Dispatcher) handleFlowCaseStats(RunId int, orders string) { // Loop through all flows in the data for _, flowInterface := range flows { + //-- Flow data ------------------------------------------------------------------------------------------------- + var f_cost_ic, f_cost_oc float64 + var f_cost_it, f_cost_ot, f_cost_cwit, f_cost_crit int + flow, ok := flowInterface.(map[string]interface{}) if !ok { local.Logger.Error("FlowCaseStats: Flow is not a valid object") @@ -113,16 +180,7 @@ func (d *Dispatcher) handleFlowCaseStats(RunId int, orders string) { local.Logger.Info("FlowCaseStats: Flow stats", zap.Int("flow.id", flowId), zap.Int("flow.result", flowResult), zap.Int("flow.time", flowTime), zap.Int("run.id", RunId), zap.Int("provider.id", run.ProvId)) - // Add to flowdata. The json must match the db schema since it will be streamed directly into it. - fd := map[string]interface{}{ - "flow_id": flowId, - "run_id": RunId, - "result": flowResult, - "time_ms": flowTime, - "provider_id": run.ProvId, - } - flowD = append(flowD, fd) - + //-- Case data and report -------------------------------------------------------------------------------------- // Get the cases array from the flow casesInterface, ok := flow["cases"] if !ok { @@ -153,6 +211,15 @@ func (d *Dispatcher) handleFlowCaseStats(RunId int, orders string) { local.Logger.Info("FlowCaseStats: Flow stats", zap.Int("case.id", caseId), zap.Int("case.result", caseResult), zap.Int("case.time", caseTime), zap.Int("run.id", RunId), zap.Int("provider.id", run.ProvId)) + // Extract costs + c_cost_ic, c_cost_oc, c_cost_it, c_cost_ot, c_cost_cwit, c_cost_crit := extractCostFields(caseObj) + f_cost_ic += c_cost_ic + f_cost_oc += c_cost_oc + f_cost_it += c_cost_it + f_cost_ot += c_cost_ot + f_cost_cwit += c_cost_cwit + f_cost_crit += c_cost_crit + // Add to casedata. The json must match the db schema since it will be streamed directly into it. cd := map[string]interface{}{ "case_id": caseId, @@ -161,13 +228,38 @@ func (d *Dispatcher) handleFlowCaseStats(RunId int, orders string) { "result": caseResult, "time_ms": caseTime, "provider_id": run.ProvId, + "cost_ic": c_cost_ic, + "cost_oc": c_cost_oc, + "cost_it": c_cost_it, + "cost_ot": c_cost_ot, + "cost_cwit": c_cost_cwit, + "cost_crit": c_cost_crit, } caseD = append(caseD, cd) } + + //-- Flow report ----------------------------------------------------------------------------------------------- + + // Add to flowdata. The json must match the db schema since it will be streamed directly into it. + fd := map[string]interface{}{ + "flow_id": flowId, + "run_id": RunId, + "result": flowResult, + "time_ms": flowTime, + "provider_id": run.ProvId, + "cost_ic": f_cost_ic, + "cost_oc": f_cost_oc, + "cost_it": f_cost_it, + "cost_ot": f_cost_ot, + "cost_cwit": f_cost_cwit, + "cost_crit": f_cost_crit, + } + flowD = append(flowD, fd) } // Stuff them into the database. + // TODO make this configurable to turn off. err = d.gis.Providers.PsqlProvider.PushStatsCase(run.OrgID, caseD) if err != nil { local.Logger.Error("FlowCaseStats: Error pushing case stats to database.", zap.Error(err)) @@ -177,4 +269,35 @@ func (d *Dispatcher) handleFlowCaseStats(RunId int, orders string) { local.Logger.Error("FlowCaseStats: Error pushing flow stats to database.", zap.Error(err)) } + if d.gis.ServiceContext.Telemetry { + o := gotel.GetOtelConfig() + if o != nil { + // Loop through flowD and add type and timestamp + for _, flowItem := range flowD { + if flowMap, ok := flowItem.(map[string]interface{}); ok { + flowMap["type"] = "flow" + flowMap["timestamp"] = time.Now().Unix() + // TODO not sure this is how I wnt the data to be sent. Same with case. + if jsonBytes, err := json.Marshal(flowMap); err == nil { + o.Logger.Info("flow_stats", "data", string(jsonBytes)) + } + } + } + + // Loop through caseD and add type and timestamp + for _, caseItem := range caseD { + if caseMap, ok := caseItem.(map[string]interface{}); ok { + caseMap["type"] = "case" + caseMap["timestamp"] = time.Now().Unix() + // Convert map to JSON for logging + if jsonBytes, err := json.Marshal(caseMap); err == nil { + o.Logger.Info("case_stats", "data", string(jsonBytes)) + } + + } + } + + } + } + } diff --git a/service/src/components/LogsDialog.vue b/service/src/components/LogsDialog.vue index c18946f..743999a 100644 --- a/service/src/components/LogsDialog.vue +++ b/service/src/components/LogsDialog.vue @@ -134,6 +134,15 @@ const parsedLogs = computed(() => { } }) +// Helper function to check if THIS specific object should be ignored (not recursive) +const shouldIgnoreThisObject = (obj: any): boolean => { + return typeof obj === 'object' && + obj !== null && + obj.message && + typeof obj.message === 'string' && + obj.message.startsWith('COST:') +} + // Flatten the parsed logs into the specific structure requested const flattenedLogs = computed((): FlattenedLogItem[] => { if (parsedLogs.value === null) return [] @@ -142,8 +151,12 @@ const flattenedLogs = computed((): FlattenedLogItem[] => { const processValue = (value: any, depth: number = 0) => { if (Array.isArray(value)) { - // Each array item gets "directive" label at depth 0 + // Process each array item, but skip ones that should be ignored value.forEach((item, index) => { + // Skip this specific item if it should be ignored + if (shouldIgnoreThisObject(item)) { + return + } // Process the directive's content if (typeof item === 'object' && item !== null) { @@ -160,7 +173,10 @@ const flattenedLogs = computed((): FlattenedLogItem[] => { } }) } else if (typeof value === 'object' && value !== null) { - processObject(value, depth) + // Only skip if THIS object should be ignored + if (!shouldIgnoreThisObject(value)) { + processObject(value, depth) + } } else { result.push({ displayText: formatPrimitiveValue(value), @@ -174,7 +190,17 @@ const flattenedLogs = computed((): FlattenedLogItem[] => { } const processObject = (obj: any, depth: number) => { + // Skip processing if THIS specific object should be ignored + if (shouldIgnoreThisObject(obj)) { + return + } + Object.entries(obj).forEach(([key, value]) => { + // Skip displaying the "type: LOG" line specifically + if (key === 'type' && value === 'LOG') { + return + } + if (Array.isArray(value)) { // Show the key name first, then process the array const isStartFlow = key === 'start.flow' @@ -190,7 +216,10 @@ const flattenedLogs = computed((): FlattenedLogItem[] => { }) processValue(value, depth) } else if (typeof value === 'object' && value !== null) { - processObject(value, depth + 1) + // Only skip if THIS specific object should be ignored + if (!shouldIgnoreThisObject(value)) { + processObject(value, depth + 1) + } } else { // For primitive values, show key: value const isStartFlow = key === 'start.flow' @@ -207,6 +236,7 @@ const flattenedLogs = computed((): FlattenedLogItem[] => { }) } else { + result.push({ displayText: `${key}: ${formatPrimitiveValue(value)}`, depth, @@ -221,10 +251,16 @@ const flattenedLogs = computed((): FlattenedLogItem[] => { }) } + // Start processing from the root if (Array.isArray(parsedLogs.value)) { // Process each top-level array item directly parsedLogs.value.forEach((item) => { + // Skip this specific item if it should be ignored + if (shouldIgnoreThisObject(item)) { + return + } + if (typeof item === 'object' && item !== null) { processObject(item, 0) } else { @@ -239,7 +275,10 @@ const flattenedLogs = computed((): FlattenedLogItem[] => { } }) } else if (typeof parsedLogs.value === 'object' && parsedLogs.value !== null) { - processObject(parsedLogs.value, 0) + // Only skip if the root object itself should be ignored + if (!shouldIgnoreThisObject(parsedLogs.value)) { + processObject(parsedLogs.value, 0) + } } else { result.push({ displayText: formatPrimitiveValue(parsedLogs.value), diff --git a/service/src/components/ReportDialog.vue b/service/src/components/ReportDialog.vue index 100d989..5071ade 100644 --- a/service/src/components/ReportDialog.vue +++ b/service/src/components/ReportDialog.vue @@ -130,23 +130,37 @@ - -
-

{{ formatKey(String(sectionKey)) }}

+ +
+

Cost Information

-
{{ JSON.stringify(section, null, 2) }}
-
-
- {{ index + 1 }}: -
-
{{ JSON.stringify(item, null, 2) }}
- {{ formatValue(item) }} -
-
+
+ Total input cost: + {{ reportData.costInfo.total_input_cost }} +
+
+ Total output cost: + {{ reportData.costInfo.total_output_cost }} +
+
+ Total input tokens: + {{ reportData.costInfo.total_input_tokens }} +
+
+ Total output tokens: + {{ reportData.costInfo.total_output_tokens }} +
+
+ Total cache write input tokens: + {{ reportData.costInfo.total_cache_write_input_tokens }} +
+
+ Total cache read input tokens: + {{ reportData.costInfo.total_cache_read_input_tokens }}
- {{ formatValue(section) }}
+
diff --git a/service/src/routes/admin/AdminView.vue b/service/src/routes/admin/AdminView.vue index 0e6e918..421e51f 100644 --- a/service/src/routes/admin/AdminView.vue +++ b/service/src/routes/admin/AdminView.vue @@ -838,10 +838,12 @@ const handleFileSelected = async (event: Event) => { const response = await apiClient.post('/api/import', importData) if (response.ok) { showSnackbar('Import completed successfully', 'success') + // TODO I don't know why this is here. // Refresh users list if we're on the users tab - if (tab.value === 'users') { - await fetchUsers() - } + //if (tab.value === 'users') { + // await fetchUsers() + //} + await fetchProviders() } else { const errorData = await response.json().catch(() => ({ message: 'Failed to import data' })) showSnackbar(errorData.message || 'Failed to import data', 'error') @@ -973,8 +975,11 @@ const deleteProvider = async (provider: Provider) => { providersLoading.value = false showSnackbar('Provider deleted successfully', 'success') } else { - const errorData = await response.json() - providersError.value = errorData.message || 'Failed to delete provider. Please try again.' + if (response.status === 409) { + showSnackbar("Can't delete provider because a design is still using it", 'error') + } else { + showSnackbar('System error occurred while deleting provider', 'error') + } } } catch (err) { console.error('Error deleting provider:', err) diff --git a/service/src/routes/run/RunView.vue b/service/src/routes/run/RunView.vue index e58c493..a2e1e4a 100644 --- a/service/src/routes/run/RunView.vue +++ b/service/src/routes/run/RunView.vue @@ -1160,14 +1160,53 @@ + + \ No newline at end of file diff --git a/service/src/routes/main/MainView.vue b/service/src/routes/main/MainView.vue index e3b62d9..1902ef7 100644 --- a/service/src/routes/main/MainView.vue +++ b/service/src/routes/main/MainView.vue @@ -549,51 +549,15 @@ @cancel="handleCaseSelectionCancel" /> - - - - - Filter by Tags - - -
- -

Loading tags...

-
- - - - - {{ tag.name }} - - - -
- - - - mdi-close - Close - - -
-
+ + ({ id: 0, - org_id: 0, name: '', text: '', decorations: '', @@ -747,7 +710,6 @@ const openCaseDialog = (case_: Case | null) => { if (case_ == null) { workingCase.value = { id: 0, - org_id: 0, name: "", text: "", decorations: "", @@ -761,7 +723,6 @@ const openCaseDialog = (case_: Case | null) => { const category = categories.value.find(cat => cat.id === case_.cat_id) workingCase.value = { id: case_.id, - org_id: case_.org_id, name: case_.name, text: case_.text, decorations: case_.decorations, @@ -781,7 +742,6 @@ const closeCaseDialog = () => { nextTick(() => { workingCase.value = { id: 0, - org_id: 0, name: '', text: '', decorations: '', @@ -960,7 +920,6 @@ const workingFlow = ref({ narrative: '', decorations: '', cids: [], - org_id: 0, cat_id: 0, tag_ids: [], retired: false @@ -1009,7 +968,6 @@ const openFlowDialog = (flow_: Flow) => { if (flow_ == null) { workingFlow.value = { id: 0, - org_id: 0, name: "", narrative: "", decorations: "", @@ -1023,7 +981,6 @@ const openFlowDialog = (flow_: Flow) => { const category = categories.value.find(cat => cat.id === flow_.cat_id) workingFlow.value = { id: flow_.id, - org_id: flow_.org_id, name: flow_.name, narrative: flow_.narrative, decorations: flow_.decorations, @@ -1042,7 +999,6 @@ const createFlowFromSelectedCases = () => { // Pre-populate the working flow with selected case IDs workingFlow.value = { id: 0, - org_id: 0, name: "", narrative: "", decorations: "", @@ -1070,7 +1026,6 @@ const openCaseSelectionDialog = async () => { loadingCasesForSelection.value = true try { const params = new URLSearchParams() - params.append('org_id', "1") params.append('cat_id', workingFlow.value.cat_id.toString()) const response = await apiClient.get(`/api/cases?${params}`) @@ -1211,13 +1166,11 @@ const doRetireFlow = async () => { } } - // -- NEW DESIGN ------------------------------------------------------------------------------------------------------ const createNewDesignFromSelectedFlows = () => { const d : Design = { id: 0, - org_id: 0, name: '', narrative: '', values: '', @@ -1246,6 +1199,10 @@ const doSnackbar = (text: string, color: 'success' | 'error' = 'error') => { // -- OTHER ---------------------------------------------------------------------------------------------------------- +const handleTagDialogClose = () => { + // NOP +} + // Watch for tab changes to load appropriate data // TODO not sure if we really want to fetch the cases and flows. watch(tab, (newTab) => { diff --git a/service/src/stores/common.ts b/service/src/stores/common.ts index c99e0b3..5f5998f 100644 --- a/service/src/stores/common.ts +++ b/service/src/stores/common.ts @@ -1,7 +1,7 @@ // WWWHerd (c) 2025 Ginfra Project. All rights Reserved. Source licensed under Apache License Version 2.0, January 2004 import {nextTick, ref} from "vue"; import {apiClient} from '@/utils/api' -import {type Design, selectedCategory, tags, categories} from "@/stores/data.ts"; +import {type Design, tags, categories} from "@/stores/data.ts"; // -------------------------------------------------------------------------------------------------------------------- // - Cases @@ -12,7 +12,6 @@ export interface Case { decorations: string; text: string; type_id: number; - org_id: number; cat_id: number; tag_ids: Array; retired: boolean; @@ -67,7 +66,6 @@ export const updateCase = async (tc: Case | null) => { text: tc.text, decorations: '', type_id: tc.type_id, - org_id: 0, cat_id: tc.cat_id, tag_ids: tc.tag_ids, retired: false, @@ -122,7 +120,6 @@ export interface Flow { name: string; narrative: string; decorations: string; - org_id: number; cat_id: number | null; cids: Array; tag_ids: Array; @@ -177,7 +174,6 @@ export const updateFlow = async (fl: Flow | null) => { name: fl.name, narrative: fl.narrative, decorations: fl.decorations, - org_id: 0, cat_id: fl.cat_id ?? null, tag_ids: fl.tag_ids, cids: fl.cids, diff --git a/service/src/stores/data.ts b/service/src/stores/data.ts index 6bb29ae..8fca424 100644 --- a/service/src/stores/data.ts +++ b/service/src/stores/data.ts @@ -13,7 +13,6 @@ export interface Design { narrative: string; values: string; host: string; - org_id: number; tag_ids: Array; provider_id: number | null; flowids?: Array; diff --git a/service/test/working.json b/service/test/working.json index 54045a8..53e355b 100644 --- a/service/test/working.json +++ b/service/test/working.json @@ -32,7 +32,7 @@ "port": 8900, "service_class": "wwwherd", "specifics": { - "postgres_url": "postgres://postgres:YjRjNzY2YjkzMmIx@192.168.1.41:5432", + "postgres_url": "postgres://postgres:OWU5ODA4NDJiNGZh@192.168.1.41:5432", "magnitude_auth": "AAAAAAAAAAAA" } }, -- GitLab From dcef14f2af2cf446b492a388e9a6285a1801ec1e Mon Sep 17 00:00:00 2001 From: Erich Gatejen Date: Thu, 4 Sep 2025 10:49:53 -0600 Subject: [PATCH 11/25] bugs --- magnitude/service/service_message.go | 19 + magnitude/service/service_store.go.OLD | 187 -------- magnitude/service/service_work.go.OLD | 443 ------------------ service/server/flowcase.js | 4 +- service/server/flowcase_rest.js | 6 +- service/server/proxy.js | 4 +- service/service/stats.go | 6 +- service/src/components/BugEditDialog.vue | 219 +++++++++ service/src/components/CaseBugDialog.vue | 339 ++++++++++++++ .../src/components/SnackbarNotification.vue | 1 + service/src/components/status.ts | 140 +++++- service/src/plugins/vuetify.js | 2 + service/src/routes/main/MainView.vue | 31 +- service/src/stores/common.ts | 56 +++ 14 files changed, 813 insertions(+), 644 deletions(-) delete mode 100644 magnitude/service/service_store.go.OLD delete mode 100644 magnitude/service/service_work.go.OLD create mode 100644 service/src/components/BugEditDialog.vue create mode 100644 service/src/components/CaseBugDialog.vue diff --git a/magnitude/service/service_message.go b/magnitude/service/service_message.go index c961f54..c0d33ec 100644 --- a/magnitude/service/service_message.go +++ b/magnitude/service/service_message.go @@ -21,6 +21,7 @@ import ( "runtime" "strconv" "strings" + "time" ) // ##################################################################################################################### @@ -33,6 +34,9 @@ var ( latestOutputTokens = 0 latestCacheWriteInputTokens = 0 latestCacheReadInputTokens = 0 + + startCaseTime time.Time + startFlowTime time.Time ) func clearCosts() { @@ -270,6 +274,15 @@ func wrapRawMessage(message stream.Message) *stream.RawMessage { } func doMsgDone(run *scommon.RunSpec) (*stream.RawMessage, error) { + // Catchall for flow. + if (run.CurrentFlowIdx < len(run.Order.Flows)) && (run.CurrentCaseIdx < len(run.Order.Flows[run.CurrentFlowIdx].Cases)) { + if run.Order.Flows[run.CurrentFlowIdx].Time == 0 { + run.Order.Flows[run.CurrentFlowIdx].Time = time.Since(startFlowTime).Milliseconds() + } + if run.Order.Flows[run.CurrentFlowIdx].Result <= data.RunStatusRunning { + run.Order.Flows[run.CurrentFlowIdx].Result = deriveFlowResult(run) + } + } run.CurrentState = scommon.RunStateDone run.Status = data.RunStatusDone sendStatus(run, data.RunStatusDone, "Done, pending results.") @@ -301,6 +314,7 @@ func doMsgFlow(run *scommon.RunSpec) (*stream.RawMessage, error) { run.CurrentCaseIdx = 0 run.CurrentState = scommon.RunStateOrderFlow sendCurrentFlow(run, run.Order.Flows[run.CurrentFlowIdx].Id) + startFlowTime = time.Now() return &stream.RawMessage{ MType: stream.MsgTypeOrderFlow, // (token, name, prompt string, flowId int) @@ -323,6 +337,7 @@ func doMsgFlow(run *scommon.RunSpec) (*stream.RawMessage, error) { } func nextFlow(run *scommon.RunSpec) (*stream.RawMessage, error) { + run.CurrentFlowIdx++ if run.CurrentFlowIdx >= len(run.Order.Flows) { run.CurrentFlowIdx = 0 @@ -337,6 +352,7 @@ func doMsgCase(run *scommon.RunSpec) (*stream.RawMessage, error) { run.CurrentDirectiveIdx = 0 run.CurrentState = scommon.RunStateOrderDirective sendCurrentCase(run, run.Order.Flows[run.CurrentFlowIdx].Cases[run.CurrentCaseIdx].Id) + startCaseTime = time.Now() return &stream.RawMessage{ MType: stream.MsgTypeOrderCase, Data: stream.NewMsgOrderCase(getKey(run.Uid, run.Id), run.Order.Flows[run.CurrentFlowIdx].Name, @@ -350,6 +366,7 @@ func nextCase(run *scommon.RunSpec) (*stream.RawMessage, error) { run.CurrentCaseIdx++ if run.CurrentCaseIdx >= len(run.Order.Flows[run.CurrentFlowIdx].Cases) { run.Order.Flows[run.CurrentFlowIdx].Result = deriveFlowResult(run) + run.Order.Flows[run.CurrentFlowIdx].Time = time.Since(startFlowTime).Milliseconds() run.CurrentCaseIdx = 0 return nextFlow(run) } else { @@ -447,6 +464,7 @@ func msgResult(msg *stream.MsgResult) (*stream.RawMessage, error) { if run.Order.Flows[run.CurrentFlowIdx].Cases[run.CurrentCaseIdx].Decorated.Soft { run.Order.Flows[run.CurrentFlowIdx].Cases[run.CurrentCaseIdx].Result = msg.Result run.Order.Flows[run.CurrentFlowIdx].Cases[run.CurrentCaseIdx].Error = msg.Info + run.Order.Flows[run.CurrentFlowIdx].Cases[run.CurrentCaseIdx].Time = time.Since(startCaseTime).Milliseconds() return nextCase(run) } else { return doMsgDoneError(run, msg.Info, msg.Result) @@ -457,6 +475,7 @@ func msgResult(msg *stream.MsgResult) (*stream.RawMessage, error) { case scommon.RunStateOrderDirective: if run.CurrentDirectiveIdx == scommon.DirectiveFlagDone { run.Order.Flows[run.CurrentFlowIdx].Cases[run.CurrentCaseIdx].Result = msg.Result // TODO do we need to derive this? + run.Order.Flows[run.CurrentFlowIdx].Cases[run.CurrentCaseIdx].Time = time.Since(startCaseTime).Milliseconds() return nextCase(run) } return doMsgDirective(run) diff --git a/magnitude/service/service_store.go.OLD b/magnitude/service/service_store.go.OLD deleted file mode 100644 index 9833cc8..0000000 --- a/magnitude/service/service_store.go.OLD +++ /dev/null @@ -1,187 +0,0 @@ -/* -Package service -Copyright (c) 2025 Erich Gatejen -[LICENSE]: https://www.apache.org/licenses/LICENSE-2.0.txt, if unaltered. You are free to alter and apply any license -you wish to this file. - -Work maangement. -*/ -package service - -import ( - cmap "github.com/orcaman/concurrent-map/v2" - "gitlab.com/ginfra/ginfra/base" - "gitlab.com/ginfra/wwwherd/magnitude/service/common" -) - -//type RunSpecMap cmap.ConcurrentMap[string, *RunSpec] -- This just puts Goland in a tizzy, so remove it for now. - -var ( - runQueue = cmap.New[[]*common.RunSpec]() - running = cmap.New[cmap.ConcurrentMap[string, *common.RunSpec]]() - runHistory = cmap.New[cmap.ConcurrentMap[string, *common.RunSpec]]() -) - -func initStore() { - runQueue.Clear() - running.Clear() - runHistory.Clear() -} - -// -- RUN QUEUE --------------------------------------------------------------------------------------------------- - -func runQueuePut(uid string, run *common.RunSpec) { - if !runQueue.Has(uid) { - runQueue.Set(uid, []*common.RunSpec{}) - } - if queue, ok := runQueue.Get(uid); ok { - queue = append(queue, run) - runQueue.Set(uid, queue) - } -} - -func runQueueArray(uid string) []*common.RunSpec { - // Make a copy - specs := make([]*common.RunSpec, 0) - if m, ok := runQueue.Get(uid); ok { - for _, item := range m { - specs = append(specs, item) - } - } - return specs -} - -// -- RUNNING --------------------------------------------------------------------------------------------------- - -func runningNumberRunning(uid string) int { - if i, ok := runQueue.Get(uid); ok { - return len(i) - } - return 0 -} - -func runningGetUid(uid string) cmap.ConcurrentMap[string, *common.RunSpec] { - if !running.Has(uid) { - m := cmap.New[*common.RunSpec]() - running.Set(uid, m) - return m - } - m, _ := running.Get(uid) - return m -} - -func runningPut(uid, id string, run *common.RunSpec) { - m := runningGetUid(uid) - m.Set(id, run) -} - -func runningGetArray(uid string) []*common.RunSpec { - specs := make([]*common.RunSpec, 0) - if m, ok := running.Get(uid); ok { - for item := range m.IterBuffered() { - specs = append(specs, item.Val) - } - } - return specs -} - -// -- HISTORY -------------------------------------------------------------------------------------------------- - -func historyGetUid(uid string) cmap.ConcurrentMap[string, *common.RunSpec] { - if !runHistory.Has(uid) { - m := cmap.New[*common.RunSpec]() - runHistory.Set(uid, m) - return m - } - m, _ := runHistory.Get(uid) - return m -} - -func historyPut(uid, id string, run *common.RunSpec) { - m := historyGetUid(uid) - m.Set(id, run) -} - -func historyRemove(uid, id string) *common.RunSpec { - var r *common.RunSpec - if m, ok := runHistory.Get(uid); ok { - if r, ok = m.Get(id); ok { - m.Remove(id) - } - if m.Count() == 0 { - runHistory.Remove(uid) - } - } - return r -} - -func historyGetArray(uid string) []*common.RunSpec { - specs := make([]*common.RunSpec, 0) - if m, ok := runHistory.Get(uid); ok { - for item := range m.IterBuffered() { - specs = append(specs, item.Val) - } - } - return specs -} - -// -- ALL -------------------------------------------------------------------------------------- - -func moveFromRunningToHistory(run *common.RunSpec) { - historyPut(run.Uid, run.Id, run) - if runMap, ok := running.Get(run.Uid); ok { - runMap.Remove(run.Id) - if runMap.Count() == 0 { - running.Remove(run.Uid) - } - } -} - -func findRunSpec(uid, id string) (*common.RunSpec, error) { - - for l := range running.IterBuffered() { - lv := l.Val - for s := range lv.IterBuffered() { - spec := s.Val - if spec.Id == id { - if spec.Uid == uid { - return spec, nil - } else { - return nil, base.NewGinfraErrorAFlags("RUNNING: Run does not belong to user.", - base.ERRFLAG_BAD_ID, base.LM_ID, id, base.LM_UID, uid) - } - } - } - } - - for l := range runQueue.IterBuffered() { - lv := l.Val - for _, spec := range lv { - if spec.Id == id { - if spec.Uid == uid { - return spec, nil - } else { - return nil, base.NewGinfraErrorAFlags("QUEUE: Run does not belong to user.", - base.ERRFLAG_BAD_ID, base.LM_ID, id, base.LM_UID, uid) - } - } - } - } - - for l := range runHistory.IterBuffered() { - lv := l.Val - for s := range lv.IterBuffered() { - spec := s.Val - if spec.Id == id { - if spec.Uid == uid { - return spec, nil - } else { - return nil, base.NewGinfraErrorAFlags("HISTORY: Test does not belong to user.", - base.ERRFLAG_BAD_ID, base.LM_ID, id, base.LM_UID, uid) - } - } - } - } - - return nil, base.NewGinfraErrorAFlags("Id not found.", base.ERRFLAG_NOT_FOUND, base.LM_ID, id) -} diff --git a/magnitude/service/service_work.go.OLD b/magnitude/service/service_work.go.OLD deleted file mode 100644 index 82a9139..0000000 --- a/magnitude/service/service_work.go.OLD +++ /dev/null @@ -1,443 +0,0 @@ -/* -Package service -Copyright (c) 2025 Erich Gatejen -[LICENSE]: https://www.apache.org/licenses/LICENSE-2.0.txt, if unaltered. You are free to alter and apply any license -you wish to this file. - -Work management. -*/ -package service - -import ( - "encoding/json" - "gitlab.com/ginfra/ginfra/base" - "gitlab.com/ginfra/wwwherd/common/data" - "gitlab.com/ginfra/wwwherd/common/script" - fscript "gitlab.com/ginfra/wwwherd/common/script/flow" - rscript "gitlab.com/ginfra/wwwherd/common/script/raw" - "gitlab.com/ginfra/wwwherd/common/stream" - "gitlab.com/ginfra/wwwherd/magnitude/local" - scommon "gitlab.com/ginfra/wwwherd/magnitude/service/common" - "gitlab.com/ginfra/wwwherd/magnitude/service/flow" - "gitlab.com/ginfra/wwwherd/magnitude/service/raw" - "go.uber.org/zap" - "os" - "path/filepath" - "strconv" - "strings" - "time" -) - -// ##################################################################################################################### -// # VALUES AND DATA - -// Used during translation. -// ##################################################################################################################### -// # WORK - -// -- orders ---------------------------------------------------------------------------------------------------- - -func findPrompt(text string) string { - lines := strings.Split(text, "\n") - var result []string - found := false - - for _, line := range lines { - if strings.Contains(line, "##PROMPT") { - if found { - break - } - found = true - continue - } - if found { - result = append(result, line) - } - } - - if len(result) == 0 { - return "" - } - return strings.Join(result, "\n") -} - -func sendStatus(client stream.StreamClient, run *scommon.RunSpec, status data.RunStatus, data string, wid int) { - run.Status = status - if err := client.Send(stream.NewMsgRunStatus(run.IdInt, status, data)); err != nil { - local.Logger.Error("Failed to send status.", zap.Error(err), zap.String(base.LM_ID, run.Id), - zap.String(base.LM_UID, run.Uid), zap.Int(base.LM_WORKER_ID, wid)) - } -} - -// -- worker ------------------------------------------------------------------------------------------------------- - -func getRunner(target, dfile, provider, model, key, url string) (*script.Runner, error) { - return script.NewRunner(target, dfile, provider, model, key, url) -} - -func getCaseDispatch(thecase *data.OrderCaseItem) script.Dispatcher { - switch thecase.Type { - case data.CaseTypeFlowScript: - return flow.GetDispatcher() - case data.CaseTypeRaw: - return raw.GetDispatcher() - default: - panic("Unknown case type.") - } -} - -func getCaseCompiler(thecase *data.OrderCaseItem) script.Compiler { - switch thecase.Type { - case data.CaseTypeFlowScript: - return &fscript.Compiler{} - case data.CaseTypeRaw: - return &rscript.Compiler{} - default: - panic("Unknown case type.") - } -} - -func compileOrders(spec *scommon.RunSpec) int { - if err := json.Unmarshal([]byte(spec.Text), &spec.Order); err != nil { - spec.Err = base.NewGinfraErrorA("Failed to unmarshal orders.", err).Error() - return 1 - } - - errnum := 0 - for flIdx := range spec.Order.Flows { - fl := &spec.Order.Flows[flIdx] - - // Range through all cases in the current fl - for caseIdx := range fl.Cases { - case_ := &fl.Cases[caseIdx] - - // Compile the case using caseCompileDispatch - compiler := getCaseCompiler(case_) - cc, errors := compiler.CompileCase(case_.Order) - - if errors != nil && len(errors) > 0 { - // If there are errors, put them into the Error field and set Result to RunStatusError - case_.Error = base.NewGinfraErrorA("Compile failed.", errors).Error() - case_.Result = data.RunStatusError - errnum++ - } else { - // If no error, put the compiled result into the Compiled field - case_.Compiled = cc - } - } - } - return errnum -} - -func shipOrders(run *scommon.RunSpec, id int, client stream.StreamClient, final bool) { - orderText, err := json.Marshal(run.Order) - if err != nil { - local.Logger.Error("Failed to marshal orders.", zap.Error(err), zap.String(base.LM_ID, run.Id), - zap.String(base.LM_UID, run.Uid), zap.Int(base.LM_WORKER_ID, id)) - } else if err = client.Send(stream.NewMsgRunOrders(run.IdInt, string(orderText), final)); err != nil { - local.Logger.Error("Failed to send orders.", zap.Error(err), zap.String(base.LM_ID, run.Id), - zap.String(base.LM_UID, run.Uid), zap.Int(base.LM_WORKER_ID, id)) - } -} - -func managePanic(run *scommon.RunSpec, id int, client stream.StreamClient, r any) { - - dfile, err := os.ReadFile(filepath.Join(run.Path, local.DispositionFile)) - if err == nil { - if strings.Contains(string(dfile), "Agent start failed: page.goto") { - sendStatus(client, run, data.RunStatusError, "Could not connect to target host: "+run.Host, id) - return - } - local.Logger.Error("Unknown panic recovered in worker", zap.Any("panic", r), - zap.Int(base.LM_WORKER_ID, id), zap.String(base.LM_ID, run.Id), zap.String(base.LM_UID, run.Uid)) - - } else { - local.Logger.Error("Failed to read disposition file.", zap.Error(err), zap.String(base.LM_ID, run.Id), - zap.String(base.LM_UID, run.Uid), zap.Int(base.LM_WORKER_ID, id)) - } - - sendStatus(client, run, data.RunStatusError, "serious internal server error", id) -} - -func runCases(run *scommon.RunSpec, logfile *Logger, client stream.StreamClient, fprompt string, id, flowidx int, - fdec *data.Decorations) data.ResultReport { - var ( - err error - ab bool // First failure that isn't Soft breaks the flow. - r data.ResultReport - ) - for caseidx, _ := range run.Order.Flows[flowidx].Cases { - cased := &run.Order.Flows[flowidx].Cases[caseidx] - _ = logfile.Log(" {\"start.case\":\"" + cased.Name + "\"}") - if run.Canceled { - cased.Result = data.RunStatusCancelled - r.Skipped++ - } else if ab { - cased.Result = data.RunStatusSkipped - r.Skipped++ - } else { - var ( - e error - log []interface{} - ) - cased.Result = data.RunStatusRunning - if err = client.Send(stream.NewMsgRunCurrentCase(run.IdInt, cased.Id)); err != nil { - local.Logger.Error("Failed to send current case message.", zap.Error(err), zap.String(base.LM_ID, run.Id), - zap.String(base.LM_UID, run.Uid), zap.Int(base.LM_WORKER_ID, id)) - } - - d := getCaseDispatch(cased) - startTime := time.Now() - if log, e = d.RunCase(run.Runner, cased, fdec, run.Values, fprompt); e != nil { - // Failed to run properly. - cased.Error = base.NewGinfraErrorA("Case run failed.", e).Error() - ab = true - cased.Result = data.RunStatusError - r.Errored++ - } else if cased.IsFailed() { - if !cased.Decorated.Soft { - ab = true - } - cased.Result = data.RunStatusFailed - r.Failed++ - } else { - cased.Result = data.RunStatusPassed - r.Passed++ - } - cased.Time = time.Since(startTime).Milliseconds() - - err = logfile.Log(log) - if err != nil { - local.Logger.Warn("Failed to marshal log data to JSON.", zap.Error(err), zap.String(base.LM_ID, run.Id), - zap.String(base.LM_UID, run.Uid), zap.Int(base.LM_WORKER_ID, id)) - } - - // Update the ui - shipOrders(run, id, client, false) - } - } - - return r -} - -func runFlows(run *scommon.RunSpec, logfile *Logger, id int, client stream.StreamClient) data.ResultReport { - var ( - err error - r data.ResultReport - ab bool - ) - - for flowidx, flowd := range run.Order.Flows { - - if ab { - run.Order.Flows[flowidx].Result = data.RunStatusSkipped - for caseidx := range run.Order.Flows[flowidx].Cases { - run.Order.Flows[flowidx].Cases[caseidx].Result = data.RunStatusSkipped - } - continue - } - - run.Order.Flows[flowidx].Decorated = data.DecorationsFromString(run.Order.Flows[flowidx].Decorations) - - _ = logfile.Log(" {\"start.flow\":\"" + flowd.Name + "\"}") - - run.Order.Flows[flowidx].Result = data.RunStatusRunning - if flowd.Host != "" { - run.Host = flowd.Host // TODO flowd level host is for future feature work. - } - - // if no host has been set yet, the run will fail. - if run.Host == "" { - run.Order.Flows[flowidx].Error = "No host test for run. Run will fail. Generally, you should set host when starting your run." - run.Order.Flows[flowidx].Result = data.RunStatusError - break - } - - if err = client.Send(stream.NewMsgRunCurrentFlow(run.IdInt, flowd.Id)); err != nil { - local.Logger.Error("Failed to send current flowd message.", zap.Error(err), zap.String(base.LM_ID, run.Id), - zap.String(base.LM_UID, run.Uid), zap.Int(base.LM_WORKER_ID, id)) - } - - fprompt := findPrompt(flowd.Narrative) // Top level flow prompt - if run.Order.Flows[flowidx].Decorated.Strict { - fprompt = scommon.PromptStrict + fprompt - } - - startTime := time.Now() - report := runCases(run, logfile, client, fprompt, id, flowidx, &run.Order.Flows[flowidx].Decorated) - run.Order.Flows[flowidx].Time = time.Since(startTime).Milliseconds() - - // Flow disposition - if run.Canceled { - run.Order.Flows[flowidx].Result = data.RunStatusCancelled - r.Skipped++ - } else if report.Failed > 0 { - run.Order.Flows[flowidx].Result = data.RunStatusFailed - run.Order.Flows[flowidx].Error = "Failures while running flow." - if !run.Order.Flows[flowidx].Decorated.Soft { - ab = true - run.Order.Flows[flowidx].Error = "Failures while running flow. Aborting" - } - r.Failed++ - } else if report.Errored > 0 { - run.Order.Flows[flowidx].Result = data.RunStatusError - run.Order.Flows[flowidx].Error = "Errors while running flow. Aborting" - ab = true - r.Errored++ - } else if report.Passed > 0 { - run.Order.Flows[flowidx].Result = data.RunStatusPassed - r.Passed++ - } else { - run.Order.Flows[flowidx].Result = data.RunStatusDone - r.Passed++ - } - - local.Logger.Info("Magnitude flow complete.", zap.String(base.LM_ID, run.Id), zap.String(base.LM_UID, run.Uid), - zap.Int(base.LM_WORKER_ID, id), zap.String(base.LM_NAME, run.Order.Flows[flowidx].Name)) - } - - return r -} - -func worker(id int, client stream.StreamClient) { - for run := range runChannel { - // TODO this is kind of touchy For now, just ignore everything that isn't starting. - if run.Status != data.RunStatusRunning { - continue - } - - func() { - - // -- COMPLETION -------------------------------------------------------------------------------------------- - // Everything that has to happen when we leave the function. - defer func() { - _ = client.Send(stream.NewMsgRunCurrentFlow(run.IdInt, 0)) // Ignore the errors because if there is problem it would have already been logged below. - _ = client.Send(stream.NewMsgRunCurrentCase(run.IdInt, 0)) - - shipOrders(run, id, client, true) - local.Logger.Info("Run orders sent.", zap.String(base.LM_ID, run.Id), zap.String(base.LM_UID, run.Uid), - zap.Int(base.LM_WORKER_ID, id), zap.String(base.LM_STATUS, run.Status.String()), zap.String(base.LM_NAME, - run.Name)) - - if r := recover(); r != nil { - managePanic(run, id, client, r) - } - - moveFromRunningToHistory(run) - - if run.Runner != nil { - err := run.Runner.Kill() - if err != nil { - local.Logger.Error("Failed to kill runner. Process may be ghosted", zap.Error(err), - zap.String(base.LM_ID, run.Id), zap.String(base.LM_UID, run.Uid), zap.Int(base.LM_WORKER_ID, id)) - } - } - - local.Logger.Info("Run processing complete.", zap.String(base.LM_ID, run.Id), zap.String(base.LM_UID, run.Uid), - zap.Int(base.LM_WORKER_ID, id), zap.String(base.LM_STATUS, run.Status.String()), zap.String(base.LM_NAME, - run.Name)) - }() - - // -- COMPILE ------------------------------------------------------------------------------------------------- - // TODO earlier version of this service locked the various run stores, but I'm thinking we dont really need to do that. - numerr := compileOrders(run) - if numerr > 0 { - run.Status = data.RunStatusError - run.Err = "Errors while compiling. Number of errors: " + strconv.Itoa(numerr) + "." - local.Logger.Error("Magnitude test error(s) in compile.", zap.String(base.LM_ID, run.Id), - zap.String(base.LM_UID, run.Uid), zap.Int("worker", id), zap.Int(base.LM_NUMBER, numerr)) - sendStatus(client, run, run.Status, run.Err, id) - moveFromRunningToHistory(run) - shipOrders(run, id, client, false) - return - } - - // -- EXECUTION --------------------------------------------------------------------------------------------- - var err error - if run.Runner, err = getRunner(run.Host, filepath.Join(run.Path, local.DispositionFile), run.Prov, run.ProvModel, - run.ProvKey, run.ProvUrl); err != nil { - panic(base.NewGinfraErrorChild("Could not get a runner", err)) - } - - logfile, err := NewLogger(getLogPath(run.Path, run.Uid, run.Id)) - if err != nil { - local.Logger.Error("Failed to open log file.", zap.Error(err), zap.String(base.LM_ID, run.Id)) - } - - sendStatus(client, run, data.RunStatusRunning, "Running", id) - local.Logger.Info("Magnitude running test.", zap.String(base.LM_ID, run.Id), - zap.String(base.LM_UID, run.Uid), zap.Int("worker", id)) - - report := runFlows(run, logfile, id, client) - - // -- DISPOSITION ---------------------------------------------------------------------------------------------- - // TODO Tiny window when a stop order might catch the runningCmd still live. - if run.Canceled { - run.Err = "Cancelled" - sendStatus(client, run, data.RunStatusCancelled, run.Err, id) - } else if report.Errored > 0 { - run.Err = base.NewGinfraError("Errors while running.").Error() - sendStatus(client, run, data.RunStatusError, run.Err, id) - } else if report.Failed > 0 { - run.Err = base.NewGinfraError("Failures while running.").Error() - sendStatus(client, run, data.RunStatusFailed, run.Err, id) - } else if report.Passed > 0 { - run.Err = base.NewGinfraError("Passed.").Error() - sendStatus(client, run, data.RunStatusPassed, run.Err, id) - } else { - run.Err = base.NewGinfraError("Complete, no errors").Error() - sendStatus(client, run, data.RunStatusDone, run.Err, id) - } - - // We are done. Send the log and log the done. - _ = logfile.Close() - content, err := logfile.GetContents() - if err == nil { - - jcontent := data.FormatAsJSON(content) - - // TODO Since it is going into postgres for now, it will be compressed there. These should never get too big, - // it would be ok to leave it in the db over the long run. But... then we can't properly index and scrap it - // for search an analytics. Eventually it needs to move somewhere else. - b64content := data.EncodeToBase64([]byte(jcontent)) - - uidInt, err2 := strconv.Atoi(run.Uid) - idInt, err3 := strconv.Atoi(run.Id) - if err2 != nil || err3 != nil { - local.Logger.Error("BUG: During log save, failed to convert run uid and id to int. Not sending the log", - zap.Error(err), zap.String(base.LM_ID, run.Id), zap.String(base.LM_UID, run.Uid), zap.Int(base.LM_WORKER_ID, id)) - } else { - - if err = client.Send(stream.NewMsgRunLog(uidInt, idInt, b64content)); err != nil { - local.Logger.Error("Failed to send run log.", zap.Error(err), zap.String(base.LM_ID, run.Id), - zap.String(base.LM_UID, run.Uid), zap.Int(base.LM_WORKER_ID, id)) - } - - // We will delete the log here if everything goes right. Not now we will leave the log if things go wrong, - // but eventually they need to be culled too. - if err = os.RemoveAll(run.Path); err != nil { - local.Logger.Error("Failed to delete run directory. It is ghosted.", zap.Error(err), zap.String(base.LM_ID, run.Id), - zap.String(base.LM_UID, run.Uid), zap.Int(base.LM_WORKER_ID, id), zap.String(base.LM_PATH, run.Path)) - } - - } - } else { - local.Logger.Info("Could not get run log.", zap.Error(err), zap.String(base.LM_ID, run.Id), - zap.String(base.LM_UID, run.Uid), zap.Int(base.LM_WORKER_ID, id), zap.String(base.LM_STATUS, - run.Status.String()), zap.String(base.LM_NAME, run.Name)) - } - local.Logger.Info("Run complete.", zap.String(base.LM_ID, run.Id), zap.String(base.LM_UID, run.Uid), - zap.Int(base.LM_WORKER_ID, id), zap.String(base.LM_STATUS, run.Status.String()), zap.String(base.LM_NAME, - run.Name)) - }() - } - doneChannel <- true -} - -func initWorkerPool(client stream.StreamClient) { - runChannel = make(chan *scommon.RunSpec, runningPoolSize) - doneChannel = make(chan bool) - for i := 0; i < runningPoolSize; i++ { - go worker(i, client) - } -} diff --git a/service/server/flowcase.js b/service/server/flowcase.js index 9bf63b4..c21cdbd 100644 --- a/service/server/flowcase.js +++ b/service/server/flowcase.js @@ -170,7 +170,7 @@ async function updateCaseBug(orgId, id, status, description, info, type, priorit let client = getClient(); try { const result = await client.query( - `UPDATE case_bugs SET org_id = $1, status = $2, description = $3, info = $4, type = $5, priority = $6, link = $7 update_date = CURRENT_TIMESTAMP + `UPDATE case_bugs SET org_id = $1, status = $2, description = $3, info = $4, type = $5, priority = $6, link = $7, update_date = CURRENT_TIMESTAMP WHERE id = $8 AND org_id = $9 RETURNING id, case_id, status, description, info, type, priority, reporter, link, entry_date, update_date`, [orgId, status, description, info, type, priority, link, id, orgId] @@ -208,7 +208,7 @@ async function queryCaseBugByCaseId(orgId, caseId) { async function deleteCaseBug(orgId, id) { let client = getClient(); try { - const result = await client.query('DELETE FROM case_bugs WHERE id = $1 AND ord_id = $2 RETURNING id', [id, orgId]); + const result = await client.query('DELETE FROM case_bugs WHERE id = $1 AND org_id = $2 RETURNING id', [id, orgId]); if (result.rows.length === 0) { throw new Error(`Case bug with id ${id} not found`); diff --git a/service/server/flowcase_rest.js b/service/server/flowcase_rest.js index 84f96fc..e99b2d3 100644 --- a/service/server/flowcase_rest.js +++ b/service/server/flowcase_rest.js @@ -151,9 +151,9 @@ export async function addFlowCase(app) { // -- Case Bugs --------------------------------------------------------------------------------------------------- - app.post('/api/casebug', authenticateToken, async (req, res) => { + app.post('/api/casesbug', authenticateToken, async (req, res) => { try { - const result = await proxy.addCaseBug(1, parseInt(req.body.caseId), parseInt(req.body.status), req.body.description, req.body.info, + const result = await proxy.addCaseBug(1, parseInt(req.body.case_id), parseInt(req.body.status), req.body.description, req.body.info, parseInt(req.body.type), parseInt(req.body.priority), req.user.username, req.body.link); res.json(result); } catch (error) { @@ -197,7 +197,7 @@ export async function addFlowCase(app) { } }); - app.delete('/api/cases/id/:id', authenticateToken, async (req, res) => { + app.delete('/api/casesbug/id/:id', authenticateToken, async (req, res) => { try { const bugId = parseInt(req.params.id); const result = await proxy.deleteCaseBug(1, bugId); diff --git a/service/server/proxy.js b/service/server/proxy.js index 75b8674..0e859f2 100644 --- a/service/server/proxy.js +++ b/service/server/proxy.js @@ -180,8 +180,8 @@ async function queryCaseBugByCaseId(orgId, caseId) { return await fc.queryCaseBugByCaseId(orgId, caseId); } -async function deleteCaseBug(ordId, id) { - return await fc.deleteCaseBug(ordId, id); +async function deleteCaseBug(orgId, id) { + return await fc.deleteCaseBug(orgId, id); } // -- Case History ----------------------------------------------------------------------------------------------------- diff --git a/service/service/stats.go b/service/service/stats.go index 28dd717..d36a7e0 100644 --- a/service/service/stats.go +++ b/service/service/stats.go @@ -107,7 +107,7 @@ func extractFlowFields(d map[string]interface{}) (int, int, int, error) { } timeInterface, ok := d["time"] if !ok { - return 0, 0, 0, fmt.Errorf("missing 'time' field") + timeInterface = "0" } // Convert to int @@ -119,11 +119,11 @@ func extractFlowFields(d map[string]interface{}) (int, int, int, error) { if err != nil { return 0, 0, 0, base.NewGinfraErrorChild("failed to convert 'result' to integer.", err) } - time, err := convertToInt(timeInterface) + ftime, err := convertToInt(timeInterface) if err != nil { return 0, 0, 0, base.NewGinfraErrorChild("failed to convert 'result' to integer.", err) } - return id, result, time, nil + return id, result, ftime, nil } func (d *Dispatcher) handleFlowCaseStats(RunId int, orders string) { diff --git a/service/src/components/BugEditDialog.vue b/service/src/components/BugEditDialog.vue new file mode 100644 index 0000000..31aa7bf --- /dev/null +++ b/service/src/components/BugEditDialog.vue @@ -0,0 +1,219 @@ + + + + diff --git a/service/src/components/CaseBugDialog.vue b/service/src/components/CaseBugDialog.vue new file mode 100644 index 0000000..4107917 --- /dev/null +++ b/service/src/components/CaseBugDialog.vue @@ -0,0 +1,339 @@ + + + + + + + diff --git a/service/src/components/SnackbarNotification.vue b/service/src/components/SnackbarNotification.vue index 3fa3e46..d337a96 100644 --- a/service/src/components/SnackbarNotification.vue +++ b/service/src/components/SnackbarNotification.vue @@ -54,4 +54,5 @@ watch(isVisible, (newValue) => { const close = () => { isVisible.value = false } + diff --git a/service/src/components/status.ts b/service/src/components/status.ts index 01756ef..c1eba78 100644 --- a/service/src/components/status.ts +++ b/service/src/components/status.ts @@ -1,6 +1,11 @@ // WWWHerd (c) 2025 Ginfra Project. All rights Reserved. Source licensed under Apache License Version 2.0, January 2004 // Get run status text +// ------------------------------------------------------------------------------------------------------- +// - RUN + +import {ref} from "vue"; + export const RUN_STATUS = { NEW: 1, WAITING: 2, @@ -81,4 +86,137 @@ export const getRunStatusColor = (status: number) => { case RUN_STATUS.SKIPPED: return 'grey' default: return 'grey' } -} \ No newline at end of file +} + +// ------------------------------------------------------------------------------------------------------- +// - BUG + +export const BUG_STATUS = { + NEW: 1, + OPEN: 2, + FIXED: 3, + DUPLICATE: 4, + WONT_FIX: 5 +} as const; + + +export const getBugStatusText = (status: number) => { + switch (status) { + case BUG_STATUS.NEW: return 'New' + case BUG_STATUS.OPEN: return 'Open' + case BUG_STATUS.FIXED: return 'Fixed' + case BUG_STATUS.DUPLICATE: return 'Duplicate' + case BUG_STATUS.WONT_FIX: return 'Wont Fix' + default: return 'Unknown' + } +} +export const statusOptions = [ + '', + 'New', + 'Open', + 'Fixed', + 'Duplicate', + 'Wont Fix' +] + +export const BUG_TYPE = { + BUG: 1, + FEATURE: 2, + PERFORMANCE: 3, + COST: 4, + OBSOLETE: 5 +} as const; + +export const getBugTypeText = (status: number) => { + switch (status) { + case BUG_TYPE.BUG: return 'Bug' + case BUG_TYPE.FEATURE: return 'Feature' + case BUG_TYPE.PERFORMANCE: return 'Performance' + case BUG_TYPE.COST: return 'Cost' + case BUG_TYPE.OBSOLETE: return 'Obsolete' + default: return 'Unknown' + } +} + +export const typeOptions = [ + '', + 'Bug', + 'Feature', + 'Performance', + 'Cost', + "Obsolete" +] + +export const BUG_PRIORITY = { + CRITICAL: 1, + HIGH: 2, + MEDIUM: 3, + LOW: 4, + INFORMATIONAL: 5 +} as const; + +export const getBugPriorityText = (priority: number) => { + switch (priority) { + case BUG_PRIORITY.CRITICAL: return 'Critical' + case BUG_PRIORITY.HIGH: return 'High' + case BUG_PRIORITY.MEDIUM: return 'Medium' + case BUG_PRIORITY.LOW: return 'Low' + case BUG_PRIORITY.INFORMATIONAL: return 'Informational' + default: return 'Unknown' + } +} + +export const priorityOptions = [ + '', + 'Critical', + 'High', + 'Medium', + 'Low', + "Informational" +] + +export const getPriorityColor = (priority: number) => { + switch (priority) { + case BUG_PRIORITY.CRITICAL: return 'red' + case BUG_PRIORITY.HIGH: return 'orange' + case BUG_PRIORITY.MEDIUM: return 'yellow' + case BUG_PRIORITY.LOW: return 'brown' + case BUG_PRIORITY.INFORMATIONAL: return 'blue' + default: return 'gray' + } +} + +export const bugFilters = ref({ + type: [] as number[], + status: [1, 2] as number[], + priority: [] as number[] +}) + +// TODO these should be merged with the options above. + +// Define filter options - you may need to adjust these based on your actual enum values +export const typeFilterOptions = [ + { title: 'Bug', value: 1 }, + { title: 'Feature', value: 2 }, + { title: 'Performance', value: 3 }, + { title: 'Cost', value: 4 }, + { title: 'Obsolete', value: 5 } +] + +export const statusFilterOptions = [ + { title: 'New', value: 1 }, + { title: 'Open', value: 2 }, + { title: 'Fixed', value: 3 }, + { title: 'Duplicate', value: 4 }, + { title: 'Wont Fix', value: 5 } +] + +export const priorityFilterOptions = [ + { title: 'Critical', value: 1 }, + { title: 'High', value: 2 }, + { title: 'Medium', value: 3 }, + { title: 'Low', value: 4 }, + { title: 'Informational', value: 5 } +] + + diff --git a/service/src/plugins/vuetify.js b/service/src/plugins/vuetify.js index 60688ae..2bd9cd2 100644 --- a/service/src/plugins/vuetify.js +++ b/service/src/plugins/vuetify.js @@ -20,10 +20,12 @@ const topTheme = { dialogBackground: '#a4f1e5', buttonLive: '#0259fb', buttonDisabled: '#6e6d6d', + bug: '#edb1a6', appbar: '#abc7e6', appbutton: '#e0e4fd', softbutton: '#5061ca', hibutton: '#1f9f5b', + clickable: '#111112', info: '#0a0a0a', error: '#ff0000', ok: '#00aa00', diff --git a/service/src/routes/main/MainView.vue b/service/src/routes/main/MainView.vue index 1902ef7..36e8737 100644 --- a/service/src/routes/main/MainView.vue +++ b/service/src/routes/main/MainView.vue @@ -110,9 +110,16 @@ + + + @@ -549,7 +556,7 @@ @cancel="handleCaseSelectionCancel" /> - + + + + @@ -639,6 +652,7 @@ import Sidebar from "@/components/Sidebar.vue"; import FlowTagsDialog from "@/components/FlowTagsDialog.vue"; import SnackbarNotification from "@/components/SnackbarNotification.vue"; import FilterTagsDialog from "@/components/FilterTagsDialog.vue"; +import CaseBugDialog from '@/components/CaseBugDialog.vue' const tab = ref('cases') @@ -868,7 +882,6 @@ const doRetireCase = async () => { } } -// -- Decorations --------------------------------------------------------------- const strictChecked = ref(false) const softChecked = ref(false) @@ -899,6 +912,18 @@ const clearDecorations = () => { softChecked.value = false } +// -- Case Bug --------------------------------------------------------------- + +// Add these to your data/refs +const caseBugDialog = ref(false) +const selectedCaseId = ref(null) + +// Add this method +const openCaseBugDialog = (case_: any) => { + selectedCaseId.value = case_.id + caseBugDialog.value = true +} + // --------------------------------------------------------------------------------------------------------------------- // -- FLOW ------------------------------------------------------------------------------------------------------------ diff --git a/service/src/stores/common.ts b/service/src/stores/common.ts index 5f5998f..c094ee9 100644 --- a/service/src/stores/common.ts +++ b/service/src/stores/common.ts @@ -112,6 +112,62 @@ export const getCaseColor = (case_: Case) => { return category?.color || 'primary' } +// -------------------------------------------------------------------------------------------------------------------- +// - Case Bug +// id, case_id, status, description, info, type, priority, reporter, link, entry_date, update_date +export interface Bug { + id: number + case_id: number + type: number + status: number + priority: number + description: string + info: string + link: string + reporter: string + entry_date: string + update_date: string +} + +export const fetchBugs = async (caseId: number): Promise => { + const response = await apiClient.get(`/api/casesbug/case/${caseId}`) + if (response.ok) { + return await response.json() as Bug[] + } else { + if (response.status === 404) { + throw Error(`Case not found`) + } else { + throw Error(`Server error`) + } + } +} + +export const saveBug = async (bug: Bug, newBug: boolean): Promise => { + const response = await (newBug + ? apiClient.post('/api/casesbug', bug) + : apiClient.put(`/api/casesbug`, bug)) + if (response.ok) { + return await response.json() as Bug + } else { + if (response.status === 404) { + throw Error(`Case not found`) + } else { + throw Error(`Server error`) + } + } +} + +export const deleteBug = async (bug: Bug) => { + const response = await apiClient.delete(`/api/casesbug/id/${bug.id}`) + if (!response.ok) { + if (response.status === 404) { + throw Error(`Case bug not found`) + } else { + throw Error(`Server error`) + } + } +} + // -------------------------------------------------------------------------------------------------------------------- // - Flows -- GitLab From 38e8d3814f3eb5fabc5e58b77c347708ff586f86 Mon Sep 17 00:00:00 2001 From: Erich Gatejen Date: Thu, 4 Sep 2025 13:08:34 -0600 Subject: [PATCH 12/25] fix logs --- magnitude/src/index.ts | 19 +- magnitude/src/index.ts.OLD | 376 ------------------------- service/providers/psql/schema/full.sql | 2 +- service/server/flowcase.js | 35 +-- service/server/flowcase_rest.js | 2 +- service/server/proxy.js | 4 +- service/src/components/LogsDialog.vue | 182 ++---------- service/test/working.json | 2 +- 8 files changed, 55 insertions(+), 567 deletions(-) delete mode 100644 magnitude/src/index.ts.OLD diff --git a/magnitude/src/index.ts b/magnitude/src/index.ts index b3cd7c4..21eca6d 100644 --- a/magnitude/src/index.ts +++ b/magnitude/src/index.ts @@ -145,27 +145,21 @@ agent.then(resolvedAgent => { resolvedAgent.events.on('tokensUsed', (usage: any) => { if (usage.inputTokens !== undefined) { latestInputTokens += usage.inputTokens; - console.log('COST: inputTokens:' + usage.inputTokens); } if (usage.inputCost !== undefined) { latestInputCost += usage.inputCost; - console.log('COST: inputCost:' + usage.inputCost); } if (usage.outputTokens !== undefined) { latestOutputTokens += usage.outputTokens; - console.log('COST: outputTokens:' + usage.outputTokens); } if (usage.outputCost !== undefined) { latestOutputCost += usage.outputCost; - console.log('COST: outputCost:' + usage.outputCost); } if (usage.cacheWriteInputTokens !== undefined) { latestCacheWriteInputTokens += usage.cacheWriteInputTokens; - console.log('COST: cacheWriteInputTokens:' + usage.cacheWriteInputTokens); } if (usage.cacheReadInputTokens !== undefined) { latestCacheReadInputTokens += usage.cacheReadInputTokens; - console.log('COST: cacheReadInputTokens:' + usage.cacheReadInputTokens); } }); }).catch(error => { @@ -339,7 +333,12 @@ process.on('SIGINT', async (): Promise => { }); function getLog() { - fs.writeSync(fd, '\n"ending", "ending"\n ]\n'); + const finalEntry: CapturedLogEntry = { + timestamp: new Date().toISOString(), + type: "LOG", + message: "Ending" + }; + fs.writeSync(fd, JSON.stringify(finalEntry, null, 2)+ "\n]\n"); return fs.readFileSync(logFile, 'utf8'); } @@ -463,14 +462,14 @@ async function sendResult() { // MAIN function startFlow(info: any) { - console.log('"start.flow": ' + info.name) + console.log('start flow: ' + info.name) state = StateFlow; promptglobal = info.prompt currentFlowId = info.flowId } function startCase(info: any) { - console.log('"start.case": ' + info.name) + console.log('start case: ' + info.name) state = StateCase; promptglobal = info.prompt promptset = "" @@ -605,7 +604,7 @@ function delay(ms: number): Promise { (async () => { let waitLoop = true; - console.log("Starting wait..."); + console.log("Starting."); // Block the main thread until waitLoop becomes false while (waitLoop) { diff --git a/magnitude/src/index.ts.OLD b/magnitude/src/index.ts.OLD deleted file mode 100644 index 2c26bec..0000000 --- a/magnitude/src/index.ts.OLD +++ /dev/null @@ -1,376 +0,0 @@ -// WWWHerd (c) 2025 Ginfra Project. All rights Reserved. Source licensed under Apache License Version 2.0, January 2004 -import * as readline from 'readline'; -import {AgentOptions, BrowserConnectorOptions, startBrowserAgent} from 'magnitude-core'; -import * as fs from 'fs'; - -// =================================================================================================================== -// How to use: -// ARG1: UIID -// ARG2: controller URL -// ARG3: Target host url -// ARG4: Store directory -// ARG5: Provider name -// ARG6: apiKey -// ARG7: model -// ARG8: url to provider - -const uiid = process.argv[2]; -const controllerUrl = process.argv[3]; -const target = process.argv[4]; -const store = process.argv[5]; -const provider = process.argv[6]; -const apiKey = process.argv[7]; -const model = process.argv[8]; -const url = process.argv[9]; - -const logFile = `${store}/log.json`; -const dispFile = `${store}/disp.json`; - -const argData = { - target: target, - store: store, - provider: provider, - apiKey: apiKey, - model: model, - url: url -}; - -fs.writeFileSync(dispFile, JSON.stringify(argData, null, 2) + '\n'); - -let fd = fs.openSync(logFile, 'w'); -fs.writeSync(fd, '[\n'); - - -// =================================================================================================================== -// Console output capture - -interface CapturedLogEntry { - timestamp: string; - type: string; - message: string; -} - -const capturedOutput: CapturedLogEntry[] = []; - -type ConsoleMethod = typeof console.log; -const createConsoleCapture = ( - type: string -): ConsoleMethod => { - return (arg: any): void => { - let processedArg: any; - - try { - // Check if arg is already a valid JSON object/string - if (typeof arg === 'object' && arg !== null) { - processedArg = arg; - } else if (typeof arg === 'string') { - // Try to parse as JSON first - try { - processedArg = JSON.parse(arg); - } catch { - // If not JSON, clean terminal formatting and keep as string - processedArg = arg.replace(/\x1B\[[0-9;]*[a-zA-Z]/g, ''); - } - } else { - // For other types, convert to string and clean formatting - processedArg = String(arg).replace(/\x1B\[[0-9;]*[a-zA-Z]/g, ''); - } - } catch (error) { - // If anything fails, convert to string representation - processedArg = String(arg).replace(/\x1B\[[0-9;]*[a-zA-Z]/g, ''); - } - - const logEntry: CapturedLogEntry = { - timestamp: new Date().toISOString(), - type: type, - message: processedArg - }; - - capturedOutput.push(logEntry); - }; -}; - -// Override console methods to capture output -console.log = createConsoleCapture('LOG'); -console.error = createConsoleCapture('ERROR'); -console.warn = createConsoleCapture('WARN'); -console.info = createConsoleCapture('INFO'); - -// Function to clear captured output -function clearCapturedOutput(): void { - capturedOutput.length = 0; -} - -// Function to get all captured output as JSON string -function getCapturedOutput(): string { - return JSON.stringify(capturedOutput, null, 2); -} - -// ==================================================================================================================== -// AGENT - -const agent = startBrowserAgent({ - // Starting URL for agent - url: target || "http://localhost:8888", - // Show thoughts and actions - narrate: true, - // LLM configuration - llm: { - provider: provider, - options: { - model: model, - apiKey: apiKey, - baseUrl: url - } - }, -}).catch(async error => { - console.error(JSON.stringify({ - type: "AGENT", - status: "failed", - description: "starting agent", - reasoning: error.message || error, - })) - fs.writeSync(fd, getCapturedOutput() + "\n"); - fs.closeSync(fd); - await sleep(2000); - process.exit(9); -}); - -// Listen to token usage events -agent.then(resolvedAgent => { - resolvedAgent.events.on('tokensUsed', (usage: any) => { - if (usage.inputTokens !== undefined) { - console.log('COST: inputTokens:' + usage.inputTokens); - } - if (usage.inputCost !== undefined) { - console.log('COST: inputCost:' + usage.inputCost); - } - if (usage.outputTokens !== undefined) { - console.log('COST: outputTokens:' + usage.outputTokens); - } - if (usage.outputCost !== undefined) { - console.log('COST: outputCost:' + usage.outputCost); - } - if (usage.cacheWriteInputTokens !== undefined) { - console.log('COST: cacheWriteInputTokens:' + usage.cacheWriteInputTokens); - } - if (usage.cacheReadInputTokens !== undefined) { - console.log('COST: cacheReadInputTokens:' + usage.cacheReadInputTokens); - } - }); -}).catch(error => { - console.error('Failed to set up token usage listener:', error); -}); - - -let prompt = "" -let promptset = "" -let promptglobal = "" -let data: Record = {} - -// Function to perform agent action -async function act(order: string): Promise { - // TODO Not sure if magnitude will freak if either data or prompt it empty. We shall see. - - const resolvedAgent = await agent; - await resolvedAgent.act(order, {data: data, prompt: promptglobal + ` ` + promptset + ` ` + prompt}).catch(error => console.error( - JSON.stringify({ - type: "WWWHERD", - status: "failed", - description: order, - reasoning: error.message, - }) - )) - data = {}; - prompt = ""; -} - -const CHECK_INSTRUCTIONS=` -Given the actions of an LLM agent executing a test case, and a screenshot taken afterwards, evaluate whether the provided check "passes" i.e. holds true or not. - -Check to evaluate: -`.trim(); - -// Check doesn't scroll, we we have to just use act with the correct prompt. -async function check(description: string): Promise { - await act(CHECK_INSTRUCTIONS + description) -} - -// ==================================================================================================================== -// I/O - -const rl: readline.Interface = readline.createInterface({ - input: process.stdin, - output: process.stdout, - terminal: false -}); - -// Raw input state management -let isRawMode = false; -let rawBuffer: string[] = []; - -// Process each line from stdin -rl.on('line', async (line: string): Promise => { - - if (isRawMode) { - if (line.startsWith('##RAW')) { - isRawMode = false; - const raw = rawBuffer.join('\n'); - - try { - // Create an async function that has access to the agent variable - const executeRaw = new Function('agent', 'console', 'data', 'prompt', 'promptset', 'promptglobal', ` - return (async () => { - ${raw} - })(); - `); - - // Execute the raw code with access to the current context variables - await executeRaw(await agent, console, data, prompt, promptset, promptglobal); - - } catch (error) { - console.error(JSON.stringify({ - type: "WWWHERD", - status: "error", - description: "Raw code execution failed", - // reasoning: error.message || "", - //zstack: error.stack || "" - })); - } - - const capturedJson = getCapturedOutput(); - if (capturedJson !== "[]") { - process.stdout.write(capturedJson + '\n'); - //process.stderr.write(capturedJson + '\n'); - } - clearCapturedOutput() - console.log("##RAW DONE"); - } else { - rawBuffer.push(line); - } - - } else { - - // -- ACT ---------------------------------------------------------------------------------------- - if (line.startsWith('##ACT ')) { - console.log("##ACT"); - const data = line.substring(6).trim(); - const [command, name, value] = data.split('|').map(s => s.trim()); - await act(command); - console.log("##ACT DONE"); - - // -- PROMPT ---------------------------------------------------------------------------------------- - } else if (line.startsWith('##PROMPT ')) { - console.log("##PROMPT"); - prompt = line.substring(9).trim(); - console.log("GOOD"); - console.log("##PROMPT DONE"); - - // -- PROMPTSET ---------------------------------------------------------------------------------------- - } else if (line.startsWith('##PROMPTSET ')) { - console.log("##PROMPTSET"); - promptset = line.substring(12).trim(); - console.log("GOOD"); - console.log("##PROMPTSET DONE"); - - // -- PROMPTCLEAR ---------------------------------------------------------------------------------------- - } else if (line.startsWith('##PROMPTCLEAR')) { - console.log("##PROMPTCLEAR"); - promptset = "" - console.log("GOOD"); - console.log("##PROMPTCLEAR DONE"); - - // -- PROMPTGLOBAL ---------------------------------------------------------------------------------------- - } else if (line.startsWith('##PROMPTGLOBAL ')) { - console.log("##PROMPTGLOBAL"); - promptglobal = "" - console.log("GOOD"); - console.log("##PROMPTGLOBAL DONE"); - - // -- PROMPTGLOBALCLEAR ---------------------------------------------------------------------------------------- - } else if (line.startsWith('##PROMPTGLOBALCLEAR')) { - console.log("##PROMPTGLOBALCLEAR"); - promptglobal = line.substring(19).trim(); - console.log("GOOD"); - console.log("##PROMPTGLOBALCLEAR DONE"); - - // -- DATA ---------------------------------------------------------------------------------------- - } else if (line.startsWith('##DATA ')) { - console.log("##DATA"); - try { - data = JSON.parse(line.substring(7).trim()); - console.log("GOOD"); - } catch (error) { - console.error('Error parsing JSON for ##DATA: ', error); - } - console.log("##DATA DONE"); - - // -- NAV ---------------------------------------------------------------------------------------- - } else if (line.startsWith('##NAV ')) { - console.log("##NAV"); - const resolvedAgent = await agent; - try { - await resolvedAgent.nav(line.substring(6).trim()); - } catch (error) { - console.error('NAV ERROR:', error); - } - console.log("##NAV DONE"); - - // -- CHECK ---------------------------------------------------------------------------------------- - } else if (line.startsWith('##CHECK ')) { - console.log("##CHECK"); - await check(line.substring(8).trim()); - console.log("##CHECK DONE"); - - // -- PING ---------------------------------------------------------------------------------------- - } else if (line.startsWith('##PING')) { - console.log("##PONG"); - console.log("PONG") - console.log("##PONG DONE"); - - // -- RAW ---------------------------------------------------------------------------------------- - } else if (line.startsWith('##RAW')) { - console.log("##RAW"); - isRawMode = true; - rawBuffer = []; - - // -- QUIT ---------------------------------------------------------------------------------------- - } else if (line.startsWith('##QUIT')) { - console.log("##QUIT"); - rl.close(); - fs.writeSync(fd, ' "order":"exit on ##QUIT"\n ]\n'); - fs.closeSync(fd); - process.exit(0); - } - if (line != "") { - // Output captured logs as JSON - const capturedJson = getCapturedOutput(); - if (capturedJson !== "[]") { - fs.writeSync(fd, capturedJson + ',\n'); - } - clearCapturedOutput() - } - } //end if raw -}); - -// Handle when stdin is closed -//rl.on('close', (): void => { -// fs.writeFileSync(logFile, ' "order":"exit on pipe close"\n ]\n'); -// process.exit(0); -//}); - -// Optional: Handle process termination signals -process.on('SIGINT', async (): Promise => { - fs.writeSync(fd, ' "order":"exit on terminate"\n ]\n'); - fs.closeSync(fd); - rl.close(); - await (await agent).stop() - process.exit(0); -}); - -// =================================================================================================================== -// Tool - -function sleep(ms: number): Promise { - return new Promise(resolve => setTimeout(resolve, ms)); -} \ No newline at end of file diff --git a/service/providers/psql/schema/full.sql b/service/providers/psql/schema/full.sql index 5605dfb..3be9679 100644 --- a/service/providers/psql/schema/full.sql +++ b/service/providers/psql/schema/full.sql @@ -181,7 +181,7 @@ CREATE TABLE case_history case_id INT NOT NULL, case_text TEXT NOT NULL, decorations TEXT, - retirement BOOLEAN NOT NULL DEFAULT FALSE, + retired BOOLEAN NOT NULL DEFAULT FALSE, change_date TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, org_id INT NOT NULL, CONSTRAINT fkcase_hist_org FOREIGN KEY (org_id) diff --git a/service/server/flowcase.js b/service/server/flowcase.js index c21cdbd..095cf13 100644 --- a/service/server/flowcase.js +++ b/service/server/flowcase.js @@ -8,17 +8,6 @@ import { getClient } from './client.js'; // --------------------------------------------------------------------------------------------------------------------- // - CASE -/* -( - id SERIAL PRIMARY KEY, - name TEXT NOT NULL, - text TEXT, - decorations TEXT, - type_id INT NOT NULL, - org_id INT NOT NULL, - cat_id INT NOT NULL, - retired BOOLEAN NOT NULL DEFAULT FALSE, - */ async function queryCasesByOrgId(orgId) { let client = getClient() @@ -55,7 +44,6 @@ async function addCase(orgId, catId, typeId, name, text, decorations, tags = []) let client = getClient() try { await client.query('BEGIN'); - const result = await client.query( `INSERT INTO cases (org_id, cat_id, type_id, name, text, decorations) VALUES ($1, $2, $3, $4, $5, $6) RETURNING id, cat_id, type_id, name, text, decorations, retired`, @@ -69,8 +57,11 @@ async function addCase(orgId, catId, typeId, name, text, decorations, tags = []) [caseId, tags] ); } - await client.query('COMMIT'); + + const retired = result.rows[0].retired; + await addCaseHistory(orgId, caseId, text, decorations, retired) + return result.rows[0]; } catch (error) { await client.query('ROLLBACK'); @@ -85,8 +76,8 @@ async function updateCase(orgId, catId, caseId, typeId, name, text, decorations, await client.query('BEGIN'); const result = await client.query( - 'UPDATE cases SET cat_id = $2, name = $3, text = $4, type_id = $5, decorations = $6 WHERE id = $7 AND org_id = $1' + - 'RETURNING id, cat_id, name, text, type_id, decorations, retired', + `UPDATE cases SET cat_id = $2, name = $3, text = $4, type_id = $5, decorations = $6 WHERE id = $7 AND org_id = $1 + RETURNING id, cat_id, name, text, type_id, decorations, retired`, [orgId, catId, name, text, typeId, decorations, caseId ] ); @@ -99,6 +90,10 @@ async function updateCase(orgId, catId, caseId, typeId, name, text, decorations, } await client.query('COMMIT'); + + const retired = result.rows[0].retired; + await addCaseHistory(orgId, caseId, text, decorations, retired) + return result.rows[0]; } catch (error) { await client.query('ROLLBACK'); @@ -224,14 +219,14 @@ async function deleteCaseBug(orgId, id) { // --------------------------------------------------------------------------------------------------------------------- // - CASE HISTORY -async function addCaseHistory(orgId, caseId, caseText, decorations) { +async function addCaseHistory(orgId, caseId, caseText, decorations, retired) { let client = getClient() try { const result = await client.query( - `INSERT INTO case_history (case_id, case_text, decorations, retirement, org_id, change_date) - VALUES ($1, $2, $3, (SELECT retirement FROM cases WHERE id = $1), $4, CURRENT_TIMESTAMP) - RETURNING id, case_id, case_text, decorations, retirement, change_date`, - [caseId, caseText, decorations, orgId] + `INSERT INTO case_history (case_id, case_text, decorations, retired, org_id, change_date) + VALUES ($1, $2, $3, $4, $5, CURRENT_TIMESTAMP) + RETURNING id, case_id, case_text, decorations, retired, change_date`, + [caseId, caseText, decorations, retired, orgId] ); return result.rows[0]; } catch (error) { diff --git a/service/server/flowcase_rest.js b/service/server/flowcase_rest.js index e99b2d3..7c28034 100644 --- a/service/server/flowcase_rest.js +++ b/service/server/flowcase_rest.js @@ -215,7 +215,7 @@ export async function addFlowCase(app) { app.post('/api/cases/history/:caseId', authenticateToken, async (req, res) => { try { const caseId = parseInt(req.params.caseId); - const result = await proxy.addCaseHistory(orgId, caseId, req.body.text, req.body.decorations) + const result = await proxy.addCaseHistory(orgId, caseId, req.body.text, req.body.retired, req.body.decorations) res.json(result); } catch (error) { res.status(500).json({error: error.message}); diff --git a/service/server/proxy.js b/service/server/proxy.js index 0e859f2..4e4e3a5 100644 --- a/service/server/proxy.js +++ b/service/server/proxy.js @@ -186,8 +186,8 @@ async function deleteCaseBug(orgId, id) { // -- Case History ----------------------------------------------------------------------------------------------------- -async function addCaseHistory(orgId, caseId, caseText, decorations) { - return await fc.addCaseHistory(orgId, caseId, caseText, decorations); +async function addCaseHistory(orgId, caseId, caseText, decorations, retired) { + return await fc.addCaseHistory(orgId, caseId, caseText, decorations, retired); } async function queryCaseHistoryByCaseId(orgId, caseId) { diff --git a/service/src/components/LogsDialog.vue b/service/src/components/LogsDialog.vue index 743999a..c851b3c 100644 --- a/service/src/components/LogsDialog.vue +++ b/service/src/components/LogsDialog.vue @@ -134,161 +134,43 @@ const parsedLogs = computed(() => { } }) -// Helper function to check if THIS specific object should be ignored (not recursive) -const shouldIgnoreThisObject = (obj: any): boolean => { - return typeof obj === 'object' && - obj !== null && - obj.message && - typeof obj.message === 'string' && - obj.message.startsWith('COST:') -} - // Flatten the parsed logs into the specific structure requested const flattenedLogs = computed((): FlattenedLogItem[] => { if (parsedLogs.value === null) return [] const result: FlattenedLogItem[] = [] - const processValue = (value: any, depth: number = 0) => { - if (Array.isArray(value)) { - // Process each array item, but skip ones that should be ignored - value.forEach((item, index) => { - // Skip this specific item if it should be ignored - if (shouldIgnoreThisObject(item)) { - return - } - - // Process the directive's content - if (typeof item === 'object' && item !== null) { - processObject(item, depth + 1) - } else { - result.push({ - displayText: formatPrimitiveValue(item), - depth: depth + 1, - isDirective: false, - isStartFlow: false, - isStartCase: false, - isLogEntry: true - }) - } - }) - } else if (typeof value === 'object' && value !== null) { - // Only skip if THIS object should be ignored - if (!shouldIgnoreThisObject(value)) { - processObject(value, depth) - } + parsedLogs.value.forEach((item) => { + let depth = 0 + let id = false + let isf = false + let isc = false + let log = false + + if (item.message.startsWith("##")) { + depth = 2 + id = true + } else if (item.message.startsWith("start flow:")) { + depth = 0 + isf = true + } else if (item.message.startsWith("start case:")) { + depth = 1 + isc = true } else { - result.push({ - displayText: formatPrimitiveValue(value), - depth, - isDirective: false, - isStartFlow: false, - isStartCase: false, - isLogEntry: true - }) - } - } - - const processObject = (obj: any, depth: number) => { - // Skip processing if THIS specific object should be ignored - if (shouldIgnoreThisObject(obj)) { - return + log = true + depth = 3 + item.message = `${item.timestamp} ${item.type} ${item.message}` } - Object.entries(obj).forEach(([key, value]) => { - // Skip displaying the "type: LOG" line specifically - if (key === 'type' && value === 'LOG') { - return - } - - if (Array.isArray(value)) { - // Show the key name first, then process the array - const isStartFlow = key === 'start.flow' - const isStartCase = key === 'start.case' - - result.push({ - displayText: `${key}:`, - depth, - isDirective: false, - isStartFlow, - isStartCase, - isLogEntry: false - }) - processValue(value, depth) - } else if (typeof value === 'object' && value !== null) { - // Only skip if THIS specific object should be ignored - if (!shouldIgnoreThisObject(value)) { - processObject(value, depth + 1) - } - } else { - // For primitive values, show key: value - const isStartFlow = key === 'start.flow' - const isStartCase = key === 'start.case' - - if (key == 'directive') { - result.push({ - displayText: `${key}: ${formatPrimitiveValue(value)}`, - depth, - isDirective: true, - isStartFlow: false, - isStartCase: false, - isLogEntry: false - }) - - } else { - - result.push({ - displayText: `${key}: ${formatPrimitiveValue(value)}`, - depth, - isDirective: false, - isStartFlow, - isStartCase, - isLogEntry: false - }) - } - - } - }) - } - - - // Start processing from the root - if (Array.isArray(parsedLogs.value)) { - // Process each top-level array item directly - parsedLogs.value.forEach((item) => { - // Skip this specific item if it should be ignored - if (shouldIgnoreThisObject(item)) { - return - } - - if (typeof item === 'object' && item !== null) { - processObject(item, 0) - } else { - result.push({ - displayText: formatPrimitiveValue(item), - depth: 0, - isDirective: false, - isStartFlow: false, - isStartCase: false, - isLogEntry: true - }) - } - }) - } else if (typeof parsedLogs.value === 'object' && parsedLogs.value !== null) { - // Only skip if the root object itself should be ignored - if (!shouldIgnoreThisObject(parsedLogs.value)) { - processObject(parsedLogs.value, 0) - } - } else { result.push({ - displayText: formatPrimitiveValue(parsedLogs.value), - depth: 0, - isDirective: false, - isStartFlow: false, - isStartCase: false, - isLogEntry: true + displayText: item.message, + depth: depth, + isDirective: id, + isStartFlow: isf, + isStartCase: isc, + isLogEntry: log }) - } + }) return result }) @@ -299,18 +181,6 @@ const logLines = computed(() => { return props.logs.split('\n').filter(line => line.trim().length > 0) }) -const formatPrimitiveValue = (value: any): string => { - if (value === null || value === undefined) { - return 'null' - } - - if (typeof value === 'string') { - return value - } - - return String(value) -} - const closeDialog = () => { emit('update:modelValue', false) } diff --git a/service/test/working.json b/service/test/working.json index 53e355b..766bf99 100644 --- a/service/test/working.json +++ b/service/test/working.json @@ -32,7 +32,7 @@ "port": 8900, "service_class": "wwwherd", "specifics": { - "postgres_url": "postgres://postgres:OWU5ODA4NDJiNGZh@192.168.1.41:5432", + "postgres_url": "postgres://postgres:NTU0OTkzOTQwZDhj@192.168.1.41:5432", "magnitude_auth": "AAAAAAAAAAAA" } }, -- GitLab From 15f7ea150f21b432bfffe39cff761682aed95e3a Mon Sep 17 00:00:00 2001 From: Erich Gatejen Date: Sun, 7 Sep 2025 08:17:22 -0600 Subject: [PATCH 13/25] rename service --- common/data/strings.go | 45 +++ common/script/run_windows.go | 2 +- common/stream/msg_mag.go | 2 +- docs/ARCH.md | 4 +- docs/RESTAPI.md | 6 +- integration/build/all/Dockerfile | 14 +- .../build/all/config/ginfra_config.json | 4 +- ...gnitude_config.json => runner_config.json} | 2 +- ...config.json => runner_service_config.json} | 0 integration/build/all/files/configure.sh | 6 +- integration/build/all/files/entrypoint.sh | 6 +- integration/build/all/files/provision.sh | 8 +- {magnitude => runner}/.gitignore | 2 +- {magnitude => runner}/README.md | 2 +- {magnitude => runner}/api/README.md | 0 .../api/service/client/service_client.gen.go | 2 +- .../api/service/models/service_models.gen.go | 0 .../api/service/server/service_server.gen.go | 3 +- .../api/service/service.yaml | 0 {magnitude => runner}/build/Dockerfile | 0 .../build/container_entrypoint.sh | 0 .../build/custom_entrypoint.sh | 0 .../build/custom_provision.sh | 0 {magnitude => runner}/build/custom_verify.sh | 0 {magnitude => runner}/build/debugging.sh | 0 {magnitude => runner}/build/dockertask.yaml | 2 +- {magnitude => runner}/build/files/README.md | 0 {magnitude => runner}/build/provision.sh | 0 {magnitude => runner}/build/seed_repo.cmd | 0 {magnitude => runner}/build/seed_repo.sh | 0 .../build/service_config.json.NEW | 2 +- {magnitude => runner}/build/test.yaml | 2 +- {magnitude => runner}/doc/FLOWSCRIPT.md | 0 {magnitude => runner}/doc/README.md | 0 {magnitude => runner}/doc/Specifics.md | 0 {magnitude => runner}/doc/TASK.md | 0 {magnitude => runner}/doc/UserDocs.md | 0 .../doc/modelfles/Modelfile_UI_TARS | 0 .../doc/modelfles/Modelfile_qwen | 0 {magnitude => runner}/doc/testing.txt | 0 {magnitude => runner}/go.mod | 2 +- {magnitude => runner}/go.sum | 0 {magnitude => runner}/local/README.md | 0 {magnitude => runner}/local/cmd.go | 0 {magnitude => runner}/local/config.go | 0 {magnitude => runner}/local/tools.go | 2 +- {magnitude => runner}/local/values.go | 4 +- {magnitude => runner}/log/README.md | 0 {magnitude => runner}/main.go | 4 +- {magnitude => runner}/package-lock.json | 4 +- {magnitude => runner}/package.json | 6 +- .../providers/message/direct_connect.go | 2 +- .../providers/message/provider.go | 0 .../providers/service_providers.go | 4 +- .../service/common/prompt.go | 0 {magnitude => runner}/service/common/types.go | 2 +- {magnitude => runner}/service/common/utils.go | 0 {magnitude => runner}/service/private.go | 12 +- .../service/service_control.go | 14 +- .../service/service_dispatch.go | 18 +- .../service/service_feature.go | 36 +- {magnitude => runner}/service/service_impl.go | 52 +-- .../service/service_message.go | 61 ++- .../service/service_setup.go | 10 +- .../service/service_store.go | 14 +- {magnitude => runner}/service/service_test.go | 0 .../service/service_values.go | 2 +- {magnitude => runner}/service/util.go | 6 +- {magnitude => runner}/specifics.json | 6 +- {magnitude => runner}/src/index.js | 0 {magnitude => runner}/src/index.ts | 0 {magnitude => runner}/src/run.sh | 0 {magnitude => runner}/start_local.cmd | 0 {magnitude => runner}/start_local.sh | 0 {magnitude => runner}/taskfile.yaml | 0 {magnitude => runner}/taskfilecustom.yaml | 2 +- {magnitude => runner}/test/README.md | 0 {magnitude => runner}/test/config.json | 8 +- {magnitude => runner}/test/test_config.json | 6 +- {magnitude => runner}/tsconfig.json | 0 service/local/values.go | 2 +- service/package-lock.json | 78 ++++ service/package.json | 2 + service/providers/psql/schema/full.sql | 4 + service/server/proxy.js | 5 + service/server/stats.js | 25 +- service/server/stats_rest.js | 15 +- service/service/dispatcher_runs.go | 14 +- service/src/components/ChartCosts.vue | 376 ++++++++++++++++++ service/src/components/nav.ts | 4 + service/src/router.ts | 6 + service/src/routes/casedata/CaseData.vue | 206 ++++++++++ service/src/routes/main/MainView.vue | 26 +- service/src/routes/run/RunView.vue | 11 +- service/src/stores/common.ts | 118 ++++-- service/test/config.json | 4 +- service/test/working.json | 6 +- taskfile.yaml | 10 +- 98 files changed, 1094 insertions(+), 209 deletions(-) create mode 100644 common/data/strings.go rename integration/build/all/config/{magnitude_config.json => runner_config.json} (95%) rename integration/build/all/config/{magnitude_service_config.json => runner_service_config.json} (100%) rename {magnitude => runner}/.gitignore (97%) rename {magnitude => runner}/README.md (98%) rename {magnitude => runner}/api/README.md (100%) rename {magnitude => runner}/api/service/client/service_client.gen.go (99%) rename {magnitude => runner}/api/service/models/service_models.gen.go (100%) rename {magnitude => runner}/api/service/server/service_server.gen.go (99%) rename {magnitude => runner}/api/service/service.yaml (100%) rename {magnitude => runner}/build/Dockerfile (100%) rename {magnitude => runner}/build/container_entrypoint.sh (100%) rename {magnitude => runner}/build/custom_entrypoint.sh (100%) rename {magnitude => runner}/build/custom_provision.sh (100%) rename {magnitude => runner}/build/custom_verify.sh (100%) rename {magnitude => runner}/build/debugging.sh (100%) rename {magnitude => runner}/build/dockertask.yaml (62%) rename {magnitude => runner}/build/files/README.md (100%) rename {magnitude => runner}/build/provision.sh (100%) rename {magnitude => runner}/build/seed_repo.cmd (100%) rename {magnitude => runner}/build/seed_repo.sh (100%) rename {magnitude => runner}/build/service_config.json.NEW (53%) rename {magnitude => runner}/build/test.yaml (90%) rename {magnitude => runner}/doc/FLOWSCRIPT.md (100%) rename {magnitude => runner}/doc/README.md (100%) rename {magnitude => runner}/doc/Specifics.md (100%) rename {magnitude => runner}/doc/TASK.md (100%) rename {magnitude => runner}/doc/UserDocs.md (100%) rename {magnitude => runner}/doc/modelfles/Modelfile_UI_TARS (100%) rename {magnitude => runner}/doc/modelfles/Modelfile_qwen (100%) rename {magnitude => runner}/doc/testing.txt (100%) rename {magnitude => runner}/go.mod (98%) rename {magnitude => runner}/go.sum (100%) rename {magnitude => runner}/local/README.md (100%) rename {magnitude => runner}/local/cmd.go (100%) rename {magnitude => runner}/local/config.go (100%) rename {magnitude => runner}/local/tools.go (96%) rename {magnitude => runner}/local/values.go (95%) rename {magnitude => runner}/log/README.md (100%) rename {magnitude => runner}/main.go (92%) rename {magnitude => runner}/package-lock.json (99%) rename {magnitude => runner}/package.json (84%) rename {magnitude => runner}/providers/message/direct_connect.go (98%) rename {magnitude => runner}/providers/message/provider.go (100%) rename {magnitude => runner}/providers/service_providers.go (95%) rename {magnitude => runner}/service/common/prompt.go (100%) rename {magnitude => runner}/service/common/types.go (96%) rename {magnitude => runner}/service/common/utils.go (100%) rename {magnitude => runner}/service/private.go (97%) rename {magnitude => runner}/service/service_control.go (87%) rename {magnitude => runner}/service/service_dispatch.go (81%) rename {magnitude => runner}/service/service_feature.go (85%) rename {magnitude => runner}/service/service_impl.go (82%) rename {magnitude => runner}/service/service_message.go (85%) rename {magnitude => runner}/service/service_setup.go (83%) rename {magnitude => runner}/service/service_store.go (94%) rename {magnitude => runner}/service/service_test.go (100%) rename {magnitude => runner}/service/service_values.go (99%) rename {magnitude => runner}/service/util.go (93%) rename {magnitude => runner}/specifics.json (79%) rename {magnitude => runner}/src/index.js (100%) rename {magnitude => runner}/src/index.ts (100%) rename {magnitude => runner}/src/run.sh (100%) rename {magnitude => runner}/start_local.cmd (100%) rename {magnitude => runner}/start_local.sh (100%) rename {magnitude => runner}/taskfile.yaml (100%) rename {magnitude => runner}/taskfilecustom.yaml (66%) rename {magnitude => runner}/test/README.md (100%) rename {magnitude => runner}/test/config.json (87%) rename {magnitude => runner}/test/test_config.json (97%) rename {magnitude => runner}/tsconfig.json (100%) create mode 100644 service/src/components/ChartCosts.vue create mode 100644 service/src/routes/casedata/CaseData.vue diff --git a/common/data/strings.go b/common/data/strings.go new file mode 100644 index 0000000..e3861b9 --- /dev/null +++ b/common/data/strings.go @@ -0,0 +1,45 @@ +/* +Package data +Copyright (c) 2025 Ginfra Project +[LICENSE]: https://www.apache.org/licenses/LICENSE-2.0.txt + +Unified message strings for messages (logs, metrics, etc). These are simple constants now but they can be abstracted later. +The values are consistent and can be used for data mining. +*/ +package data + +const ( + SMsgRunnerDone = "Done" + + // -- General ------------------------------------------------------------------------------------------------------ + + SMsgGeneralFailedSendStatus = "Failed to send status" + SMsgGeneralFailedSendOrders = "Failed to send orders" + + // -- Cost messages ------------------------------------------------------------------------------------------------ + + SMsgCostsInputCost = "latestInputCost" + SMsgCostsOutputCost = "latestOutputCost" + SMsgCostsInputTokens = "latestInputTokens" + SMsgCostsOutputTokens = "latestOutputTokens" + SMsgCostsCacheWriteInputTokens = "cacheWriteInputTokens" + SMsgCostsCacheReadInputTokens = "cacheReadInputTokens" + SMsgCostsCaseExecutionTime = "caseExecutionTime" + + // -- Runner messages ---------------------------------------------------------------------------------------------- + + SMsgRunnerRunning = "Running" + SMsgRunnerFailedSendRunLog = "Failed to send run log" + SMsgRunnerFailedSendCurrentFlow = "Failed to send current flow" + SMsgRunnerFailedSendCurrentCase = "Failed to send current case" + SMsgRunnerLostProcess = "Lost process has occurred" + SMsgRunnerTerminatingScriptError = "Terminating script due to error" + SMsgRunnerStartingFlow = "Starting flow" + SMsgRunnerStartingCase = "Starting case" + SMsgRunnerDonePendingResults = "Done, pending results" + SMsgRunnerBadDirectiveResult = "Bad directive result" + SMsgRunnerFault = "Runner fault" + SMsgRunnerMsgRx = "Runner message received" + SMsgRunnerMsgRxPanic = "Runner panic on message received" + SMsgRunnerMsgRxFatal = "Runner fatal error on message received" +) diff --git a/common/script/run_windows.go b/common/script/run_windows.go index c89172c..ac4daa6 100644 --- a/common/script/run_windows.go +++ b/common/script/run_windows.go @@ -12,7 +12,7 @@ package script import ( "gitlab.com/ginfra/ginfra/base" - "gitlab.com/ginfra/wwwherd/magnitude/local" + "gitlab.com/ginfra/wwwherd/runner/local" "go.uber.org/zap" "os/exec" "strconv" diff --git a/common/stream/msg_mag.go b/common/stream/msg_mag.go index b73a40c..6ea3435 100644 --- a/common/stream/msg_mag.go +++ b/common/stream/msg_mag.go @@ -4,7 +4,7 @@ Copyright (c) 2025 Ginfra Project [LICENSE]: https://www.apache.org/licenses/LICENSE-2.0.txt, if unaltered. You are free to alter and apply any license you wish to this file. -Messages for magnitude control. +Messages for runner control. */ package stream diff --git a/docs/ARCH.md b/docs/ARCH.md index b32121a..02066f0 100644 --- a/docs/ARCH.md +++ b/docs/ARCH.md @@ -34,9 +34,9 @@ following services: - Run job dispatch and monitoring - Periodic data processes -### Magnitude service +### Runner service -The service node is found in magnitude/ and is a ginfra-based service. It handles actually running the AI processes/ +The service node is found in runner/ and is a ginfra-based service. It handles actually running the AI processes/ ### Postgres node diff --git a/docs/RESTAPI.md b/docs/RESTAPI.md index af3ee65..9e39ae6 100644 --- a/docs/RESTAPI.md +++ b/docs/RESTAPI.md @@ -45,9 +45,9 @@ The json body will have the following fields: - narrative - values - host -- (int) provider_id -- flowids -- tag_ids +- provider_id (int) +- flowids (json int array) +- tag_ids (json int array) You can get these values as the output from GET /api/design or GET /api/design/:designId. A successful response will have the run's id. diff --git a/integration/build/all/Dockerfile b/integration/build/all/Dockerfile index 4f8b418..47bed1c 100644 --- a/integration/build/all/Dockerfile +++ b/integration/build/all/Dockerfile @@ -6,13 +6,13 @@ COPY integration/build/all/files/ /files/ RUN chmod 755 /files/*.sh RUN /files/provision.sh -# Magnitude -COPY magnitude/build/ginfra_service.exe /wwwherd/magnitude/ginfra_service.exe -COPY magnitude/src /wwwherd/magnitude/src -COPY magnitude/package.json /wwwherd/magnitude/package.json -COPY magnitude/specifics.json /wwwherd/magnitude/specifics.json -COPY integration/build/all/config/magnitude_config.json /wwwherd/magnitude/config.json -COPY integration/build/all/config/magnitude_service_config.json /wwwherd/magnitude/service_config.json +# Runner +COPY runner/build/ginfra_service.exe /wwwherd/runner/ginfra_service.exe +COPY runner/src /wwwherd/runner/src +COPY runner/package.json /wwwherd/runner/package.json +COPY runner/specifics.json /wwwherd/runner/specifics.json +COPY integration/build/all/config/runner_config.json /wwwherd/runner/config.json +COPY integration/build/all/config/runner_service_config.json /wwwherd/runner/service_config.json # GINFRA control and dispatch service. COPY service/build/ginfra_service.exe /wwwherd/service_ginfra/ginfra_service.exe diff --git a/integration/build/all/config/ginfra_config.json b/integration/build/all/config/ginfra_config.json index f5e4668..7c17d43 100644 --- a/integration/build/all/config/ginfra_config.json +++ b/integration/build/all/config/ginfra_config.json @@ -7,7 +7,7 @@ "*": "<<>>" }, "services": { - "magnitude/test": "http://<<>>:8900/", + "runner/test": "http://<<>>:8900/", "wwwherd": "http://<<>>:8901/" } }, @@ -33,7 +33,7 @@ "service_class": "wwwherd", "specifics": { "postgres_url": "postgres://postgres:<<>>@<<>>:5432", - "magnitude_auth": "AAAAAAAAAAAA" + "runner_auth": "AAAAAAAAAAAA" } }, "service_auth": "AAAAAAAAAAAA" diff --git a/integration/build/all/config/magnitude_config.json b/integration/build/all/config/runner_config.json similarity index 95% rename from integration/build/all/config/magnitude_config.json rename to integration/build/all/config/runner_config.json index 9043d86..43589c3 100644 --- a/integration/build/all/config/magnitude_config.json +++ b/integration/build/all/config/runner_config.json @@ -7,7 +7,7 @@ "*": "<<>>" }, "services": { - "magnitude/test": "http://<<>>:8900/", + "runner/test": "http://<<>>:8900/", "wwwherd": "http://<<>>:8901/" } }, diff --git a/integration/build/all/config/magnitude_service_config.json b/integration/build/all/config/runner_service_config.json similarity index 100% rename from integration/build/all/config/magnitude_service_config.json rename to integration/build/all/config/runner_service_config.json diff --git a/integration/build/all/files/configure.sh b/integration/build/all/files/configure.sh index 7d1bafc..bd48aeb 100644 --- a/integration/build/all/files/configure.sh +++ b/integration/build/all/files/configure.sh @@ -34,13 +34,13 @@ replace_config_placeholders() { # ################################################################################################## # CONFIG FILES -replace_config_placeholders "/wwwherd/magnitude/config.json" "localhost" +replace_config_placeholders "/wwwherd/runner/config.json" "localhost" replace_config_placeholders "/wwwherd/service_ginfra/config.json" "localhost" # ################################################################################################## -# MAGNITUDE -cd /wwwherd/magnitude +# RUNNER +cd /wwwherd/runner npm install npx playwright install diff --git a/integration/build/all/files/entrypoint.sh b/integration/build/all/files/entrypoint.sh index bc35987..ba90d0e 100644 --- a/integration/build/all/files/entrypoint.sh +++ b/integration/build/all/files/entrypoint.sh @@ -38,9 +38,9 @@ export WWWHERD_JWT_SECRET=$(cat /jwt_secret) pg_ctlcluster 16 main start -# MAGNITUDE -cd /wwwherd/magnitude -./ginfra_service.exe /wwwherd/magnitude >> /wwwherd/magnitude/log/service.log 2>&1 & +# RUNNER +cd /wwwherd/runner +./ginfra_service.exe /wwwherd/runner >> /wwwherd/runner/log/service.log 2>&1 & # API SERVICE cd /wwwherd/api diff --git a/integration/build/all/files/provision.sh b/integration/build/all/files/provision.sh index b20787a..81827b8 100644 --- a/integration/build/all/files/provision.sh +++ b/integration/build/all/files/provision.sh @@ -19,15 +19,15 @@ chmod 755 /entrypoint.sh mkdir /wwwherd mkdir /wwwherd/import -# MAGNITUDE SERVER -mkdir /wwwherd/magnitude -mkdir /wwwherd/magnitude/log +# RUNNER SERVER +mkdir /wwwherd/runner +mkdir /wwwherd/runner/log # GINFRA control and dispatch service. mkdir /wwwherd/service_ginfra mkdir /wwwherd/service_ginfra/schema mkdir /wwwherd/service_ginfra/log -mkdir /wwwherd/service_ginfra/magnitude +mkdir /wwwherd/service_ginfra/runner # UI server. mkdir /wwwherd/ui diff --git a/magnitude/.gitignore b/runner/.gitignore similarity index 97% rename from magnitude/.gitignore rename to runner/.gitignore index 59cc251..22d1a88 100644 --- a/magnitude/.gitignore +++ b/runner/.gitignore @@ -47,6 +47,6 @@ tests_output/ dist/* -magnitude/ +runner/ tmp/ diff --git a/magnitude/README.md b/runner/README.md similarity index 98% rename from magnitude/README.md rename to runner/README.md index bd545b4..61bbc32 100644 --- a/magnitude/README.md +++ b/runner/README.md @@ -48,7 +48,7 @@ See the [additional documentation](doc/Specifics) for more information. | doc/ | Additional documentation. | | local/ | Data, values and tools specific to your service. | | providers/ | Service providers that are unique to your service. | -| magnitude/ | Created and used by the service for working files. | +| runner/ | Created and used by the service for working files. | | service/ | Your service implementation. This is where you will implement the interfaces defined in your API specification and tests for it. | | service/private.go | Server overhead. Generally you can leave it alone. | | test/ | Data and configuration for local testing. | diff --git a/magnitude/api/README.md b/runner/api/README.md similarity index 100% rename from magnitude/api/README.md rename to runner/api/README.md diff --git a/magnitude/api/service/client/service_client.gen.go b/runner/api/service/client/service_client.gen.go similarity index 99% rename from magnitude/api/service/client/service_client.gen.go rename to runner/api/service/client/service_client.gen.go index 3a892c0..045a077 100644 --- a/magnitude/api/service/client/service_client.gen.go +++ b/runner/api/service/client/service_client.gen.go @@ -18,7 +18,7 @@ import ( "github.com/getkin/kin-openapi/openapi3" "github.com/oapi-codegen/runtime" - . "gitlab.com/ginfra/wwwherd/magnitude/api/service/models" + . "gitlab.com/ginfra/wwwherd/runner/api/service/models" ) // RequestEditorFn is the function signature for the RequestEditor callback function diff --git a/magnitude/api/service/models/service_models.gen.go b/runner/api/service/models/service_models.gen.go similarity index 100% rename from magnitude/api/service/models/service_models.gen.go rename to runner/api/service/models/service_models.gen.go diff --git a/magnitude/api/service/server/service_server.gen.go b/runner/api/service/server/service_server.gen.go similarity index 99% rename from magnitude/api/service/server/service_server.gen.go rename to runner/api/service/server/service_server.gen.go index 49e4a45..cdcce23 100644 --- a/magnitude/api/service/server/service_server.gen.go +++ b/runner/api/service/server/service_server.gen.go @@ -20,8 +20,7 @@ import ( "github.com/labstack/echo/v4" "github.com/oapi-codegen/runtime" strictecho "github.com/oapi-codegen/runtime/strictmiddleware/echo" - . "gitlab.com/ginfra/wwwherd/magnitude/api/service/models" -) + . "gitlab.com/ginfra/wwwherd/runner/api/service/models" // ServerInterface represents all server handlers. type ServerInterface interface { diff --git a/magnitude/api/service/service.yaml b/runner/api/service/service.yaml similarity index 100% rename from magnitude/api/service/service.yaml rename to runner/api/service/service.yaml diff --git a/magnitude/build/Dockerfile b/runner/build/Dockerfile similarity index 100% rename from magnitude/build/Dockerfile rename to runner/build/Dockerfile diff --git a/magnitude/build/container_entrypoint.sh b/runner/build/container_entrypoint.sh similarity index 100% rename from magnitude/build/container_entrypoint.sh rename to runner/build/container_entrypoint.sh diff --git a/magnitude/build/custom_entrypoint.sh b/runner/build/custom_entrypoint.sh similarity index 100% rename from magnitude/build/custom_entrypoint.sh rename to runner/build/custom_entrypoint.sh diff --git a/magnitude/build/custom_provision.sh b/runner/build/custom_provision.sh similarity index 100% rename from magnitude/build/custom_provision.sh rename to runner/build/custom_provision.sh diff --git a/magnitude/build/custom_verify.sh b/runner/build/custom_verify.sh similarity index 100% rename from magnitude/build/custom_verify.sh rename to runner/build/custom_verify.sh diff --git a/magnitude/build/debugging.sh b/runner/build/debugging.sh similarity index 100% rename from magnitude/build/debugging.sh rename to runner/build/debugging.sh diff --git a/magnitude/build/dockertask.yaml b/runner/build/dockertask.yaml similarity index 62% rename from magnitude/build/dockertask.yaml rename to runner/build/dockertask.yaml index 600cdb9..6b55858 100644 --- a/magnitude/build/dockertask.yaml +++ b/runner/build/dockertask.yaml @@ -2,5 +2,5 @@ version: '3' vars: GINFRA_DOCKER_REPO: "" - GINFRA_DOCKER_PATH: "wwwherd/magnitude" + GINFRA_DOCKER_PATH: "wwwherd/runner" GINFRA_DOCKER_TAG: "" diff --git a/magnitude/build/files/README.md b/runner/build/files/README.md similarity index 100% rename from magnitude/build/files/README.md rename to runner/build/files/README.md diff --git a/magnitude/build/provision.sh b/runner/build/provision.sh similarity index 100% rename from magnitude/build/provision.sh rename to runner/build/provision.sh diff --git a/magnitude/build/seed_repo.cmd b/runner/build/seed_repo.cmd similarity index 100% rename from magnitude/build/seed_repo.cmd rename to runner/build/seed_repo.cmd diff --git a/magnitude/build/seed_repo.sh b/runner/build/seed_repo.sh similarity index 100% rename from magnitude/build/seed_repo.sh rename to runner/build/seed_repo.sh diff --git a/magnitude/build/service_config.json.NEW b/runner/build/service_config.json.NEW similarity index 53% rename from magnitude/build/service_config.json.NEW rename to runner/build/service_config.json.NEW index cfda302..e4da2c5 100644 --- a/magnitude/build/service_config.json.NEW +++ b/runner/build/service_config.json.NEW @@ -1 +1 @@ -{"id":"local","auth":"AAAAAAAAAAAA","config_access_token":"AAAAAAAAAAAA","config_host_uri":"file://test/config.json","config_uri":"file://test/config.json","deployment":"default","name":"magnitude","port":8900,"specifics":[],"type":"local"} +{"id":"local","auth":"AAAAAAAAAAAA","config_access_token":"AAAAAAAAAAAA","config_host_uri":"file://test/config.json","config_uri":"file://test/config.json","deployment":"default","name":"runner","port":8900,"specifics":[],"type":"local"} diff --git a/magnitude/build/test.yaml b/runner/build/test.yaml similarity index 90% rename from magnitude/build/test.yaml rename to runner/build/test.yaml index 0e43b30..3e9d532 100644 --- a/magnitude/build/test.yaml +++ b/runner/build/test.yaml @@ -8,7 +8,7 @@ Workflow: Source: Hosting.json Success: Configuration provider started. - Ginfra: - Cmd: manage host start magnitude + Cmd: manage host start runner Ping: Delay: 50 Until: 6000 diff --git a/magnitude/doc/FLOWSCRIPT.md b/runner/doc/FLOWSCRIPT.md similarity index 100% rename from magnitude/doc/FLOWSCRIPT.md rename to runner/doc/FLOWSCRIPT.md diff --git a/magnitude/doc/README.md b/runner/doc/README.md similarity index 100% rename from magnitude/doc/README.md rename to runner/doc/README.md diff --git a/magnitude/doc/Specifics.md b/runner/doc/Specifics.md similarity index 100% rename from magnitude/doc/Specifics.md rename to runner/doc/Specifics.md diff --git a/magnitude/doc/TASK.md b/runner/doc/TASK.md similarity index 100% rename from magnitude/doc/TASK.md rename to runner/doc/TASK.md diff --git a/magnitude/doc/UserDocs.md b/runner/doc/UserDocs.md similarity index 100% rename from magnitude/doc/UserDocs.md rename to runner/doc/UserDocs.md diff --git a/magnitude/doc/modelfles/Modelfile_UI_TARS b/runner/doc/modelfles/Modelfile_UI_TARS similarity index 100% rename from magnitude/doc/modelfles/Modelfile_UI_TARS rename to runner/doc/modelfles/Modelfile_UI_TARS diff --git a/magnitude/doc/modelfles/Modelfile_qwen b/runner/doc/modelfles/Modelfile_qwen similarity index 100% rename from magnitude/doc/modelfles/Modelfile_qwen rename to runner/doc/modelfles/Modelfile_qwen diff --git a/magnitude/doc/testing.txt b/runner/doc/testing.txt similarity index 100% rename from magnitude/doc/testing.txt rename to runner/doc/testing.txt diff --git a/magnitude/go.mod b/runner/go.mod similarity index 98% rename from magnitude/go.mod rename to runner/go.mod index ede1e75..6290483 100644 --- a/magnitude/go.mod +++ b/runner/go.mod @@ -1,4 +1,4 @@ -module gitlab.com/ginfra/wwwherd/magnitude +module gitlab.com/ginfra/wwwherd/runner go 1.24.0 diff --git a/magnitude/go.sum b/runner/go.sum similarity index 100% rename from magnitude/go.sum rename to runner/go.sum diff --git a/magnitude/local/README.md b/runner/local/README.md similarity index 100% rename from magnitude/local/README.md rename to runner/local/README.md diff --git a/magnitude/local/cmd.go b/runner/local/cmd.go similarity index 100% rename from magnitude/local/cmd.go rename to runner/local/cmd.go diff --git a/magnitude/local/config.go b/runner/local/config.go similarity index 100% rename from magnitude/local/config.go rename to runner/local/config.go diff --git a/magnitude/local/tools.go b/runner/local/tools.go similarity index 96% rename from magnitude/local/tools.go rename to runner/local/tools.go index 21c93d3..40f3496 100644 --- a/magnitude/local/tools.go +++ b/runner/local/tools.go @@ -14,7 +14,7 @@ import ( "github.com/labstack/echo/v4" "gitlab.com/ginfra/ginfra/base" "gitlab.com/ginfra/ginfra/common/config" - "gitlab.com/ginfra/wwwherd/magnitude/api/service/models" // If you use multiple version, this will have to be changed. + "gitlab.com/ginfra/wwwherd/runner/api/service/models" // If you use multiple version, this will have to be changed. "go.uber.org/zap" "net/http" ) diff --git a/magnitude/local/values.go b/runner/local/values.go similarity index 95% rename from magnitude/local/values.go rename to runner/local/values.go index 9ef3894..056b758 100644 --- a/magnitude/local/values.go +++ b/runner/local/values.go @@ -20,7 +20,7 @@ import ( // ##################################################################################################################### // # STATIC -const ServiceName = "magnitude" +const ServiceName = "runner" const TestAuth = "XXXXXXXXXXXX" // ##################################################################################################################### @@ -35,7 +35,7 @@ var ( const LLMKey = "LLM_API_KEY" const MoondreamKey = "MOONDREAM_API_KEY" -const WorkingSubdir = "magnitude" +const WorkingSubdir = "runner" const TestsSubdir = "tests" const StreamClientTargetConfig = "stream_client_target" diff --git a/magnitude/log/README.md b/runner/log/README.md similarity index 100% rename from magnitude/log/README.md rename to runner/log/README.md diff --git a/magnitude/main.go b/runner/main.go similarity index 92% rename from magnitude/main.go rename to runner/main.go index f3b69a3..9a61443 100644 --- a/magnitude/main.go +++ b/runner/main.go @@ -11,8 +11,8 @@ package main import ( "fmt" - "gitlab.com/ginfra/wwwherd/magnitude/local" - "gitlab.com/ginfra/wwwherd/magnitude/service" + "gitlab.com/ginfra/wwwherd/runner/local" + "gitlab.com/ginfra/wwwherd/runner/service" "go.uber.org/zap" "net/http" "os" diff --git a/magnitude/package-lock.json b/runner/package-lock.json similarity index 99% rename from magnitude/package-lock.json rename to runner/package-lock.json index 2d1a313..0127ead 100644 --- a/magnitude/package-lock.json +++ b/runner/package-lock.json @@ -1,11 +1,11 @@ { - "name": "service_magnitude", + "name": "runner", "version": "1.0.0", "lockfileVersion": 3, "requires": true, "packages": { "": { - "name": "service_magnitude", + "name": "runner", "version": "1.0.0", "license": "Apache-2.0", "dependencies": { diff --git a/magnitude/package.json b/runner/package.json similarity index 84% rename from magnitude/package.json rename to runner/package.json index 7eb962e..e894827 100644 --- a/magnitude/package.json +++ b/runner/package.json @@ -15,7 +15,7 @@ "pkgroll": "^2.10.0", "tsx": "^4.20.3" }, - "name": "service_magnitude", + "name": "runner", "version": "1.0.0", "main": "src/index.ts", "scripts": { @@ -25,8 +25,8 @@ "test": "magnitude-test", "test:watch": "magnitude-test --watch" }, - "keywords": ["magnitude", "agent", "wwwherd"], + "keywords": ["runner","magnitude", "agent", "wwwherd"], "author": "Ginfra Project", "license": "Apache-2.0", - "description": "WWWHerd Magnitude Agent" + "description": "WWWHerd Runner Agent" } diff --git a/magnitude/providers/message/direct_connect.go b/runner/providers/message/direct_connect.go similarity index 98% rename from magnitude/providers/message/direct_connect.go rename to runner/providers/message/direct_connect.go index 85014dd..295cd1a 100644 --- a/magnitude/providers/message/direct_connect.go +++ b/runner/providers/message/direct_connect.go @@ -11,7 +11,7 @@ package message import ( "github.com/labstack/echo/v4" "gitlab.com/ginfra/wwwherd/common/stream" - "gitlab.com/ginfra/wwwherd/magnitude/local" + "gitlab.com/ginfra/wwwherd/runner/local" "go.uber.org/zap" "io" "net/http" diff --git a/magnitude/providers/message/provider.go b/runner/providers/message/provider.go similarity index 100% rename from magnitude/providers/message/provider.go rename to runner/providers/message/provider.go diff --git a/magnitude/providers/service_providers.go b/runner/providers/service_providers.go similarity index 95% rename from magnitude/providers/service_providers.go rename to runner/providers/service_providers.go index 8e1c5ae..02c0c56 100644 --- a/magnitude/providers/service_providers.go +++ b/runner/providers/service_providers.go @@ -11,8 +11,8 @@ import ( "github.com/labstack/echo/v4" "gitlab.com/ginfra/ginfra/base" "gitlab.com/ginfra/wwwherd/common/stream" - "gitlab.com/ginfra/wwwherd/magnitude/local" - "gitlab.com/ginfra/wwwherd/magnitude/providers/message" + "gitlab.com/ginfra/wwwherd/runner/local" + "gitlab.com/ginfra/wwwherd/runner/providers/message" "go.uber.org/zap" // # YOUR IMPORTS START HERE // >A############################################################################################################### diff --git a/magnitude/service/common/prompt.go b/runner/service/common/prompt.go similarity index 100% rename from magnitude/service/common/prompt.go rename to runner/service/common/prompt.go diff --git a/magnitude/service/common/types.go b/runner/service/common/types.go similarity index 96% rename from magnitude/service/common/types.go rename to runner/service/common/types.go index 8223394..147d471 100644 --- a/magnitude/service/common/types.go +++ b/runner/service/common/types.go @@ -10,7 +10,7 @@ package common import ( "gitlab.com/ginfra/wwwherd/common/data" "gitlab.com/ginfra/wwwherd/common/script" - "gitlab.com/ginfra/wwwherd/magnitude/local" + "gitlab.com/ginfra/wwwherd/runner/local" "go.uber.org/zap" ) diff --git a/magnitude/service/common/utils.go b/runner/service/common/utils.go similarity index 100% rename from magnitude/service/common/utils.go rename to runner/service/common/utils.go diff --git a/magnitude/service/private.go b/runner/service/private.go similarity index 97% rename from magnitude/service/private.go rename to runner/service/private.go index bcaea46..c69e60d 100644 --- a/magnitude/service/private.go +++ b/runner/service/private.go @@ -26,8 +26,8 @@ import ( gecho "gitlab.com/ginfra/ginfra/common/service/echo" "gitlab.com/ginfra/ginfra/common/service/gotel" "gitlab.com/ginfra/ginfra/common/service/shared" - "gitlab.com/ginfra/wwwherd/magnitude/local" - "gitlab.com/ginfra/wwwherd/magnitude/providers" + "gitlab.com/ginfra/wwwherd/runner/local" + "gitlab.com/ginfra/wwwherd/runner/providers" "go.uber.org/zap" "net" "net/http" @@ -101,7 +101,7 @@ func ginfraHTTPErrorHandler(err error, c echo.Context) { } } -func Setup(c *local.GContext) *Servicemagnitude { +func Setup(c *local.GContext) *Servicerunner { var ( pl *providers.ProvidersLoaded @@ -146,7 +146,7 @@ func Setup(c *local.GContext) *Servicemagnitude { } // Create the service context - service, errr := NewServicemagnitude(c, pl) + service, errr := NewServicerunner(c, pl) if errr != nil { local.Logger.Panic(errr.Error()) } @@ -210,7 +210,7 @@ func Setup(c *local.GContext) *Servicemagnitude { const StRetries = 60 const StInterval = 200 -func getServiceInstance(service *Servicemagnitude) *config.Config { +func getServiceInstance(service *Servicerunner) *config.Config { var ( c *config.Config @@ -236,7 +236,7 @@ func getServiceInstance(service *Servicemagnitude) *config.Config { return c } -func RunService(service *Servicemagnitude) error { +func RunService(service *Servicerunner) error { var ( err error = nil sw []*openapi3.T diff --git a/magnitude/service/service_control.go b/runner/service/service_control.go similarity index 87% rename from magnitude/service/service_control.go rename to runner/service/service_control.go index 2c5e2ae..e3ec9c8 100644 --- a/magnitude/service/service_control.go +++ b/runner/service/service_control.go @@ -12,8 +12,8 @@ import ( "context" "github.com/labstack/echo/v4" "gitlab.com/ginfra/ginfra/common/service/gotel" - "gitlab.com/ginfra/wwwherd/magnitude/api/service/models" - "gitlab.com/ginfra/wwwherd/magnitude/local" + "gitlab.com/ginfra/wwwherd/runner/api/service/models" + "gitlab.com/ginfra/wwwherd/runner/local" "go.opentelemetry.io/otel/trace" "go.uber.org/zap" "net/http" @@ -23,7 +23,7 @@ import ( // # YOUR IMPORTS END ) -func (gis *Servicemagnitude) GiControlDatasourceInit(ctx echo.Context, params models.GiControlDatasourceInitParams) error { +func (gis *Servicerunner) GiControlDatasourceInit(ctx echo.Context, params models.GiControlDatasourceInitParams) error { var span trace.Span o := gotel.GetOtelConfig() @@ -41,7 +41,7 @@ func (gis *Servicemagnitude) GiControlDatasourceInit(ctx echo.Context, params mo // # END DATA SOURCE INIT IMPLEMENTATION } -func (gis *Servicemagnitude) GiControlManageReset(ctx echo.Context, params models.GiControlManageResetParams) error { +func (gis *Servicerunner) GiControlManageReset(ctx echo.Context, params models.GiControlManageResetParams) error { var ( span trace.Span err error @@ -65,7 +65,7 @@ func (gis *Servicemagnitude) GiControlManageReset(ctx echo.Context, params model // >E############################################################################################################### msg := "Reset OK" - err = MagnitudeSetup(gis.Providers.Mc, gis.Providers.Ms) + err = RunnerSetup(gis.Providers.Mc, gis.Providers.Ms) if err != nil { msg = "Reset error. " + err.Error() local.Logger.Error("Reset failed. Service is likely unavailable until system restart (including container restart).", @@ -83,7 +83,7 @@ func (gis *Servicemagnitude) GiControlManageReset(ctx echo.Context, params model return err } -func (gis *Servicemagnitude) GiControlManageStop(ctx echo.Context, params models.GiControlManageStopParams) error { +func (gis *Servicerunner) GiControlManageStop(ctx echo.Context, params models.GiControlManageStopParams) error { var ( span trace.Span err error @@ -133,7 +133,7 @@ func (gis *Servicemagnitude) GiControlManageStop(ctx echo.Context, params models // GiGetSpecifics Get specifics for this service // (GET /specifics) -func (gis *Servicemagnitude) GiGetSpecifics(ctx echo.Context) error { +func (gis *Servicerunner) GiGetSpecifics(ctx echo.Context) error { var span trace.Span o := gotel.GetOtelConfig() if o != nil { diff --git a/magnitude/service/service_dispatch.go b/runner/service/service_dispatch.go similarity index 81% rename from magnitude/service/service_dispatch.go rename to runner/service/service_dispatch.go index a477515..c2402b9 100644 --- a/magnitude/service/service_dispatch.go +++ b/runner/service/service_dispatch.go @@ -14,8 +14,8 @@ import ( "gitlab.com/ginfra/wwwherd/common/data" "gitlab.com/ginfra/wwwherd/common/script" "gitlab.com/ginfra/wwwherd/common/stream" - "gitlab.com/ginfra/wwwherd/magnitude/local" - scommon "gitlab.com/ginfra/wwwherd/magnitude/service/common" + "gitlab.com/ginfra/wwwherd/runner/local" + scommon "gitlab.com/ginfra/wwwherd/runner/service/common" "go.uber.org/zap" "strconv" ) @@ -23,10 +23,10 @@ import ( // ##################################################################################################################### // # FUNCTIONS -func sendStatus(run *scommon.RunSpec, status data.RunStatus, data string) { +func sendStatus(run *scommon.RunSpec, status data.RunStatus, d string) { run.Status = status - if err := sclient.Send(stream.NewMsgRunStatus(run.IdInt, status, data)); err != nil { - local.Logger.Error("Failed to send status.", zap.Error(err), zap.String(base.LM_ID, run.Id), + if err := sclient.Send(stream.NewMsgRunStatus(run.IdInt, status, d)); err != nil { + local.Logger.Error(data.SMsgGeneralFailedSendStatus, zap.Error(err), zap.String(base.LM_ID, run.Id), zap.String(base.LM_UID, run.Uid)) } } @@ -37,7 +37,7 @@ func shipOrders(run *scommon.RunSpec, final bool) { local.Logger.Error("Failed to marshal orders.", zap.Error(err), zap.String(base.LM_ID, run.Id), zap.String(base.LM_UID, run.Uid)) } else if err = sclient.Send(stream.NewMsgRunOrders(run.IdInt, string(orderText), final)); err != nil { - local.Logger.Error("Failed to send orders.", zap.Error(err), zap.String(base.LM_ID, run.Id), + local.Logger.Error(data.SMsgGeneralFailedSendOrders, zap.Error(err), zap.String(base.LM_ID, run.Id), zap.String(base.LM_UID, run.Uid)) } } @@ -69,7 +69,7 @@ func Dispatch(run *scommon.RunSpec) error { if numerr > 0 { run.Status = data.RunStatusError run.Err = "Errors while compiling. Number of errors: " + strconv.Itoa(numerr) + "." - local.Logger.Error("Magnitude test error(s) in compile.", zap.String(base.LM_ID, run.Id), + local.Logger.Error("Compile errors", zap.String(base.LM_ID, run.Id), zap.String(base.LM_UID, run.Uid), zap.Int(base.LM_NUMBER, numerr)) sendStatus(run, run.Status, run.Err) moveFromRunningToHistory(run) @@ -79,8 +79,8 @@ func Dispatch(run *scommon.RunSpec) error { // -- EXECUTION ------------------------------------------------------------------------------------------------- run.Status = data.RunStatusRunning - sendStatus(run, data.RunStatusRunning, "Running") - local.Logger.Info("Magnitude running test.", zap.String(base.LM_ID, run.Id), zap.String(base.LM_UID, run.Uid)) + sendStatus(run, data.RunStatusRunning, data.SMsgRunnerRunning) + local.Logger.Info(data.SMsgRunnerRunning, zap.String(base.LM_ID, run.Id), zap.String(base.LM_UID, run.Uid)) // Yeah, I know, but this is a reminder. run.CurrentFlowIdx = 0 diff --git a/magnitude/service/service_feature.go b/runner/service/service_feature.go similarity index 85% rename from magnitude/service/service_feature.go rename to runner/service/service_feature.go index 8c4726d..1b92e7f 100644 --- a/magnitude/service/service_feature.go +++ b/runner/service/service_feature.go @@ -43,8 +43,8 @@ import ( "gitlab.com/ginfra/ginfra/base" "gitlab.com/ginfra/wwwherd/common/data" "gitlab.com/ginfra/wwwherd/common/stream" - "gitlab.com/ginfra/wwwherd/magnitude/local" - "gitlab.com/ginfra/wwwherd/magnitude/service/common" + "gitlab.com/ginfra/wwwherd/runner/local" + "gitlab.com/ginfra/wwwherd/runner/service/common" "go.uber.org/zap" "os" "runtime" @@ -63,7 +63,7 @@ var ( // ##################################################################################################################### // # FEATURES -func dataStatusMagnitude(rs *common.RunSpec) (string, string, string, string, data.RunStatus, error) { +func dataStatusRunner(rs *common.RunSpec) (string, string, string, string, data.RunStatus, error) { if rs == nil { return "", "", "", "", data.RunStatusUnknown, base.NewGinfraError("RunSpec is nil.") } @@ -73,7 +73,7 @@ func dataStatusMagnitude(rs *common.RunSpec) (string, string, string, string, da return rs.Id, rs.Uid, rs.Name, "OK", rs.Status, nil } -func MagnitudeRun(id int, values map[string]interface{}, uid, name, target, provId, provUrl, provKey, provModel, d string) error { +func RunnerRun(id int, values map[string]interface{}, uid, name, target, provId, provUrl, provKey, provModel, d string) error { // TODO the locks might not be needed any more. if !MagLock.Lock(time.Minute) { return base.NewGinfraError("Run failed due to timeout.") @@ -108,14 +108,14 @@ func MagnitudeRun(id int, values map[string]interface{}, uid, name, target, prov } runQueuePut(&spec) - local.Logger.Info("Magnitude test queued.", zap.String(base.LM_ID, spec.Id), zap.String(base.LM_UID, spec.Uid)) + local.Logger.Info("Runner workflow queued.", zap.String(base.LM_ID, spec.Id), zap.String(base.LM_UID, spec.Uid)) return nil } -// MagnitudeStop Stops the test for the given id. +// RunnerStop Stops the test for the given id. // If the id doesn't exist, it will just return an error. -func MagnitudeStop(gis *Servicemagnitude, uid, id string) error { +func RunnerStop(gis *Servicerunner, uid, id string) error { if !MagLock.Lock(time.Minute) { return base.NewGinfraError("Status failed due to timeout.") } @@ -123,13 +123,13 @@ func MagnitudeStop(gis *Servicemagnitude, uid, id string) error { uidInt, err := strconv.Atoi(uid) if err != nil { - local.Logger.Error("BUG: Invalid uid format passed to MagnitudeStop", zap.String("uid", uid), zap.Error(err)) + local.Logger.Error("BUG: Invalid uid format passed to RunnerStop", zap.String("uid", uid), zap.Error(err)) return base.NewGinfraError("Invalid id format") } idInt, err := strconv.Atoi(id) if err != nil { - local.Logger.Error("Invalid id format passed to MagnitudeStop", zap.String("id", id), zap.Error(err)) + local.Logger.Error("Invalid id format passed to RunnerStop", zap.String("id", id), zap.Error(err)) return base.NewGinfraError("Invalid id format") } @@ -179,9 +179,9 @@ func MagnitudeStop(gis *Servicemagnitude, uid, id string) error { return err } -// MagnitudeStatus returns the Id, Uid, Name, Status (text narrative or error) and Status token for the run with the given id. +// RunnerStatus returns the Id, Uid, Name, Status (text narrative or error) and Status token for the run with the given id. // If the id doesn't exist, it will just return an error. -func MagnitudeStatus(uid, id string) (string, string, string, string, data.RunStatus, error) { +func RunnerStatus(uid, id string) (string, string, string, string, data.RunStatus, error) { if !MagLock.Lock(time.Minute) { return "", "", "", "", data.RunStatusUnknown, base.NewGinfraError("Status failed due to timeout.") } @@ -190,11 +190,11 @@ func MagnitudeStatus(uid, id string) (string, string, string, string, data.RunSt if err != nil { return "", "", "", "", data.RunStatusUnknown, err } - return dataStatusMagnitude(spec) + return dataStatusRunner(spec) } -// MagnitudeLog get the log -func MagnitudeLog(uid string, id string) (string, error) { +// RunnerLog get the log +func RunnerLog(uid string, id string) (string, error) { if !MagLock.Lock(time.Minute) { return "", base.NewGinfraError("Status failed due to timeout.") } @@ -222,8 +222,8 @@ func MagnitudeLog(uid string, id string) (string, error) { return string(fd), nil } -// MagnitudeRemove remove completed tests. -func MagnitudeRemove(uid string, ids *[]string) error { +// RunnerRemove remove completed tests. +func RunnerRemove(uid string, ids *[]string) error { if ids == nil || len(*ids) < 1 { return base.NewGinfraError("No ids provided.") } @@ -293,10 +293,10 @@ func runloop(client stream.StreamClient) { runningPut(rs) err = Dispatch(rs) if err == nil { - local.Logger.Info("Magnitude test dispatched.", zap.String(base.LM_ID, rs.Id), + local.Logger.Info("Runner workflow dispatched.", zap.String(base.LM_ID, rs.Id), zap.String(base.LM_UID, rs.Uid)) } else { - local.Logger.Info("Magnitude test dispatch failed.", zap.String(base.LM_ID, rs.Id), + local.Logger.Info("Runner workflow dispatch failed.", zap.String(base.LM_ID, rs.Id), zap.String(base.LM_UID, rs.Uid), zap.Error(err)) } diff --git a/magnitude/service/service_impl.go b/runner/service/service_impl.go similarity index 82% rename from magnitude/service/service_impl.go rename to runner/service/service_impl.go index f09610e..9248730 100644 --- a/magnitude/service/service_impl.go +++ b/runner/service/service_impl.go @@ -14,9 +14,9 @@ import ( "github.com/labstack/echo/v4" "gitlab.com/ginfra/ginfra/common/config" "gitlab.com/ginfra/ginfra/common/service/shared" - sv "gitlab.com/ginfra/wwwherd/magnitude/api/service/server" - "gitlab.com/ginfra/wwwherd/magnitude/local" - "gitlab.com/ginfra/wwwherd/magnitude/providers" + sv "gitlab.com/ginfra/wwwherd/runner/api/service/server" + "gitlab.com/ginfra/wwwherd/runner/local" + "gitlab.com/ginfra/wwwherd/runner/providers" "go.uber.org/zap" "os" "path/filepath" @@ -27,8 +27,8 @@ import ( "bytes" "gitlab.com/ginfra/ginfra/base" - "gitlab.com/ginfra/wwwherd/magnitude/api/service/models" - scommon "gitlab.com/ginfra/wwwherd/magnitude/service/common" + "gitlab.com/ginfra/wwwherd/runner/api/service/models" + scommon "gitlab.com/ginfra/wwwherd/runner/service/common" "go.opentelemetry.io/otel/attribute" "io" "net/http" @@ -38,7 +38,7 @@ import ( // # YOUR IMPORTS END ) -type Servicemagnitude struct { +type Servicerunner struct { Providers *providers.ProvidersLoaded ServiceContext *local.GContext ServiceConfig *config.Config @@ -50,8 +50,8 @@ type Servicemagnitude struct { // # ADD ADDITIONAL SERVICE DATA END HERE } -func NewServicemagnitude(gcontext *local.GContext, providers *providers.ProvidersLoaded) (*Servicemagnitude, error) { - return &Servicemagnitude{ +func NewServicerunner(gcontext *local.GContext, providers *providers.ProvidersLoaded) (*Servicerunner, error) { + return &Servicerunner{ Providers: providers, ServiceContext: gcontext, }, nil @@ -69,7 +69,7 @@ func doBundleFile(path string, cfg *config.Config, item string) { } } -func ServiceSetup(service *Servicemagnitude) error { +func ServiceSetup(service *Servicerunner) error { var err error @@ -82,7 +82,7 @@ func ServiceSetup(service *Servicemagnitude) error { local.MessageServer = "http://localhost:" + service.ServiceConfig.GetValue("port").GetValueStringNoerr() + "/message" - serr := MagnitudeSetup(service.Providers.Mc, service.Providers.Ms) + serr := RunnerSetup(service.Providers.Mc, service.Providers.Ms) if serr != nil { local.Logger.Error("Setup failed. Service unavailable until corrected and reset", zap.Error(serr)) } @@ -95,14 +95,14 @@ func ServiceSetup(service *Servicemagnitude) error { var specifics string -func getServiceSpecifics(gis *Servicemagnitude) string { +func getServiceSpecifics(gis *Servicerunner) string { if specifics == "" { specifics = shared.LoadSpecifics(gis.ServiceContext.Scfg.ServiceHome) } return specifics } -func RegisterHandlers(echoServer *echo.Echo, service *Servicemagnitude) ([]*openapi3.T, error) { +func RegisterHandlers(echoServer *echo.Echo, service *Servicerunner) ([]*openapi3.T, error) { // Always register the main service. sw, err := sv.GetSwagger() @@ -150,7 +150,7 @@ func RegisterHandlers(echoServer *echo.Echo, service *Servicemagnitude) ([]*open // # ADDITIONAL CODE GOES BELOW HERE // # Stubs can go here or in other files. -func (gis *Servicemagnitude) triageEntry(ctx echo.Context, auth string) (bool, error) { +func (gis *Servicerunner) triageEntry(ctx echo.Context, auth string) (bool, error) { if !setupComplete { local.Logger.Error("Service not setup. Correct and reset.") return true, local.PostErrorResponse(ctx, http.StatusInternalServerError, base.NewGinfraError("Service not setup. Correct and reset.")) @@ -165,7 +165,7 @@ func (gis *Servicemagnitude) triageEntry(ctx echo.Context, auth string) (bool, e // GiTestRun Run a test. // (POST /gitest/run) -func (gis *Servicemagnitude) GiTestRun(ctx echo.Context, params models.GiTestRunParams) (err error) { +func (gis *Servicemrunner) GiTestRun(ctx echo.Context, params models.GiTestRunParams) (err error) { span := SpanStart("GiTestRun", []base.NV{{"target", params.Target}, {"uid", params.Uid}, {"name", params.Name}}) @@ -199,7 +199,7 @@ func (gis *Servicemagnitude) GiTestRun(ctx echo.Context, params models.GiTestRun } } - err = MagnitudeRun(params.Id, values, params.Uid, params.Name, params.Target, params.Provider, base.Ptr2StringOrEmpty(params.ProviderUrl), + err = RunnerRun(params.Id, values, params.Uid, params.Name, params.Target, params.Provider, base.Ptr2StringOrEmpty(params.ProviderUrl), base.Ptr2StringOrEmpty(params.ProviderToken), base.Ptr2StringOrEmpty(params.ProviderModel), pstring) if err != nil { return TriageError(ctx, err) @@ -213,7 +213,7 @@ func (gis *Servicemagnitude) GiTestRun(ctx echo.Context, params models.GiTestRun // GiTestStop Stop a test. // (GET /gitest/stop) -func (gis *Servicemagnitude) GiTestStop(ctx echo.Context, params models.GiTestStopParams) (err error) { +func (gis *Servicerunner) GiTestStop(ctx echo.Context, params models.GiTestStopParams) (err error) { span := SpanStart("GiTestStop", []base.NV{{"id", params.Id}, {"uid", params.Uid}}) defer func() { SpanEnd(span, err) @@ -223,7 +223,7 @@ func (gis *Servicemagnitude) GiTestStop(ctx echo.Context, params models.GiTestSt return err } - err = MagnitudeStop(gis, params.Uid, params.Id) + err = RunnerStop(gis, params.Uid, params.Id) if err != nil { return TriageError(ctx, err) } @@ -233,7 +233,7 @@ func (gis *Servicemagnitude) GiTestStop(ctx echo.Context, params models.GiTestSt // GiTestStatusTest Get status for a test. // (GET /test/gitest/status/test) -func (gis *Servicemagnitude) GiTestStatusTest(ctx echo.Context, params models.GiTestStatusTestParams) (err error) { +func (gis *Servicerunner) GiTestStatusTest(ctx echo.Context, params models.GiTestStatusTestParams) (err error) { span := SpanStart("GiTestStatusTest", []base.NV{{"id", params.Id}, {"uid", params.Uid}}) defer func() { @@ -244,7 +244,7 @@ func (gis *Servicemagnitude) GiTestStatusTest(ctx echo.Context, params models.Gi return err } - id, _, name, status, state, err := MagnitudeStatus(params.Uid, params.Id) + id, _, name, status, state, err := RunnerStatus(params.Uid, params.Id) if err != nil { return TriageError(ctx, err) } @@ -266,7 +266,7 @@ func (gis *Servicemagnitude) GiTestStatusTest(ctx echo.Context, params models.Gi func gatherStatusSub(runs []*scommon.RunSpec) *[]models.TestStatusItem { rd := make([]models.TestStatusItem, 0) for _, item := range runs { - id, _, name, text, state, _ := dataStatusMagnitude(item) + id, _, name, text, state, _ := dataStatusRunner(item) ri := models.TestStatusItem{ Id: &id, Name: &name, @@ -283,7 +283,7 @@ func gatherStatusSub(runs []*scommon.RunSpec) *[]models.TestStatusItem { return &rd } -func (gis *Servicemagnitude) gatherGiTestStatusAll(uid string) (models.TestStatusInfo, error) { +func (gis *Servicerunner) gatherGiTestStatusAll(uid string) (models.TestStatusInfo, error) { var r models.TestStatusInfo if !MagLock.Lock(time.Minute) { return r, base.NewGinfraError("Status failed due to timeout.") @@ -299,7 +299,7 @@ func (gis *Servicemagnitude) gatherGiTestStatusAll(uid string) (models.TestStatu // GiTestStatusAll Get status for all tests. // (GET /test/gitest/status/all) -func (gis *Servicemagnitude) GiTestStatusAll(ctx echo.Context, params models.GiTestStatusAllParams) (err error) { +func (gis *Servicerunner) GiTestStatusAll(ctx echo.Context, params models.GiTestStatusAllParams) (err error) { span := SpanStart("GiTestStatusAll", []base.NV{{"uid", params.Uid}}) defer func() { @@ -320,7 +320,7 @@ func (gis *Servicemagnitude) GiTestStatusAll(ctx echo.Context, params models.GiT // GiTestLog Get log for a test. // (GET /test/gitest/log) -func (gis *Servicemagnitude) GiTestLog(ctx echo.Context, params models.GiTestLogParams) (err error) { +func (gis *Servicerunner) GiTestLog(ctx echo.Context, params models.GiTestLogParams) (err error) { span := SpanStart("GiTestStatusAll", []base.NV{{"id", params.Id}, {"uid", params.Uid}}) defer func() { @@ -332,7 +332,7 @@ func (gis *Servicemagnitude) GiTestLog(ctx echo.Context, params models.GiTestLog } var log string - log, err = MagnitudeLog(params.Uid, params.Id) + log, err = RunnerLog(params.Uid, params.Id) if err != nil { return TriageError(ctx, err) } @@ -342,7 +342,7 @@ func (gis *Servicemagnitude) GiTestLog(ctx echo.Context, params models.GiTestLog // GiTestRemove Remove a list of completed tests. // (POST /gitest/remove) -func (gis *Servicemagnitude) GiTestRemove(ctx echo.Context, params models.GiTestRemoveParams) (err error) { +func (gis *Servicerunner) GiTestRemove(ctx echo.Context, params models.GiTestRemoveParams) (err error) { span := SpanStart("GiTestRemove", []base.NV{{"uid", params.Uid}}) defer func() { @@ -360,7 +360,7 @@ func (gis *Servicemagnitude) GiTestRemove(ctx echo.Context, params models.GiTest return local.PostErrorResponse(ctx, http.StatusInternalServerError, err) } - err = MagnitudeRemove(params.Uid, req.Remove) + err = RunnerRemove(params.Uid, req.Remove) if err != nil { return TriageError(ctx, err) } diff --git a/magnitude/service/service_message.go b/runner/service/service_message.go similarity index 85% rename from magnitude/service/service_message.go rename to runner/service/service_message.go index c0d33ec..090024a 100644 --- a/magnitude/service/service_message.go +++ b/runner/service/service_message.go @@ -15,8 +15,8 @@ import ( fscript "gitlab.com/ginfra/wwwherd/common/script/flow" rscript "gitlab.com/ginfra/wwwherd/common/script/raw" "gitlab.com/ginfra/wwwherd/common/stream" - "gitlab.com/ginfra/wwwherd/magnitude/local" - scommon "gitlab.com/ginfra/wwwherd/magnitude/service/common" + "gitlab.com/ginfra/wwwherd/runner/local" + scommon "gitlab.com/ginfra/wwwherd/runner/service/common" "go.uber.org/zap" "runtime" "strconv" @@ -185,7 +185,7 @@ func processLog(run *scommon.RunSpec, log string) { b64content := data.EncodeToBase64([]byte(jcontent)) if err := sclient.Send(stream.NewMsgRunLog(run.UidInt, run.IdInt, b64content)); err != nil { - local.Logger.Error("Failed to send run log.", zap.Error(err), zap.String(base.LM_ID, run.Id), + local.Logger.Error(data.SMsgRunnerFailedSendRunLog, zap.Error(err), zap.String(base.LM_ID, run.Id), zap.String(base.LM_UID, run.Uid)) } } @@ -194,7 +194,7 @@ func processLog(run *scommon.RunSpec, log string) { // # FEATURES func lostProcess(uiid string) *stream.RawMessage { - local.Logger.Error("Lost process has occurred.", zap.String(base.LM_UID, uiid)) + local.Logger.Error(data.SMsgRunnerLostProcess, zap.String(base.LM_UID, uiid)) return wrapRawMessage(stream.NewMsgFault("Ghosted")) } @@ -253,14 +253,14 @@ func findPrompt(flow data.OrderFlowItem) string { func sendCurrentFlow(run *scommon.RunSpec, id int) { if err := sclient.Send(stream.NewMsgRunCurrentFlow(run.IdInt, id)); err != nil { - local.Logger.Error("Failed to send current flow message.", zap.Error(err), zap.String(base.LM_ID, run.Id), + local.Logger.Error(data.SMsgRunnerFailedSendCurrentFlow, zap.Error(err), zap.String(base.LM_ID, run.Id), zap.String(base.LM_UID, run.Uid)) } } func sendCurrentCase(run *scommon.RunSpec, id int) { if err := sclient.Send(stream.NewMsgRunCurrentCase(run.IdInt, id)); err != nil { - local.Logger.Error("Failed to send current case message.", zap.Error(err), zap.String(base.LM_ID, run.Id), + local.Logger.Error(data.SMsgRunnerFailedSendCurrentCase, zap.Error(err), zap.String(base.LM_ID, run.Id), zap.String(base.LM_UID, run.Uid)) } } @@ -285,7 +285,7 @@ func doMsgDone(run *scommon.RunSpec) (*stream.RawMessage, error) { } run.CurrentState = scommon.RunStateDone run.Status = data.RunStatusDone - sendStatus(run, data.RunStatusDone, "Done, pending results.") + sendStatus(run, data.RunStatusDone, data.SMsgRunnerDonePendingResults) return &stream.RawMessage{ MType: stream.MsgTypeDone, Data: stream.NewMsgDone(getKey(run.Uid, run.Id)), @@ -297,9 +297,10 @@ func doMsgDoneError(run *scommon.RunSpec, msg string, status data.RunStatus) (*s run.Status = status run.Order.Flows[run.CurrentFlowIdx].Cases[run.CurrentCaseIdx].Costs = formatCosts() // TODO I think this is ok here, but keep an eye out for odd costs when errors happen. run.Order.Flows[run.CurrentFlowIdx].Cases[run.CurrentCaseIdx].Error = msg - sendStatus(run, status, "Failing due to processing error.") + logCase(run) + sendStatus(run, status, "Failing due to error.") skipRemaining(run, status) - local.Logger.Error("Done on error in script. Terminating.", zap.String(base.LM_CAUSE, msg)) + local.Logger.Error(data.SMsgRunnerTerminatingScriptError, zap.String(base.LM_CAUSE, msg)) return &stream.RawMessage{ MType: stream.MsgTypeDone, Data: stream.NewMsgDone(getKey(run.Uid, run.Id)), @@ -315,9 +316,9 @@ func doMsgFlow(run *scommon.RunSpec) (*stream.RawMessage, error) { run.CurrentState = scommon.RunStateOrderFlow sendCurrentFlow(run, run.Order.Flows[run.CurrentFlowIdx].Id) startFlowTime = time.Now() + local.Logger.Info(data.SMsgRunnerStartingFlow, zap.String(base.LM_UID, run.Uid), zap.String(base.LM_ID, run.Id), zap.Int("flow", run.CurrentFlowIdx)) return &stream.RawMessage{ MType: stream.MsgTypeOrderFlow, - // (token, name, prompt string, flowId int) Data: stream.NewMsgOrderFlow(getKey(run.Uid, run.Id), run.Order.Flows[run.CurrentFlowIdx].Name, findPrompt(run.Order.Flows[run.CurrentFlowIdx]), run.Order.Flows[run.CurrentFlowIdx].Id), }, nil @@ -327,7 +328,8 @@ func doMsgFlow(run *scommon.RunSpec) (*stream.RawMessage, error) { } run.CurrentState = scommon.RunStateDone - sendStatus(run, data.RunStatusDone, "Done, pending results.") + sendStatus(run, data.RunStatusDone, data.SMsgRunnerDonePendingResults) + local.Logger.Info(data.SMsgRunnerDonePendingResults, zap.String(base.LM_UID, run.Uid), zap.String(base.LM_ID, run.Id), zap.Int("flow", run.CurrentFlowIdx), zap.Int("case", run.CurrentCaseIdx)) sendCurrentFlow(run, 0) sendCurrentCase(run, 0) return &stream.RawMessage{ @@ -337,7 +339,12 @@ func doMsgFlow(run *scommon.RunSpec) (*stream.RawMessage, error) { } func nextFlow(run *scommon.RunSpec) (*stream.RawMessage, error) { - + local.Logger.Info("Flow complete", + zap.Int(base.LM_RESULT, int(run.Order.Flows[run.CurrentFlowIdx].Result)), + zap.Int(base.LM_ID, run.Order.Flows[run.CurrentFlowIdx].Id), + zap.Int(base.LM_ID_RUN, run.IdInt), + zap.String(base.LM_UID, run.Uid), + ) run.CurrentFlowIdx++ if run.CurrentFlowIdx >= len(run.Order.Flows) { run.CurrentFlowIdx = 0 @@ -353,6 +360,7 @@ func doMsgCase(run *scommon.RunSpec) (*stream.RawMessage, error) { run.CurrentState = scommon.RunStateOrderDirective sendCurrentCase(run, run.Order.Flows[run.CurrentFlowIdx].Cases[run.CurrentCaseIdx].Id) startCaseTime = time.Now() + local.Logger.Info(data.SMsgRunnerStartingCase, zap.String(base.LM_UID, run.Uid), zap.String(base.LM_ID, run.Id), zap.Int("flow", run.CurrentFlowIdx), zap.Int("case", run.CurrentCaseIdx)) return &stream.RawMessage{ MType: stream.MsgTypeOrderCase, Data: stream.NewMsgOrderCase(getKey(run.Uid, run.Id), run.Order.Flows[run.CurrentFlowIdx].Name, @@ -360,7 +368,25 @@ func doMsgCase(run *scommon.RunSpec) (*stream.RawMessage, error) { }, nil } +func logCase(run *scommon.RunSpec) { + local.Logger.Info("Case complete", + zap.Int(base.LM_RESULT, int(run.Order.Flows[run.CurrentFlowIdx].Cases[run.CurrentCaseIdx].Result)), + zap.Int(base.LM_ID, run.Order.Flows[run.CurrentFlowIdx].Cases[run.CurrentCaseIdx].Id), + zap.Int(base.LM_ID_RUN, run.IdInt), + zap.Int("id.flow", run.Order.Flows[run.CurrentFlowIdx].Id), + zap.String(base.LM_UID, run.Uid), + zap.Float64(data.SMsgCostsInputCost, latestInputCost), + zap.Float64(data.SMsgCostsOutputCost, latestOutputCost), + zap.Int(data.SMsgCostsInputTokens, latestInputTokens), + zap.Int(data.SMsgCostsOutputTokens, latestOutputTokens), + zap.Int(data.SMsgCostsCacheWriteInputTokens, latestCacheWriteInputTokens), + zap.Int(data.SMsgCostsCacheReadInputTokens, latestCacheReadInputTokens), + zap.Duration(data.SMsgCostsCaseExecutionTime, time.Since(startCaseTime)), + ) +} + func nextCase(run *scommon.RunSpec) (*stream.RawMessage, error) { + logCase(run) run.Order.Flows[run.CurrentFlowIdx].Cases[run.CurrentCaseIdx].Costs = formatCosts() shipOrders(run, false) run.CurrentCaseIdx++ @@ -422,6 +448,7 @@ func msgReady(msg *stream.MsgReady) (*stream.RawMessage, error) { } else { run.Status = data.RunStatusRunning sendStatus(run, data.RunStatusRunning, "Running.") + local.Logger.Info("Starting run.", zap.String(base.LM_UID, run.Uid), zap.String(base.LM_ID, run.Id)) run.CurrentState = scommon.RunStateOrderFlow return doMsgFlow(run) } @@ -456,7 +483,7 @@ func msgResult(msg *stream.MsgResult) (*stream.RawMessage, error) { } if err = parseCosts(msg.Costs); err != nil { - local.Logger.Error("Failed to parse costs from directive result.", zap.Error(err)) + local.Logger.Error(data.SMsgRunnerBadDirectiveResult, zap.Error(err)) } // inputCost:0.00183925 outputCost:0.0001905 inputTokens:7357 outputTokens:254 cacheWriteInputTokens:0 cacheReadInputTokens:0 @@ -519,12 +546,12 @@ func msgFault(msg *stream.MsgFault) (*stream.RawMessage, error) { if err != nil { return lostProcess(msg.Token), nil } - local.Logger.Error("Runner fault.", zap.String(base.LM_UID, run.Uid), zap.String(base.LM_ID, run.Id), zap.Error(err)) + local.Logger.Error(data.SMsgRunnerFault, zap.String(base.LM_UID, run.Uid), zap.String(base.LM_ID, run.Id), zap.Error(err)) return doMsgDoneError(run, "Done. Failed due to runner fault.", data.RunStatusError) } func MsgRecipient(message *stream.RawMessage) (*stream.RawMessage, error) { - local.Logger.Info("MsgRecipient") + local.Logger.Info(data.SMsgRunnerMsgRx, zap.Int(base.LM_TYPE, int(message.MType))) var ( r *stream.RawMessage @@ -539,7 +566,7 @@ func MsgRecipient(message *stream.RawMessage) (*stream.RawMessage, error) { // TODO do we need to fault the client? // Log the panic - local.Logger.Error("Panic in MsgRecipient", + local.Logger.Error(data.SMsgRunnerMsgRxPanic, zap.Any(base.LM_CAUSE, r), zap.String("stack_trace", stackTrace), ) } @@ -565,7 +592,7 @@ func MsgRecipient(message *stream.RawMessage) (*stream.RawMessage, error) { if err != nil { r = wrapRawMessage(stream.NewMsgFault("")) - local.Logger.Error("Fatal error in MsgRecipient. Returning fault.", zap.Error(err)) + local.Logger.Error(data.SMsgRunnerMsgRxFatal, zap.Error(err)) } return r, err diff --git a/magnitude/service/service_setup.go b/runner/service/service_setup.go similarity index 83% rename from magnitude/service/service_setup.go rename to runner/service/service_setup.go index b9ecd61..f09bee4 100644 --- a/magnitude/service/service_setup.go +++ b/runner/service/service_setup.go @@ -11,8 +11,8 @@ package service import ( "gitlab.com/ginfra/ginfra/base" "gitlab.com/ginfra/wwwherd/common/stream" - "gitlab.com/ginfra/wwwherd/magnitude/local" - "gitlab.com/ginfra/wwwherd/magnitude/providers/message" + "gitlab.com/ginfra/wwwherd/runner/local" + "gitlab.com/ginfra/wwwherd/runner/providers/message" "os" "path/filepath" "time" @@ -21,7 +21,7 @@ import ( // ##################################################################################################################### // # SETUP -func MagnitudeSetup(client stream.StreamClient, server message.MessageProvider) error { +func RunnerSetup(client stream.StreamClient, server message.MessageProvider) error { if !MagLock.Lock(time.Minute * 2) { return base.NewGinfraError("Setup failed due to deadlock.") } @@ -59,9 +59,9 @@ func MagnitudeSetup(client stream.StreamClient, server message.MessageProvider) // -- Setup complete. Run it. --------------------------------------------------------------------- setupComplete = true - local.Logger.Info("Magnitude setup complete.") + local.Logger.Info("Runner setup complete.") if !loopRunning { - local.Logger.Info("Magnitude setup starting run loop.") + local.Logger.Info("Runner setup starting run loop.") loopRunning = true go runloop(client) } diff --git a/magnitude/service/service_store.go b/runner/service/service_store.go similarity index 94% rename from magnitude/service/service_store.go rename to runner/service/service_store.go index e69d6fc..7eeb51b 100644 --- a/magnitude/service/service_store.go +++ b/runner/service/service_store.go @@ -10,7 +10,10 @@ package service import ( "gitlab.com/ginfra/ginfra/base" - scommon "gitlab.com/ginfra/wwwherd/magnitude/service/common" + "gitlab.com/ginfra/wwwherd/runner/local" + scommon "gitlab.com/ginfra/wwwherd/runner/service/common" + "go.uber.org/zap" + "os" "strings" "sync" ) @@ -176,6 +179,15 @@ func moveFromRunningToHistory(run *scommon.RunSpec) { // Remove from running delete(running, getKey(run.Uid, run.Id)) + + // Handle any file store. Right now we just delete it all. + if run.Path != "" { + err := os.RemoveAll(run.Path) + if err != nil { + local.Logger.Error("failed to delete directory: %s, error: %v", zap.Error(err)) + } + } + } func findRunSpecUiid(token string) (*scommon.RunSpec, error) { diff --git a/magnitude/service/service_test.go b/runner/service/service_test.go similarity index 100% rename from magnitude/service/service_test.go rename to runner/service/service_test.go diff --git a/magnitude/service/service_values.go b/runner/service/service_values.go similarity index 99% rename from magnitude/service/service_values.go rename to runner/service/service_values.go index cfab766..04e3b59 100644 --- a/magnitude/service/service_values.go +++ b/runner/service/service_values.go @@ -17,7 +17,7 @@ import ( "encoding/json" "fmt" "gitlab.com/ginfra/ginfra/base" - "gitlab.com/ginfra/wwwherd/magnitude/service/common" + "gitlab.com/ginfra/wwwherd/runner/service/common" "io" "os" "path/filepath" diff --git a/magnitude/service/util.go b/runner/service/util.go similarity index 93% rename from magnitude/service/util.go rename to runner/service/util.go index dbf9662..d4d219c 100644 --- a/magnitude/service/util.go +++ b/runner/service/util.go @@ -13,7 +13,7 @@ import ( "gitlab.com/ginfra/ginfra/base" "gitlab.com/ginfra/ginfra/common/config" "gitlab.com/ginfra/ginfra/common/service/gotel" - "gitlab.com/ginfra/wwwherd/magnitude/local" + "gitlab.com/ginfra/wwwherd/runner/local" "go.opentelemetry.io/otel/attribute" "go.opentelemetry.io/otel/trace" "go.uber.org/zap" @@ -24,8 +24,8 @@ import ( // InitService goes through the service initialization process. It will call initfn if it is the first service to do so. // It will call loadfn is not the first service. The idea is the first call can set everything up and the second can // just load the configuration information. -func InitService(service *Servicemagnitude, initfn func(service *Servicemagnitude) error, - loadfn func(service *Servicemagnitude) error) error { +func InitService(service *Servicerunner, initfn func(service *Servicerunner) error, + loadfn func(service *Servicerunner) error) error { const ( POLL_DELAY = 350 diff --git a/magnitude/specifics.json b/runner/specifics.json similarity index 79% rename from magnitude/specifics.json rename to runner/specifics.json index 50105e3..e6cd363 100644 --- a/magnitude/specifics.json +++ b/runner/specifics.json @@ -1,10 +1,10 @@ { "meta": { "version": 1, - "name": "magnitude", + "name": "runner", "type": "nodejs", - "module_name": "gitlab.com/ginfra/wwwherd/magnitude", - "serviceclass": "magnitude", + "module_name": "gitlab.com/ginfra/wwwherd/runner", + "serviceclass": "runner", "invocationcmd": "" }, "exposed": { diff --git a/magnitude/src/index.js b/runner/src/index.js similarity index 100% rename from magnitude/src/index.js rename to runner/src/index.js diff --git a/magnitude/src/index.ts b/runner/src/index.ts similarity index 100% rename from magnitude/src/index.ts rename to runner/src/index.ts diff --git a/magnitude/src/run.sh b/runner/src/run.sh similarity index 100% rename from magnitude/src/run.sh rename to runner/src/run.sh diff --git a/magnitude/start_local.cmd b/runner/start_local.cmd similarity index 100% rename from magnitude/start_local.cmd rename to runner/start_local.cmd diff --git a/magnitude/start_local.sh b/runner/start_local.sh similarity index 100% rename from magnitude/start_local.sh rename to runner/start_local.sh diff --git a/magnitude/taskfile.yaml b/runner/taskfile.yaml similarity index 100% rename from magnitude/taskfile.yaml rename to runner/taskfile.yaml diff --git a/magnitude/taskfilecustom.yaml b/runner/taskfilecustom.yaml similarity index 66% rename from magnitude/taskfilecustom.yaml rename to runner/taskfilecustom.yaml index cdeccf8..717b3bc 100644 --- a/magnitude/taskfilecustom.yaml +++ b/runner/taskfilecustom.yaml @@ -4,7 +4,7 @@ tasks: stubs: cmds: - - ginfra ginterface stubs openapi magnitude service service_test.yaml + - ginfra ginterface stubs openapi runner service service_test.yaml prime_local: run: once diff --git a/magnitude/test/README.md b/runner/test/README.md similarity index 100% rename from magnitude/test/README.md rename to runner/test/README.md diff --git a/magnitude/test/config.json b/runner/test/config.json similarity index 87% rename from magnitude/test/config.json rename to runner/test/config.json index 35439d8..405e38f 100644 --- a/magnitude/test/config.json +++ b/runner/test/config.json @@ -7,11 +7,11 @@ "*": "localhost" }, "services": { - "magnitude/test": "http://localhost:8901/" + "runner/test": "http://localhost:8901/" } }, "services": { - "magnitude": { + "runner": { "management": { "auth": "XAWJYFFDIDRY", "instance": { @@ -27,9 +27,9 @@ "auth": "XAWJYFFDIDRY", "config": { "config_provider": "file://test/config.json", - "image": "magnitude", + "image": "runner", "port": 8901, - "service_class": "magnitude", + "service_class": "runner", "specifics": { "stream_client_target": "http://localhost:8900" } diff --git a/magnitude/test/test_config.json b/runner/test/test_config.json similarity index 97% rename from magnitude/test/test_config.json rename to runner/test/test_config.json index 7358883..23d2b47 100644 --- a/magnitude/test/test_config.json +++ b/runner/test/test_config.json @@ -146,7 +146,7 @@ }, "type": "host" }, - "magnitude": { + "runner": { "management": { "auth": "XKQNFQRBTFJG", "instance": { @@ -162,9 +162,9 @@ "auth": "XKQNFQRBTFJG", "config": { "config_provider": "http://deskgoat:5111", - "image": "ginfra/magnitude", + "image": "wwwherd/runner", "port": 8905, - "service_class": "magnitude", + "service_class": "runner", "specifics": { } }, diff --git a/magnitude/tsconfig.json b/runner/tsconfig.json similarity index 100% rename from magnitude/tsconfig.json rename to runner/tsconfig.json diff --git a/service/local/values.go b/service/local/values.go index d203db5..6cdbf36 100644 --- a/service/local/values.go +++ b/service/local/values.go @@ -34,7 +34,7 @@ var ( const ( ConfigPostgressAdminUrl = "postgres_url" ConfigPostgressUserUrl = "postgres_user_url" - ConfigManitudeAuth = "magnitude_auth" + ConfigRunnerAuth = "runner_auth" UserTokenLength = 12 diff --git a/service/package-lock.json b/service/package-lock.json index 9444e5c..ce94b89 100644 --- a/service/package-lock.json +++ b/service/package-lock.json @@ -10,6 +10,7 @@ "dependencies": { "bcrypt": "^6.0.0", "cors": "^2.8.5", + "echarts": "^5.6.0", "express": "^5.1.0", "jsonwebtoken": "^9.0.2", "node-fetch": "^3.3.2", @@ -17,6 +18,7 @@ "pg": "^8.16.3", "pinia": "^3.0.3", "vue": "^3.5.17", + "vue-echarts": "^7.0.3", "vue-router": "4.5.1", "vuedraggable": "^4.1.0", "vuetify": "^3.9.0", @@ -752,6 +754,22 @@ "safe-buffer": "^5.0.1" } }, + "node_modules/echarts": { + "version": "5.6.0", + "resolved": "https://registry.npmjs.org/echarts/-/echarts-5.6.0.tgz", + "integrity": "sha512-oTbVTsXfKuEhxftHqL5xprgLoc0k7uScAwtryCgWF6hPYFLRwOUHiFmHGCBKP5NPFNkDVopOieyUqYGH8Fa3kA==", + "license": "Apache-2.0", + "dependencies": { + "tslib": "2.3.0", + "zrender": "5.6.1" + } + }, + "node_modules/echarts/node_modules/tslib": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.3.0.tgz", + "integrity": "sha512-N82ooyxVNm6h1riLCoyS9e3fuJ3AMG2zIZs2Gd1ATcSFjSA23Q0fzjjZeh0jbJvWVDZ0cJT8yaNNaaXHzueNjg==", + "license": "0BSD" + }, "node_modules/ee-first": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/ee-first/-/ee-first-1.1.1.tgz", @@ -2540,6 +2558,51 @@ } } }, + "node_modules/vue-demi": { + "version": "0.13.11", + "resolved": "https://registry.npmjs.org/vue-demi/-/vue-demi-0.13.11.tgz", + "integrity": "sha512-IR8HoEEGM65YY3ZJYAjMlKygDQn25D5ajNFNoKh9RSDMQtlzCxtfQjdQgv9jjK+m3377SsJXY8ysq8kLCZL25A==", + "hasInstallScript": true, + "license": "MIT", + "bin": { + "vue-demi-fix": "bin/vue-demi-fix.js", + "vue-demi-switch": "bin/vue-demi-switch.js" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/antfu" + }, + "peerDependencies": { + "@vue/composition-api": "^1.0.0-rc.1", + "vue": "^3.0.0-0 || ^2.6.0" + }, + "peerDependenciesMeta": { + "@vue/composition-api": { + "optional": true + } + } + }, + "node_modules/vue-echarts": { + "version": "7.0.3", + "resolved": "https://registry.npmjs.org/vue-echarts/-/vue-echarts-7.0.3.tgz", + "integrity": "sha512-/jSxNwOsw5+dYAUcwSfkLwKPuzTQ0Cepz1LxCOpj2QcHrrmUa/Ql0eQqMmc1rTPQVrh2JQ29n2dhq75ZcHvRDw==", + "license": "MIT", + "dependencies": { + "vue-demi": "^0.13.11" + }, + "peerDependencies": { + "@vue/runtime-core": "^3.0.0", + "echarts": "^5.5.1", + "vue": "^2.7.0 || ^3.1.1" + }, + "peerDependenciesMeta": { + "@vue/runtime-core": { + "optional": true + } + } + }, "node_modules/vue-router": { "version": "4.5.1", "resolved": "https://registry.npmjs.org/vue-router/-/vue-router-4.5.1.tgz", @@ -2673,6 +2736,21 @@ "engines": { "node": ">=0.4" } + }, + "node_modules/zrender": { + "version": "5.6.1", + "resolved": "https://registry.npmjs.org/zrender/-/zrender-5.6.1.tgz", + "integrity": "sha512-OFXkDJKcrlx5su2XbzJvj/34Q3m6PvyCZkVPHGYpcCJ52ek4U/ymZyfuV1nKE23AyBJ51E/6Yr0mhZ7xGTO4ag==", + "license": "BSD-3-Clause", + "dependencies": { + "tslib": "2.3.0" + } + }, + "node_modules/zrender/node_modules/tslib": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.3.0.tgz", + "integrity": "sha512-N82ooyxVNm6h1riLCoyS9e3fuJ3AMG2zIZs2Gd1ATcSFjSA23Q0fzjjZeh0jbJvWVDZ0cJT8yaNNaaXHzueNjg==", + "license": "0BSD" } } } diff --git a/service/package.json b/service/package.json index 6fc41c8..c6acc32 100644 --- a/service/package.json +++ b/service/package.json @@ -12,6 +12,7 @@ "dependencies": { "bcrypt": "^6.0.0", "cors": "^2.8.5", + "echarts": "^5.6.0", "express": "^5.1.0", "jsonwebtoken": "^9.0.2", "node-fetch": "^3.3.2", @@ -19,6 +20,7 @@ "pg": "^8.16.3", "pinia": "^3.0.3", "vue": "^3.5.17", + "vue-echarts": "^7.0.3", "vue-router": "4.5.1", "vuedraggable": "^4.1.0", "vuetify": "^3.9.0", diff --git a/service/providers/psql/schema/full.sql b/service/providers/psql/schema/full.sql index 3be9679..7f8843b 100644 --- a/service/providers/psql/schema/full.sql +++ b/service/providers/psql/schema/full.sql @@ -463,6 +463,8 @@ CREATE TABLE case_stats cost_cwit INT DEFAULT 0, -- cacheWriteInputTokens cost_crit INT DEFAULT 0, -- cacheReadInputTokens + entry_timestamp TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + CONSTRAINT fkcase_stat_org FOREIGN KEY (org_id) REFERENCES organizations (id) ); @@ -486,6 +488,8 @@ CREATE TABLE flow_stats cost_cwit INT DEFAULT 0, -- cacheWriteInputTokens cost_crit INT DEFAULT 0, -- cacheReadInputTokens + entry_timestamp TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + CONSTRAINT fkflow_stat_org FOREIGN KEY (org_id) REFERENCES organizations (id) ); diff --git a/service/server/proxy.js b/service/server/proxy.js index 4e4e3a5..10094a3 100644 --- a/service/server/proxy.js +++ b/service/server/proxy.js @@ -270,6 +270,10 @@ async function insertCaseStats(orgId, data) { return await stats.insertCaseStats(orgId, data); } +async function getCaseStats(caseId, orgId) { + return await stats.getCaseStats(caseId, orgId); +} + // -- Other ----------------------------------------------------------------------------------------------------------- async function closeConnection() { @@ -331,6 +335,7 @@ export default { changeUserPassword, insertFlowStats, insertCaseStats, + getCaseStats, runReport, addToken, getTokensForOrg, diff --git a/service/server/stats.js b/service/server/stats.js index e6f77f8..46d1fe6 100644 --- a/service/server/stats.js +++ b/service/server/stats.js @@ -337,7 +337,6 @@ async function insertFlowStats(orgId, data) { `; const result = await client.query(query, [JSON.stringify(dataWithOrgId)]); - logger.info(`Successfully inserted ${result.rowCount} flow stats records`); return { success: true, @@ -350,9 +349,31 @@ async function insertFlowStats(orgId, data) { } } +async function getCaseStats(caseId, orgId) { + const client = getClient(); + + try { + + const query = ` + SELECT * FROM case_stats + WHERE case_id = $1 AND org_id = $2 + ORDER BY entry_timestamp ASC + `; + + const result = await client.query(query, [caseId, orgId]); + return result.rows; + + } catch (error) { + logger.error(`Error getting case stats for case ${caseId}, org ${orgId}:`, error); + throw error; + } +} + + export default { insertCaseStats, insertFlowStats, - runReport + runReport, + getCaseStats }; \ No newline at end of file diff --git a/service/server/stats_rest.js b/service/server/stats_rest.js index 0b00c0a..b4d52cf 100644 --- a/service/server/stats_rest.js +++ b/service/server/stats_rest.js @@ -44,5 +44,18 @@ export async function addStats(app) { res.status(500).json({error: error.message}); } }); - + + app.get('/api/stats/case/:id', authenticateToken, async (req, res) => { + try { + const caseId = parseInt(req.params.id) + const result = await proxy.getCaseStats(1, caseId); + if (!result) { + return res.status(404).json({error: 'Case not found'}); + } + res.json(result); + } catch (error) { + res.status(500).json({error: error.message}); + } + }); + } \ No newline at end of file diff --git a/service/service/dispatcher_runs.go b/service/service/dispatcher_runs.go index 2211800..b886fac 100644 --- a/service/service/dispatcher_runs.go +++ b/service/service/dispatcher_runs.go @@ -57,20 +57,20 @@ func (d *Dispatcher) discoverAndGetAuth() (string, string, error) { // TODO Do we want to cache these? // We need the service surl, err := d.gis.ServiceContext.ConfigProvider.DiscoverService(d.gis.ServiceContext.Scfg.DeploymentName, - "magnitude/test") + "runner/test") if err != nil || surl == "" { - local.Logger.Warn("Could not discover magnitude service. Stalling.", zap.Error(err)) + local.Logger.Warn("Could not discover runner service. Stalling.", zap.Error(err)) time.Sleep(local.DispatchStallTime * time.Millisecond) - return "", "", base.NewGinfraError("Could not discover magnitude service.") + return "", "", base.NewGinfraError("Could not discover runner service.") } // And the service auth var mauth string scfg := local.GetConfiguredServiceSpecifics(d.gis.ServiceContext) if scfg.Err == nil { - if mauth, err = scfg.GetValue(local.ConfigManitudeAuth).GetValueString(); err != nil { - local.Logger.Warn("Could not get magnitude service authorization token. Stalling.", zap.Error(err)) + if mauth, err = scfg.GetValue(local.ConfigRunnerAuth).GetValueString(); err != nil { + local.Logger.Warn("Could not get runner service authorization token. Stalling.", zap.Error(err)) time.Sleep(local.DispatchStallTime * time.Millisecond) - return "", "", base.NewGinfraError("Could not get magnitude service authorization token") + return "", "", base.NewGinfraError("Could not get runner service authorization token") } } return surl, mauth, nil @@ -187,7 +187,7 @@ func (d *Dispatcher) dispatchNew(runs []local.DbRun) { func (d *Dispatcher) dispatchCancel(runs []local.DbRun) { surl, mauth, err := d.discoverAndGetAuth() if err != nil { - local.Logger.Warn("Could not discover magnitude service. Ignoring cancellations.", zap.Error(err)) + local.Logger.Warn("Could not discover runner service. Ignoring cancellations.", zap.Error(err)) return } diff --git a/service/src/components/ChartCosts.vue b/service/src/components/ChartCosts.vue new file mode 100644 index 0000000..e0ef492 --- /dev/null +++ b/service/src/components/ChartCosts.vue @@ -0,0 +1,376 @@ + + + + + diff --git a/service/src/components/nav.ts b/service/src/components/nav.ts index d03a903..646f197 100644 --- a/service/src/components/nav.ts +++ b/service/src/components/nav.ts @@ -38,3 +38,7 @@ export const navigateToRun = () => { export const navigateToSettings = () => { router.push('/settings') } + +export const navigateToCaseData = () => { + router.push('/casedata') +} diff --git a/service/src/router.ts b/service/src/router.ts index d95e245..821d5a1 100644 --- a/service/src/router.ts +++ b/service/src/router.ts @@ -37,6 +37,12 @@ const router = createRouter({ name: 'admin', component: () => import('./routes/admin/AdminView.vue'), meta: { requiresAuth: true } + }, + { + path: '/casedata', + name: 'casecada', + component: () => import('./routes/casedata/CaseData.vue'), + meta: { requiresAuth: true } } ] diff --git a/service/src/routes/casedata/CaseData.vue b/service/src/routes/casedata/CaseData.vue new file mode 100644 index 0000000..a7e9538 --- /dev/null +++ b/service/src/routes/casedata/CaseData.vue @@ -0,0 +1,206 @@ + + + + + + diff --git a/service/src/routes/main/MainView.vue b/service/src/routes/main/MainView.vue index 36e8737..3a80a4f 100644 --- a/service/src/routes/main/MainView.vue +++ b/service/src/routes/main/MainView.vue @@ -54,11 +54,9 @@ -
-
@@ -110,6 +108,13 @@ + + +