This commit is contained in:
Morten Olsen
2024-01-11 09:03:14 +01:00
commit 36f63662ad
82 changed files with 7071 additions and 0 deletions

5
.dockerignore Normal file
View File

@@ -0,0 +1,5 @@
/node_modules/
/packages/*/node_modules/
/packages/*/dist/
/out/
.turbo/

16
.eslintrc Normal file
View File

@@ -0,0 +1,16 @@
{
"extends": "@react-native-community",
"rules": {
"react/react-in-jsx-scope": 0,
"prettier/prettier": [
"error",
{
"singleQuote": true
}
]
},
"ignorePatterns": [
"node_modules/",
"dist/"
]
}

53
.github/release-drafter-config.yml vendored Normal file
View File

@@ -0,0 +1,53 @@
name-template: '$RESOLVED_VERSION 🚀'
tag-template: '$RESOLVED_VERSION'
categories:
- title: '🚀 Features'
labels:
- 'feature'
- 'enhancement'
- title: '🐛 Bug Fixes'
labels:
- 'fix'
- 'bugfix'
- 'bug'
- title: '🧰 Maintenance'
label: 'chore'
change-template: '- $TITLE @$AUTHOR (#$NUMBER)'
change-title-escapes: '\<*_&' # You can add # and @ to disable mentions, and add ` to disable code blocks.
version-resolver:
major:
labels:
- 'major'
minor:
labels:
- 'enhancement'
- 'feature'
patch:
labels:
- 'bug'
- 'chore'
- 'fix'
- 'bugfix'
default: patch
autolabeler:
- label: 'chore'
files:
- '*.md'
branch:
- '/docs{0,1}\/.+/'
- label: 'bug'
title:
- '/fix/i'
body:
- '/fix/i'
- label: 'enhancement'
title:
- '/feature/i'
- '/feat/i'
body:
- '/feature/i'
- '/feat/i'
template: |
## Changes
$CHANGES

21
.github/workflows/auto-label.yml vendored Normal file
View File

@@ -0,0 +1,21 @@
name: Auto Labeler
on:
pull_request:
types: [opened, reopened, synchronize]
permissions:
contents: read
jobs:
auto-labeler:
permissions:
contents: write
pull-requests: write
runs-on: ubuntu-latest
steps:
- uses: release-drafter/release-drafter@v5
with:
config-name: release-drafter-config.yml
disable-releaser: true
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}

123
.github/workflows/release.yml vendored Normal file
View File

@@ -0,0 +1,123 @@
name: Node.js Package
on:
push:
pull_request:
types: [opened]
# release:
# types: [created]
env:
NODE_CACHE: 'pnpm'
NODE_VERSION: '20.x'
NODE_SCOPE: '@bob-the-algorithm'
NPM_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
REGISTRY: ghcr.io
IMAGE_NAME: ${{ github.repository }}
permissions:
contents: read
packages: read
jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- run: corepack enable
- uses: actions/setup-node@v3
with:
cache: '${{ env.NODE_CACHE }}'
node-version: '${{ env.NODE_VERSION }}'
registry-url: '${{ env.NODE_REGISTRY_URL }}'
scope: '${{ env.NODE_SCOPE }}'
- run: pnpm install
- run: pnpm run test
- run: pnpm run build
- uses: actions/upload-artifact@v3
with:
name: lib
retention-days: 5
path: |
packages/*/dist
packages/*/package.json
package.json
README.md
update-release-draft:
if: github.ref == 'refs/heads/main'
needs: build
permissions:
contents: write
pull-requests: write
packages: write
runs-on: ubuntu-latest
steps:
- uses: release-drafter/release-drafter@v5
with:
config-name: release-drafter-config.yml
publish: true
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
release-docker:
if: github.ref == 'refs/heads/main'
runs-on: ubuntu-latest
needs: [build, update-release-draft]
permissions:
contents: read
packages: write
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Log in to the Container registry
uses: docker/login-action@65b78e6e13532edd9afa3aa52ac7964289d1a9c1
with:
registry: ${{ env.REGISTRY }}
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Extract metadata (tags, labels) for Docker
id: meta
uses: docker/metadata-action@9ec57ed1fcdbf14dcef7dfbe97b2010124a938b7
with:
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
- name: Build and push Docker image
uses: docker/build-push-action@f2a1d5e99d037542a71f64918e516c093c6f3fc4
with:
context: .
file: ./docker/Dockerfile
push: true
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
# release-npm:
# if: github.ref == 'refs/heads/main'
# runs-on: ubuntu-latest
# needs: [build, update-release-draft]
# permissions:
# contents: read
# packages: write
# steps:
# - uses: actions/checkout@v3
# with:
# fetch-depth: 0
# - run: corepack enable
# - uses: actions/setup-node@v3
# with:
# cache: '${{ env.NODE_CACHE }}'
# node-version: '${{ env.NODE_VERSION }}'
# scope: '${{ env.NODE_SCOPE }}'
# - uses: actions/download-artifact@v3
# with:
# name: lib
# path: ./
# - run: |
# pnpm config set //registry.npmjs.org/:_authToken ${NPM_TOKEN}
# pnpm install
# git config user.name "Github Actions Bot"
# git config user.email "<>"
# node scripts/set-version.ts $(git describe --tag --abbrev=0)
# pnpm publish -r --publish-branch main --access public --no-git-checks
# env:
# NPM_TOKEN: ${{ secrets.NPM_TOKEN }}

4
.gitignore vendored Normal file
View File

@@ -0,0 +1,4 @@
/node_modules/
.turbo/
/.pnpm-store/
/out/

1
.npmrc Normal file
View File

@@ -0,0 +1 @@
store-dir=.pnpm-store

18
.prettierrc Normal file
View File

@@ -0,0 +1,18 @@
{
"arrowParens": "always",
"bracketSpacing": true,
"htmlWhitespaceSensitivity": "css",
"insertPragma": false,
"bracketSameLine": false,
"jsxSingleQuote": false,
"printWidth": 120,
"proseWrap": "preserve",
"quoteProps": "as-needed",
"requirePragma": false,
"semi": true,
"singleQuote": true,
"tabWidth": 2,
"trailingComma": "all",
"useTabs": false,
"singleAttributePerLine": false
}

9
docker-compose.yaml Normal file
View File

@@ -0,0 +1,9 @@
version: '3'
services:
server:
image: 'ghcr.io/morten-olsen/mini-loader:latest'
build:
context: .
dockerfile: ./docker/Dockerfile
ports:
- 4500:4500

42
docker/Dockerfile Normal file
View File

@@ -0,0 +1,42 @@
FROM node:18-alpine AS base
RUN corepack enable
FROM base AS builder
RUN apk add --no-cache libc6-compat
RUN apk update
RUN npm install -g turbo
# Set working directory
WORKDIR /app
COPY . .
RUN turbo prune @morten-olsen/mini-loader-server --docker
# Add lockfile and package.json's of isolated subworkspace
FROM base AS installer
RUN apk add --no-cache libc6-compat
RUN apk update
WORKDIR /app
# First install the dependencies (as they change less often)
COPY .gitignore .gitignore
COPY --from=builder /app/out/json/ .
RUN pnpm install
# Build the project
COPY --from=builder /app/out/full/ .
RUN pnpm turbo run build --filter=@morten-olsen/mini-loader-server
FROM base AS runner
WORKDIR /app
# Don't run production as root
RUN \
ln -s /app/packages/server/bin/index.mjs /usr/local/bin/mini-loader-server
COPY ./docker/entrypoint.sh /entrypoint.sh
RUN chmod +x /entrypoint.sh
COPY --from=installer /app .
EXPOSE 4500
ENTRYPOINT ["/entrypoint.sh"]
CMD ["mini-loader-server"]

12
docker/entrypoint.sh Normal file
View File

@@ -0,0 +1,12 @@
#!/bin/sh
CMD=$@
UID=${UID:-1001}
GID=${GID:-1001}
addgroup --system --gid ${GID} nodejs && \
adduser --system --uid ${UID} -G nodejs miniloader && \
mkdir -p /app/data
chown -R miniloader:nodejs /app/data
su miniloader -s /bin/sh -c "$CMD"

27
package.json Normal file
View File

@@ -0,0 +1,27 @@
{
"name": "@morten-olsen/mini-loader-repo",
"private": "true",
"packageManager": "pnpm@8.10.4",
"version": "1.0.0",
"scripts": {
"build": "turbo build",
"build:dev": "tsc --build --watch",
"test:lint": "eslint ./packages/*/src",
"test": "pnpm run test:lint"
},
"keywords": [],
"author": "",
"license": "ISC",
"devDependencies": {
"@react-native-community/eslint-config": "^3.2.0",
"eslint": "^8.53.0",
"prettier": "^2.8.8",
"turbo": "^1.10.16",
"typescript": "^5.3.3"
},
"dependencies": {
"@pnpm/find-workspace-packages": "^6.0.9",
"@types/node": "^20.10.8",
"ts-node": "^10.9.2"
}
}

3
packages/cli/.gitignore vendored Normal file
View File

@@ -0,0 +1,3 @@
/dist/
/node_modules/
/coverage/

4
packages/cli/bin/index.mjs Executable file
View File

@@ -0,0 +1,4 @@
#!/usr/bin/env node
import 'source-map-support/register.js';
import '../dist/esm/index.js';

41
packages/cli/package.json Normal file
View File

@@ -0,0 +1,41 @@
{
"name": "@morten-olsen/mini-loader-cli",
"version": "1.0.0",
"main": "./dist/esm/index.js",
"types": "./dist/esm/index.d.ts",
"bin": {
"mini-loader": "./bin/index.mjs"
},
"scripts": {
"build": "tsc --build"
},
"type": "module",
"files": [
"./dist"
],
"exports": {
".": {
"import": "./dist/esm/index.js"
}
},
"dependencies": {
"@rollup/plugin-auto-install": "^3.0.5",
"@rollup/plugin-commonjs": "^25.0.7",
"@rollup/plugin-json": "^6.1.0",
"@rollup/plugin-node-resolve": "^15.2.3",
"@rollup/plugin-replace": "^5.0.5",
"@rollup/plugin-sucrase": "^5.0.2",
"@trpc/client": "^10.45.0",
"commander": "^11.1.0",
"rollup": "^4.9.4",
"rollup-plugin-node-polyfills": "^0.2.1",
"source-map-support": "^0.5.21",
"superjson": "^2.2.1"
},
"devDependencies": {
"@morten-olsen/mini-loader-configs": "workspace:^",
"@morten-olsen/mini-loader-runner": "workspace:^",
"@morten-olsen/mini-loader-server": "workspace:^",
"typescript": "^5.3.3"
}
}

View File

@@ -0,0 +1,42 @@
import { rollup } from 'rollup';
import sucrase from '@rollup/plugin-sucrase';
import { nodeResolve } from '@rollup/plugin-node-resolve';
import commonjs from '@rollup/plugin-commonjs';
import json from '@rollup/plugin-json';
import auto from '@rollup/plugin-auto-install';
import { resolve } from 'path';
const fix = <T extends { default: any }>(f: T) => f as T['default'];
type BundleOptions = {
entry: string;
autoInstall?: boolean;
};
const bundle = async ({ entry, autoInstall }: BundleOptions) => {
const entryFile = resolve(entry);
const codeBundler = await rollup({
plugins: [
fix(sucrase)({
transforms: ['typescript', 'jsx'],
}),
...[autoInstall ? fix(auto) : []],
nodeResolve({ extensions: ['.js', '.jsx', '.ts', '.tsx'] }),
fix(json)(),
fix(commonjs)({ include: /node_modules/ }),
],
input: entryFile,
});
const { output: codeOutput } = await codeBundler.generate({
format: 'cjs',
});
if (codeOutput.length > 1) {
throw new Error('Multiple outputs are not supported');
}
const [codeResult] = codeOutput;
const { code } = codeResult;
return code;
};
export { bundle };

View File

@@ -0,0 +1,20 @@
import { createTRPCProxyClient, httpBatchLink } from '@trpc/client';
import superjson from 'superjson';
import type { Runtime } from '@morten-olsen/mini-loader-server';
import type { RootRouter } from '@morten-olsen/mini-loader-server';
const createClient = () => {
return createTRPCProxyClient<RootRouter>({
transformer: superjson,
links: [
httpBatchLink({
url: 'http://localhost:4500',
}),
],
});
};
type Client = ReturnType<typeof createClient>;
export type { Client, Runtime };
export { createClient };

View File

@@ -0,0 +1,32 @@
import { Command } from 'commander';
import { createClient } from '../../client/client.js';
const list = new Command('list');
const toInt = (value?: string) => {
if (!value) {
return undefined;
}
return parseInt(value, 10);
};
list
.alias('ls')
.description('List logs')
.option('-r, --run-id <runId>', 'Run ID')
.option('-l, --load-id <loadId>', 'Load ID')
.option('-o, --offset <offset>', 'Offset')
.option('-a, --limit <limit>', 'Limit', '1000')
.action(async () => {
const { runId, loadId, offset, limit } = list.opts();
const client = await createClient();
const artifacts = await client.artifacts.find.query({
runId,
loadId,
offset: toInt(offset),
limit: toInt(limit),
});
console.table(artifacts);
});
export { list };

View File

@@ -0,0 +1,7 @@
import { Command } from 'commander';
import { list } from './artifacts.list.js';
const artifacts = new Command('artifacts');
artifacts.addCommand(list);
export { artifacts };

View File

@@ -0,0 +1,15 @@
import { Command } from 'commander';
import { createClient } from '../../client/client.js';
const list = new Command('list');
list
.alias('ls')
.description('List loads')
.action(async () => {
const client = await createClient();
const loads = await client.loads.find.query({});
console.table(loads);
});
export { list };

View File

@@ -0,0 +1,27 @@
import { Command } from 'commander';
import { resolve } from 'path';
import { run } from '@morten-olsen/mini-loader-runner';
import { bundle } from '../../bundler/bundler.js';
const local = new Command('local');
local.argument('script').action(async (script) => {
const location = resolve(script);
const code = await bundle({ entry: location });
const { promise, emitter } = await run({
script: code,
});
emitter.addListener('message', (message) => {
switch (message.type) {
case 'log':
console.log(message.payload);
break;
case 'artifact:create':
console.log('artifact:create', message.payload.name);
break;
}
});
await promise;
});
export { local };

View File

@@ -0,0 +1,30 @@
import { Command } from 'commander';
import { resolve } from 'path';
import { createClient } from '../../client/client.js';
import { bundle } from '../../bundler/bundler.js';
const push = new Command('push');
push
.argument('script')
.option('-i, --id <id>', 'Load id')
.option('-n, --name <name>')
.option('-r, --run', 'Run the load')
.option('-ai, --auto-install', 'Auto install dependencies', false)
.action(async (script) => {
const opts = push.opts();
const location = resolve(script);
const client = await createClient();
const code = await bundle({ entry: location, autoInstall: opts.autoInstall });
const id = await client.loads.set.mutate({
id: opts.id,
name: opts.name,
script: code,
});
console.log('created load with id', id);
if (opts.run) {
client.runs.create.mutate({ loadId: id });
}
});
export { push };

View File

@@ -0,0 +1,12 @@
import { Command } from 'commander';
import { push } from './loads.push.js';
import { list } from './loads.list.js';
import { local } from './loads.local.js';
const loads = new Command('loads');
loads.addCommand(push);
loads.addCommand(list);
loads.addCommand(local);
export { loads };

View File

@@ -0,0 +1,36 @@
import { Command } from 'commander';
import { createClient } from '../../client/client.js';
const list = new Command('list');
const toInt = (value?: string) => {
if (!value) {
return undefined;
}
return parseInt(value, 10);
};
list
.alias('ls')
.description('List logs')
.option('-r, --run-id <runId>', 'Run ID')
.option('-l, --load-id <loadId>', 'Load ID')
.option('--severities <severities...>', 'Severities')
.option('-o, --offset <offset>', 'Offset')
.option('-a, --limit <limit>', 'Limit', '1000')
.option('-s, --sort <order>', 'Sort', 'desc')
.action(async () => {
const { runId, loadId, severities, offset, limit, order } = list.opts();
const client = await createClient();
const logs = await client.logs.find.query({
runId,
loadId,
severities,
offset: toInt(offset),
limit: toInt(limit),
order,
});
console.table(logs.reverse());
});
export { list };

View File

@@ -0,0 +1,7 @@
import { Command } from 'commander';
import { list } from './logs.list.js';
const logs = new Command('logs');
logs.addCommand(list);
export { logs };

View File

@@ -0,0 +1,14 @@
import { Command } from 'commander';
import { createClient } from '../../client/client.js';
const create = new Command('create');
create
.description('Create a new run')
.argument('load-id', 'Load ID')
.action(async (loadId) => {
const client = await createClient();
await client.runs.create.mutate({ loadId });
});
export { create };

View File

@@ -0,0 +1,16 @@
import { Command } from 'commander';
import { createClient } from '../../client/client.js';
const list = new Command('create');
list
.alias('ls')
.description('Find a run')
.argument('[load-id]', 'Load ID')
.action(async (loadId) => {
const client = await createClient();
const runs = await client.runs.find.query({ loadId });
console.table(runs);
});
export { list };

View File

@@ -0,0 +1,8 @@
import { Command } from 'commander';
import { create } from './runs.create.js';
import { list } from './runs.list.js';
const runs = new Command('runs');
runs.description('Manage runs').addCommand(create).addCommand(list);
export { runs };

12
packages/cli/src/index.ts Normal file
View File

@@ -0,0 +1,12 @@
import { program } from 'commander';
import { loads } from './commands/loads/loads.js';
import { runs } from './commands/runs/runs.js';
import { logs } from './commands/logs/logs.js';
import { artifacts } from './commands/artifacts/artifacts.js';
program.addCommand(loads);
program.addCommand(runs);
program.addCommand(logs);
program.addCommand(artifacts);
await program.parseAsync();

View File

@@ -0,0 +1,9 @@
{
"extends": "@morten-olsen/mini-loader-configs/tsconfig.esm.json",
"compilerOptions": {
"outDir": "dist/esm",
},
"include": [
"./src/**/*.ts"
],
}

View File

@@ -0,0 +1,12 @@
{
"name": "@morten-olsen/mini-loader-configs",
"version": "1.0.0",
"description": "",
"main": "index.js",
"scripts": {
"test": "echo \"Error: no test specified\" && exit 1"
},
"keywords": [],
"author": "",
"license": "ISC"
}

View File

@@ -0,0 +1,17 @@
{
"compilerOptions": {
"target": "ESNext",
"module": "NodeNext",
"moduleResolution": "NodeNext",
"declaration": true,
"declarationMap": true,
"sourceMap": true,
"esModuleInterop": true,
"strict": true,
"allowSyntheticDefaultImports": true,
"jsx": "react"
},
"ts-node": {
"esm": true,
}
}

3
packages/examples/.gitignore vendored Normal file
View File

@@ -0,0 +1,3 @@
/dist/
/node_modules/
/coverage/

View File

@@ -0,0 +1,25 @@
{
"name": "@morten-olsen/mini-loader-examples",
"version": "1.0.0",
"main": "./dist/esm/index.js",
"types": "./dist/esm/index.d.ts",
"scripts": {
"build": "tsc --build"
},
"type": "module",
"files": [
"./dist"
],
"exports": {
".": {
"import": "./dist/esm/index.js"
}
},
"devDependencies": {
"@morten-olsen/mini-loader-configs": "workspace:^",
"@morten-olsen/mini-loader-cli": "workspace:^",
"@morten-olsen/mini-loader": "workspace:^",
"@types/node": "^20.10.8",
"typescript": "^5.3.3"
}
}

View File

@@ -0,0 +1,5 @@
import { artifacts, logger } from '@morten-olsen/mini-loader';
logger.info('Hello world');
artifacts.create('foo', 'bar');

7
packages/examples/test.sh Executable file
View File

@@ -0,0 +1,7 @@
#!/bin/bash
mini-loader loads push src/simple.ts -i foo
mini-loader loads ls
mini-loader runs create foo
mini-loader runs ls
mini-loader logs ls -a 1000

View File

@@ -0,0 +1,9 @@
{
"extends": "@morten-olsen/mini-loader-configs/tsconfig.esm.json",
"compilerOptions": {
"outDir": "dist/esm",
},
"include": [
"src"
],
}

3
packages/mini-loader/.gitignore vendored Normal file
View File

@@ -0,0 +1,3 @@
/dist/
/node_modules/
/coverage/

View File

@@ -0,0 +1,23 @@
{
"name": "@morten-olsen/mini-loader",
"version": "1.0.0",
"main": "./dist/esm/index.js",
"types": "./dist/esm/index.d.ts",
"scripts": {
"build": "tsc --build"
},
"type": "module",
"files": [
"./dist"
],
"exports": {
".": {
"import": "./dist/esm/index.js"
}
},
"devDependencies": {
"@morten-olsen/mini-loader-configs": "workspace:^",
"@types/node": "^20.10.8",
"typescript": "^5.3.3"
}
}

View File

@@ -0,0 +1,26 @@
import { send } from '../utils.js';
type ArtifactCreateEvent = {
type: 'artifact:create';
payload: {
name: string;
data: string;
};
};
const create = (name: string, data: Buffer | string) => {
send({
type: 'artifact:create',
payload: {
name,
data: data.toString('base64'),
},
});
};
const artifacts = {
create,
};
export type { ArtifactCreateEvent };
export { artifacts };

View File

@@ -0,0 +1,9 @@
import type { LoggerEvent } from './logger/logger.js';
import type { ArtifactCreateEvent } from './artifacts/artifacts.js';
type Event = LoggerEvent | ArtifactCreateEvent;
export type { Event, LoggerEvent, ArtifactCreateEvent };
export { logger } from './logger/logger.js';
export { artifacts } from './artifacts/artifacts.js';
export { input } from './input/input.js';

View File

@@ -0,0 +1,11 @@
import { workerData } from 'worker_threads';
const get = <T>() => {
return workerData as T;
};
const input = {
get,
};
export { input };

View File

@@ -0,0 +1,50 @@
import { send } from '../utils.js';
type LoggerEvent = {
type: 'log';
payload: {
severity: 'info' | 'warning' | 'error';
message: string;
data?: unknown;
};
};
const sendLog = (event: LoggerEvent['payload']) => {
send({
type: 'log',
payload: event,
});
};
const info = (message: string, data?: unknown) => {
sendLog({
severity: 'info',
message,
data,
});
};
const warn = (message: string, data?: unknown) => {
sendLog({
severity: 'warning',
message,
data,
});
};
const error = (message: string, data?: unknown) => {
sendLog({
severity: 'error',
message,
data,
});
};
const logger = {
info,
warn,
error,
};
export type { LoggerEvent };
export { logger };

View File

@@ -0,0 +1,8 @@
import { parentPort } from 'worker_threads';
const send = (data: any) => {
const cleaned = JSON.parse(JSON.stringify(data));
parentPort?.postMessage(cleaned);
};
export { send };

View File

@@ -0,0 +1,9 @@
{
"extends": "@morten-olsen/mini-loader-configs/tsconfig.esm.json",
"compilerOptions": {
"outDir": "dist/esm",
},
"include": [
"src"
],
}

3
packages/runner/.gitignore vendored Normal file
View File

@@ -0,0 +1,3 @@
/dist/
/node_modules/
/coverage/

View File

@@ -0,0 +1,27 @@
{
"name": "@morten-olsen/mini-loader-runner",
"version": "1.0.0",
"main": "./dist/esm/index.js",
"types": "./dist/esm/index.d.ts",
"scripts": {
"build": "tsc --build"
},
"type": "module",
"files": [
"./dist"
],
"exports": {
".": {
"import": "./dist/esm/index.js"
}
},
"devDependencies": {
"@morten-olsen/mini-loader-configs": "workspace:^",
"@morten-olsen/mini-loader": "workspace:^",
"@types/node": "^20.10.8",
"typescript": "^5.3.3"
},
"dependencies": {
"eventemitter3": "^5.0.1"
}
}

View File

@@ -0,0 +1,43 @@
import { Worker } from 'worker_threads';
import { EventEmitter } from 'eventemitter3';
import { Event } from '@morten-olsen/mini-loader';
type RunEvents = {
message: (event: Event) => void;
error: (error: Error) => void;
exit: () => void;
};
type RunOptions = {
script: string;
input?: unknown;
};
const run = async ({ script, input }: RunOptions) => {
const emitter = new EventEmitter<RunEvents>();
const worker = new Worker(script, {
eval: true,
workerData: {
input,
},
});
const promise = new Promise<void>((resolve, reject) => {
worker.on('message', (message: Event) => {
emitter.emit('message', message);
});
worker.on('exit', () => {
resolve();
});
worker.on('error', (error) => {
reject(error);
});
});
return {
emitter,
promise,
};
};
export { run };

View File

@@ -0,0 +1,9 @@
{
"extends": "@morten-olsen/mini-loader-configs/tsconfig.esm.json",
"compilerOptions": {
"outDir": "dist/esm",
},
"include": [
"src"
],
}

3
packages/server/.gitignore vendored Normal file
View File

@@ -0,0 +1,3 @@
/dist/
/node_modules/
/coverage/

4
packages/server/bin/index.mjs Executable file
View File

@@ -0,0 +1,4 @@
#!/usr/bin/env node
import 'source-map-support/register.js';
import '../dist/esm/index.js';

Binary file not shown.

View File

@@ -0,0 +1,38 @@
{
"name": "@morten-olsen/mini-loader-server",
"version": "1.0.0",
"main": "./dist/esm/index.js",
"types": "./dist/esm/index.d.ts",
"bin": {
"mini-loader-server": "./bin/index.mjs"
},
"scripts": {
"build": "tsc --build"
},
"type": "module",
"files": [
"./dist"
],
"exports": {
".": {
"import": "./dist/esm/index.js"
}
},
"devDependencies": {
"@morten-olsen/mini-loader-configs": "workspace:^",
"@morten-olsen/mini-loader-runner": "workspace:^",
"@types/node": "^20.10.8",
"typescript": "^5.3.3"
},
"dependencies": {
"@trpc/client": "^10.45.0",
"@trpc/server": "^10.45.0",
"eventemitter3": "^5.0.1",
"knex": "^3.1.0",
"nanoid": "^5.0.4",
"source-map-support": "^0.5.21",
"sqlite3": "^5.1.7",
"superjson": "^2.2.1",
"zod": "^3.22.4"
}
}

View File

@@ -0,0 +1,33 @@
import knex, { Knex } from 'knex';
import { source } from './migrations/migrations.source.js';
const tableNames = {
loads: 'loads',
};
class Database {
#instance: Promise<Knex>;
constructor(config: Knex.Config) {
this.#instance = this.#setup({
...config,
migrations: {
migrationSource: source,
},
});
}
#setup = async (config: Knex.Config) => {
const db = knex(config);
await db.migrate.latest();
process.on('SIGINT', () => db.destroy());
return db;
};
public get instance() {
return this.#instance;
}
}
export { Database, tableNames };

View File

@@ -0,0 +1,56 @@
import { Knex } from 'knex';
const name = 'init';
const up = async (knex: Knex) => {
await knex.schema.createTable('loads', (table) => {
table.string('id').primary();
table.string('name').nullable();
table.string('script').notNullable();
});
await knex.schema.createTable('runs', (table) => {
table.string('id').primary();
table.string('loadId').notNullable();
table.string('status').notNullable();
table.string('script').notNullable();
table.string('input').nullable();
table.string('error').nullable();
table.timestamp('startedAt').nullable();
table.timestamp('endedAt').nullable();
table.index('loadId');
table.index('status');
});
await knex.schema.createTable('logs', (table) => {
table.string('id').primary();
table.string('runId').notNullable();
table.string('loadId').notNullable();
table.string('severity').notNullable();
table.string('message').notNullable();
table.jsonb('data').nullable();
table.timestamp('timestamp').notNullable();
table.index('runId');
});
await knex.schema.createTable('artifacts', (table) => {
table.string('id').primary();
table.string('name').notNullable();
table.string('runId').notNullable();
table.string('loadId').notNullable();
table.text('data').notNullable();
table.index('runId');
});
};
const down = async (knex: Knex) => {
await knex.schema.dropTable('loads');
await knex.schema.dropTable('runs');
await knex.schema.dropTable('logs');
await knex.schema.dropTable('artifacts');
};
export { name, up, down };

View File

@@ -0,0 +1,19 @@
import { Knex } from 'knex';
import * as init from './migration.init.js';
type Migration = {
name: string;
up: (knex: Knex) => Promise<void>;
down: (knex: Knex) => Promise<void>;
};
const migrations = [init] satisfies Migration[];
const source: Knex.MigrationSource<Migration> = {
getMigrations: async () => migrations,
getMigration: async (migration) => migration,
getMigrationName: (migration) => migration.name,
};
export { source };

View File

@@ -0,0 +1,14 @@
import { createHTTPServer } from '@trpc/server/adapters/standalone';
import { rootRouter } from './router/router.js';
import { createContext } from './router/router.utils.js';
const server = createHTTPServer({
router: rootRouter,
createContext: await createContext(),
});
server.listen(4500);
console.log('Started');
export type { Runtime } from './runtime/runtime.js';
export type { RootRouter } from './router/router.js';

37
packages/server/src/knex.d.ts vendored Normal file
View File

@@ -0,0 +1,37 @@
import 'knex';
declare module 'knex/types/tables.js' {
interface Tables {
loads: {
id: string;
name?: string;
script: string;
};
runs: {
id: string;
loadId: string;
script: string;
input?: string;
error?: string;
startedAt?: Date;
endedAt?: Date;
status: 'created' | 'running' | 'succeeded' | 'failed';
};
logs: {
id: string;
runId: string;
loadId: string;
severity: 'info' | 'warning' | 'error';
message: string;
data?: any;
timestamp: Date;
};
artifacts: {
id: string;
name: string;
runId: string;
loadId: string;
data: string;
};
}
}

View File

@@ -0,0 +1,22 @@
import { z } from 'zod';
const addArtifactSchema = z.object({
name: z.string(),
runId: z.string(),
loadId: z.string(),
data: z.string(),
});
const findArtifactsSchema = z.object({
ids: z.array(z.string()).optional(),
runId: z.string().optional(),
loadId: z.string().optional(),
offset: z.number().optional(),
limit: z.number().optional(),
});
type AddArtifactOptions = z.infer<typeof addArtifactSchema>;
type FindArtifactsOptions = z.infer<typeof findArtifactsSchema>;
export type { AddArtifactOptions, FindArtifactsOptions };
export { addArtifactSchema, findArtifactsSchema };

View File

@@ -0,0 +1,115 @@
import { EventEmitter } from 'eventemitter3';
import { Database } from '../../database/database.js';
import { nanoid } from 'nanoid';
import { AddArtifactOptions, FindArtifactsOptions } from './artifacts.schemas.js';
import { createHash } from 'crypto';
type ArtifactRepoEvents = {};
type ArtifactRepoOptions = {
database: Database;
};
class ArtifactRepo extends EventEmitter<ArtifactRepoEvents> {
#options: ArtifactRepoOptions;
constructor(options: ArtifactRepoOptions) {
super();
this.#options = options;
}
public add = async (options: AddArtifactOptions) => {
const { database } = this.#options;
const db = await database.instance;
const id = nanoid();
await db('artifacts').insert({
id,
name: options.name,
runId: options.runId,
loadId: options.loadId,
data: Buffer.from(options.data).toString('base64'),
});
};
public prepareRemove = async (options: FindArtifactsOptions) => {
const { database } = this.#options;
const db = await database.instance;
const query = db('artifacts').select('id');
if (options.ids) {
query.whereIn('id', options.ids);
}
if (options.runId) {
query.where({ runId: options.runId });
}
if (options.loadId) {
query.where({ loadId: options.loadId });
}
if (options.offset) {
query.offset(options.offset);
}
if (options.limit) {
query.limit(options.limit);
}
const ids = await query;
const token = ids.map((id) => Buffer.from(id.id).toString('base64')).join('|');
const hash = createHash('sha256').update(token).digest('hex');
return {
ids,
hash,
};
};
public remove = async (hash: string, ids: string[]) => {
const { database } = this.#options;
const db = await database.instance;
const token = ids.map((id) => Buffer.from(id).toString('base64')).join('|');
const actualHash = createHash('sha256').update(token).digest('hex');
if (hash !== actualHash) {
throw new Error('Invalid hash');
}
await db('artifacts').whereIn('id', ids).delete();
};
public find = async (options: FindArtifactsOptions) => {
const { database } = this.#options;
const db = await database.instance;
const query = db('artifacts').select(['id', 'name', 'runId', 'loadId']);
if (options.ids) {
query.whereIn('id', options.ids);
}
if (options.runId) {
query.where({ runId: options.runId });
}
if (options.loadId) {
query.where({ loadId: options.loadId });
}
if (options.offset) {
query.offset(options.offset);
}
if (options.limit) {
query.limit(options.limit);
}
const results = await query;
return results;
};
}
export { addArtifactSchema, findArtifactsSchema } from './artifacts.schemas.js';
export { ArtifactRepo };

View File

@@ -0,0 +1,18 @@
import { z } from 'zod';
const setLoadSchema = z.object({
id: z.string(),
name: z.string().optional(),
script: z.string(),
});
const findLoadsSchema = z.object({
limit: z.number().optional(),
offset: z.number().optional(),
});
type SetLoadOptions = z.infer<typeof setLoadSchema>;
type FindLoadsOptions = z.infer<typeof findLoadsSchema>;
export type { SetLoadOptions, FindLoadsOptions };
export { setLoadSchema, findLoadsSchema };

View File

@@ -0,0 +1,78 @@
import { EventEmitter } from 'eventemitter3';
import { Database } from '../../database/database.js';
import { FindLoadsOptions, SetLoadOptions } from './loads.schemas.js';
import { nanoid } from 'nanoid';
type LoadRepoEvents = {
created: (id: string) => void;
updated: (id: string) => void;
deleted: (id: string) => void;
};
type LoadRepoOptions = {
database: Database;
};
class LoadRepo extends EventEmitter<LoadRepoEvents> {
#options: LoadRepoOptions;
constructor(options: LoadRepoOptions) {
super();
this.#options = options;
}
public getById = async (id: string) => {
const { database } = this.#options;
const db = await database.instance;
const loads = await db('loads').where({ id }).first();
return loads;
};
public getScript = async (id: string) => {
const load = await this.getById(id);
return load?.script;
};
public find = async (options: FindLoadsOptions) => {
const { database } = this.#options;
const db = await database.instance;
const query = db('loads').select(['id', 'name']);
if (options.offset) {
query.offset(options.offset);
}
if (options.limit) {
query.limit(options.limit);
}
const loads = await query;
return loads;
};
public set = async (options: SetLoadOptions) => {
const { database } = this.#options;
const db = await database.instance;
const id = options.id || nanoid();
const current = await this.getById(id);
if (current) {
await db('loads').where({ id }).update(options);
this.emit('updated', id);
return id;
} else {
await db('loads').insert({
...options,
id,
});
}
this.emit('updated', id);
return id;
};
}
export { setLoadSchema, findLoadsSchema } from './loads.schemas.js';
export { LoadRepo };

View File

@@ -0,0 +1,25 @@
import { z } from 'zod';
const addLogSchema = z.object({
runId: z.string(),
loadId: z.string(),
severity: z.enum(['info', 'warning', 'error']),
message: z.string(),
data: z.any().optional(),
});
const findLogsSchema = z.object({
ids: z.array(z.string()).optional(),
runId: z.string().optional(),
loadId: z.string().optional(),
severities: z.array(z.enum(['debug', 'info', 'warn', 'error'])).optional(),
offset: z.number().optional(),
limit: z.number().optional(),
order: z.enum(['asc', 'desc']).optional(),
});
type AddLogOptions = z.infer<typeof addLogSchema>;
type FindLogsOptions = z.infer<typeof findLogsSchema>;
export type { AddLogOptions, FindLogsOptions };
export { addLogSchema, findLogsSchema };

View File

@@ -0,0 +1,122 @@
import { EventEmitter } from 'eventemitter3';
import { Database } from '../../database/database.js';
import { AddLogOptions, FindLogsOptions } from './logs.schemas.js';
import { nanoid } from 'nanoid';
import { createHash } from 'crypto';
type LogRepoEvents = {};
type LogRepoOptions = {
database: Database;
};
class LogRepo extends EventEmitter<LogRepoEvents> {
#options: LogRepoOptions;
constructor(options: LogRepoOptions) {
super();
this.#options = options;
}
public add = async (options: AddLogOptions) => {
const { database } = this.#options;
const db = await database.instance;
const id = nanoid();
await db('logs').insert({
id,
runId: options.runId,
loadId: options.loadId,
severity: options.severity,
message: options.message,
data: options.data,
timestamp: new Date(),
});
};
public prepareRemove = async (options: FindLogsOptions) => {
const { database } = this.#options;
const db = await database.instance;
const query = db('logs').select('id');
if (options.ids) {
query.whereIn('id', options.ids);
}
if (options.runId) {
query.where({ runId: options.runId });
}
if (options.loadId) {
query.where({ loadId: options.loadId });
}
if (options.severities) {
query.whereIn('severity', options.severities);
}
const ids = await query;
const token = ids.map((id) => Buffer.from(id.id).toString('base64')).join('|');
const hash = createHash('sha256').update(token).digest('hex');
return {
ids,
hash,
};
};
public remove = async (hash: string, ids: string[]) => {
const { database } = this.#options;
const db = await database.instance;
const token = ids.map((id) => Buffer.from(id).toString('base64')).join('|');
const actualHash = createHash('sha256').update(token).digest('hex');
if (hash !== actualHash) {
throw new Error('Invalid hash');
}
await db('logs').whereIn('id', ids).delete();
};
public find = async (options: FindLogsOptions) => {
const { database } = this.#options;
const db = await database.instance;
const query = db('logs');
if (options.ids) {
query.whereIn('id', options.ids);
}
if (options.runId) {
query.where({ runId: options.runId });
}
if (options.loadId) {
query.where({ loadId: options.loadId });
}
if (options.severities) {
query.whereIn('severity', options.severities);
}
if (options.offset) {
query.offset(options.offset);
}
if (options.limit) {
query.limit(options.limit);
}
if (options.order) {
query.orderBy('timestamp', options.order);
}
const logs = await query;
return logs;
};
}
export { addLogSchema, findLogsSchema } from './logs.schemas.js';
export { LogRepo };

View File

@@ -0,0 +1,54 @@
import { Database } from '../database/database.js';
import { ArtifactRepo } from './artifacts/artifacts.js';
import { LoadRepo } from './loads/loads.js';
import { LogRepo } from './logs/logs.js';
import { RunRepo } from './runs/runs.js';
type ReposOptions = {
database: Database;
};
class Repos {
#loads: LoadRepo;
#runs: RunRepo;
#logs: LogRepo;
#artifacts: ArtifactRepo;
constructor({ database }: ReposOptions) {
this.#loads = new LoadRepo({
database,
});
this.#runs = new RunRepo({
database,
loads: this.#loads,
});
this.#logs = new LogRepo({
database,
});
this.#artifacts = new ArtifactRepo({
database,
});
}
public get loads() {
return this.#loads;
}
public get runs() {
return this.#runs;
}
public get logs() {
return this.#logs;
}
public get artifacts() {
return this.#artifacts;
}
}
export { findLogsSchema, addLogSchema } from './logs/logs.js';
export { setLoadSchema, findLoadsSchema } from './loads/loads.js';
export { createRunSchema, findRunsSchema } from './runs/runs.js';
export { addArtifactSchema, findArtifactsSchema } from './artifacts/artifacts.js';
export { Repos };

View File

@@ -0,0 +1,28 @@
import { z } from 'zod';
const runStatusSchema = z.enum(['running', 'succeeded', 'failed']);
const createRunSchema = z.object({
loadId: z.string(),
config: z.any().optional(),
data: z.any().optional(),
});
const updateRunSchema = z.object({
status: runStatusSchema,
error: z.string().optional(),
});
const findRunsSchema = z.object({
loadId: z.string().optional(),
offset: z.number().optional(),
limit: z.number().optional(),
});
type RunStatus = z.infer<typeof runStatusSchema>;
type CreateRunOptions = z.infer<typeof createRunSchema>;
type UpdateRunOptions = z.infer<typeof updateRunSchema>;
type FindRunsOptions = z.infer<typeof findRunsSchema>;
export type { RunStatus, CreateRunOptions, UpdateRunOptions, FindRunsOptions };
export { runStatusSchema, createRunSchema, updateRunSchema, findRunsSchema };

View File

@@ -0,0 +1,140 @@
import { nanoid } from 'nanoid';
import { EventEmitter } from 'eventemitter3';
import { Database } from '../../database/database.js';
import { CreateRunOptions, FindRunsOptions, UpdateRunOptions } from './runs.schemas.js';
import { LoadRepo } from '../loads/loads.js';
type RunRepoEvents = {
created: (args: { id: string; loadId: string }) => void;
updated: (args: { id: string; loadId: string }) => void;
failed: (args: { id: string; loadId: string }) => void;
succeeded: (args: { id: string; loadId: string }) => void;
};
type RunRepoOptions = {
database: Database;
loads: LoadRepo;
};
class RunRepo extends EventEmitter<RunRepoEvents> {
#options: RunRepoOptions;
constructor(options: RunRepoOptions) {
super();
this.#options = options;
}
public getById = async (id: string) => {
const { database } = this.#options;
const db = await database.instance;
const run = await db('runs').where({ id }).first();
if (!run) {
throw new Error('Run not found');
}
return run;
};
public getByLoadId = async (loadId: string) => {
const { database } = this.#options;
const db = await database.instance;
const runs = await db('runs').where({ loadId });
return runs;
};
public find = async (options: FindRunsOptions) => {
const { database } = this.#options;
const db = await database.instance;
const query = db('runs').select(['id', 'status', 'startedAt', 'status', 'error', 'endedAt']);
if (options.loadId) {
query.where({ loadId: options.loadId });
}
if (options.offset) {
query.offset(options.offset);
}
if (options.limit) {
query.limit(options.limit);
}
const runs = await query;
return runs;
};
public remove = async (options: FindRunsOptions) => {
const { database } = this.#options;
const db = await database.instance;
const query = db('runs');
if (options.loadId) {
query.where({ loadId: options.loadId });
}
await query.del();
};
public started = async (id: string) => {
const { database } = this.#options;
const db = await database.instance;
const current = await this.getById(id);
if (!current) {
throw new Error('Run not found');
}
const { loadId } = current;
const runs = await db('runs').where({ id }).update({
status: 'running',
startedAt: new Date(),
});
this.emit('updated', { id, loadId });
return runs;
};
public finished = async (id: string, options: UpdateRunOptions) => {
const { database } = this.#options;
const db = await database.instance;
const { loadId } = await this.getById(id);
const runs = await db('runs').where({ id }).update({
status: options.status,
error: options.error,
endedAt: new Date(),
});
this.emit('updated', { id, loadId });
switch (options.status) {
case 'failed':
this.emit('failed', { id, loadId });
break;
case 'succeeded':
this.emit('succeeded', { id, loadId });
break;
}
return runs;
};
public create = async (options: CreateRunOptions) => {
const { database, loads } = this.#options;
const id = nanoid();
const db = await database.instance;
const script = await loads.getScript(options.loadId);
await db('runs').insert({
id,
script,
loadId: options.loadId,
status: 'created',
startedAt: new Date(),
});
this.emit('created', {
id,
loadId: options.loadId,
});
return id;
};
}
export { createRunSchema, findRunsSchema } from './runs.schemas.js';
export { RunRepo };

View File

@@ -0,0 +1,43 @@
import { z } from 'zod';
import { findArtifactsSchema } from '../repos/repos.js';
import { publicProcedure, router } from './router.utils.js';
const find = publicProcedure.input(findArtifactsSchema).query(async ({ input, ctx }) => {
const { runtime } = ctx;
const { repos } = runtime;
const { artifacts } = repos;
const result = await artifacts.find(input);
return result;
});
const prepareRemove = publicProcedure.input(findArtifactsSchema).query(async ({ input, ctx }) => {
const { runtime } = ctx;
const { repos } = runtime;
const { artifacts } = repos;
await artifacts.prepareRemove(input);
});
const remove = publicProcedure
.input(
z.object({
hash: z.string(),
ids: z.array(z.string()),
}),
)
.mutation(async ({ input, ctx }) => {
const { runtime } = ctx;
const { repos } = runtime;
const { artifacts } = repos;
await artifacts.remove(input.hash, input.ids);
});
const artifactsRouter = router({
find,
remove,
prepareRemove,
});
export { artifactsRouter };

View File

@@ -0,0 +1,27 @@
import { findLoadsSchema, setLoadSchema } from '../repos/repos.js';
import { publicProcedure, router } from './router.utils.js';
const set = publicProcedure.input(setLoadSchema).mutation(async ({ input, ctx }) => {
const { runtime } = ctx;
const { repos } = runtime;
const { loads } = repos;
const load = await loads.set(input);
return load;
});
const find = publicProcedure.input(findLoadsSchema).query(async ({ input, ctx }) => {
const { runtime } = ctx;
const { repos } = runtime;
const { loads } = repos;
const load = await loads.find(input);
return load;
});
const loadsRouter = router({
set,
find,
});
export { loadsRouter };

View File

@@ -0,0 +1,43 @@
import { z } from 'zod';
import { findLogsSchema } from '../repos/repos.js';
import { publicProcedure, router } from './router.utils.js';
const find = publicProcedure.input(findLogsSchema).query(async ({ input, ctx }) => {
const { runtime } = ctx;
const { repos } = runtime;
const { logs } = repos;
const result = await logs.find(input);
return result;
});
const prepareRemove = publicProcedure.input(findLogsSchema).query(async ({ input, ctx }) => {
const { runtime } = ctx;
const { repos } = runtime;
const { logs } = repos;
await logs.prepareRemove(input);
});
const remove = publicProcedure
.input(
z.object({
hash: z.string(),
ids: z.array(z.string()),
}),
)
.mutation(async ({ input, ctx }) => {
const { runtime } = ctx;
const { repos } = runtime;
const { logs } = repos;
await logs.remove(input.hash, input.ids);
});
const logsRouter = router({
find,
remove,
prepareRemove,
});
export { logsRouter };

View File

@@ -0,0 +1,33 @@
import { createRunSchema, findRunsSchema } from '../repos/repos.js';
import { publicProcedure, router } from './router.utils.js';
const create = publicProcedure.input(createRunSchema).mutation(async ({ input, ctx }) => {
const { runtime } = ctx;
const { repos } = runtime;
const { runs } = repos;
const id = await runs.create(input);
return id;
});
const find = publicProcedure.input(findRunsSchema).query(async ({ input, ctx }) => {
const { runtime } = ctx;
const { repos } = runtime;
const { runs } = repos;
const results = await runs.find(input);
return results;
});
const remove = publicProcedure.input(findRunsSchema).mutation(async ({ input, ctx }) => {
const { runtime } = ctx;
const { repos } = runtime;
const { runs } = repos;
await runs.remove(input);
});
const runsRouter = router({
create,
find,
remove,
});
export { runsRouter };

View File

@@ -0,0 +1,17 @@
import { artifactsRouter } from './router.artifacts.js';
import { loadsRouter } from './router.loads.js';
import { logsRouter } from './router.logs.js';
import { runsRouter } from './router.runs.js';
import { router } from './router.utils.js';
const rootRouter = router({
loads: loadsRouter,
runs: runsRouter,
logs: logsRouter,
artifacts: artifactsRouter,
});
type RootRouter = typeof rootRouter;
export type { RootRouter };
export { rootRouter };

View File

@@ -0,0 +1,32 @@
import { initTRPC } from '@trpc/server';
import { resolve } from 'path';
import { mkdir } from 'fs/promises';
import superjson from 'superjson';
import { Runtime } from '../runtime/runtime.js';
const createContext = async () => {
await mkdir(resolve(process.cwd(), 'data'), { recursive: true });
const runtime = new Runtime({
database: {
client: 'sqlite3',
connection: {
filename: resolve(process.cwd(), 'data', 'database.sqlite'),
},
useNullAsDefault: true,
},
});
return async () => {
return {
runtime,
};
};
};
type Context = Awaited<ReturnType<typeof createContext>>;
const { router, procedure: publicProcedure } = initTRPC.context<Context>().create({
transformer: superjson,
});
export { createContext, router, publicProcedure };

View File

@@ -0,0 +1,82 @@
import { EventEmitter } from 'eventemitter3';
import { run } from '@morten-olsen/mini-loader-runner';
import { Repos } from '../repos/repos.js';
import { LoggerEvent } from '../../../mini-loader/dist/esm/logger/logger.js';
import { ArtifactCreateEvent } from '../../../mini-loader/dist/esm/artifacts/artifacts.js';
type RunnerInstanceEvents = {
completed: (args: { id: string }) => void;
};
type RunnerInstanceOptions = {
id: string;
loadId: string;
repos: Repos;
};
class RunnerInstance extends EventEmitter<RunnerInstanceEvents> {
#options: RunnerInstanceOptions;
constructor(options: RunnerInstanceOptions) {
super();
this.#options = options;
}
#addLog = async (event: LoggerEvent['payload']) => {
const { repos, id, loadId } = this.#options;
const { logs } = repos;
await logs.add({
runId: id,
loadId,
severity: event.severity,
message: event.message,
data: event.data,
});
};
#addArtifact = async (event: ArtifactCreateEvent['payload']) => {
const { repos, id, loadId } = this.#options;
const { artifacts } = repos;
console.log('evt', event);
await artifacts.add({
name: event.name,
runId: id,
loadId,
data: event.data,
});
};
public start = async () => {
const { repos, id } = this.#options;
const { runs } = repos;
const { script } = await runs.getById(id);
try {
await runs.started(id);
const { promise, emitter } = await run({ script });
emitter.on('message', (message) => {
switch (message.type) {
case 'log': {
this.#addLog(message.payload);
break;
}
case 'artifact:create': {
this.#addArtifact(message.payload);
break;
}
}
});
await promise;
await runs.finished(id, { status: 'succeeded' });
} catch (error) {
let errorMessage = 'Unknown error';
if (error instanceof Error) {
errorMessage = error.message;
}
await runs.finished(id, { status: 'failed', error: errorMessage });
} finally {
this.emit('completed', { id });
}
};
}
export { RunnerInstance };

View File

@@ -0,0 +1,38 @@
import { Repos } from '../repos/repos.js';
import { RunnerInstance } from './runner.instance.js';
type RunnerOptions = {
repos: Repos;
};
class Runner {
#options: RunnerOptions;
#instances: Map<string, RunnerInstance> = new Map();
constructor(options: RunnerOptions) {
this.#options = options;
const { repos } = options;
repos.runs.on('created', this.#start);
}
#start = async (args: { id: string; loadId: string }) => {
const { repos } = this.#options;
if (this.#instances.has(args.id)) {
return;
}
const instance = new RunnerInstance({
id: args.id,
loadId: args.loadId,
repos,
});
instance.on('completed', () => {
this.#instances.delete(args.id);
});
this.#instances.set(args.id, instance);
await instance.start();
};
}
export { Runner };

View File

@@ -0,0 +1,29 @@
import { Knex } from 'knex';
import { Database } from '../database/database.js';
import { Repos } from '../repos/repos.js';
import { Runner } from '../runner/runner.js';
type RuntimeOptions = {
database: Knex.Config;
};
class Runtime {
#repos: Repos;
#runner: Runner;
constructor(options: RuntimeOptions) {
const database = new Database(options.database);
this.#repos = new Repos({ database });
this.#runner = new Runner({ repos: this.#repos });
}
public get repos() {
return this.#repos;
}
public get runner() {
return this.#runner;
}
}
export { Runtime };

View File

@@ -0,0 +1,9 @@
{
"extends": "@morten-olsen/mini-loader-configs/tsconfig.esm.json",
"compilerOptions": {
"outDir": "dist/esm",
},
"include": [
"src"
],
}

4889
pnpm-lock.yaml generated Normal file

File diff suppressed because it is too large Load Diff

2
pnpm-workspace.yaml Normal file
View File

@@ -0,0 +1,2 @@
packages:
- packages/*

16
scripts/set-version.mjs Normal file
View File

@@ -0,0 +1,16 @@
import { findWorkspacePackages } from '@pnpm/find-workspace-packages';
import { writeFile } from 'fs/promises';
import { join } from 'path';
const version = process.argv[2];
if (!version) {
throw new Error('Version is required');
}
const packages = await findWorkspacePackages(process.cwd());
for (const { manifest, dir } of packages) {
console.log(dir, version);
manifest.version = version;
await writeFile(join(dir, 'package.json'), JSON.stringify(manifest, null, 2));
}

17
tsconfig.json Normal file
View File

@@ -0,0 +1,17 @@
{
"include": [],
"references": [
{
"path": "./packages/server/tsconfig.json"
},
{
"path": "./packages/mini-loader/tsconfig.json"
},
{
"path": "./packages/cli/tsconfig.json"
},
{
"path": "./packages/runner/tsconfig.json"
},
],
}

23
turbo.json Normal file
View File

@@ -0,0 +1,23 @@
{
"$schema": "https://turbo.build/schema.json",
"pipeline": {
"build": {
"dependsOn": ["^build"],
"outputs": ["dist/**"],
"inputs": ["src/**/*.tsx", "src/**/*.ts", "./tsconfig.*"]
},
"test": {
"cache": false
},
"clean": {},
"dev": {
"dependsOn": ["^build"],
"cache": false
},
"start": {
"dependsOn": ["^build", "build"],
"outputs": ["dist/**"],
"inputs": ["src/**/*.tsx", "src/**/*.ts", "./tsconfig.*"]
}
}
}