9 Commits
0.2.2 ... 0.5.0

Author SHA1 Message Date
Morten Olsen
7436b3439c feat: improved configuration (#31) 2024-01-14 12:49:54 +01:00
Morten Olsen
2109bc3af9 feat: add scheduler (#30) 2024-01-14 12:30:39 +01:00
Morten Olsen
eeaad68f6e feat: initial policy system and version (#29)
Resolves #22 #24 and #26
2024-01-14 09:45:42 +01:00
Morten Olsen
c7ca97f041 feat: local secrets (#28) 2024-01-14 09:45:10 +01:00
Morten Olsen
c8e02d8da4 ci: fix docker tag (#17) 2024-01-13 14:07:33 +01:00
Morten Olsen
9a5b27f1be ci: publish with latest tag (#16) 2024-01-13 14:01:46 +01:00
Morten Olsen
0760328854 fix: add health checks and volumes to docker file (#2)
Co-authored-by: Morten Olsen <morten-olsen@users.noreply.github.com>
2024-01-12 22:48:38 +01:00
Morten Olsen
fa23b325b3 chore: add issue templates (#15) 2024-01-12 22:26:29 +01:00
Morten Olsen
4f183310a6 fix: add runner as a dependency of cli (#14) 2024-01-12 22:20:41 +01:00
39 changed files with 653 additions and 39 deletions

27
.github/ISSUE_TEMPLATE/bug_report.md vendored Normal file
View File

@@ -0,0 +1,27 @@
---
name: Bug report
about: Create a report to help us improve
title: ''
labels: bug
assignees: ''
---
**Describe the bug**
A clear and concise description of what the bug is.
**To Reproduce**
Steps to reproduce the behavior:
1. Go to '...'
2. Click on '....'
3. Scroll down to '....'
4. See error
**Expected behavior**
A clear and concise description of what you expected to happen.
**Screenshots**
If applicable, add screenshots to help explain your problem.
**Additional context**
Add any other context about the problem here.

View File

@@ -0,0 +1,20 @@
---
name: Feature request
about: Suggest an idea for this project
title: ''
labels: enhancement
assignees: ''
---
**Is your feature request related to a problem? Please describe.**
A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
**Describe the solution you'd like**
A clear and concise description of what you want to happen.
**Describe alternatives you've considered**
A clear and concise description of any alternative solutions or features you've considered.
**Additional context**
Add any other context or screenshots about the feature request here.

View File

@@ -82,6 +82,8 @@ jobs:
uses: docker/metadata-action@9ec57ed1fcdbf14dcef7dfbe97b2010124a938b7
with:
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
tags: |
latest
- name: Build and push Docker image
uses: docker/build-push-action@f2a1d5e99d037542a71f64918e516c093c6f3fc4
with:

View File

@@ -22,9 +22,11 @@ Also see [anti-features and limitations](./docs/anti-features.md)
Get up and running with mini loader in just a few steps:
1. **Install the CLI**: `npm install -g @morten-olsen/mini-loader-cli`
2. **Deploy the Server**: `docker run -p 4500:4500 -n mini-loader ghcr.io/morten-olsen/mini-loader:main`.
3. **Push Your First Load**: `mini-loader loads push script.mjs -r -i first`
3. **See the logs**: `mini-loader logs ls -l first`
2. **Deploy the Server**: `docker run -p 4500:4500 -name mini-loader ghcr.io/morten-olsen/mini-loader`.
3. **Get your access token**: `docker exec mini-loader mini-loader-server create-token`
4. **Login**: `mini-loader auth login http://localhost:4500`
5. **Push Your First Load**: `mini-loader loads push script.mjs -r -i first`
6. **See the logs**: `mini-loader logs ls -l first`
For a detailed guide on getting started, please refer to the [Getting Started Tutorial](./docs/getting-started.md).

View File

@@ -5,5 +5,9 @@ services:
build:
context: .
dockerfile: ./docker/Dockerfile
volumes:
- data:/app/data
ports:
- 4500:4500
volumes:
data:

View File

@@ -27,6 +27,11 @@ COPY --from=builder /app/out/full/ .
RUN pnpm turbo run build --filter=@morten-olsen/mini-loader-server
FROM base AS runner
ENV \
NODE_ENV=production \
DATA_DIR=/data \
CACHE_DIR=/cache
RUN apk add --no-cache jq curl
WORKDIR /app
# Don't run production as root
@@ -38,5 +43,12 @@ RUN chmod +x /entrypoint.sh
COPY --from=installer /app .
EXPOSE 4500
VOLUME /data
HEALTHCHECK \
--interval=10s \
--start-period=10s \
CMD curl -f http://localhost:4500/health || exit 1
ENTRYPOINT ["/entrypoint.sh"]
CMD ["mini-loader-server", "start"]

View File

@@ -7,6 +7,8 @@ GID=${GID:-1001}
addgroup --system --gid ${GID} nodejs && \
adduser --system --uid ${UID} -G nodejs miniloader && \
mkdir -p /app/data
chown -R miniloader:nodejs /app/data
mkdir -p ${DATA_DIR}
mkdir -p ${CACHE_DIR}
chown -R miniloader:nodejs ${DATA_DIR}
chown -R miniloader:nodejs ${CACHE_DIR}
su miniloader -s /bin/sh -c "$CMD"

View File

@@ -8,7 +8,7 @@ This guide will help you quickly set up and run a mini loader server using Docke
To begin, let's deploy the mini loader container. Run the following command in your terminal:
```bash
docker run -p 4500:4500 -n mini-loader ghcr.io/morten-olsen/mini-loader:main
docker run -p 4500:4500 -n mini-loader ghcr.io/morten-olsen/mini-loader:latest
```
This command downloads the latest mini loader image and runs it, exposing port 4500.

View File

@@ -5,6 +5,7 @@
"packageManager": "pnpm@8.10.4",
"version": "1.0.0",
"scripts": {
"prepare": "pnpm run build",
"build": "turbo build",
"build:dev": "tsc --build --watch",
"test:lint": "eslint ./packages/*/src",

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env node
import 'source-map-support/register.js';
import '../dist/esm/index.js';
import '../dist/esm/src/index.js';

View File

@@ -1,8 +1,8 @@
{
"name": "@morten-olsen/mini-loader-cli",
"version": "1.0.0",
"main": "./dist/esm/index.js",
"types": "./dist/esm/index.d.ts",
"main": "./dist/esm/src/index.js",
"types": "./dist/esm/src/index.d.ts",
"license": "GPL-3.0",
"bin": {
"mini-loader": "./bin/index.mjs"
@@ -16,10 +16,12 @@
],
"exports": {
".": {
"import": "./dist/esm/index.js"
"import": "./dist/esm/src/index.js"
}
},
"dependencies": {
"@morten-olsen/mini-loader-runner": "workspace:^",
"@morten-olsen/mini-loader-server": "workspace:^",
"@rollup/plugin-auto-install": "^3.0.5",
"@rollup/plugin-commonjs": "^25.0.7",
"@rollup/plugin-json": "^6.1.0",
@@ -28,6 +30,7 @@
"@rollup/plugin-sucrase": "^5.0.2",
"@trpc/client": "^10.45.0",
"commander": "^11.1.0",
"dotenv": "^16.3.1",
"env-paths": "^3.0.0",
"inquirer": "^9.2.12",
"ora": "^8.0.1",
@@ -38,8 +41,6 @@
},
"devDependencies": {
"@morten-olsen/mini-loader-configs": "workspace:^",
"@morten-olsen/mini-loader-runner": "workspace:^",
"@morten-olsen/mini-loader-server": "workspace:^",
"@types/inquirer": "^9.0.7",
"typescript": "^5.3.3"
},

View File

@@ -2,6 +2,7 @@ import { createTRPCProxyClient, httpBatchLink } from '@trpc/client';
import superjson from 'superjson';
import type { Runtime } from '@morten-olsen/mini-loader-server';
import type { RootRouter } from '@morten-olsen/mini-loader-server';
import pkg from '../../package.json';
import { Context } from '../context/context.js';
const createClient = (context: Context) => {
@@ -14,6 +15,7 @@ const createClient = (context: Context) => {
httpBatchLink({
url: `${context.host}/trpc`,
headers: {
'x-version': pkg.version,
authorization: `Bearer ${context.token}`,
},
}),

View File

@@ -3,6 +3,7 @@ import { resolve } from 'path';
import { run as runLoad } from '@morten-olsen/mini-loader-runner';
import { bundle } from '../../bundler/bundler.js';
import { step } from '../../utils/step.js';
import { readSecrets } from './local.utils.js';
const run = new Command('run');
@@ -12,12 +13,14 @@ run
.action(async (script) => {
const location = resolve(script);
const { autoInstall } = run.opts();
const secrets = await readSecrets();
const code = await step('Bundling', async () => {
return await bundle({ entry: location, autoInstall });
});
const { promise, emitter } = await runLoad({
script: code,
secrets,
});
emitter.addListener('message', (message) => {
switch (message.type) {

View File

@@ -0,0 +1,25 @@
import dotenv from 'dotenv';
import { existsSync } from 'fs';
import { readFile } from 'fs/promises';
import { join } from 'path';
const ENV_PREFIX = 'ML_S_';
const readSecrets = async () => {
let secretLocation = join(process.cwd(), '.secret');
let secrets: Record<string, string> = {};
if (existsSync(secretLocation)) {
const content = await readFile(secretLocation, 'utf-8');
secrets = dotenv.parse(content);
}
for (const key in process.env) {
if (key.startsWith(ENV_PREFIX)) {
secrets[key.replace(ENV_PREFIX, '')] = process.env[key]!;
}
}
return secrets;
};
export { readSecrets };

View File

@@ -0,0 +1,32 @@
import { Command } from 'commander';
import { createClient } from '../../client/client.js';
import { step } from '../../utils/step.js';
import { Context } from '../../context/context.js';
import { Config } from '../../config/config.js';
const add = new Command('add');
add
.description('Add schedule')
.argument('<load-id>', 'Load ID')
.argument('<cron>', 'Cron')
.option('-n, --name <name>', 'Name')
.action(async (loadId, cron) => {
const config = new Config();
const context = new Context(config.context);
const { name } = add.opts();
const client = await step('Connecting to server', async () => {
return createClient(context);
});
const id = await step('Adding schedule', async () => {
return await client.schedules.add.mutate({
name,
load: loadId,
cron,
});
});
console.log(`Schedule added with ID ${id}`);
});
export { add };

View File

@@ -0,0 +1,39 @@
import { Command } from 'commander';
import { createClient } from '../../client/client.js';
import { step } from '../../utils/step.js';
import { Context } from '../../context/context.js';
import { Config } from '../../config/config.js';
const list = new Command('list');
const toInt = (value?: string) => {
if (!value) {
return undefined;
}
return parseInt(value, 10);
};
list
.alias('ls')
.description('List schedules')
.option('-l, --load-ids <loadIds...>', 'Load ID')
.option('-o, --offset <offset>', 'Offset')
.option('-a, --limit <limit>', 'Limit', '1000')
.action(async () => {
const { loadIds, offset, limit } = list.opts();
const config = new Config();
const context = new Context(config.context);
const client = await step('Connecting to server', async () => {
return createClient(context);
});
const schedules = await step('Getting schedules', async () => {
return await client.schedules.find.query({
loadIds,
offset: toInt(offset),
limit: toInt(limit),
});
});
console.table(schedules);
});
export { list };

View File

@@ -0,0 +1,61 @@
import { Command } from 'commander';
import { createClient } from '../../client/client.js';
import { step } from '../../utils/step.js';
import { Context } from '../../context/context.js';
import inquirer from 'inquirer';
import { Config } from '../../config/config.js';
const remove = new Command('remove');
const toInt = (value?: string) => {
if (!value) {
return undefined;
}
return parseInt(value, 10);
};
remove
.alias('ls')
.description('LRemove schedules')
.option('-i, --ids <ids...>', 'Load IDs')
.option('-l, --load-ids <loadIds...>', 'Load IDs')
.option('-o, --offset <offset>', 'Offset')
.option('-a, --limit <limit>', 'Limit', '1000')
.action(async () => {
const { ids, loadIds, offset, limit } = remove.opts();
const config = new Config();
const context = new Context(config.context);
const client = await step('Connecting to server', async () => {
return createClient(context);
});
const response = await step('Preparing to delete', async () => {
return await client.schedules.prepareRemove.query({
ids,
loadIds,
offset: toInt(offset),
limit: toInt(limit),
});
});
if (!response.ids.length) {
console.log('No logs to delete');
return;
}
const { confirm } = await inquirer.prompt([
{
type: 'confirm',
name: 'confirm',
message: `Are you sure you want to delete ${response.ids.length} schedules?`,
},
]);
if (!confirm) {
return;
}
await step('Deleting artifacts', async () => {
await client.artifacts.remove.mutate(response);
});
});
export { remove };

View File

@@ -0,0 +1,11 @@
import { Command } from 'commander';
import { list } from './schedules.list.js';
import { remove } from './schedules.remove.js';
import { add } from './schedules.add.js';
const schedules = new Command('schedules');
schedules.addCommand(list);
schedules.addCommand(remove);
schedules.addCommand(add);
export { schedules };

View File

@@ -1,4 +1,5 @@
import { program } from 'commander';
import { Command, program } from 'commander';
import pkg from '../package.json';
import { loads } from './commands/loads/loads.js';
import { runs } from './commands/runs/runs.js';
import { logs } from './commands/logs/logs.js';
@@ -7,6 +8,7 @@ import { secrets } from './commands/secrets/secrets.js';
import { local } from './commands/local/local.js';
import { auth } from './commands/auth/auth.js';
import { contexts } from './commands/contexts/contexts.js';
import { schedules } from './commands/schedules/schedules.js';
program.addCommand(loads);
program.addCommand(runs);
@@ -16,5 +18,14 @@ program.addCommand(secrets);
program.addCommand(local);
program.addCommand(auth);
program.addCommand(contexts);
program.addCommand(schedules);
program.version(pkg.version);
const version = new Command('version');
version.action(() => {
console.log(pkg.version);
});
program.addCommand(version);
await program.parseAsync();

View File

@@ -8,6 +8,7 @@
"sourceMap": true,
"esModuleInterop": true,
"strict": true,
"resolveJsonModule": true,
"allowSyntheticDefaultImports": true,
"jsx": "react"
},

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env node
import 'source-map-support/register.js';
import '../dist/esm/index.js';
import '../dist/esm/src/index.js';

View File

@@ -2,8 +2,8 @@
"name": "@morten-olsen/mini-loader-server",
"version": "1.0.0",
"license": "GPL-3.0",
"main": "./dist/esm/index.js",
"types": "./dist/esm/index.d.ts",
"main": "./dist/esm/src/index.js",
"types": "./dist/esm/src/index.d.ts",
"bin": {
"mini-loader-server": "./bin/index.mjs"
},
@@ -16,7 +16,7 @@
],
"exports": {
".": {
"import": "./dist/esm/index.js"
"import": "./dist/esm/src/index.js"
}
},
"devDependencies": {
@@ -31,6 +31,8 @@
"@trpc/client": "^10.45.0",
"@trpc/server": "^10.45.0",
"commander": "^11.1.0",
"cron": "^3.1.6",
"env-paths": "^3.0.0",
"eventemitter3": "^5.0.1",
"fastify": "^4.25.2",
"jsonwebtoken": "^9.0.2",

View File

@@ -20,10 +20,10 @@ class Auth {
#setup = async () => {
const { config } = this.#options;
const secretLocation = resolve(config.files.location, 'secret');
const secretLocation = resolve(config.files.data, 'secret');
let secret = '';
await mkdir(config.files.data, { recursive: true });
if (!existsSync(secretLocation)) {
await mkdir(config.files.location, { recursive: true });
secret = nanoid();
await writeFile(secretLocation, secret);
} else {

View File

@@ -3,7 +3,8 @@ import { Knex } from 'knex';
type Config = {
database: Omit<Knex.Config, 'migrations'>;
files: {
location: string;
data: string;
cache: string;
};
auth?: {
oidc?: {

View File

@@ -0,0 +1,22 @@
import { Knex } from 'knex';
const name = 'schedule-support';
const up = async (knex: Knex) => {
await knex.schema.createTable('schedules', (table) => {
table.string('id').primary();
table.string('name').nullable();
table.string('description').nullable();
table.string('load').notNullable();
table.string('cron').notNullable();
table.string('input').nullable();
table.timestamp('createdAt').notNullable();
table.timestamp('updatedAt').notNullable();
});
};
const down = async (knex: Knex) => {
await knex.schema.dropTable('schedule');
};
export { name, up, down };

View File

@@ -1,6 +1,7 @@
import { Knex } from 'knex';
import * as init from './migration.init.js';
import * as scheduleSupport from './migration.schedule.js';
type Migration = {
name: string;
@@ -8,7 +9,7 @@ type Migration = {
down: (knex: Knex) => Promise<void>;
};
const migrations = [init] satisfies Migration[];
const migrations = [init, scheduleSupport] satisfies Migration[];
const source: Knex.MigrationSource<Migration> = {
getMigrations: async () => migrations,

View File

@@ -6,6 +6,7 @@ const start = new Command('start');
start.action(async () => {
const port = 4500;
const runtime = await Runtime.create();
await runtime.scheduler.start();
const server = await createServer(runtime);
await server.listen({
port,
@@ -18,7 +19,11 @@ start.action(async () => {
const createToken = new Command('create-token');
createToken.action(async () => {
const runtime = await Runtime.create();
const token = await runtime.auth.createToken({});
const token = await runtime.auth.createToken({
policy: {
'*:*': ['*'],
},
});
console.log(token);
});

View File

@@ -43,5 +43,15 @@ declare module 'knex/types/tables.js' {
createdAt: Date;
updatedAt: Date;
};
schedules: {
id: string;
name?: string;
description?: string;
load: string;
cron: string;
input?: string;
createdAt: Date;
updatedAt: Date;
};
}
}

View File

@@ -62,7 +62,7 @@ class LoadRepo extends EventEmitter<LoadRepoEvents> {
const db = await database.instance;
const id = options.id || nanoid();
const script = createHash('sha256').update(options.script).digest('hex');
const scriptDir = resolve(this.#options.config.files.location, 'scripts');
const scriptDir = resolve(this.#options.config.files.data, 'scripts');
await mkdir(scriptDir, { recursive: true });
await writeFile(resolve(scriptDir, `${script}.js`), options.script);

View File

@@ -4,6 +4,7 @@ import { ArtifactRepo } from './artifacts/artifacts.js';
import { LoadRepo } from './loads/loads.js';
import { LogRepo } from './logs/logs.js';
import { RunRepo } from './runs/runs.js';
import { ScheduleRepo } from './schedules/schedules.js';
import { SecretRepo } from './secrets/secrets.js';
type ReposOptions = {
@@ -17,6 +18,7 @@ class Repos {
#logs: LogRepo;
#artifacts: ArtifactRepo;
#secrets: SecretRepo;
#schedule: ScheduleRepo;
constructor({ database, config }: ReposOptions) {
this.#loads = new LoadRepo({
@@ -36,6 +38,9 @@ class Repos {
this.#secrets = new SecretRepo({
database,
});
this.#schedule = new ScheduleRepo({
database,
});
}
public get loads() {
@@ -57,8 +62,13 @@ class Repos {
public get secrets() {
return this.#secrets;
}
public get schedules() {
return this.#schedule;
}
}
export { findSchedulesSchema, addScheduleSchema } from './schedules/schedules.js';
export { findLogsSchema, addLogSchema } from './logs/logs.js';
export { setLoadSchema, findLoadsSchema } from './loads/loads.js';
export { createRunSchema, findRunsSchema } from './runs/runs.js';

View File

@@ -0,0 +1,22 @@
import { z } from 'zod';
const addScheduleSchema = z.object({
name: z.string().optional(),
description: z.string().optional(),
load: z.string(),
cron: z.string(),
input: z.string().optional(),
});
const findSchedulesSchema = z.object({
ids: z.array(z.string()).optional(),
loadIds: z.array(z.string()).optional(),
offset: z.number().optional(),
limit: z.number().optional(),
});
type AddScheduleOptions = z.infer<typeof addScheduleSchema>;
type FindSchedulesOptions = z.infer<typeof findSchedulesSchema>;
export type { AddScheduleOptions, FindSchedulesOptions };
export { addScheduleSchema, findSchedulesSchema };

View File

@@ -0,0 +1,118 @@
import { EventEmitter } from 'eventemitter3';
import { Database } from '../../database/database.js';
import { nanoid } from 'nanoid';
import { AddScheduleOptions, FindSchedulesOptions } from './schedules.schemas.js';
import { createHash } from 'crypto';
type ScheduleRepoEvents = {
added: (id: string) => void;
removed: (id: string) => void;
};
type ScheduleRepoOptions = {
database: Database;
};
class ScheduleRepo extends EventEmitter<ScheduleRepoEvents> {
#options: ScheduleRepoOptions;
constructor(options: ScheduleRepoOptions) {
super();
this.#options = options;
}
public get = async (id: string) => {
const { database } = this.#options;
const db = await database.instance;
const result = await db('schedules').where('id', id).first();
return result;
};
public add = async (options: AddScheduleOptions) => {
const { database } = this.#options;
const db = await database.instance;
const id = nanoid();
await db('schedules').insert({
id,
name: options.name,
description: options.description,
cron: options.cron,
createdAt: new Date(),
updatedAt: new Date(),
});
this.emit('added', id);
return id;
};
public prepareRemove = async (options: FindSchedulesOptions) => {
const { database } = this.#options;
const db = await database.instance;
const query = db('schedules').select('id');
if (options.ids) {
query.whereIn('id', options.ids);
}
if (options.loadIds) {
query.whereIn('loadId', options.loadIds);
}
const result = await query;
const ids = result.map((row) => row.id);
const token = ids.map((id) => Buffer.from(id).toString('base64')).join('|');
const hash = createHash('sha256').update(token).digest('hex');
return {
ids,
hash,
};
};
public remove = async (hash: string, ids: string[]) => {
const { database } = this.#options;
const db = await database.instance;
const token = ids.map((id) => Buffer.from(id).toString('base64')).join('|');
const actualHash = createHash('sha256').update(token).digest('hex');
if (hash !== actualHash) {
throw new Error('Invalid hash');
}
await db('schedules').whereIn('id', ids).delete();
ids.forEach((id) => {
this.emit('removed', id);
});
};
public find = async (options: FindSchedulesOptions) => {
const { database } = this.#options;
const db = await database.instance;
const query = db('schedules');
if (options.ids) {
query.whereIn('id', options.ids);
}
if (options.loadIds) {
query.whereIn('loadId', options.loadIds);
}
if (options.offset) {
query.offset(options.offset);
}
if (options.limit) {
query.limit(options.limit);
}
const results = await query;
return results;
};
}
export { addScheduleSchema, findSchedulesSchema } from './schedules.schemas.js';
export { ScheduleRepo };

View File

@@ -0,0 +1,53 @@
import { z } from 'zod';
import { addScheduleSchema, findSchedulesSchema } from '../repos/repos.js';
import { publicProcedure, router } from './router.utils.js';
const add = publicProcedure.input(addScheduleSchema).mutation(async ({ input, ctx }) => {
const { runtime } = ctx;
const { repos } = runtime;
const { schedules } = repos;
const result = await schedules.add(input);
return result;
});
const find = publicProcedure.input(findSchedulesSchema).query(async ({ input, ctx }) => {
const { runtime } = ctx;
const { repos } = runtime;
const { schedules } = repos;
const result = await schedules.find(input);
return result;
});
const prepareRemove = publicProcedure.input(findSchedulesSchema).query(async ({ input, ctx }) => {
const { runtime } = ctx;
const { repos } = runtime;
const { schedules } = repos;
return await schedules.prepareRemove(input);
});
const remove = publicProcedure
.input(
z.object({
hash: z.string(),
ids: z.array(z.string()),
}),
)
.mutation(async ({ input, ctx }) => {
const { runtime } = ctx;
const { repos } = runtime;
const { artifacts } = repos;
await artifacts.remove(input.hash, input.ids);
});
const schedulesRouter = router({
add,
find,
remove,
prepareRemove,
});
export { schedulesRouter };

View File

@@ -2,6 +2,7 @@ import { artifactsRouter } from './router.artifacts.js';
import { loadsRouter } from './router.loads.js';
import { logsRouter } from './router.logs.js';
import { runsRouter } from './router.runs.js';
import { schedulesRouter } from './router.schedules.js';
import { secretsRouter } from './router.secrets.js';
import { router } from './router.utils.js';
@@ -11,6 +12,7 @@ const rootRouter = router({
logs: logsRouter,
artifacts: artifactsRouter,
secrets: secretsRouter,
schedules: schedulesRouter,
});
type RootRouter = typeof rootRouter;

View File

@@ -59,7 +59,7 @@ class RunnerInstance extends EventEmitter<RunnerInstanceEvents> {
const { runs, secrets } = repos;
try {
const { script: scriptHash, input } = await runs.getById(id);
const scriptLocation = resolve(config.files.location, 'scripts', `${scriptHash}.js`);
const scriptLocation = resolve(config.files.data, 'scripts', `${scriptHash}.js`);
const script = await readFile(scriptLocation, 'utf-8');
const allSecrets = await secrets.getAll();
await runs.started(id);

View File

@@ -1,20 +1,26 @@
import { resolve } from 'path';
import envPaths from 'env-paths';
import { Database } from '../database/database.js';
import { Repos } from '../repos/repos.js';
import { Runner } from '../runner/runner.js';
import { Config } from '../config/config.js';
import { Auth } from '../auth/auth.js';
import { resolve } from 'path';
import { Scheduler } from '../scheduler/scheduler.js';
const paths = envPaths('mini-loader-server');
class Runtime {
#repos: Repos;
#runner: Runner;
#auth: Auth;
#scheduler: Scheduler;
constructor(options: Config) {
const database = new Database(options.database);
this.#repos = new Repos({ database, config: options });
this.#runner = new Runner({ repos: this.#repos, config: options });
this.#auth = new Auth({ config: options });
this.#scheduler = new Scheduler({ runs: this.#repos.runs, schedules: this.#repos.schedules });
}
public get repos() {
@@ -29,17 +35,22 @@ class Runtime {
return this.#auth;
}
public get scheduler() {
return this.#scheduler;
}
public static create = async () => {
const runtime = new Runtime({
database: {
client: 'sqlite3',
connection: {
filename: resolve(process.cwd(), 'data', 'database.sqlite'),
filename: resolve(paths.data, 'database.sqlite'),
},
useNullAsDefault: true,
},
files: {
location: resolve(process.cwd(), 'data', 'files'),
data: process.env.DATA_DIR || resolve(paths.data, 'data', 'files'),
cache: process.env.CACHE_DIR || resolve(paths.cache, 'data', 'cache'),
},
});

View File

@@ -0,0 +1,73 @@
import { CronJob } from 'cron';
import { ScheduleRepo } from '../repos/schedules/schedules.js';
import { RunRepo } from '../repos/runs/runs.js';
type SchedulerOptions = {
runs: RunRepo;
schedules: ScheduleRepo;
};
type RunningSchedule = {
id: string;
job: CronJob;
stop: () => Promise<void>;
};
class Scheduler {
#running: RunningSchedule[] = [];
#options: SchedulerOptions;
constructor(options: SchedulerOptions) {
this.#options = options;
const { schedules } = this.#options;
schedules.on('added', this.#add);
schedules.on('removed', this.#remove);
}
#remove = async (id: string) => {
const current = this.#running.filter((r) => r.id === id);
await Promise.all(current.map((r) => r.stop()));
this.#running = this.#running.filter((r) => r.id !== id);
};
#add = async (id: string) => {
const { schedules, runs } = this.#options;
const current = this.#running.filter((r) => r.id === id);
await Promise.all(current.map((r) => r.stop()));
const schedule = await schedules.get(id);
if (!schedule) {
return;
}
const job = new CronJob(schedule.cron, async () => {
await runs.create({
loadId: schedule.load,
});
});
const stop = async () => {
job.stop();
};
this.#running.push({
id: schedule.id,
job,
stop,
});
};
public stop = async () => {
for (const running of this.#running) {
await running.stop();
this.#running = this.#running.filter((r) => r !== running);
}
};
public start = async () => {
const { schedules } = this.#options;
await this.stop();
const all = await schedules.find({});
for (const schedule of all) {
await this.#add(schedule.id);
}
};
}
export { Scheduler };

View File

@@ -1,3 +1,4 @@
import pkg from '../../package.json';
import { fastifyTRPCPlugin, FastifyTRPCPluginOptions } from '@trpc/server/adapters/fastify';
import fastify from 'fastify';
import { RootRouter, rootRouter } from '../router/router.js';
@@ -13,9 +14,6 @@ const createServer = async (runtime: Runtime) => {
level: 'warn',
},
});
server.get('/', async () => {
return { hello: 'world' };
});
server.get('/health', async (req) => {
let authorized = false;
@@ -27,7 +25,7 @@ const createServer = async (runtime: Runtime) => {
authorized = true;
}
} catch (error) {}
return { authorized, status: 'ok' };
return { authorized, status: 'ok', version: pkg.version };
});
server.register(fastifyTRPCPlugin, {

42
pnpm-lock.yaml generated
View File

@@ -36,6 +36,12 @@ importers:
packages/cli:
dependencies:
'@morten-olsen/mini-loader-runner':
specifier: workspace:^
version: link:../runner
'@morten-olsen/mini-loader-server':
specifier: workspace:^
version: link:../server
'@rollup/plugin-auto-install':
specifier: ^3.0.5
version: 3.0.5(rollup@4.9.4)
@@ -60,6 +66,9 @@ importers:
commander:
specifier: ^11.1.0
version: 11.1.0
dotenv:
specifier: ^16.3.1
version: 16.3.1
env-paths:
specifier: ^3.0.0
version: 3.0.0
@@ -85,12 +94,6 @@ importers:
'@morten-olsen/mini-loader-configs':
specifier: workspace:^
version: link:../configs
'@morten-olsen/mini-loader-runner':
specifier: workspace:^
version: link:../runner
'@morten-olsen/mini-loader-server':
specifier: workspace:^
version: link:../server
'@types/inquirer':
specifier: ^9.0.7
version: 9.0.7
@@ -170,6 +173,12 @@ importers:
commander:
specifier: ^11.1.0
version: 11.1.0
cron:
specifier: ^3.1.6
version: 3.1.6
env-paths:
specifier: ^3.0.0
version: 3.0.0
eventemitter3:
specifier: ^5.0.1
version: 5.0.1
@@ -1285,6 +1294,10 @@ packages:
'@types/node': 20.10.8
dev: true
/@types/luxon@3.3.8:
resolution: {integrity: sha512-jYvz8UMLDgy3a5SkGJne8H7VA7zPV2Lwohjx0V8V31+SqAjNmurWMkk9cQhfvlcnXWudBpK9xPM1n4rljOcHYQ==}
dev: false
/@types/node@20.10.8:
resolution: {integrity: sha512-f8nQs3cLxbAFc00vEU59yf9UyGUftkPaLGfvbVOIDdx2i1b8epBqj2aNGyP19fiyXWvlmZ7qC1XLjAzw/OKIeA==}
dependencies:
@@ -2080,6 +2093,13 @@ packages:
resolution: {integrity: sha512-dcKFX3jn0MpIaXjisoRvexIJVEKzaq7z2rZKxf+MSr9TkdmHmsU4m2lcLojrj/FHl8mk5VxMmYA+ftRkP/3oKQ==}
dev: false
/cron@3.1.6:
resolution: {integrity: sha512-cvFiQCeVzsA+QPM6fhjBtlKGij7tLLISnTSvFxVdnFGLdz+ZdXN37kNe0i2gefmdD17XuZA6n2uPVwzl4FxW/w==}
dependencies:
'@types/luxon': 3.3.8
luxon: 3.4.4
dev: false
/cross-spawn@7.0.3:
resolution: {integrity: sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==}
engines: {node: '>= 8'}
@@ -2189,6 +2209,11 @@ packages:
esutils: 2.0.3
dev: true
/dotenv@16.3.1:
resolution: {integrity: sha512-IPzF4w4/Rd94bA9imS68tZBaYyBWSCE47V1RGuMrB94iyTOIEwRmVL2x/4An+6mETpLrKJ5hQkB8W4kFAadeIQ==}
engines: {node: '>=12'}
dev: false
/eastasianwidth@0.2.0:
resolution: {integrity: sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA==}
dev: false
@@ -3744,6 +3769,11 @@ packages:
dependencies:
yallist: 4.0.0
/luxon@3.4.4:
resolution: {integrity: sha512-zobTr7akeGHnv7eBOXcRgMeCP6+uyYsczwmeRCauvpvaAltgNyTbLH/+VaEAPUeWBT+1GuNmz4wC/6jtQzbbVA==}
engines: {node: '>=12'}
dev: false
/magic-string@0.25.9:
resolution: {integrity: sha512-RmF0AsMzgt25qzqqLc1+MbHmhdx0ojF2Fvs4XnOqz2ZOBXzzkEwc/dJQZCYHAn7v1jbVOjAZfK8msRn4BxO4VQ==}
dependencies: