mirror of
https://github.com/morten-olsen/mini-loader.git
synced 2026-02-08 01:36:26 +01:00
Compare commits
5 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
a08f9e1c91 | ||
|
|
e0c41d9220 | ||
|
|
028b65587e | ||
|
|
7436b3439c | ||
|
|
2109bc3af9 |
17
.github/workflows/release.yml
vendored
17
.github/workflows/release.yml
vendored
@@ -71,12 +71,24 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Log in to the Container registry
|
- name: Log in to the Container registry
|
||||||
uses: docker/login-action@65b78e6e13532edd9afa3aa52ac7964289d1a9c1
|
uses: docker/login-action@65b78e6e13532edd9afa3aa52ac7964289d1a9c1
|
||||||
with:
|
with:
|
||||||
registry: ${{ env.REGISTRY }}
|
registry: ${{ env.REGISTRY }}
|
||||||
username: ${{ github.actor }}
|
username: ${{ github.actor }}
|
||||||
password: ${{ secrets.GITHUB_TOKEN }}
|
password: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
- name: Set up QEMU
|
||||||
|
uses: docker/setup-qemu-action@v3
|
||||||
|
|
||||||
|
- name: Set up Docker Buildx
|
||||||
|
uses: docker/setup-buildx-action@v3
|
||||||
|
|
||||||
|
- name: Retrieve version
|
||||||
|
run: |
|
||||||
|
echo "TAG_NAME=$(git describe --tag --abbrev=0) >> $GITHUB_OUTPUT
|
||||||
|
id: version
|
||||||
|
|
||||||
- name: Extract metadata (tags, labels) for Docker
|
- name: Extract metadata (tags, labels) for Docker
|
||||||
id: meta
|
id: meta
|
||||||
uses: docker/metadata-action@9ec57ed1fcdbf14dcef7dfbe97b2010124a938b7
|
uses: docker/metadata-action@9ec57ed1fcdbf14dcef7dfbe97b2010124a938b7
|
||||||
@@ -84,11 +96,16 @@ jobs:
|
|||||||
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
|
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
|
||||||
tags: |
|
tags: |
|
||||||
latest
|
latest
|
||||||
|
${{ steps.version.outputs.TAG_NAME }}
|
||||||
|
|
||||||
- name: Build and push Docker image
|
- name: Build and push Docker image
|
||||||
uses: docker/build-push-action@f2a1d5e99d037542a71f64918e516c093c6f3fc4
|
uses: docker/build-push-action@f2a1d5e99d037542a71f64918e516c093c6f3fc4
|
||||||
with:
|
with:
|
||||||
context: .
|
context: .
|
||||||
file: ./docker/Dockerfile
|
file: ./docker/Dockerfile
|
||||||
|
platforms: linux/amd64,linux/arm64
|
||||||
|
cache-from: type=gha
|
||||||
|
cache-to: type=gha,mode=max
|
||||||
push: true
|
push: true
|
||||||
tags: ${{ steps.meta.outputs.tags }}
|
tags: ${{ steps.meta.outputs.tags }}
|
||||||
labels: ${{ steps.meta.outputs.labels }}
|
labels: ${{ steps.meta.outputs.labels }}
|
||||||
|
|||||||
@@ -27,6 +27,10 @@ COPY --from=builder /app/out/full/ .
|
|||||||
RUN pnpm turbo run build --filter=@morten-olsen/mini-loader-server
|
RUN pnpm turbo run build --filter=@morten-olsen/mini-loader-server
|
||||||
|
|
||||||
FROM base AS runner
|
FROM base AS runner
|
||||||
|
ENV \
|
||||||
|
NODE_ENV=production \
|
||||||
|
DATA_DIR=/data \
|
||||||
|
CACHE_DIR=/cache
|
||||||
RUN apk add --no-cache jq curl
|
RUN apk add --no-cache jq curl
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
|
|
||||||
@@ -39,7 +43,7 @@ RUN chmod +x /entrypoint.sh
|
|||||||
|
|
||||||
COPY --from=installer /app .
|
COPY --from=installer /app .
|
||||||
EXPOSE 4500
|
EXPOSE 4500
|
||||||
VOLUME /app/data
|
VOLUME /data
|
||||||
|
|
||||||
HEALTHCHECK \
|
HEALTHCHECK \
|
||||||
--interval=10s \
|
--interval=10s \
|
||||||
|
|||||||
@@ -7,6 +7,8 @@ GID=${GID:-1001}
|
|||||||
addgroup --system --gid ${GID} nodejs && \
|
addgroup --system --gid ${GID} nodejs && \
|
||||||
adduser --system --uid ${UID} -G nodejs miniloader && \
|
adduser --system --uid ${UID} -G nodejs miniloader && \
|
||||||
|
|
||||||
mkdir -p /app/data
|
mkdir -p ${DATA_DIR}
|
||||||
chown -R miniloader:nodejs /app/data
|
mkdir -p ${CACHE_DIR}
|
||||||
|
chown -R miniloader:nodejs ${DATA_DIR}
|
||||||
|
chown -R miniloader:nodejs ${CACHE_DIR}
|
||||||
su miniloader -s /bin/sh -c "$CMD"
|
su miniloader -s /bin/sh -c "$CMD"
|
||||||
@@ -1,4 +1,4 @@
|
|||||||
#!/usr/bin/env node
|
#!/usr/bin/env node
|
||||||
|
|
||||||
import 'source-map-support/register.js';
|
import 'source-map-support/register.js';
|
||||||
import '../dist/esm/index.js';
|
import '../dist/esm/src/index.js';
|
||||||
|
|||||||
@@ -1,8 +1,8 @@
|
|||||||
{
|
{
|
||||||
"name": "@morten-olsen/mini-loader-cli",
|
"name": "@morten-olsen/mini-loader-cli",
|
||||||
"version": "1.0.0",
|
"version": "1.0.0",
|
||||||
"main": "./dist/esm/index.js",
|
"main": "./dist/esm/src/index.js",
|
||||||
"types": "./dist/esm/index.d.ts",
|
"types": "./dist/esm/src/index.d.ts",
|
||||||
"license": "GPL-3.0",
|
"license": "GPL-3.0",
|
||||||
"bin": {
|
"bin": {
|
||||||
"mini-loader": "./bin/index.mjs"
|
"mini-loader": "./bin/index.mjs"
|
||||||
@@ -16,11 +16,12 @@
|
|||||||
],
|
],
|
||||||
"exports": {
|
"exports": {
|
||||||
".": {
|
".": {
|
||||||
"import": "./dist/esm/index.js"
|
"import": "./dist/esm/src/index.js"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@morten-olsen/mini-loader-runner": "workspace:^",
|
"@morten-olsen/mini-loader-runner": "workspace:^",
|
||||||
|
"@morten-olsen/mini-loader-server": "workspace:^",
|
||||||
"@rollup/plugin-auto-install": "^3.0.5",
|
"@rollup/plugin-auto-install": "^3.0.5",
|
||||||
"@rollup/plugin-commonjs": "^25.0.7",
|
"@rollup/plugin-commonjs": "^25.0.7",
|
||||||
"@rollup/plugin-json": "^6.1.0",
|
"@rollup/plugin-json": "^6.1.0",
|
||||||
@@ -40,7 +41,6 @@
|
|||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@morten-olsen/mini-loader-configs": "workspace:^",
|
"@morten-olsen/mini-loader-configs": "workspace:^",
|
||||||
"@morten-olsen/mini-loader-server": "workspace:^",
|
|
||||||
"@types/inquirer": "^9.0.7",
|
"@types/inquirer": "^9.0.7",
|
||||||
"typescript": "^5.3.3"
|
"typescript": "^5.3.3"
|
||||||
},
|
},
|
||||||
|
|||||||
@@ -4,6 +4,7 @@ import { run as runLoad } from '@morten-olsen/mini-loader-runner';
|
|||||||
import { bundle } from '../../bundler/bundler.js';
|
import { bundle } from '../../bundler/bundler.js';
|
||||||
import { step } from '../../utils/step.js';
|
import { step } from '../../utils/step.js';
|
||||||
import { readSecrets } from './local.utils.js';
|
import { readSecrets } from './local.utils.js';
|
||||||
|
import { Config } from '../../config/config.js';
|
||||||
|
|
||||||
const run = new Command('run');
|
const run = new Command('run');
|
||||||
|
|
||||||
@@ -12,6 +13,7 @@ run
|
|||||||
.argument('script')
|
.argument('script')
|
||||||
.action(async (script) => {
|
.action(async (script) => {
|
||||||
const location = resolve(script);
|
const location = resolve(script);
|
||||||
|
const config = new Config();
|
||||||
const { autoInstall } = run.opts();
|
const { autoInstall } = run.opts();
|
||||||
const secrets = await readSecrets();
|
const secrets = await readSecrets();
|
||||||
|
|
||||||
@@ -21,6 +23,7 @@ run
|
|||||||
const { promise, emitter } = await runLoad({
|
const { promise, emitter } = await runLoad({
|
||||||
script: code,
|
script: code,
|
||||||
secrets,
|
secrets,
|
||||||
|
cacheLocation: config.cacheLocation,
|
||||||
});
|
});
|
||||||
emitter.addListener('message', (message) => {
|
emitter.addListener('message', (message) => {
|
||||||
switch (message.type) {
|
switch (message.type) {
|
||||||
|
|||||||
32
packages/cli/src/commands/schedules/schedules.add.ts
Normal file
32
packages/cli/src/commands/schedules/schedules.add.ts
Normal file
@@ -0,0 +1,32 @@
|
|||||||
|
import { Command } from 'commander';
|
||||||
|
import { createClient } from '../../client/client.js';
|
||||||
|
import { step } from '../../utils/step.js';
|
||||||
|
import { Context } from '../../context/context.js';
|
||||||
|
import { Config } from '../../config/config.js';
|
||||||
|
|
||||||
|
const add = new Command('add');
|
||||||
|
|
||||||
|
add
|
||||||
|
.description('Add schedule')
|
||||||
|
.argument('<load-id>', 'Load ID')
|
||||||
|
.argument('<cron>', 'Cron')
|
||||||
|
.option('-n, --name <name>', 'Name')
|
||||||
|
.action(async (loadId, cron) => {
|
||||||
|
const config = new Config();
|
||||||
|
const context = new Context(config.context);
|
||||||
|
const { name } = add.opts();
|
||||||
|
const client = await step('Connecting to server', async () => {
|
||||||
|
return createClient(context);
|
||||||
|
});
|
||||||
|
const id = await step('Adding schedule', async () => {
|
||||||
|
return await client.schedules.add.mutate({
|
||||||
|
name,
|
||||||
|
load: loadId,
|
||||||
|
cron,
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
console.log(`Schedule added with ID ${id}`);
|
||||||
|
});
|
||||||
|
|
||||||
|
export { add };
|
||||||
39
packages/cli/src/commands/schedules/schedules.list.ts
Normal file
39
packages/cli/src/commands/schedules/schedules.list.ts
Normal file
@@ -0,0 +1,39 @@
|
|||||||
|
import { Command } from 'commander';
|
||||||
|
import { createClient } from '../../client/client.js';
|
||||||
|
import { step } from '../../utils/step.js';
|
||||||
|
import { Context } from '../../context/context.js';
|
||||||
|
import { Config } from '../../config/config.js';
|
||||||
|
|
||||||
|
const list = new Command('list');
|
||||||
|
|
||||||
|
const toInt = (value?: string) => {
|
||||||
|
if (!value) {
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
return parseInt(value, 10);
|
||||||
|
};
|
||||||
|
|
||||||
|
list
|
||||||
|
.alias('ls')
|
||||||
|
.description('List schedules')
|
||||||
|
.option('-l, --load-ids <loadIds...>', 'Load ID')
|
||||||
|
.option('-o, --offset <offset>', 'Offset')
|
||||||
|
.option('-a, --limit <limit>', 'Limit', '1000')
|
||||||
|
.action(async () => {
|
||||||
|
const { loadIds, offset, limit } = list.opts();
|
||||||
|
const config = new Config();
|
||||||
|
const context = new Context(config.context);
|
||||||
|
const client = await step('Connecting to server', async () => {
|
||||||
|
return createClient(context);
|
||||||
|
});
|
||||||
|
const schedules = await step('Getting schedules', async () => {
|
||||||
|
return await client.schedules.find.query({
|
||||||
|
loadIds,
|
||||||
|
offset: toInt(offset),
|
||||||
|
limit: toInt(limit),
|
||||||
|
});
|
||||||
|
});
|
||||||
|
console.table(schedules);
|
||||||
|
});
|
||||||
|
|
||||||
|
export { list };
|
||||||
61
packages/cli/src/commands/schedules/schedules.remove.ts
Normal file
61
packages/cli/src/commands/schedules/schedules.remove.ts
Normal file
@@ -0,0 +1,61 @@
|
|||||||
|
import { Command } from 'commander';
|
||||||
|
import { createClient } from '../../client/client.js';
|
||||||
|
import { step } from '../../utils/step.js';
|
||||||
|
import { Context } from '../../context/context.js';
|
||||||
|
import inquirer from 'inquirer';
|
||||||
|
import { Config } from '../../config/config.js';
|
||||||
|
|
||||||
|
const remove = new Command('remove');
|
||||||
|
|
||||||
|
const toInt = (value?: string) => {
|
||||||
|
if (!value) {
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
return parseInt(value, 10);
|
||||||
|
};
|
||||||
|
|
||||||
|
remove
|
||||||
|
.alias('ls')
|
||||||
|
.description('LRemove schedules')
|
||||||
|
.option('-i, --ids <ids...>', 'Load IDs')
|
||||||
|
.option('-l, --load-ids <loadIds...>', 'Load IDs')
|
||||||
|
.option('-o, --offset <offset>', 'Offset')
|
||||||
|
.option('-a, --limit <limit>', 'Limit', '1000')
|
||||||
|
.action(async () => {
|
||||||
|
const { ids, loadIds, offset, limit } = remove.opts();
|
||||||
|
const config = new Config();
|
||||||
|
const context = new Context(config.context);
|
||||||
|
const client = await step('Connecting to server', async () => {
|
||||||
|
return createClient(context);
|
||||||
|
});
|
||||||
|
const response = await step('Preparing to delete', async () => {
|
||||||
|
return await client.schedules.prepareRemove.query({
|
||||||
|
ids,
|
||||||
|
loadIds,
|
||||||
|
offset: toInt(offset),
|
||||||
|
limit: toInt(limit),
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!response.ids.length) {
|
||||||
|
console.log('No logs to delete');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
const { confirm } = await inquirer.prompt([
|
||||||
|
{
|
||||||
|
type: 'confirm',
|
||||||
|
name: 'confirm',
|
||||||
|
message: `Are you sure you want to delete ${response.ids.length} schedules?`,
|
||||||
|
},
|
||||||
|
]);
|
||||||
|
|
||||||
|
if (!confirm) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
await step('Deleting artifacts', async () => {
|
||||||
|
await client.artifacts.remove.mutate(response);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
export { remove };
|
||||||
11
packages/cli/src/commands/schedules/schedules.ts
Normal file
11
packages/cli/src/commands/schedules/schedules.ts
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
import { Command } from 'commander';
|
||||||
|
import { list } from './schedules.list.js';
|
||||||
|
import { remove } from './schedules.remove.js';
|
||||||
|
import { add } from './schedules.add.js';
|
||||||
|
|
||||||
|
const schedules = new Command('schedules');
|
||||||
|
schedules.addCommand(list);
|
||||||
|
schedules.addCommand(remove);
|
||||||
|
schedules.addCommand(add);
|
||||||
|
|
||||||
|
export { schedules };
|
||||||
@@ -7,12 +7,13 @@ type ConfigValues = {
|
|||||||
context?: string;
|
context?: string;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
const paths = envPaths('mini-loader');
|
||||||
|
|
||||||
class Config {
|
class Config {
|
||||||
#location: string;
|
#location: string;
|
||||||
#config?: ConfigValues;
|
#config?: ConfigValues;
|
||||||
|
|
||||||
constructor() {
|
constructor() {
|
||||||
const paths = envPaths('mini-loader');
|
|
||||||
this.#location = join(paths.config, 'config.json');
|
this.#location = join(paths.config, 'config.json');
|
||||||
if (existsSync(this.#location)) {
|
if (existsSync(this.#location)) {
|
||||||
this.#config = JSON.parse(readFileSync(this.#location, 'utf-8'));
|
this.#config = JSON.parse(readFileSync(this.#location, 'utf-8'));
|
||||||
@@ -23,6 +24,10 @@ class Config {
|
|||||||
return this.#config?.context || 'default';
|
return this.#config?.context || 'default';
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public get cacheLocation() {
|
||||||
|
return join(paths.cache, this.context);
|
||||||
|
}
|
||||||
|
|
||||||
public setContext = (context: string) => {
|
public setContext = (context: string) => {
|
||||||
this.#config = {
|
this.#config = {
|
||||||
...(this.#config || {}),
|
...(this.#config || {}),
|
||||||
|
|||||||
@@ -8,6 +8,7 @@ import { secrets } from './commands/secrets/secrets.js';
|
|||||||
import { local } from './commands/local/local.js';
|
import { local } from './commands/local/local.js';
|
||||||
import { auth } from './commands/auth/auth.js';
|
import { auth } from './commands/auth/auth.js';
|
||||||
import { contexts } from './commands/contexts/contexts.js';
|
import { contexts } from './commands/contexts/contexts.js';
|
||||||
|
import { schedules } from './commands/schedules/schedules.js';
|
||||||
|
|
||||||
program.addCommand(loads);
|
program.addCommand(loads);
|
||||||
program.addCommand(runs);
|
program.addCommand(runs);
|
||||||
@@ -17,6 +18,7 @@ program.addCommand(secrets);
|
|||||||
program.addCommand(local);
|
program.addCommand(local);
|
||||||
program.addCommand(auth);
|
program.addCommand(auth);
|
||||||
program.addCommand(contexts);
|
program.addCommand(contexts);
|
||||||
|
program.addCommand(schedules);
|
||||||
|
|
||||||
program.version(pkg.version);
|
program.version(pkg.version);
|
||||||
|
|
||||||
|
|||||||
@@ -5,10 +5,11 @@ type RunOptions = {
|
|||||||
script: string;
|
script: string;
|
||||||
input?: Buffer | string;
|
input?: Buffer | string;
|
||||||
secrets?: Record<string, string>;
|
secrets?: Record<string, string>;
|
||||||
|
cacheLocation: string;
|
||||||
};
|
};
|
||||||
|
|
||||||
const run = async ({ script, input, secrets }: RunOptions) => {
|
const run = async ({ script, input, secrets, cacheLocation }: RunOptions) => {
|
||||||
const info = await setup({ script, input, secrets });
|
const info = await setup({ script, input, secrets, cacheLocation });
|
||||||
|
|
||||||
const worker = new Worker(info.scriptLocation, {
|
const worker = new Worker(info.scriptLocation, {
|
||||||
stdin: false,
|
stdin: false,
|
||||||
|
|||||||
@@ -1,5 +1,4 @@
|
|||||||
import { join } from 'path';
|
import { join } from 'path';
|
||||||
import os from 'os';
|
|
||||||
import { nanoid } from 'nanoid';
|
import { nanoid } from 'nanoid';
|
||||||
import { chmod, mkdir, rm, writeFile } from 'fs/promises';
|
import { chmod, mkdir, rm, writeFile } from 'fs/promises';
|
||||||
import { createServer } from 'net';
|
import { createServer } from 'net';
|
||||||
@@ -9,6 +8,7 @@ type SetupOptions = {
|
|||||||
input?: Buffer | string;
|
input?: Buffer | string;
|
||||||
script: string;
|
script: string;
|
||||||
secrets?: Record<string, string>;
|
secrets?: Record<string, string>;
|
||||||
|
cacheLocation: string;
|
||||||
};
|
};
|
||||||
|
|
||||||
type RunEvents = {
|
type RunEvents = {
|
||||||
@@ -20,7 +20,7 @@ type RunEvents = {
|
|||||||
const setup = async (options: SetupOptions) => {
|
const setup = async (options: SetupOptions) => {
|
||||||
const { input, script, secrets } = options;
|
const { input, script, secrets } = options;
|
||||||
const emitter = new EventEmitter<RunEvents>();
|
const emitter = new EventEmitter<RunEvents>();
|
||||||
const dataDir = join(os.tmpdir(), 'mini-loader', nanoid());
|
const dataDir = join(options.cacheLocation, nanoid());
|
||||||
|
|
||||||
await mkdir(dataDir, { recursive: true });
|
await mkdir(dataDir, { recursive: true });
|
||||||
await chmod(dataDir, 0o700);
|
await chmod(dataDir, 0o700);
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
#!/usr/bin/env node
|
#!/usr/bin/env node
|
||||||
|
|
||||||
import 'source-map-support/register.js';
|
import 'source-map-support/register.js';
|
||||||
import '../dist/esm/index.js';
|
import '../dist/esm/src/index.js';
|
||||||
|
|||||||
@@ -2,8 +2,8 @@
|
|||||||
"name": "@morten-olsen/mini-loader-server",
|
"name": "@morten-olsen/mini-loader-server",
|
||||||
"version": "1.0.0",
|
"version": "1.0.0",
|
||||||
"license": "GPL-3.0",
|
"license": "GPL-3.0",
|
||||||
"main": "./dist/esm/index.js",
|
"main": "./dist/esm/src/index.js",
|
||||||
"types": "./dist/esm/index.d.ts",
|
"types": "./dist/esm/src/index.d.ts",
|
||||||
"bin": {
|
"bin": {
|
||||||
"mini-loader-server": "./bin/index.mjs"
|
"mini-loader-server": "./bin/index.mjs"
|
||||||
},
|
},
|
||||||
@@ -16,7 +16,7 @@
|
|||||||
],
|
],
|
||||||
"exports": {
|
"exports": {
|
||||||
".": {
|
".": {
|
||||||
"import": "./dist/esm/index.js"
|
"import": "./dist/esm/src/index.js"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
@@ -31,6 +31,8 @@
|
|||||||
"@trpc/client": "^10.45.0",
|
"@trpc/client": "^10.45.0",
|
||||||
"@trpc/server": "^10.45.0",
|
"@trpc/server": "^10.45.0",
|
||||||
"commander": "^11.1.0",
|
"commander": "^11.1.0",
|
||||||
|
"cron": "^3.1.6",
|
||||||
|
"env-paths": "^3.0.0",
|
||||||
"eventemitter3": "^5.0.1",
|
"eventemitter3": "^5.0.1",
|
||||||
"fastify": "^4.25.2",
|
"fastify": "^4.25.2",
|
||||||
"jsonwebtoken": "^9.0.2",
|
"jsonwebtoken": "^9.0.2",
|
||||||
|
|||||||
@@ -20,10 +20,10 @@ class Auth {
|
|||||||
|
|
||||||
#setup = async () => {
|
#setup = async () => {
|
||||||
const { config } = this.#options;
|
const { config } = this.#options;
|
||||||
const secretLocation = resolve(config.files.location, 'secret');
|
const secretLocation = resolve(config.files.data, 'secret');
|
||||||
let secret = '';
|
let secret = '';
|
||||||
|
await mkdir(config.files.data, { recursive: true });
|
||||||
if (!existsSync(secretLocation)) {
|
if (!existsSync(secretLocation)) {
|
||||||
await mkdir(config.files.location, { recursive: true });
|
|
||||||
secret = nanoid();
|
secret = nanoid();
|
||||||
await writeFile(secretLocation, secret);
|
await writeFile(secretLocation, secret);
|
||||||
} else {
|
} else {
|
||||||
|
|||||||
@@ -3,7 +3,8 @@ import { Knex } from 'knex';
|
|||||||
type Config = {
|
type Config = {
|
||||||
database: Omit<Knex.Config, 'migrations'>;
|
database: Omit<Knex.Config, 'migrations'>;
|
||||||
files: {
|
files: {
|
||||||
location: string;
|
data: string;
|
||||||
|
cache: string;
|
||||||
};
|
};
|
||||||
auth?: {
|
auth?: {
|
||||||
oidc?: {
|
oidc?: {
|
||||||
|
|||||||
@@ -0,0 +1,22 @@
|
|||||||
|
import { Knex } from 'knex';
|
||||||
|
|
||||||
|
const name = 'schedule-support';
|
||||||
|
|
||||||
|
const up = async (knex: Knex) => {
|
||||||
|
await knex.schema.createTable('schedules', (table) => {
|
||||||
|
table.string('id').primary();
|
||||||
|
table.string('name').nullable();
|
||||||
|
table.string('description').nullable();
|
||||||
|
table.string('load').notNullable();
|
||||||
|
table.string('cron').notNullable();
|
||||||
|
table.string('input').nullable();
|
||||||
|
table.timestamp('createdAt').notNullable();
|
||||||
|
table.timestamp('updatedAt').notNullable();
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
const down = async (knex: Knex) => {
|
||||||
|
await knex.schema.dropTable('schedule');
|
||||||
|
};
|
||||||
|
|
||||||
|
export { name, up, down };
|
||||||
@@ -1,6 +1,7 @@
|
|||||||
import { Knex } from 'knex';
|
import { Knex } from 'knex';
|
||||||
|
|
||||||
import * as init from './migration.init.js';
|
import * as init from './migration.init.js';
|
||||||
|
import * as scheduleSupport from './migration.schedule.js';
|
||||||
|
|
||||||
type Migration = {
|
type Migration = {
|
||||||
name: string;
|
name: string;
|
||||||
@@ -8,7 +9,7 @@ type Migration = {
|
|||||||
down: (knex: Knex) => Promise<void>;
|
down: (knex: Knex) => Promise<void>;
|
||||||
};
|
};
|
||||||
|
|
||||||
const migrations = [init] satisfies Migration[];
|
const migrations = [init, scheduleSupport] satisfies Migration[];
|
||||||
|
|
||||||
const source: Knex.MigrationSource<Migration> = {
|
const source: Knex.MigrationSource<Migration> = {
|
||||||
getMigrations: async () => migrations,
|
getMigrations: async () => migrations,
|
||||||
|
|||||||
@@ -6,6 +6,7 @@ const start = new Command('start');
|
|||||||
start.action(async () => {
|
start.action(async () => {
|
||||||
const port = 4500;
|
const port = 4500;
|
||||||
const runtime = await Runtime.create();
|
const runtime = await Runtime.create();
|
||||||
|
await runtime.scheduler.start();
|
||||||
const server = await createServer(runtime);
|
const server = await createServer(runtime);
|
||||||
await server.listen({
|
await server.listen({
|
||||||
port,
|
port,
|
||||||
|
|||||||
10
packages/server/src/knex.d.ts
vendored
10
packages/server/src/knex.d.ts
vendored
@@ -43,5 +43,15 @@ declare module 'knex/types/tables.js' {
|
|||||||
createdAt: Date;
|
createdAt: Date;
|
||||||
updatedAt: Date;
|
updatedAt: Date;
|
||||||
};
|
};
|
||||||
|
schedules: {
|
||||||
|
id: string;
|
||||||
|
name?: string;
|
||||||
|
description?: string;
|
||||||
|
load: string;
|
||||||
|
cron: string;
|
||||||
|
input?: string;
|
||||||
|
createdAt: Date;
|
||||||
|
updatedAt: Date;
|
||||||
|
};
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -62,7 +62,7 @@ class LoadRepo extends EventEmitter<LoadRepoEvents> {
|
|||||||
const db = await database.instance;
|
const db = await database.instance;
|
||||||
const id = options.id || nanoid();
|
const id = options.id || nanoid();
|
||||||
const script = createHash('sha256').update(options.script).digest('hex');
|
const script = createHash('sha256').update(options.script).digest('hex');
|
||||||
const scriptDir = resolve(this.#options.config.files.location, 'scripts');
|
const scriptDir = resolve(this.#options.config.files.data, 'scripts');
|
||||||
await mkdir(scriptDir, { recursive: true });
|
await mkdir(scriptDir, { recursive: true });
|
||||||
await writeFile(resolve(scriptDir, `${script}.js`), options.script);
|
await writeFile(resolve(scriptDir, `${script}.js`), options.script);
|
||||||
|
|
||||||
|
|||||||
@@ -4,6 +4,7 @@ import { ArtifactRepo } from './artifacts/artifacts.js';
|
|||||||
import { LoadRepo } from './loads/loads.js';
|
import { LoadRepo } from './loads/loads.js';
|
||||||
import { LogRepo } from './logs/logs.js';
|
import { LogRepo } from './logs/logs.js';
|
||||||
import { RunRepo } from './runs/runs.js';
|
import { RunRepo } from './runs/runs.js';
|
||||||
|
import { ScheduleRepo } from './schedules/schedules.js';
|
||||||
import { SecretRepo } from './secrets/secrets.js';
|
import { SecretRepo } from './secrets/secrets.js';
|
||||||
|
|
||||||
type ReposOptions = {
|
type ReposOptions = {
|
||||||
@@ -17,6 +18,7 @@ class Repos {
|
|||||||
#logs: LogRepo;
|
#logs: LogRepo;
|
||||||
#artifacts: ArtifactRepo;
|
#artifacts: ArtifactRepo;
|
||||||
#secrets: SecretRepo;
|
#secrets: SecretRepo;
|
||||||
|
#schedule: ScheduleRepo;
|
||||||
|
|
||||||
constructor({ database, config }: ReposOptions) {
|
constructor({ database, config }: ReposOptions) {
|
||||||
this.#loads = new LoadRepo({
|
this.#loads = new LoadRepo({
|
||||||
@@ -36,6 +38,9 @@ class Repos {
|
|||||||
this.#secrets = new SecretRepo({
|
this.#secrets = new SecretRepo({
|
||||||
database,
|
database,
|
||||||
});
|
});
|
||||||
|
this.#schedule = new ScheduleRepo({
|
||||||
|
database,
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
public get loads() {
|
public get loads() {
|
||||||
@@ -57,8 +62,13 @@ class Repos {
|
|||||||
public get secrets() {
|
public get secrets() {
|
||||||
return this.#secrets;
|
return this.#secrets;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public get schedules() {
|
||||||
|
return this.#schedule;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export { findSchedulesSchema, addScheduleSchema } from './schedules/schedules.js';
|
||||||
export { findLogsSchema, addLogSchema } from './logs/logs.js';
|
export { findLogsSchema, addLogSchema } from './logs/logs.js';
|
||||||
export { setLoadSchema, findLoadsSchema } from './loads/loads.js';
|
export { setLoadSchema, findLoadsSchema } from './loads/loads.js';
|
||||||
export { createRunSchema, findRunsSchema } from './runs/runs.js';
|
export { createRunSchema, findRunsSchema } from './runs/runs.js';
|
||||||
|
|||||||
22
packages/server/src/repos/schedules/schedules.schemas.ts
Normal file
22
packages/server/src/repos/schedules/schedules.schemas.ts
Normal file
@@ -0,0 +1,22 @@
|
|||||||
|
import { z } from 'zod';
|
||||||
|
|
||||||
|
const addScheduleSchema = z.object({
|
||||||
|
name: z.string().optional(),
|
||||||
|
description: z.string().optional(),
|
||||||
|
load: z.string(),
|
||||||
|
cron: z.string(),
|
||||||
|
input: z.string().optional(),
|
||||||
|
});
|
||||||
|
|
||||||
|
const findSchedulesSchema = z.object({
|
||||||
|
ids: z.array(z.string()).optional(),
|
||||||
|
loadIds: z.array(z.string()).optional(),
|
||||||
|
offset: z.number().optional(),
|
||||||
|
limit: z.number().optional(),
|
||||||
|
});
|
||||||
|
|
||||||
|
type AddScheduleOptions = z.infer<typeof addScheduleSchema>;
|
||||||
|
type FindSchedulesOptions = z.infer<typeof findSchedulesSchema>;
|
||||||
|
|
||||||
|
export type { AddScheduleOptions, FindSchedulesOptions };
|
||||||
|
export { addScheduleSchema, findSchedulesSchema };
|
||||||
118
packages/server/src/repos/schedules/schedules.ts
Normal file
118
packages/server/src/repos/schedules/schedules.ts
Normal file
@@ -0,0 +1,118 @@
|
|||||||
|
import { EventEmitter } from 'eventemitter3';
|
||||||
|
import { Database } from '../../database/database.js';
|
||||||
|
import { nanoid } from 'nanoid';
|
||||||
|
import { AddScheduleOptions, FindSchedulesOptions } from './schedules.schemas.js';
|
||||||
|
import { createHash } from 'crypto';
|
||||||
|
|
||||||
|
type ScheduleRepoEvents = {
|
||||||
|
added: (id: string) => void;
|
||||||
|
removed: (id: string) => void;
|
||||||
|
};
|
||||||
|
|
||||||
|
type ScheduleRepoOptions = {
|
||||||
|
database: Database;
|
||||||
|
};
|
||||||
|
|
||||||
|
class ScheduleRepo extends EventEmitter<ScheduleRepoEvents> {
|
||||||
|
#options: ScheduleRepoOptions;
|
||||||
|
|
||||||
|
constructor(options: ScheduleRepoOptions) {
|
||||||
|
super();
|
||||||
|
this.#options = options;
|
||||||
|
}
|
||||||
|
|
||||||
|
public get = async (id: string) => {
|
||||||
|
const { database } = this.#options;
|
||||||
|
const db = await database.instance;
|
||||||
|
const result = await db('schedules').where('id', id).first();
|
||||||
|
return result;
|
||||||
|
};
|
||||||
|
|
||||||
|
public add = async (options: AddScheduleOptions) => {
|
||||||
|
const { database } = this.#options;
|
||||||
|
const db = await database.instance;
|
||||||
|
const id = nanoid();
|
||||||
|
|
||||||
|
await db('schedules').insert({
|
||||||
|
id,
|
||||||
|
name: options.name,
|
||||||
|
description: options.description,
|
||||||
|
cron: options.cron,
|
||||||
|
createdAt: new Date(),
|
||||||
|
updatedAt: new Date(),
|
||||||
|
});
|
||||||
|
|
||||||
|
this.emit('added', id);
|
||||||
|
|
||||||
|
return id;
|
||||||
|
};
|
||||||
|
|
||||||
|
public prepareRemove = async (options: FindSchedulesOptions) => {
|
||||||
|
const { database } = this.#options;
|
||||||
|
const db = await database.instance;
|
||||||
|
|
||||||
|
const query = db('schedules').select('id');
|
||||||
|
|
||||||
|
if (options.ids) {
|
||||||
|
query.whereIn('id', options.ids);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (options.loadIds) {
|
||||||
|
query.whereIn('loadId', options.loadIds);
|
||||||
|
}
|
||||||
|
|
||||||
|
const result = await query;
|
||||||
|
const ids = result.map((row) => row.id);
|
||||||
|
const token = ids.map((id) => Buffer.from(id).toString('base64')).join('|');
|
||||||
|
const hash = createHash('sha256').update(token).digest('hex');
|
||||||
|
return {
|
||||||
|
ids,
|
||||||
|
hash,
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
public remove = async (hash: string, ids: string[]) => {
|
||||||
|
const { database } = this.#options;
|
||||||
|
const db = await database.instance;
|
||||||
|
const token = ids.map((id) => Buffer.from(id).toString('base64')).join('|');
|
||||||
|
const actualHash = createHash('sha256').update(token).digest('hex');
|
||||||
|
|
||||||
|
if (hash !== actualHash) {
|
||||||
|
throw new Error('Invalid hash');
|
||||||
|
}
|
||||||
|
|
||||||
|
await db('schedules').whereIn('id', ids).delete();
|
||||||
|
ids.forEach((id) => {
|
||||||
|
this.emit('removed', id);
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
public find = async (options: FindSchedulesOptions) => {
|
||||||
|
const { database } = this.#options;
|
||||||
|
const db = await database.instance;
|
||||||
|
|
||||||
|
const query = db('schedules');
|
||||||
|
|
||||||
|
if (options.ids) {
|
||||||
|
query.whereIn('id', options.ids);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (options.loadIds) {
|
||||||
|
query.whereIn('loadId', options.loadIds);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (options.offset) {
|
||||||
|
query.offset(options.offset);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (options.limit) {
|
||||||
|
query.limit(options.limit);
|
||||||
|
}
|
||||||
|
|
||||||
|
const results = await query;
|
||||||
|
return results;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
export { addScheduleSchema, findSchedulesSchema } from './schedules.schemas.js';
|
||||||
|
export { ScheduleRepo };
|
||||||
53
packages/server/src/router/router.schedules.ts
Normal file
53
packages/server/src/router/router.schedules.ts
Normal file
@@ -0,0 +1,53 @@
|
|||||||
|
import { z } from 'zod';
|
||||||
|
import { addScheduleSchema, findSchedulesSchema } from '../repos/repos.js';
|
||||||
|
import { publicProcedure, router } from './router.utils.js';
|
||||||
|
|
||||||
|
const add = publicProcedure.input(addScheduleSchema).mutation(async ({ input, ctx }) => {
|
||||||
|
const { runtime } = ctx;
|
||||||
|
const { repos } = runtime;
|
||||||
|
const { schedules } = repos;
|
||||||
|
|
||||||
|
const result = await schedules.add(input);
|
||||||
|
return result;
|
||||||
|
});
|
||||||
|
|
||||||
|
const find = publicProcedure.input(findSchedulesSchema).query(async ({ input, ctx }) => {
|
||||||
|
const { runtime } = ctx;
|
||||||
|
const { repos } = runtime;
|
||||||
|
const { schedules } = repos;
|
||||||
|
|
||||||
|
const result = await schedules.find(input);
|
||||||
|
return result;
|
||||||
|
});
|
||||||
|
|
||||||
|
const prepareRemove = publicProcedure.input(findSchedulesSchema).query(async ({ input, ctx }) => {
|
||||||
|
const { runtime } = ctx;
|
||||||
|
const { repos } = runtime;
|
||||||
|
const { schedules } = repos;
|
||||||
|
|
||||||
|
return await schedules.prepareRemove(input);
|
||||||
|
});
|
||||||
|
|
||||||
|
const remove = publicProcedure
|
||||||
|
.input(
|
||||||
|
z.object({
|
||||||
|
hash: z.string(),
|
||||||
|
ids: z.array(z.string()),
|
||||||
|
}),
|
||||||
|
)
|
||||||
|
.mutation(async ({ input, ctx }) => {
|
||||||
|
const { runtime } = ctx;
|
||||||
|
const { repos } = runtime;
|
||||||
|
const { artifacts } = repos;
|
||||||
|
|
||||||
|
await artifacts.remove(input.hash, input.ids);
|
||||||
|
});
|
||||||
|
|
||||||
|
const schedulesRouter = router({
|
||||||
|
add,
|
||||||
|
find,
|
||||||
|
remove,
|
||||||
|
prepareRemove,
|
||||||
|
});
|
||||||
|
|
||||||
|
export { schedulesRouter };
|
||||||
@@ -2,6 +2,7 @@ import { artifactsRouter } from './router.artifacts.js';
|
|||||||
import { loadsRouter } from './router.loads.js';
|
import { loadsRouter } from './router.loads.js';
|
||||||
import { logsRouter } from './router.logs.js';
|
import { logsRouter } from './router.logs.js';
|
||||||
import { runsRouter } from './router.runs.js';
|
import { runsRouter } from './router.runs.js';
|
||||||
|
import { schedulesRouter } from './router.schedules.js';
|
||||||
import { secretsRouter } from './router.secrets.js';
|
import { secretsRouter } from './router.secrets.js';
|
||||||
import { router } from './router.utils.js';
|
import { router } from './router.utils.js';
|
||||||
|
|
||||||
@@ -11,6 +12,7 @@ const rootRouter = router({
|
|||||||
logs: logsRouter,
|
logs: logsRouter,
|
||||||
artifacts: artifactsRouter,
|
artifacts: artifactsRouter,
|
||||||
secrets: secretsRouter,
|
secrets: secretsRouter,
|
||||||
|
schedules: schedulesRouter,
|
||||||
});
|
});
|
||||||
|
|
||||||
type RootRouter = typeof rootRouter;
|
type RootRouter = typeof rootRouter;
|
||||||
|
|||||||
@@ -59,7 +59,7 @@ class RunnerInstance extends EventEmitter<RunnerInstanceEvents> {
|
|||||||
const { runs, secrets } = repos;
|
const { runs, secrets } = repos;
|
||||||
try {
|
try {
|
||||||
const { script: scriptHash, input } = await runs.getById(id);
|
const { script: scriptHash, input } = await runs.getById(id);
|
||||||
const scriptLocation = resolve(config.files.location, 'scripts', `${scriptHash}.js`);
|
const scriptLocation = resolve(config.files.data, 'scripts', `${scriptHash}.js`);
|
||||||
const script = await readFile(scriptLocation, 'utf-8');
|
const script = await readFile(scriptLocation, 'utf-8');
|
||||||
const allSecrets = await secrets.getAll();
|
const allSecrets = await secrets.getAll();
|
||||||
await runs.started(id);
|
await runs.started(id);
|
||||||
@@ -67,6 +67,7 @@ class RunnerInstance extends EventEmitter<RunnerInstanceEvents> {
|
|||||||
script,
|
script,
|
||||||
secrets: allSecrets,
|
secrets: allSecrets,
|
||||||
input,
|
input,
|
||||||
|
cacheLocation: config.files.cache,
|
||||||
});
|
});
|
||||||
this.#run = current;
|
this.#run = current;
|
||||||
const { promise, emitter } = current;
|
const { promise, emitter } = current;
|
||||||
|
|||||||
@@ -1,20 +1,26 @@
|
|||||||
|
import { resolve } from 'path';
|
||||||
|
import envPaths from 'env-paths';
|
||||||
import { Database } from '../database/database.js';
|
import { Database } from '../database/database.js';
|
||||||
import { Repos } from '../repos/repos.js';
|
import { Repos } from '../repos/repos.js';
|
||||||
import { Runner } from '../runner/runner.js';
|
import { Runner } from '../runner/runner.js';
|
||||||
import { Config } from '../config/config.js';
|
import { Config } from '../config/config.js';
|
||||||
import { Auth } from '../auth/auth.js';
|
import { Auth } from '../auth/auth.js';
|
||||||
import { resolve } from 'path';
|
import { Scheduler } from '../scheduler/scheduler.js';
|
||||||
|
|
||||||
|
const paths = envPaths('mini-loader-server');
|
||||||
|
|
||||||
class Runtime {
|
class Runtime {
|
||||||
#repos: Repos;
|
#repos: Repos;
|
||||||
#runner: Runner;
|
#runner: Runner;
|
||||||
#auth: Auth;
|
#auth: Auth;
|
||||||
|
#scheduler: Scheduler;
|
||||||
|
|
||||||
constructor(options: Config) {
|
constructor(options: Config) {
|
||||||
const database = new Database(options.database);
|
const database = new Database(options.database);
|
||||||
this.#repos = new Repos({ database, config: options });
|
this.#repos = new Repos({ database, config: options });
|
||||||
this.#runner = new Runner({ repos: this.#repos, config: options });
|
this.#runner = new Runner({ repos: this.#repos, config: options });
|
||||||
this.#auth = new Auth({ config: options });
|
this.#auth = new Auth({ config: options });
|
||||||
|
this.#scheduler = new Scheduler({ runs: this.#repos.runs, schedules: this.#repos.schedules });
|
||||||
}
|
}
|
||||||
|
|
||||||
public get repos() {
|
public get repos() {
|
||||||
@@ -29,17 +35,22 @@ class Runtime {
|
|||||||
return this.#auth;
|
return this.#auth;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public get scheduler() {
|
||||||
|
return this.#scheduler;
|
||||||
|
}
|
||||||
|
|
||||||
public static create = async () => {
|
public static create = async () => {
|
||||||
const runtime = new Runtime({
|
const runtime = new Runtime({
|
||||||
database: {
|
database: {
|
||||||
client: 'sqlite3',
|
client: 'sqlite3',
|
||||||
connection: {
|
connection: {
|
||||||
filename: resolve(process.cwd(), 'data', 'database.sqlite'),
|
filename: resolve(paths.data, 'database.sqlite'),
|
||||||
},
|
},
|
||||||
useNullAsDefault: true,
|
useNullAsDefault: true,
|
||||||
},
|
},
|
||||||
files: {
|
files: {
|
||||||
location: resolve(process.cwd(), 'data', 'files'),
|
data: process.env.DATA_DIR || resolve(paths.data, 'data', 'files'),
|
||||||
|
cache: process.env.CACHE_DIR || resolve(paths.cache, 'data', 'cache'),
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|||||||
73
packages/server/src/scheduler/scheduler.ts
Normal file
73
packages/server/src/scheduler/scheduler.ts
Normal file
@@ -0,0 +1,73 @@
|
|||||||
|
import { CronJob } from 'cron';
|
||||||
|
import { ScheduleRepo } from '../repos/schedules/schedules.js';
|
||||||
|
import { RunRepo } from '../repos/runs/runs.js';
|
||||||
|
|
||||||
|
type SchedulerOptions = {
|
||||||
|
runs: RunRepo;
|
||||||
|
schedules: ScheduleRepo;
|
||||||
|
};
|
||||||
|
|
||||||
|
type RunningSchedule = {
|
||||||
|
id: string;
|
||||||
|
job: CronJob;
|
||||||
|
stop: () => Promise<void>;
|
||||||
|
};
|
||||||
|
|
||||||
|
class Scheduler {
|
||||||
|
#running: RunningSchedule[] = [];
|
||||||
|
#options: SchedulerOptions;
|
||||||
|
|
||||||
|
constructor(options: SchedulerOptions) {
|
||||||
|
this.#options = options;
|
||||||
|
const { schedules } = this.#options;
|
||||||
|
schedules.on('added', this.#add);
|
||||||
|
schedules.on('removed', this.#remove);
|
||||||
|
}
|
||||||
|
|
||||||
|
#remove = async (id: string) => {
|
||||||
|
const current = this.#running.filter((r) => r.id === id);
|
||||||
|
await Promise.all(current.map((r) => r.stop()));
|
||||||
|
this.#running = this.#running.filter((r) => r.id !== id);
|
||||||
|
};
|
||||||
|
|
||||||
|
#add = async (id: string) => {
|
||||||
|
const { schedules, runs } = this.#options;
|
||||||
|
const current = this.#running.filter((r) => r.id === id);
|
||||||
|
await Promise.all(current.map((r) => r.stop()));
|
||||||
|
const schedule = await schedules.get(id);
|
||||||
|
if (!schedule) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
const job = new CronJob(schedule.cron, async () => {
|
||||||
|
await runs.create({
|
||||||
|
loadId: schedule.load,
|
||||||
|
});
|
||||||
|
});
|
||||||
|
const stop = async () => {
|
||||||
|
job.stop();
|
||||||
|
};
|
||||||
|
this.#running.push({
|
||||||
|
id: schedule.id,
|
||||||
|
job,
|
||||||
|
stop,
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
public stop = async () => {
|
||||||
|
for (const running of this.#running) {
|
||||||
|
await running.stop();
|
||||||
|
this.#running = this.#running.filter((r) => r !== running);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
public start = async () => {
|
||||||
|
const { schedules } = this.#options;
|
||||||
|
await this.stop();
|
||||||
|
const all = await schedules.find({});
|
||||||
|
for (const schedule of all) {
|
||||||
|
await this.#add(schedule.id);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
export { Scheduler };
|
||||||
@@ -1,3 +1,4 @@
|
|||||||
|
import pkg from '../../package.json';
|
||||||
import { fastifyTRPCPlugin, FastifyTRPCPluginOptions } from '@trpc/server/adapters/fastify';
|
import { fastifyTRPCPlugin, FastifyTRPCPluginOptions } from '@trpc/server/adapters/fastify';
|
||||||
import fastify from 'fastify';
|
import fastify from 'fastify';
|
||||||
import { RootRouter, rootRouter } from '../router/router.js';
|
import { RootRouter, rootRouter } from '../router/router.js';
|
||||||
@@ -13,9 +14,6 @@ const createServer = async (runtime: Runtime) => {
|
|||||||
level: 'warn',
|
level: 'warn',
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
server.get('/', async () => {
|
|
||||||
return { hello: 'world' };
|
|
||||||
});
|
|
||||||
|
|
||||||
server.get('/health', async (req) => {
|
server.get('/health', async (req) => {
|
||||||
let authorized = false;
|
let authorized = false;
|
||||||
@@ -27,7 +25,7 @@ const createServer = async (runtime: Runtime) => {
|
|||||||
authorized = true;
|
authorized = true;
|
||||||
}
|
}
|
||||||
} catch (error) {}
|
} catch (error) {}
|
||||||
return { authorized, status: 'ok' };
|
return { authorized, status: 'ok', version: pkg.version };
|
||||||
});
|
});
|
||||||
|
|
||||||
server.register(fastifyTRPCPlugin, {
|
server.register(fastifyTRPCPlugin, {
|
||||||
|
|||||||
28
pnpm-lock.yaml
generated
28
pnpm-lock.yaml
generated
@@ -39,6 +39,9 @@ importers:
|
|||||||
'@morten-olsen/mini-loader-runner':
|
'@morten-olsen/mini-loader-runner':
|
||||||
specifier: workspace:^
|
specifier: workspace:^
|
||||||
version: link:../runner
|
version: link:../runner
|
||||||
|
'@morten-olsen/mini-loader-server':
|
||||||
|
specifier: workspace:^
|
||||||
|
version: link:../server
|
||||||
'@rollup/plugin-auto-install':
|
'@rollup/plugin-auto-install':
|
||||||
specifier: ^3.0.5
|
specifier: ^3.0.5
|
||||||
version: 3.0.5(rollup@4.9.4)
|
version: 3.0.5(rollup@4.9.4)
|
||||||
@@ -91,9 +94,6 @@ importers:
|
|||||||
'@morten-olsen/mini-loader-configs':
|
'@morten-olsen/mini-loader-configs':
|
||||||
specifier: workspace:^
|
specifier: workspace:^
|
||||||
version: link:../configs
|
version: link:../configs
|
||||||
'@morten-olsen/mini-loader-server':
|
|
||||||
specifier: workspace:^
|
|
||||||
version: link:../server
|
|
||||||
'@types/inquirer':
|
'@types/inquirer':
|
||||||
specifier: ^9.0.7
|
specifier: ^9.0.7
|
||||||
version: 9.0.7
|
version: 9.0.7
|
||||||
@@ -173,6 +173,12 @@ importers:
|
|||||||
commander:
|
commander:
|
||||||
specifier: ^11.1.0
|
specifier: ^11.1.0
|
||||||
version: 11.1.0
|
version: 11.1.0
|
||||||
|
cron:
|
||||||
|
specifier: ^3.1.6
|
||||||
|
version: 3.1.6
|
||||||
|
env-paths:
|
||||||
|
specifier: ^3.0.0
|
||||||
|
version: 3.0.0
|
||||||
eventemitter3:
|
eventemitter3:
|
||||||
specifier: ^5.0.1
|
specifier: ^5.0.1
|
||||||
version: 5.0.1
|
version: 5.0.1
|
||||||
@@ -1288,6 +1294,10 @@ packages:
|
|||||||
'@types/node': 20.10.8
|
'@types/node': 20.10.8
|
||||||
dev: true
|
dev: true
|
||||||
|
|
||||||
|
/@types/luxon@3.3.8:
|
||||||
|
resolution: {integrity: sha512-jYvz8UMLDgy3a5SkGJne8H7VA7zPV2Lwohjx0V8V31+SqAjNmurWMkk9cQhfvlcnXWudBpK9xPM1n4rljOcHYQ==}
|
||||||
|
dev: false
|
||||||
|
|
||||||
/@types/node@20.10.8:
|
/@types/node@20.10.8:
|
||||||
resolution: {integrity: sha512-f8nQs3cLxbAFc00vEU59yf9UyGUftkPaLGfvbVOIDdx2i1b8epBqj2aNGyP19fiyXWvlmZ7qC1XLjAzw/OKIeA==}
|
resolution: {integrity: sha512-f8nQs3cLxbAFc00vEU59yf9UyGUftkPaLGfvbVOIDdx2i1b8epBqj2aNGyP19fiyXWvlmZ7qC1XLjAzw/OKIeA==}
|
||||||
dependencies:
|
dependencies:
|
||||||
@@ -2083,6 +2093,13 @@ packages:
|
|||||||
resolution: {integrity: sha512-dcKFX3jn0MpIaXjisoRvexIJVEKzaq7z2rZKxf+MSr9TkdmHmsU4m2lcLojrj/FHl8mk5VxMmYA+ftRkP/3oKQ==}
|
resolution: {integrity: sha512-dcKFX3jn0MpIaXjisoRvexIJVEKzaq7z2rZKxf+MSr9TkdmHmsU4m2lcLojrj/FHl8mk5VxMmYA+ftRkP/3oKQ==}
|
||||||
dev: false
|
dev: false
|
||||||
|
|
||||||
|
/cron@3.1.6:
|
||||||
|
resolution: {integrity: sha512-cvFiQCeVzsA+QPM6fhjBtlKGij7tLLISnTSvFxVdnFGLdz+ZdXN37kNe0i2gefmdD17XuZA6n2uPVwzl4FxW/w==}
|
||||||
|
dependencies:
|
||||||
|
'@types/luxon': 3.3.8
|
||||||
|
luxon: 3.4.4
|
||||||
|
dev: false
|
||||||
|
|
||||||
/cross-spawn@7.0.3:
|
/cross-spawn@7.0.3:
|
||||||
resolution: {integrity: sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==}
|
resolution: {integrity: sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==}
|
||||||
engines: {node: '>= 8'}
|
engines: {node: '>= 8'}
|
||||||
@@ -3752,6 +3769,11 @@ packages:
|
|||||||
dependencies:
|
dependencies:
|
||||||
yallist: 4.0.0
|
yallist: 4.0.0
|
||||||
|
|
||||||
|
/luxon@3.4.4:
|
||||||
|
resolution: {integrity: sha512-zobTr7akeGHnv7eBOXcRgMeCP6+uyYsczwmeRCauvpvaAltgNyTbLH/+VaEAPUeWBT+1GuNmz4wC/6jtQzbbVA==}
|
||||||
|
engines: {node: '>=12'}
|
||||||
|
dev: false
|
||||||
|
|
||||||
/magic-string@0.25.9:
|
/magic-string@0.25.9:
|
||||||
resolution: {integrity: sha512-RmF0AsMzgt25qzqqLc1+MbHmhdx0ojF2Fvs4XnOqz2ZOBXzzkEwc/dJQZCYHAn7v1jbVOjAZfK8msRn4BxO4VQ==}
|
resolution: {integrity: sha512-RmF0AsMzgt25qzqqLc1+MbHmhdx0ojF2Fvs4XnOqz2ZOBXzzkEwc/dJQZCYHAn7v1jbVOjAZfK8msRn4BxO4VQ==}
|
||||||
dependencies:
|
dependencies:
|
||||||
|
|||||||
Reference in New Issue
Block a user