12 Commits
0.2.5 ... 0.5.6

Author SHA1 Message Date
Morten Olsen
23ffd8bd73 fix: include runner in dependencies and await context create (#43) 2024-01-15 21:41:27 +01:00
Morten Olsen
1c3b993ab2 docs: completed v1 docs (#36) 2024-01-15 15:10:19 +01:00
Morten Olsen
161a098c9f ci: docker fix (#37) 2024-01-15 15:09:51 +01:00
Morten Olsen
a08f9e1c91 chore: docker multi arch (#35) 2024-01-15 13:50:06 +01:00
Morten Olsen
e0c41d9220 fix: docker build (#34) 2024-01-14 13:04:11 +01:00
Morten Olsen
028b65587e fix: insecure tmp path (#33)
Fixes #13
2024-01-14 13:00:17 +01:00
Morten Olsen
7436b3439c feat: improved configuration (#31) 2024-01-14 12:49:54 +01:00
Morten Olsen
2109bc3af9 feat: add scheduler (#30) 2024-01-14 12:30:39 +01:00
Morten Olsen
eeaad68f6e feat: initial policy system and version (#29)
Resolves #22 #24 and #26
2024-01-14 09:45:42 +01:00
Morten Olsen
c7ca97f041 feat: local secrets (#28) 2024-01-14 09:45:10 +01:00
Morten Olsen
c8e02d8da4 ci: fix docker tag (#17) 2024-01-13 14:07:33 +01:00
Morten Olsen
9a5b27f1be ci: publish with latest tag (#16) 2024-01-13 14:01:46 +01:00
55 changed files with 830 additions and 64 deletions

View File

@@ -71,22 +71,41 @@ jobs:
steps: steps:
- name: Checkout repository - name: Checkout repository
uses: actions/checkout@v4 uses: actions/checkout@v4
- name: Log in to the Container registry - name: Log in to the Container registry
uses: docker/login-action@65b78e6e13532edd9afa3aa52ac7964289d1a9c1 uses: docker/login-action@65b78e6e13532edd9afa3aa52ac7964289d1a9c1
with: with:
registry: ${{ env.REGISTRY }} registry: ${{ env.REGISTRY }}
username: ${{ github.actor }} username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }} password: ${{ secrets.GITHUB_TOKEN }}
- name: Set up QEMU
uses: docker/setup-qemu-action@v3
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Retrieve version
run: |
echo "TAG_NAME=$(git describe --tag --abbrev=0)" >> $GITHUB_OUTPUT
id: version
- name: Extract metadata (tags, labels) for Docker - name: Extract metadata (tags, labels) for Docker
id: meta id: meta
uses: docker/metadata-action@9ec57ed1fcdbf14dcef7dfbe97b2010124a938b7 uses: docker/metadata-action@9ec57ed1fcdbf14dcef7dfbe97b2010124a938b7
with: with:
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }} images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
tags: |
latest
${{ steps.version.outputs.TAG_NAME }}
- name: Build and push Docker image - name: Build and push Docker image
uses: docker/build-push-action@f2a1d5e99d037542a71f64918e516c093c6f3fc4 uses: docker/build-push-action@f2a1d5e99d037542a71f64918e516c093c6f3fc4
with: with:
context: . context: .
file: ./docker/Dockerfile file: ./docker/Dockerfile
platforms: linux/amd64,linux/arm64
cache-from: type=gha
cache-to: type=gha,mode=max
push: true push: true
tags: ${{ steps.meta.outputs.tags }} tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }} labels: ${{ steps.meta.outputs.labels }}

View File

@@ -1,10 +1,8 @@
![banner](./assets/banner.png) ![banner](./assets/banner.png)
# Welcome to Mini Loader! 🌐 # Welcome to Mini Loader! 🌐
Welcome to mini loader, a lightweight, Docker-based server solution for managing and executing workloads with ease. Designed for developers, small teams, and anyone in need of a simple yet powerful tool for running tasks, hosting API servers, or scheduling routine jobs. Welcome to mini loader, a lightweight server solution for managing and executing workloads with ease. Designed for developers, small teams, and anyone in need of a simple yet powerful tool for running tasks, hosting API servers, or scheduling routine jobs.
## Features ## Features
@@ -15,18 +13,37 @@ Welcome to mini loader, a lightweight, Docker-based server solution for managing
- **Task Scheduling**: Built-in support for cron-like job scheduling. - **Task Scheduling**: Built-in support for cron-like job scheduling.
- **HTTP Gateway**: Expose a HTTP server from your workloads - **HTTP Gateway**: Expose a HTTP server from your workloads
Also see [anti-features and limitations](./docs/anti-features.md) Also see [anti-features and limitations](./docs/02-anti-features.md)
:construction: This project is under active development and has not reached v1.0 yet. Expect some bugs and potential breaking changes in APIs. We appreciate your patience and welcome your feedback as we work towards a stable release!
For an overview of what's coming next, check out our roadmap at [GitHub Milestones](https://github.com/morten-olsen/mini-loader/milestones).
## Quick Start ## Quick Start
Get up and running with mini loader in just a few steps: Get up and running with mini loader in just a few steps:
1. **Install the CLI**: `npm install -g @morten-olsen/mini-loader-cli` ```bash
2. **Deploy the Server**: `docker run -p 4500:4500 -n mini-loader ghcr.io/morten-olsen/mini-loader:main`. # Install the CLI and the server
3. **Push Your First Load**: `mini-loader loads push script.mjs -r -i first` npm i -g @morten-olsen/mini-loader-cli @morten-olsen/mini-loader-server
3. **See the logs**: `mini-loader logs ls -l first`
For a detailed guide on getting started, please refer to the [Getting Started Tutorial](./docs/getting-started.md). # Start the server
mini-loader-server start &
# Get your access token
mini-loader-server create-token
# Authenticate the CLI
mini-loader auth login
# Push your first workload
mini-loader loads push -r -ai my-script.js -i first
# See the output logs
mini-loader logs ls -l first
```
For a detailed guide on getting started, please refer to the [Getting Started Tutorial](./docs/01-getting-started.md).
## Support and Contributions ## Support and Contributions
@@ -41,4 +58,4 @@ mini loader is open-source software licensed under the [GPL-3 License](./LICENSE
## Let's Get Started! ## Let's Get Started!
Dive into the world of simplified workload management with mini loader. Start with our [Getting Started Tutorial](./docs/getting-started.md) and unleash the full potential of your tasks and applications! Dive into the world of simplified workload management with mini loader. Start with our [Getting Started Tutorial](./docs/01-getting-started.md) and unleash the full potential of your tasks and applications!

View File

@@ -27,6 +27,10 @@ COPY --from=builder /app/out/full/ .
RUN pnpm turbo run build --filter=@morten-olsen/mini-loader-server RUN pnpm turbo run build --filter=@morten-olsen/mini-loader-server
FROM base AS runner FROM base AS runner
ENV \
NODE_ENV=production \
DATA_DIR=/data \
CACHE_DIR=/cache
RUN apk add --no-cache jq curl RUN apk add --no-cache jq curl
WORKDIR /app WORKDIR /app
@@ -39,7 +43,7 @@ RUN chmod +x /entrypoint.sh
COPY --from=installer /app . COPY --from=installer /app .
EXPOSE 4500 EXPOSE 4500
VOLUME /app/data VOLUME /data
HEALTHCHECK \ HEALTHCHECK \
--interval=10s \ --interval=10s \

View File

@@ -7,6 +7,8 @@ GID=${GID:-1001}
addgroup --system --gid ${GID} nodejs && \ addgroup --system --gid ${GID} nodejs && \
adduser --system --uid ${UID} -G nodejs miniloader && \ adduser --system --uid ${UID} -G nodejs miniloader && \
mkdir -p /app/data mkdir -p ${DATA_DIR}
chown -R miniloader:nodejs /app/data mkdir -p ${CACHE_DIR}
chown -R miniloader:nodejs ${DATA_DIR}
chown -R miniloader:nodejs ${CACHE_DIR}
su miniloader -s /bin/sh -c "$CMD" su miniloader -s /bin/sh -c "$CMD"

View File

@@ -15,20 +15,18 @@ Before diving into mini loader, ensure you have the following:
## Contents ## Contents
- [Creating you first workload](./installation.md): Learn how to write workloads and execute them locally with the mini loader CLI - [Creating you first workload](./03-tutorial/01-first-workload.md): Learn how to write workloads and execute them locally with the mini loader CLI
- [Running the server](./pushing-managing-loads.md): Instructions on how to run the server locally using docker. - [Running the server](./03-tutorial/02-setup-server.md): Instructions on how to run the server locally using docker.
- [Interacting with the server](./interacting-with-server.md): Learn the basic commands used to manage workloads. - [Interacting with the server](./03-tutorial/03-interacting-with-server.md): Learn the basic commands used to manage workloads.
- [Managing secrets](./managing-secrets.md): Upload secrets to the server that can be used inside your scripts. - [Managing secrets](./03-tutorial/04-managing-secrets.md): Upload secrets to the server that can be used inside your scripts.
- [Authorization](./setting-up-oidc.md): Extend the authorization using OIDC - [Create an API](./03-tutorial/05-creating-an-api.md): Create a workload which exposes a HTTP api
- [Create an API](./creating-an-api.md): Create a workload which exposes a HTTP api
## Getting Help ## Getting Help
If you encounter any issues or have questions, please refer to the [FAQs](./faqs.md) If you encounter any issues or have questions, please refer to the [FAQs](./04-faqs.md)
## Let's Get Started! ## Let's Get Started!
Ready to streamline your workload management? Let's jump right into [creating your first workload](./first-workload.md) and set up the mini loader CLI! Ready to streamline your workload management? Let's jump right into [creating your first workload](./03-tutorial/01-first-workload.md) and set up the mini loader CLI!
[Next: create a workload](./03-tutorial/01-first-workload.md)
[Next: create a workload](./first-workload.md)

View File

@@ -46,4 +46,4 @@ After running the command, you should see an output confirming that a new artifa
Congratulations on setting up and running your first script with mini loader! You're now ready to take the next step. Congratulations on setting up and running your first script with mini loader! You're now ready to take the next step.
[Next: Setting Up the Server](./setup-server.md) [Next: Setting Up the Server](./02-setup-server.md)

View File

@@ -1,14 +1,15 @@
Certainly! Here's a revised version of your documentation page to make it Certainly! Here's a revised version of your documentation page to make it
## Quick Start with mini loader using Docker ## Quick Start with mini loader using Docker
This guide will help you quickly set up and run a mini loader server using Docker. Follow these simple steps to deploy your server and start interacting with it using the [mini-loader CLI](./first-workload.md). This guide will help you quickly set up and run a mini loader server using Docker. Follow these simple steps to deploy your server and start interacting with it using the [mini-loader CLI](./01-first-workload.md).
### Step 1: Deploy the mini loader Container ### Step 1: Deploy the mini loader Container
To begin, let's deploy the mini loader container. Run the following command in your terminal: To begin, let's deploy the mini loader container. Run the following command in your terminal:
```bash ```bash
docker run -p 4500:4500 -n mini-loader ghcr.io/morten-olsen/mini-loader:main docker run -p 4500:4500 -n mini-loader ghcr.io/morten-olsen/mini-loader:latest
``` ```
This command downloads the latest mini loader image and runs it, exposing port 4500. This command downloads the latest mini loader image and runs it, exposing port 4500.
@@ -57,4 +58,4 @@ This command lists all the loads currently on your server, confirming that the C
You've successfully deployed and configured your mini loader server using Docker! You're now ready to start interacting with the server. You've successfully deployed and configured your mini loader server using Docker! You're now ready to start interacting with the server.
[Next: Interacting with the Server](./interacting-with-server.md) [Next: Interacting with the Server](./03-interacting-with-server.md)

View File

@@ -67,4 +67,4 @@ Replace `<id>` with the identifier of the artifact you wish to download.
You're now equipped to manage loads, runs, logs, and artifacts using the mini loader CLI. For advanced usage, such as managing secrets, proceed to the next section. You're now equipped to manage loads, runs, logs, and artifacts using the mini loader CLI. For advanced usage, such as managing secrets, proceed to the next section.
[Next: Managing Secrets](./managing-secrets.md) [Next: Managing Secrets](./04-managing-secrets.md)

View File

@@ -0,0 +1,81 @@
## Managing Secrets
### Introduction
In many workflows, accessing sensitive data such as API tokens or credentials is essential. To handle this securely, you can use secrets management. This tutorial demonstrates how to manage secrets using the CLI and implement them in a simple Node.js workload.
### Creating Secrets with the CLI
To create a new secret, use the `mini-loader` CLI as follows:
```bash
mini-loader secrets set <id>
```
For example, to store a GitHub personal access token, you would use:
```bash
mini-loader secrets set githubtoken
```
Upon execution, you'll be prompted to enter your access token.
### Implementing Secrets in Your Workload
Next, let's create a Node.js script (`github.js`) that uses this token to fetch your GitHub username and saves it as an artifact.
1. **Create `github.js` File:**
```javascript
import { secrets, artifacts } from '@morten-olsen/mini-loader';
import { Octokit } from '@octokit/rest';
// Retrieve the secret
const accessToken = secrets.get('githubtoken');
// Main async function to fetch and save GitHub username
async function run() {
const octokit = new Octokit({ auth: accessToken });
const user = await octokit.users.getAuthenticated();
await artifacts.create('user', JSON.stringify(user.data.login));
}
// Execute the function
run().catch(console.error);
```
This script initializes the Octokit client with the access token, fetches the authenticated user's data, and then saves the username as an artifact.
2. **Run the Script:**
Execute your script with `mini-loader`:
```bash
mini-loader loads push github.js -r -ai
```
### Managing Local Secrets
If you're running the script locally, you can manage secrets either by using a `.secrets` file or setting an environment variable.
1. **Using a `.secrets` File:**
Create a file named `.secrets` and add your token:
```
githubtoken=<your-token>
```
2. **Using Environment Variables:**
Prefix your environment variable with `ML_S_` and run the script:
```bash
ML_S_githubtoken=<your-token> mini-loader local run github.js -ai
```
### Conclusion
By following these steps, you can securely manage and use secrets within your workloads, enhancing the security and integrity of your applications.
[Next: Creating an API](./05-creating-an-api.md)

View File

@@ -0,0 +1,52 @@
## Creating an API Inside Your Workload
Workloads in mini loader can set up simple HTTP servers by connecting to a socket file, a feature supported by many JavaScript server libraries.
### Binding Your Workload to an HTTP Endpoint
To expose your workload as an HTTP server, specify the path parameter using the `getPath()` method provided by the `@morten-olsen/mini-loader` package. This method dynamically assigns a path for your API.
### Important Note
Please be aware that the gateway provided by mini loader isn't fully featured. As such, certain functionalities like streaming and WebSockets may not be supported.
### Example: Setting Up a Server with Fastify
Here's how you can create a simple API server using Fastify in TypeScript:
```typescript
import { http } from '@morten-olsen/mini-loader';
import fastify from 'fastify';
const server = fastify();
// Handling all requests and returning the requested URL
server.all('*', async (req) => {
return req.url;
});
// Listening on the path provided by mini loader
server.listen({
path: http.getPath(),
});
```
With this setup, your server will respond to all incoming requests by returning the requested URL.
### Deploying Your Workload
Now, you can push and run your workload just like any other script:
```bash
mini-loader loads push -r my-script.ts
```
### Accessing Your Server
After pushing your workload, mini loader will display the run ID. You can use this ID to access your server. For example, to make a request to your server, you can use `curl`:
```bash
curl http://localhost:4500/gateway/{your-run-id}
```
Replace `{your-run-id}` with the actual run ID provided by mini loader.

View File

@@ -29,4 +29,4 @@
"type": "git", "type": "git",
"url": "https://github.com/morten-olsen/mini-loader" "url": "https://github.com/morten-olsen/mini-loader"
} }
} }

View File

@@ -14,6 +14,9 @@
"files": [ "files": [
"./dist" "./dist"
], ],
"imports": {
"#pkg": "./package.json"
},
"exports": { "exports": {
".": { ".": {
"import": "./dist/esm/index.js" "import": "./dist/esm/index.js"
@@ -21,6 +24,7 @@
}, },
"dependencies": { "dependencies": {
"@morten-olsen/mini-loader-runner": "workspace:^", "@morten-olsen/mini-loader-runner": "workspace:^",
"@morten-olsen/mini-loader-server": "workspace:^",
"@rollup/plugin-auto-install": "^3.0.5", "@rollup/plugin-auto-install": "^3.0.5",
"@rollup/plugin-commonjs": "^25.0.7", "@rollup/plugin-commonjs": "^25.0.7",
"@rollup/plugin-json": "^6.1.0", "@rollup/plugin-json": "^6.1.0",
@@ -29,6 +33,7 @@
"@rollup/plugin-sucrase": "^5.0.2", "@rollup/plugin-sucrase": "^5.0.2",
"@trpc/client": "^10.45.0", "@trpc/client": "^10.45.0",
"commander": "^11.1.0", "commander": "^11.1.0",
"dotenv": "^16.3.1",
"env-paths": "^3.0.0", "env-paths": "^3.0.0",
"inquirer": "^9.2.12", "inquirer": "^9.2.12",
"ora": "^8.0.1", "ora": "^8.0.1",
@@ -39,7 +44,6 @@
}, },
"devDependencies": { "devDependencies": {
"@morten-olsen/mini-loader-configs": "workspace:^", "@morten-olsen/mini-loader-configs": "workspace:^",
"@morten-olsen/mini-loader-server": "workspace:^",
"@types/inquirer": "^9.0.7", "@types/inquirer": "^9.0.7",
"typescript": "^5.3.3" "typescript": "^5.3.3"
}, },

View File

@@ -1,9 +1,14 @@
import { createTRPCProxyClient, httpBatchLink } from '@trpc/client'; import { createTRPCProxyClient, httpBatchLink } from '@trpc/client';
import superjson from 'superjson'; import superjson from 'superjson';
import { createRequire } from 'module';
import type { Runtime } from '@morten-olsen/mini-loader-server'; import type { Runtime } from '@morten-olsen/mini-loader-server';
import type { RootRouter } from '@morten-olsen/mini-loader-server'; import type { RootRouter } from '@morten-olsen/mini-loader-server';
import { Context } from '../context/context.js'; import { Context } from '../context/context.js';
import { readFile } from 'fs/promises';
const require = createRequire(import.meta.url);
const pkg = JSON.parse(await readFile(require.resolve('#pkg'), 'utf-8'));
const createClient = (context: Context) => { const createClient = (context: Context) => {
if (!context.host || !context.token) { if (!context.host || !context.token) {
throw new Error('Not signed in'); throw new Error('Not signed in');
@@ -14,6 +19,7 @@ const createClient = (context: Context) => {
httpBatchLink({ httpBatchLink({
url: `${context.host}/trpc`, url: `${context.host}/trpc`,
headers: { headers: {
'x-version': pkg.version,
authorization: `Bearer ${context.token}`, authorization: `Bearer ${context.token}`,
}, },
}), }),

View File

@@ -3,6 +3,8 @@ import { resolve } from 'path';
import { run as runLoad } from '@morten-olsen/mini-loader-runner'; import { run as runLoad } from '@morten-olsen/mini-loader-runner';
import { bundle } from '../../bundler/bundler.js'; import { bundle } from '../../bundler/bundler.js';
import { step } from '../../utils/step.js'; import { step } from '../../utils/step.js';
import { readSecrets } from './local.utils.js';
import { Config } from '../../config/config.js';
const run = new Command('run'); const run = new Command('run');
@@ -11,13 +13,17 @@ run
.argument('script') .argument('script')
.action(async (script) => { .action(async (script) => {
const location = resolve(script); const location = resolve(script);
const config = new Config();
const { autoInstall } = run.opts(); const { autoInstall } = run.opts();
const secrets = await readSecrets();
const code = await step('Bundling', async () => { const code = await step('Bundling', async () => {
return await bundle({ entry: location, autoInstall }); return await bundle({ entry: location, autoInstall });
}); });
const { promise, emitter } = await runLoad({ const { promise, emitter } = await runLoad({
script: code, script: code,
secrets,
cacheLocation: config.cacheLocation,
}); });
emitter.addListener('message', (message) => { emitter.addListener('message', (message) => {
switch (message.type) { switch (message.type) {

View File

@@ -0,0 +1,25 @@
import dotenv from 'dotenv';
import { existsSync } from 'fs';
import { readFile } from 'fs/promises';
import { join } from 'path';
const ENV_PREFIX = 'ML_S_';
const readSecrets = async () => {
let secretLocation = join(process.cwd(), '.secrets');
let secrets: Record<string, string> = {};
if (existsSync(secretLocation)) {
const content = await readFile(secretLocation, 'utf-8');
secrets = dotenv.parse(content);
}
for (const key in process.env) {
if (key.startsWith(ENV_PREFIX)) {
secrets[key.replace(ENV_PREFIX, '')] = process.env[key]!;
}
}
return secrets;
};
export { readSecrets };

View File

@@ -0,0 +1,32 @@
import { Command } from 'commander';
import { createClient } from '../../client/client.js';
import { step } from '../../utils/step.js';
import { Context } from '../../context/context.js';
import { Config } from '../../config/config.js';
const add = new Command('add');
add
.description('Add schedule')
.argument('<load-id>', 'Load ID')
.argument('<cron>', 'Cron')
.option('-n, --name <name>', 'Name')
.action(async (loadId, cron) => {
const config = new Config();
const context = new Context(config.context);
const { name } = add.opts();
const client = await step('Connecting to server', async () => {
return createClient(context);
});
const id = await step('Adding schedule', async () => {
return await client.schedules.add.mutate({
name,
load: loadId,
cron,
});
});
console.log(`Schedule added with ID ${id}`);
});
export { add };

View File

@@ -0,0 +1,39 @@
import { Command } from 'commander';
import { createClient } from '../../client/client.js';
import { step } from '../../utils/step.js';
import { Context } from '../../context/context.js';
import { Config } from '../../config/config.js';
const list = new Command('list');
const toInt = (value?: string) => {
if (!value) {
return undefined;
}
return parseInt(value, 10);
};
list
.alias('ls')
.description('List schedules')
.option('-l, --load-ids <loadIds...>', 'Load ID')
.option('-o, --offset <offset>', 'Offset')
.option('-a, --limit <limit>', 'Limit', '1000')
.action(async () => {
const { loadIds, offset, limit } = list.opts();
const config = new Config();
const context = new Context(config.context);
const client = await step('Connecting to server', async () => {
return createClient(context);
});
const schedules = await step('Getting schedules', async () => {
return await client.schedules.find.query({
loadIds,
offset: toInt(offset),
limit: toInt(limit),
});
});
console.table(schedules);
});
export { list };

View File

@@ -0,0 +1,61 @@
import { Command } from 'commander';
import { createClient } from '../../client/client.js';
import { step } from '../../utils/step.js';
import { Context } from '../../context/context.js';
import inquirer from 'inquirer';
import { Config } from '../../config/config.js';
const remove = new Command('remove');
const toInt = (value?: string) => {
if (!value) {
return undefined;
}
return parseInt(value, 10);
};
remove
.alias('ls')
.description('LRemove schedules')
.option('-i, --ids <ids...>', 'Load IDs')
.option('-l, --load-ids <loadIds...>', 'Load IDs')
.option('-o, --offset <offset>', 'Offset')
.option('-a, --limit <limit>', 'Limit', '1000')
.action(async () => {
const { ids, loadIds, offset, limit } = remove.opts();
const config = new Config();
const context = new Context(config.context);
const client = await step('Connecting to server', async () => {
return createClient(context);
});
const response = await step('Preparing to delete', async () => {
return await client.schedules.prepareRemove.query({
ids,
loadIds,
offset: toInt(offset),
limit: toInt(limit),
});
});
if (!response.ids.length) {
console.log('No logs to delete');
return;
}
const { confirm } = await inquirer.prompt([
{
type: 'confirm',
name: 'confirm',
message: `Are you sure you want to delete ${response.ids.length} schedules?`,
},
]);
if (!confirm) {
return;
}
await step('Deleting artifacts', async () => {
await client.artifacts.remove.mutate(response);
});
});
export { remove };

View File

@@ -0,0 +1,11 @@
import { Command } from 'commander';
import { list } from './schedules.list.js';
import { remove } from './schedules.remove.js';
import { add } from './schedules.add.js';
const schedules = new Command('schedules');
schedules.addCommand(list);
schedules.addCommand(remove);
schedules.addCommand(add);
export { schedules };

View File

@@ -7,12 +7,13 @@ type ConfigValues = {
context?: string; context?: string;
}; };
const paths = envPaths('mini-loader');
class Config { class Config {
#location: string; #location: string;
#config?: ConfigValues; #config?: ConfigValues;
constructor() { constructor() {
const paths = envPaths('mini-loader');
this.#location = join(paths.config, 'config.json'); this.#location = join(paths.config, 'config.json');
if (existsSync(this.#location)) { if (existsSync(this.#location)) {
this.#config = JSON.parse(readFileSync(this.#location, 'utf-8')); this.#config = JSON.parse(readFileSync(this.#location, 'utf-8'));
@@ -23,6 +24,10 @@ class Config {
return this.#config?.context || 'default'; return this.#config?.context || 'default';
} }
public get cacheLocation() {
return join(paths.cache, this.context);
}
public setContext = (context: string) => { public setContext = (context: string) => {
this.#config = { this.#config = {
...(this.#config || {}), ...(this.#config || {}),

View File

@@ -1,6 +1,6 @@
import envPaths from 'env-paths'; import envPaths from 'env-paths';
import { existsSync, readFileSync, writeFileSync } from 'fs'; import { existsSync, readFileSync } from 'fs';
import { mkdir, readdir } from 'fs/promises'; import { mkdir, readdir, writeFile } from 'fs/promises';
import { dirname, join } from 'path'; import { dirname, join } from 'path';
type ContextValues = { type ContextValues = {
@@ -42,8 +42,8 @@ class Context {
return; return;
} }
const json = JSON.stringify(this.#config); const json = JSON.stringify(this.#config);
mkdir(dirname(this.#location), { recursive: true }); await mkdir(dirname(this.#location), { recursive: true });
writeFileSync(this.#location, json); await writeFile(this.#location, json);
}; };
public static list = async () => { public static list = async () => {

View File

@@ -1,4 +1,5 @@
import { program } from 'commander'; import { Command, program } from 'commander';
import { createRequire } from 'module';
import { loads } from './commands/loads/loads.js'; import { loads } from './commands/loads/loads.js';
import { runs } from './commands/runs/runs.js'; import { runs } from './commands/runs/runs.js';
import { logs } from './commands/logs/logs.js'; import { logs } from './commands/logs/logs.js';
@@ -7,6 +8,12 @@ import { secrets } from './commands/secrets/secrets.js';
import { local } from './commands/local/local.js'; import { local } from './commands/local/local.js';
import { auth } from './commands/auth/auth.js'; import { auth } from './commands/auth/auth.js';
import { contexts } from './commands/contexts/contexts.js'; import { contexts } from './commands/contexts/contexts.js';
import { schedules } from './commands/schedules/schedules.js';
import { readFile } from 'fs/promises';
const require = createRequire(import.meta.url);
const pkg = JSON.parse(await readFile(require.resolve('#pkg'), 'utf-8'));
program.addCommand(loads); program.addCommand(loads);
program.addCommand(runs); program.addCommand(runs);
@@ -16,5 +23,14 @@ program.addCommand(secrets);
program.addCommand(local); program.addCommand(local);
program.addCommand(auth); program.addCommand(auth);
program.addCommand(contexts); program.addCommand(contexts);
program.addCommand(schedules);
program.version(pkg.version);
const version = new Command('version');
version.action(() => {
console.log(pkg.version);
});
program.addCommand(version);
await program.parseAsync(); await program.parseAsync();

View File

@@ -2,6 +2,7 @@
"extends": "@morten-olsen/mini-loader-configs/tsconfig.esm.json", "extends": "@morten-olsen/mini-loader-configs/tsconfig.esm.json",
"compilerOptions": { "compilerOptions": {
"outDir": "dist/esm", "outDir": "dist/esm",
"rootDir": "src"
}, },
"include": [ "include": [
"./src/**/*.ts" "./src/**/*.ts"

View File

@@ -8,6 +8,7 @@
"sourceMap": true, "sourceMap": true,
"esModuleInterop": true, "esModuleInterop": true,
"strict": true, "strict": true,
"resolveJsonModule": true,
"allowSyntheticDefaultImports": true, "allowSyntheticDefaultImports": true,
"jsx": "react" "jsx": "react"
}, },

View File

@@ -0,0 +1 @@
demo=foobar

View File

@@ -0,0 +1,3 @@
import { secrets } from '@morten-olsen/mini-loader';
console.log(secrets.get('demo'));

View File

@@ -3,7 +3,6 @@ import { artifacts, logger } from '@morten-olsen/mini-loader';
const run = async () => { const run = async () => {
await logger.info('Hello world'); await logger.info('Hello world');
await artifacts.create('foo', 'bar'); await artifacts.create('foo', 'bar');
process.exit(0);
}; };
run(); run();

View File

@@ -1,4 +1,5 @@
import { Socket, createConnection } from 'net'; import { Socket, createConnection } from 'net';
import { Event } from './index.js';
const connect = () => const connect = () =>
new Promise<Socket>((resolve, reject) => { new Promise<Socket>((resolve, reject) => {
@@ -12,7 +13,7 @@ const connect = () =>
}); });
}); });
const send = async (data: any) => const send = async (data: Event) =>
new Promise<void>(async (resolve, reject) => { new Promise<void>(async (resolve, reject) => {
const connection = await connect(); const connection = await connect();
const cleaned = JSON.parse(JSON.stringify(data)); const cleaned = JSON.parse(JSON.stringify(data));

View File

@@ -5,10 +5,11 @@ type RunOptions = {
script: string; script: string;
input?: Buffer | string; input?: Buffer | string;
secrets?: Record<string, string>; secrets?: Record<string, string>;
cacheLocation: string;
}; };
const run = async ({ script, input, secrets }: RunOptions) => { const run = async ({ script, input, secrets, cacheLocation }: RunOptions) => {
const info = await setup({ script, input, secrets }); const info = await setup({ script, input, secrets, cacheLocation });
const worker = new Worker(info.scriptLocation, { const worker = new Worker(info.scriptLocation, {
stdin: false, stdin: false,

View File

@@ -1,5 +1,4 @@
import { join } from 'path'; import { join } from 'path';
import os from 'os';
import { nanoid } from 'nanoid'; import { nanoid } from 'nanoid';
import { chmod, mkdir, rm, writeFile } from 'fs/promises'; import { chmod, mkdir, rm, writeFile } from 'fs/promises';
import { createServer } from 'net'; import { createServer } from 'net';
@@ -9,6 +8,7 @@ type SetupOptions = {
input?: Buffer | string; input?: Buffer | string;
script: string; script: string;
secrets?: Record<string, string>; secrets?: Record<string, string>;
cacheLocation: string;
}; };
type RunEvents = { type RunEvents = {
@@ -20,7 +20,7 @@ type RunEvents = {
const setup = async (options: SetupOptions) => { const setup = async (options: SetupOptions) => {
const { input, script, secrets } = options; const { input, script, secrets } = options;
const emitter = new EventEmitter<RunEvents>(); const emitter = new EventEmitter<RunEvents>();
const dataDir = join(os.tmpdir(), 'mini-loader', nanoid()); const dataDir = join(options.cacheLocation, nanoid());
await mkdir(dataDir, { recursive: true }); await mkdir(dataDir, { recursive: true });
await chmod(dataDir, 0o700); await chmod(dataDir, 0o700);

View File

@@ -14,6 +14,9 @@
"files": [ "files": [
"./dist" "./dist"
], ],
"imports": {
"#pkg": "./package.json"
},
"exports": { "exports": {
".": { ".": {
"import": "./dist/esm/index.js" "import": "./dist/esm/index.js"
@@ -21,16 +24,18 @@
}, },
"devDependencies": { "devDependencies": {
"@morten-olsen/mini-loader-configs": "workspace:^", "@morten-olsen/mini-loader-configs": "workspace:^",
"@morten-olsen/mini-loader-runner": "workspace:^",
"@types/jsonwebtoken": "^9.0.5", "@types/jsonwebtoken": "^9.0.5",
"@types/node": "^20.10.8", "@types/node": "^20.10.8",
"typescript": "^5.3.3" "typescript": "^5.3.3"
}, },
"dependencies": { "dependencies": {
"@fastify/reply-from": "^9.7.0", "@fastify/reply-from": "^9.7.0",
"@morten-olsen/mini-loader-runner": "workspace:^",
"@trpc/client": "^10.45.0", "@trpc/client": "^10.45.0",
"@trpc/server": "^10.45.0", "@trpc/server": "^10.45.0",
"commander": "^11.1.0", "commander": "^11.1.0",
"cron": "^3.1.6",
"env-paths": "^3.0.0",
"eventemitter3": "^5.0.1", "eventemitter3": "^5.0.1",
"fastify": "^4.25.2", "fastify": "^4.25.2",
"jsonwebtoken": "^9.0.2", "jsonwebtoken": "^9.0.2",

View File

@@ -20,10 +20,10 @@ class Auth {
#setup = async () => { #setup = async () => {
const { config } = this.#options; const { config } = this.#options;
const secretLocation = resolve(config.files.location, 'secret'); const secretLocation = resolve(config.files.data, 'secret');
let secret = ''; let secret = '';
await mkdir(config.files.data, { recursive: true });
if (!existsSync(secretLocation)) { if (!existsSync(secretLocation)) {
await mkdir(config.files.location, { recursive: true });
secret = nanoid(); secret = nanoid();
await writeFile(secretLocation, secret); await writeFile(secretLocation, secret);
} else { } else {

View File

@@ -3,7 +3,8 @@ import { Knex } from 'knex';
type Config = { type Config = {
database: Omit<Knex.Config, 'migrations'>; database: Omit<Knex.Config, 'migrations'>;
files: { files: {
location: string; data: string;
cache: string;
}; };
auth?: { auth?: {
oidc?: { oidc?: {

View File

@@ -1,6 +1,8 @@
import knex, { Knex } from 'knex'; import knex, { Knex } from 'knex';
import { source } from './migrations/migrations.source.js'; import { source } from './migrations/migrations.source.js';
import { mkdir } from 'fs/promises';
import { dirname } from 'path';
const tableNames = { const tableNames = {
loads: 'loads', loads: 'loads',
@@ -20,6 +22,15 @@ class Database {
} }
#setup = async (config: Knex.Config) => { #setup = async (config: Knex.Config) => {
if (
config.connection &&
typeof config.connection !== 'string' &&
'filename' in config.connection &&
typeof config.connection.filename === 'string' &&
config.connection.filename !== ':memory:'
) {
await mkdir(dirname(config.connection.filename), { recursive: true });
}
const db = knex(config); const db = knex(config);
await db.migrate.latest(); await db.migrate.latest();
return db; return db;

View File

@@ -0,0 +1,22 @@
import { Knex } from 'knex';
const name = 'schedule-support';
const up = async (knex: Knex) => {
await knex.schema.createTable('schedules', (table) => {
table.string('id').primary();
table.string('name').nullable();
table.string('description').nullable();
table.string('load').notNullable();
table.string('cron').notNullable();
table.string('input').nullable();
table.timestamp('createdAt').notNullable();
table.timestamp('updatedAt').notNullable();
});
};
const down = async (knex: Knex) => {
await knex.schema.dropTable('schedule');
};
export { name, up, down };

View File

@@ -1,6 +1,7 @@
import { Knex } from 'knex'; import { Knex } from 'knex';
import * as init from './migration.init.js'; import * as init from './migration.init.js';
import * as scheduleSupport from './migration.schedule.js';
type Migration = { type Migration = {
name: string; name: string;
@@ -8,7 +9,7 @@ type Migration = {
down: (knex: Knex) => Promise<void>; down: (knex: Knex) => Promise<void>;
}; };
const migrations = [init] satisfies Migration[]; const migrations = [init, scheduleSupport] satisfies Migration[];
const source: Knex.MigrationSource<Migration> = { const source: Knex.MigrationSource<Migration> = {
getMigrations: async () => migrations, getMigrations: async () => migrations,

View File

@@ -6,6 +6,7 @@ const start = new Command('start');
start.action(async () => { start.action(async () => {
const port = 4500; const port = 4500;
const runtime = await Runtime.create(); const runtime = await Runtime.create();
await runtime.scheduler.start();
const server = await createServer(runtime); const server = await createServer(runtime);
await server.listen({ await server.listen({
port, port,
@@ -18,7 +19,11 @@ start.action(async () => {
const createToken = new Command('create-token'); const createToken = new Command('create-token');
createToken.action(async () => { createToken.action(async () => {
const runtime = await Runtime.create(); const runtime = await Runtime.create();
const token = await runtime.auth.createToken({}); const token = await runtime.auth.createToken({
policy: {
'*:*': ['*'],
},
});
console.log(token); console.log(token);
}); });

View File

@@ -43,5 +43,15 @@ declare module 'knex/types/tables.js' {
createdAt: Date; createdAt: Date;
updatedAt: Date; updatedAt: Date;
}; };
schedules: {
id: string;
name?: string;
description?: string;
load: string;
cron: string;
input?: string;
createdAt: Date;
updatedAt: Date;
};
} }
} }

View File

@@ -62,7 +62,7 @@ class LoadRepo extends EventEmitter<LoadRepoEvents> {
const db = await database.instance; const db = await database.instance;
const id = options.id || nanoid(); const id = options.id || nanoid();
const script = createHash('sha256').update(options.script).digest('hex'); const script = createHash('sha256').update(options.script).digest('hex');
const scriptDir = resolve(this.#options.config.files.location, 'scripts'); const scriptDir = resolve(this.#options.config.files.data, 'scripts');
await mkdir(scriptDir, { recursive: true }); await mkdir(scriptDir, { recursive: true });
await writeFile(resolve(scriptDir, `${script}.js`), options.script); await writeFile(resolve(scriptDir, `${script}.js`), options.script);

View File

@@ -4,6 +4,7 @@ import { ArtifactRepo } from './artifacts/artifacts.js';
import { LoadRepo } from './loads/loads.js'; import { LoadRepo } from './loads/loads.js';
import { LogRepo } from './logs/logs.js'; import { LogRepo } from './logs/logs.js';
import { RunRepo } from './runs/runs.js'; import { RunRepo } from './runs/runs.js';
import { ScheduleRepo } from './schedules/schedules.js';
import { SecretRepo } from './secrets/secrets.js'; import { SecretRepo } from './secrets/secrets.js';
type ReposOptions = { type ReposOptions = {
@@ -17,6 +18,7 @@ class Repos {
#logs: LogRepo; #logs: LogRepo;
#artifacts: ArtifactRepo; #artifacts: ArtifactRepo;
#secrets: SecretRepo; #secrets: SecretRepo;
#schedule: ScheduleRepo;
constructor({ database, config }: ReposOptions) { constructor({ database, config }: ReposOptions) {
this.#loads = new LoadRepo({ this.#loads = new LoadRepo({
@@ -36,6 +38,9 @@ class Repos {
this.#secrets = new SecretRepo({ this.#secrets = new SecretRepo({
database, database,
}); });
this.#schedule = new ScheduleRepo({
database,
});
} }
public get loads() { public get loads() {
@@ -57,8 +62,13 @@ class Repos {
public get secrets() { public get secrets() {
return this.#secrets; return this.#secrets;
} }
public get schedules() {
return this.#schedule;
}
} }
export { findSchedulesSchema, addScheduleSchema } from './schedules/schedules.js';
export { findLogsSchema, addLogSchema } from './logs/logs.js'; export { findLogsSchema, addLogSchema } from './logs/logs.js';
export { setLoadSchema, findLoadsSchema } from './loads/loads.js'; export { setLoadSchema, findLoadsSchema } from './loads/loads.js';
export { createRunSchema, findRunsSchema } from './runs/runs.js'; export { createRunSchema, findRunsSchema } from './runs/runs.js';

View File

@@ -0,0 +1,22 @@
import { z } from 'zod';
const addScheduleSchema = z.object({
name: z.string().optional(),
description: z.string().optional(),
load: z.string(),
cron: z.string(),
input: z.string().optional(),
});
const findSchedulesSchema = z.object({
ids: z.array(z.string()).optional(),
loadIds: z.array(z.string()).optional(),
offset: z.number().optional(),
limit: z.number().optional(),
});
type AddScheduleOptions = z.infer<typeof addScheduleSchema>;
type FindSchedulesOptions = z.infer<typeof findSchedulesSchema>;
export type { AddScheduleOptions, FindSchedulesOptions };
export { addScheduleSchema, findSchedulesSchema };

View File

@@ -0,0 +1,118 @@
import { EventEmitter } from 'eventemitter3';
import { Database } from '../../database/database.js';
import { nanoid } from 'nanoid';
import { AddScheduleOptions, FindSchedulesOptions } from './schedules.schemas.js';
import { createHash } from 'crypto';
type ScheduleRepoEvents = {
added: (id: string) => void;
removed: (id: string) => void;
};
type ScheduleRepoOptions = {
database: Database;
};
class ScheduleRepo extends EventEmitter<ScheduleRepoEvents> {
#options: ScheduleRepoOptions;
constructor(options: ScheduleRepoOptions) {
super();
this.#options = options;
}
public get = async (id: string) => {
const { database } = this.#options;
const db = await database.instance;
const result = await db('schedules').where('id', id).first();
return result;
};
public add = async (options: AddScheduleOptions) => {
const { database } = this.#options;
const db = await database.instance;
const id = nanoid();
await db('schedules').insert({
id,
name: options.name,
description: options.description,
cron: options.cron,
createdAt: new Date(),
updatedAt: new Date(),
});
this.emit('added', id);
return id;
};
public prepareRemove = async (options: FindSchedulesOptions) => {
const { database } = this.#options;
const db = await database.instance;
const query = db('schedules').select('id');
if (options.ids) {
query.whereIn('id', options.ids);
}
if (options.loadIds) {
query.whereIn('loadId', options.loadIds);
}
const result = await query;
const ids = result.map((row) => row.id);
const token = ids.map((id) => Buffer.from(id).toString('base64')).join('|');
const hash = createHash('sha256').update(token).digest('hex');
return {
ids,
hash,
};
};
public remove = async (hash: string, ids: string[]) => {
const { database } = this.#options;
const db = await database.instance;
const token = ids.map((id) => Buffer.from(id).toString('base64')).join('|');
const actualHash = createHash('sha256').update(token).digest('hex');
if (hash !== actualHash) {
throw new Error('Invalid hash');
}
await db('schedules').whereIn('id', ids).delete();
ids.forEach((id) => {
this.emit('removed', id);
});
};
public find = async (options: FindSchedulesOptions) => {
const { database } = this.#options;
const db = await database.instance;
const query = db('schedules');
if (options.ids) {
query.whereIn('id', options.ids);
}
if (options.loadIds) {
query.whereIn('loadId', options.loadIds);
}
if (options.offset) {
query.offset(options.offset);
}
if (options.limit) {
query.limit(options.limit);
}
const results = await query;
return results;
};
}
export { addScheduleSchema, findSchedulesSchema } from './schedules.schemas.js';
export { ScheduleRepo };

View File

@@ -0,0 +1,53 @@
import { z } from 'zod';
import { addScheduleSchema, findSchedulesSchema } from '../repos/repos.js';
import { publicProcedure, router } from './router.utils.js';
const add = publicProcedure.input(addScheduleSchema).mutation(async ({ input, ctx }) => {
const { runtime } = ctx;
const { repos } = runtime;
const { schedules } = repos;
const result = await schedules.add(input);
return result;
});
const find = publicProcedure.input(findSchedulesSchema).query(async ({ input, ctx }) => {
const { runtime } = ctx;
const { repos } = runtime;
const { schedules } = repos;
const result = await schedules.find(input);
return result;
});
const prepareRemove = publicProcedure.input(findSchedulesSchema).query(async ({ input, ctx }) => {
const { runtime } = ctx;
const { repos } = runtime;
const { schedules } = repos;
return await schedules.prepareRemove(input);
});
const remove = publicProcedure
.input(
z.object({
hash: z.string(),
ids: z.array(z.string()),
}),
)
.mutation(async ({ input, ctx }) => {
const { runtime } = ctx;
const { repos } = runtime;
const { artifacts } = repos;
await artifacts.remove(input.hash, input.ids);
});
const schedulesRouter = router({
add,
find,
remove,
prepareRemove,
});
export { schedulesRouter };

View File

@@ -2,6 +2,7 @@ import { artifactsRouter } from './router.artifacts.js';
import { loadsRouter } from './router.loads.js'; import { loadsRouter } from './router.loads.js';
import { logsRouter } from './router.logs.js'; import { logsRouter } from './router.logs.js';
import { runsRouter } from './router.runs.js'; import { runsRouter } from './router.runs.js';
import { schedulesRouter } from './router.schedules.js';
import { secretsRouter } from './router.secrets.js'; import { secretsRouter } from './router.secrets.js';
import { router } from './router.utils.js'; import { router } from './router.utils.js';
@@ -11,6 +12,7 @@ const rootRouter = router({
logs: logsRouter, logs: logsRouter,
artifacts: artifactsRouter, artifacts: artifactsRouter,
secrets: secretsRouter, secrets: secretsRouter,
schedules: schedulesRouter,
}); });
type RootRouter = typeof rootRouter; type RootRouter = typeof rootRouter;

View File

@@ -59,7 +59,7 @@ class RunnerInstance extends EventEmitter<RunnerInstanceEvents> {
const { runs, secrets } = repos; const { runs, secrets } = repos;
try { try {
const { script: scriptHash, input } = await runs.getById(id); const { script: scriptHash, input } = await runs.getById(id);
const scriptLocation = resolve(config.files.location, 'scripts', `${scriptHash}.js`); const scriptLocation = resolve(config.files.data, 'scripts', `${scriptHash}.js`);
const script = await readFile(scriptLocation, 'utf-8'); const script = await readFile(scriptLocation, 'utf-8');
const allSecrets = await secrets.getAll(); const allSecrets = await secrets.getAll();
await runs.started(id); await runs.started(id);
@@ -67,6 +67,7 @@ class RunnerInstance extends EventEmitter<RunnerInstanceEvents> {
script, script,
secrets: allSecrets, secrets: allSecrets,
input, input,
cacheLocation: config.files.cache,
}); });
this.#run = current; this.#run = current;
const { promise, emitter } = current; const { promise, emitter } = current;

View File

@@ -1,20 +1,26 @@
import { resolve } from 'path';
import envPaths from 'env-paths';
import { Database } from '../database/database.js'; import { Database } from '../database/database.js';
import { Repos } from '../repos/repos.js'; import { Repos } from '../repos/repos.js';
import { Runner } from '../runner/runner.js'; import { Runner } from '../runner/runner.js';
import { Config } from '../config/config.js'; import { Config } from '../config/config.js';
import { Auth } from '../auth/auth.js'; import { Auth } from '../auth/auth.js';
import { resolve } from 'path'; import { Scheduler } from '../scheduler/scheduler.js';
const paths = envPaths('mini-loader-server');
class Runtime { class Runtime {
#repos: Repos; #repos: Repos;
#runner: Runner; #runner: Runner;
#auth: Auth; #auth: Auth;
#scheduler: Scheduler;
constructor(options: Config) { constructor(options: Config) {
const database = new Database(options.database); const database = new Database(options.database);
this.#repos = new Repos({ database, config: options }); this.#repos = new Repos({ database, config: options });
this.#runner = new Runner({ repos: this.#repos, config: options }); this.#runner = new Runner({ repos: this.#repos, config: options });
this.#auth = new Auth({ config: options }); this.#auth = new Auth({ config: options });
this.#scheduler = new Scheduler({ runs: this.#repos.runs, schedules: this.#repos.schedules });
} }
public get repos() { public get repos() {
@@ -29,17 +35,22 @@ class Runtime {
return this.#auth; return this.#auth;
} }
public get scheduler() {
return this.#scheduler;
}
public static create = async () => { public static create = async () => {
const runtime = new Runtime({ const runtime = new Runtime({
database: { database: {
client: 'sqlite3', client: 'sqlite3',
connection: { connection: {
filename: resolve(process.cwd(), 'data', 'database.sqlite'), filename: resolve(paths.data, 'database.sqlite'),
}, },
useNullAsDefault: true, useNullAsDefault: true,
}, },
files: { files: {
location: resolve(process.cwd(), 'data', 'files'), data: process.env.DATA_DIR || resolve(paths.data, 'data', 'files'),
cache: process.env.CACHE_DIR || resolve(paths.cache, 'data', 'cache'),
}, },
}); });

View File

@@ -0,0 +1,73 @@
import { CronJob } from 'cron';
import { ScheduleRepo } from '../repos/schedules/schedules.js';
import { RunRepo } from '../repos/runs/runs.js';
type SchedulerOptions = {
runs: RunRepo;
schedules: ScheduleRepo;
};
type RunningSchedule = {
id: string;
job: CronJob;
stop: () => Promise<void>;
};
class Scheduler {
#running: RunningSchedule[] = [];
#options: SchedulerOptions;
constructor(options: SchedulerOptions) {
this.#options = options;
const { schedules } = this.#options;
schedules.on('added', this.#add);
schedules.on('removed', this.#remove);
}
#remove = async (id: string) => {
const current = this.#running.filter((r) => r.id === id);
await Promise.all(current.map((r) => r.stop()));
this.#running = this.#running.filter((r) => r.id !== id);
};
#add = async (id: string) => {
const { schedules, runs } = this.#options;
const current = this.#running.filter((r) => r.id === id);
await Promise.all(current.map((r) => r.stop()));
const schedule = await schedules.get(id);
if (!schedule) {
return;
}
const job = new CronJob(schedule.cron, async () => {
await runs.create({
loadId: schedule.load,
});
});
const stop = async () => {
job.stop();
};
this.#running.push({
id: schedule.id,
job,
stop,
});
};
public stop = async () => {
for (const running of this.#running) {
await running.stop();
this.#running = this.#running.filter((r) => r !== running);
}
};
public start = async () => {
const { schedules } = this.#options;
await this.stop();
const all = await schedules.find({});
for (const schedule of all) {
await this.#add(schedule.id);
}
};
}
export { Scheduler };

View File

@@ -4,8 +4,15 @@ import { RootRouter, rootRouter } from '../router/router.js';
import { createContext } from '../router/router.utils.js'; import { createContext } from '../router/router.utils.js';
import { Runtime } from '../runtime/runtime.js'; import { Runtime } from '../runtime/runtime.js';
import { gateway } from '../gateway/gateway.js'; import { gateway } from '../gateway/gateway.js';
import { createRequire } from 'module';
import { readFile } from 'fs/promises';
const require = createRequire(import.meta.url);
const createServer = async (runtime: Runtime) => { const createServer = async (runtime: Runtime) => {
const pkgLocation = require.resolve('#pkg');
const pkg = JSON.parse(await readFile(pkgLocation, 'utf-8'));
const server = fastify({ const server = fastify({
maxParamLength: 10000, maxParamLength: 10000,
bodyLimit: 30 * 1024 * 1024, bodyLimit: 30 * 1024 * 1024,
@@ -13,9 +20,6 @@ const createServer = async (runtime: Runtime) => {
level: 'warn', level: 'warn',
}, },
}); });
server.get('/', async () => {
return { hello: 'world' };
});
server.get('/health', async (req) => { server.get('/health', async (req) => {
let authorized = false; let authorized = false;
@@ -27,7 +31,7 @@ const createServer = async (runtime: Runtime) => {
authorized = true; authorized = true;
} }
} catch (error) {} } catch (error) {}
return { authorized, status: 'ok' }; return { authorized, status: 'ok', version: pkg.version };
}); });
server.register(fastifyTRPCPlugin, { server.register(fastifyTRPCPlugin, {

View File

@@ -2,6 +2,7 @@
"extends": "@morten-olsen/mini-loader-configs/tsconfig.esm.json", "extends": "@morten-olsen/mini-loader-configs/tsconfig.esm.json",
"compilerOptions": { "compilerOptions": {
"outDir": "dist/esm", "outDir": "dist/esm",
"rootDir": "src"
}, },
"include": [ "include": [
"src" "src"

42
pnpm-lock.yaml generated
View File

@@ -39,6 +39,9 @@ importers:
'@morten-olsen/mini-loader-runner': '@morten-olsen/mini-loader-runner':
specifier: workspace:^ specifier: workspace:^
version: link:../runner version: link:../runner
'@morten-olsen/mini-loader-server':
specifier: workspace:^
version: link:../server
'@rollup/plugin-auto-install': '@rollup/plugin-auto-install':
specifier: ^3.0.5 specifier: ^3.0.5
version: 3.0.5(rollup@4.9.4) version: 3.0.5(rollup@4.9.4)
@@ -63,6 +66,9 @@ importers:
commander: commander:
specifier: ^11.1.0 specifier: ^11.1.0
version: 11.1.0 version: 11.1.0
dotenv:
specifier: ^16.3.1
version: 16.3.1
env-paths: env-paths:
specifier: ^3.0.0 specifier: ^3.0.0
version: 3.0.0 version: 3.0.0
@@ -88,9 +94,6 @@ importers:
'@morten-olsen/mini-loader-configs': '@morten-olsen/mini-loader-configs':
specifier: workspace:^ specifier: workspace:^
version: link:../configs version: link:../configs
'@morten-olsen/mini-loader-server':
specifier: workspace:^
version: link:../server
'@types/inquirer': '@types/inquirer':
specifier: ^9.0.7 specifier: ^9.0.7
version: 9.0.7 version: 9.0.7
@@ -161,6 +164,9 @@ importers:
'@fastify/reply-from': '@fastify/reply-from':
specifier: ^9.7.0 specifier: ^9.7.0
version: 9.7.0 version: 9.7.0
'@morten-olsen/mini-loader-runner':
specifier: workspace:^
version: link:../runner
'@trpc/client': '@trpc/client':
specifier: ^10.45.0 specifier: ^10.45.0
version: 10.45.0(@trpc/server@10.45.0) version: 10.45.0(@trpc/server@10.45.0)
@@ -170,6 +176,12 @@ importers:
commander: commander:
specifier: ^11.1.0 specifier: ^11.1.0
version: 11.1.0 version: 11.1.0
cron:
specifier: ^3.1.6
version: 3.1.6
env-paths:
specifier: ^3.0.0
version: 3.0.0
eventemitter3: eventemitter3:
specifier: ^5.0.1 specifier: ^5.0.1
version: 5.0.1 version: 5.0.1
@@ -201,9 +213,6 @@ importers:
'@morten-olsen/mini-loader-configs': '@morten-olsen/mini-loader-configs':
specifier: workspace:^ specifier: workspace:^
version: link:../configs version: link:../configs
'@morten-olsen/mini-loader-runner':
specifier: workspace:^
version: link:../runner
'@types/jsonwebtoken': '@types/jsonwebtoken':
specifier: ^9.0.5 specifier: ^9.0.5
version: 9.0.5 version: 9.0.5
@@ -1285,6 +1294,10 @@ packages:
'@types/node': 20.10.8 '@types/node': 20.10.8
dev: true dev: true
/@types/luxon@3.3.8:
resolution: {integrity: sha512-jYvz8UMLDgy3a5SkGJne8H7VA7zPV2Lwohjx0V8V31+SqAjNmurWMkk9cQhfvlcnXWudBpK9xPM1n4rljOcHYQ==}
dev: false
/@types/node@20.10.8: /@types/node@20.10.8:
resolution: {integrity: sha512-f8nQs3cLxbAFc00vEU59yf9UyGUftkPaLGfvbVOIDdx2i1b8epBqj2aNGyP19fiyXWvlmZ7qC1XLjAzw/OKIeA==} resolution: {integrity: sha512-f8nQs3cLxbAFc00vEU59yf9UyGUftkPaLGfvbVOIDdx2i1b8epBqj2aNGyP19fiyXWvlmZ7qC1XLjAzw/OKIeA==}
dependencies: dependencies:
@@ -2080,6 +2093,13 @@ packages:
resolution: {integrity: sha512-dcKFX3jn0MpIaXjisoRvexIJVEKzaq7z2rZKxf+MSr9TkdmHmsU4m2lcLojrj/FHl8mk5VxMmYA+ftRkP/3oKQ==} resolution: {integrity: sha512-dcKFX3jn0MpIaXjisoRvexIJVEKzaq7z2rZKxf+MSr9TkdmHmsU4m2lcLojrj/FHl8mk5VxMmYA+ftRkP/3oKQ==}
dev: false dev: false
/cron@3.1.6:
resolution: {integrity: sha512-cvFiQCeVzsA+QPM6fhjBtlKGij7tLLISnTSvFxVdnFGLdz+ZdXN37kNe0i2gefmdD17XuZA6n2uPVwzl4FxW/w==}
dependencies:
'@types/luxon': 3.3.8
luxon: 3.4.4
dev: false
/cross-spawn@7.0.3: /cross-spawn@7.0.3:
resolution: {integrity: sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==} resolution: {integrity: sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==}
engines: {node: '>= 8'} engines: {node: '>= 8'}
@@ -2189,6 +2209,11 @@ packages:
esutils: 2.0.3 esutils: 2.0.3
dev: true dev: true
/dotenv@16.3.1:
resolution: {integrity: sha512-IPzF4w4/Rd94bA9imS68tZBaYyBWSCE47V1RGuMrB94iyTOIEwRmVL2x/4An+6mETpLrKJ5hQkB8W4kFAadeIQ==}
engines: {node: '>=12'}
dev: false
/eastasianwidth@0.2.0: /eastasianwidth@0.2.0:
resolution: {integrity: sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA==} resolution: {integrity: sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA==}
dev: false dev: false
@@ -3744,6 +3769,11 @@ packages:
dependencies: dependencies:
yallist: 4.0.0 yallist: 4.0.0
/luxon@3.4.4:
resolution: {integrity: sha512-zobTr7akeGHnv7eBOXcRgMeCP6+uyYsczwmeRCauvpvaAltgNyTbLH/+VaEAPUeWBT+1GuNmz4wC/6jtQzbbVA==}
engines: {node: '>=12'}
dev: false
/magic-string@0.25.9: /magic-string@0.25.9:
resolution: {integrity: sha512-RmF0AsMzgt25qzqqLc1+MbHmhdx0ojF2Fvs4XnOqz2ZOBXzzkEwc/dJQZCYHAn7v1jbVOjAZfK8msRn4BxO4VQ==} resolution: {integrity: sha512-RmF0AsMzgt25qzqqLc1+MbHmhdx0ojF2Fvs4XnOqz2ZOBXzzkEwc/dJQZCYHAn7v1jbVOjAZfK8msRn4BxO4VQ==}
dependencies: dependencies: