mirror of
https://github.com/morten-olsen/mini-loader.git
synced 2026-02-08 01:36:26 +01:00
Compare commits
22 Commits
0.1.12
...
fix/enable
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
e6fe8336de | ||
|
|
e5064ca905 | ||
|
|
1c3b993ab2 | ||
|
|
161a098c9f | ||
|
|
a08f9e1c91 | ||
|
|
e0c41d9220 | ||
|
|
028b65587e | ||
|
|
7436b3439c | ||
|
|
2109bc3af9 | ||
|
|
eeaad68f6e | ||
|
|
c7ca97f041 | ||
|
|
c8e02d8da4 | ||
|
|
9a5b27f1be | ||
|
|
0760328854 | ||
|
|
fa23b325b3 | ||
|
|
4f183310a6 | ||
|
|
ecce49209f | ||
|
|
f8f0eca320 | ||
|
|
1115ce2fb3 | ||
|
|
9c5249956e | ||
|
|
b5d8cf3a51 | ||
|
|
5154fbb4a5 |
5
.devcontainer/devcontainer.json
Normal file
5
.devcontainer/devcontainer.json
Normal file
@@ -0,0 +1,5 @@
|
||||
{
|
||||
"image": "mcr.microsoft.com/devcontainers/universal:2",
|
||||
"features": {
|
||||
}
|
||||
}
|
||||
27
.github/ISSUE_TEMPLATE/bug_report.md
vendored
Normal file
27
.github/ISSUE_TEMPLATE/bug_report.md
vendored
Normal file
@@ -0,0 +1,27 @@
|
||||
---
|
||||
name: Bug report
|
||||
about: Create a report to help us improve
|
||||
title: ''
|
||||
labels: bug
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
**Describe the bug**
|
||||
A clear and concise description of what the bug is.
|
||||
|
||||
**To Reproduce**
|
||||
Steps to reproduce the behavior:
|
||||
1. Go to '...'
|
||||
2. Click on '....'
|
||||
3. Scroll down to '....'
|
||||
4. See error
|
||||
|
||||
**Expected behavior**
|
||||
A clear and concise description of what you expected to happen.
|
||||
|
||||
**Screenshots**
|
||||
If applicable, add screenshots to help explain your problem.
|
||||
|
||||
**Additional context**
|
||||
Add any other context about the problem here.
|
||||
20
.github/ISSUE_TEMPLATE/feature_request.md
vendored
Normal file
20
.github/ISSUE_TEMPLATE/feature_request.md
vendored
Normal file
@@ -0,0 +1,20 @@
|
||||
---
|
||||
name: Feature request
|
||||
about: Suggest an idea for this project
|
||||
title: ''
|
||||
labels: enhancement
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
**Is your feature request related to a problem? Please describe.**
|
||||
A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
|
||||
|
||||
**Describe the solution you'd like**
|
||||
A clear and concise description of what you want to happen.
|
||||
|
||||
**Describe alternatives you've considered**
|
||||
A clear and concise description of any alternative solutions or features you've considered.
|
||||
|
||||
**Additional context**
|
||||
Add any other context or screenshots about the feature request here.
|
||||
19
.github/workflows/release.yml
vendored
19
.github/workflows/release.yml
vendored
@@ -71,22 +71,41 @@ jobs:
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Log in to the Container registry
|
||||
uses: docker/login-action@65b78e6e13532edd9afa3aa52ac7964289d1a9c1
|
||||
with:
|
||||
registry: ${{ env.REGISTRY }}
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Retrieve version
|
||||
run: |
|
||||
echo "TAG_NAME=$(git describe --tag --abbrev=0)" >> $GITHUB_OUTPUT
|
||||
id: version
|
||||
|
||||
- name: Extract metadata (tags, labels) for Docker
|
||||
id: meta
|
||||
uses: docker/metadata-action@9ec57ed1fcdbf14dcef7dfbe97b2010124a938b7
|
||||
with:
|
||||
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
|
||||
tags: |
|
||||
latest
|
||||
${{ steps.version.outputs.TAG_NAME }}
|
||||
|
||||
- name: Build and push Docker image
|
||||
uses: docker/build-push-action@f2a1d5e99d037542a71f64918e516c093c6f3fc4
|
||||
with:
|
||||
context: .
|
||||
file: ./docker/Dockerfile
|
||||
platforms: linux/amd64,linux/arm64
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
push: true
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
|
||||
43
CODE_OF_CONDUCT.md
Normal file
43
CODE_OF_CONDUCT.md
Normal file
@@ -0,0 +1,43 @@
|
||||
# Code of Conduct
|
||||
|
||||
## Our Pledge
|
||||
|
||||
In the interest of fostering an open and welcoming environment, we as contributors and maintainers pledge to making participation in our project and our community a harassment-free experience for everyone, regardless of age, body size, disability, ethnicity, gender identity and expression, level of experience, nationality, personal appearance, race, religion, or sexual identity and orientation.
|
||||
|
||||
## Our Standards
|
||||
|
||||
Examples of behavior that contributes to creating a positive environment include:
|
||||
|
||||
- Using welcoming and inclusive language
|
||||
- Being respectful of differing viewpoints and experiences
|
||||
- Gracefully accepting constructive criticism
|
||||
- Focusing on what is best for the community
|
||||
- Showing empathy towards other community members
|
||||
|
||||
Examples of unacceptable behavior by participants include:
|
||||
|
||||
- The use of sexualized language or imagery and unwelcome sexual attention or advances
|
||||
- Trolling, insulting/derogatory comments, and personal or political attacks
|
||||
- Public or private harassment
|
||||
- Publishing others' private information, such as a physical or electronic address, without explicit permission
|
||||
- Other conduct which could reasonably be considered inappropriate in a professional setting
|
||||
|
||||
## Our Responsibilities
|
||||
|
||||
Project maintainers are responsible for clarifying the standards of acceptable behavior and are expected to take appropriate and fair corrective action in response to any instances of unacceptable behavior.
|
||||
|
||||
Project maintainers have the right and responsibility to remove, edit, or reject comments, commits, code, wiki edits, issues, and other contributions that are not aligned with this Code of Conduct, or to ban temporarily or permanently any contributor for other behaviors that they deem inappropriate, threatening, offensive, or harmful.
|
||||
|
||||
## Scope
|
||||
|
||||
This Code of Conduct applies both within project spaces and in public spaces when an individual is representing the project or its community. Examples of representing a project or community include using an official project e-mail address, posting via an official social media account, or acting as an appointed representative at an online or offline event. Representation of a project may be further defined and clarified by project maintainers.
|
||||
|
||||
## Enforcement
|
||||
|
||||
Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by contacting the project team at s56gkgkq@void.black. All complaints will be reviewed and investigated and will result in a response that is deemed necessary and appropriate to the circumstances. The project team is obligated to maintain confidentiality with regard to the reporter of an incident. Further details of specific enforcement policies may be posted separately.
|
||||
|
||||
Project maintainers who do not follow or enforce the Code of Conduct in good faith may face temporary or permanent repercussions as determined by other members of the project's leadership.
|
||||
|
||||
## Attribution
|
||||
|
||||
This Code of Conduct is adapted from the [Contributor Covenant](https://www.contributor-covenant.org), version 1.4, available at https://www.contributor-covenant.org/version/1/4/code-of-conduct.html
|
||||
59
CONTRIBUTING.md
Normal file
59
CONTRIBUTING.md
Normal file
@@ -0,0 +1,59 @@
|
||||
# Contributing to mini loader
|
||||
|
||||
First off, thank you for considering contributing to mini loader! It's people like you that make mini loader such a great tool.
|
||||
|
||||
## Code of Conduct
|
||||
|
||||
This project and everyone participating in it is governed by the [mini loader Code of Conduct](CODE_OF_CONDUCT.md). By participating, you are expected to uphold this code. Please report unacceptable behavior to s56gkgkq@void.black.
|
||||
|
||||
## How Can I Contribute?
|
||||
|
||||
### Reporting Bugs
|
||||
|
||||
This section guides you through submitting a bug report for mini loader. Following these guidelines helps maintainers and the community understand your report, reproduce the behavior, and find related reports.
|
||||
|
||||
**Before Submitting A Bug Report**
|
||||
|
||||
- Ensure the bug was not already reported by searching on GitHub under [Issues](https://github.com/morten-olsen/mini-loader/issues).
|
||||
- If you're unable to find an open issue addressing the problem, open a new one. Be sure to include a title and clear description, as much relevant information as possible, and a code sample or an executable test case demonstrating the expected behavior that is not occurring.
|
||||
|
||||
### Suggesting Enhancements
|
||||
|
||||
This section guides you through submitting an enhancement suggestion for mini loader, including completely new features and minor improvements to existing functionality.
|
||||
|
||||
**Before Submitting An Enhancement Suggestion**
|
||||
|
||||
- Check if the enhancement has already been suggested under [Issues](https://github.com/morten-olsen/mini-loader/issues).
|
||||
- If it hasn't, create a new issue and provide a concise description of the enhancement with as much detail as possible.
|
||||
|
||||
### Your First Code Contribution
|
||||
|
||||
Unsure where to begin contributing to mini loader? You can start by looking through `beginner` and `help-wanted` issues:
|
||||
|
||||
- Beginner issues - issues which should only require a few lines of code, and a test or two.
|
||||
- Help wanted issues - issues which should be a bit more involved than `beginner` issues.
|
||||
|
||||
### Pull Requests
|
||||
|
||||
- Fill in the required template
|
||||
- Do not include issue numbers in the PR title
|
||||
- Follow the coding style used throughout the project
|
||||
- Include appropriate test coverage. New features should include new tests.
|
||||
- Document new code based on the [Documentation Styleguide](#documentation-styleguide)
|
||||
|
||||
## Documentation Styleguide
|
||||
|
||||
Use this style guide for documentation:
|
||||
|
||||
- Use Markdown
|
||||
- Reference methods and classes in markdown backticks. For example, `ClassName.methodName`
|
||||
- Document new code or add comments in code to explain parts that might be confusing.
|
||||
|
||||
## Use a Consistent Coding Style
|
||||
|
||||
* 2 spaces for indentation rather than tabs
|
||||
* You can try running `pnpm run test:lint` for style unification
|
||||
|
||||
## License
|
||||
|
||||
By contributing to mini loader, you agree that your contributions will be licensed under its GPL-3 License.
|
||||
43
README.md
43
README.md
@@ -1,14 +1,8 @@
|
||||
|
||||
|
||||
<p>
|
||||
<center>
|
||||
<img src="./assets/logo.png" width="300" height="300" />
|
||||
</center>
|
||||
</p>
|
||||

|
||||
|
||||
# Welcome to Mini Loader! 🌐
|
||||
|
||||
Welcome to mini loader, a lightweight, Docker-based server solution for managing and executing workloads with ease. Designed for developers, small teams, and anyone in need of a simple yet powerful tool for running tasks, hosting API servers, or scheduling routine jobs.
|
||||
Welcome to mini loader, a lightweight server solution for managing and executing workloads with ease. Designed for developers, small teams, and anyone in need of a simple yet powerful tool for running tasks, hosting API servers, or scheduling routine jobs.
|
||||
|
||||
## Features
|
||||
|
||||
@@ -19,18 +13,37 @@ Welcome to mini loader, a lightweight, Docker-based server solution for managing
|
||||
- **Task Scheduling**: Built-in support for cron-like job scheduling.
|
||||
- **HTTP Gateway**: Expose a HTTP server from your workloads
|
||||
|
||||
Also see [anti-features and limitations](./docs/anti-features.md)
|
||||
Also see [anti-features and limitations](./docs/02-anti-features.md)
|
||||
|
||||
:construction: This project is under active development and has not reached v1.0 yet. Expect some bugs and potential breaking changes in APIs. We appreciate your patience and welcome your feedback as we work towards a stable release!
|
||||
|
||||
For an overview of what's coming next, check out our roadmap at [GitHub Milestones](https://github.com/morten-olsen/mini-loader/milestones).
|
||||
|
||||
## Quick Start
|
||||
|
||||
Get up and running with mini loader in just a few steps:
|
||||
|
||||
1. **Install the CLI**: `npm install -g @morten-olsen/mini-loader-cli`
|
||||
2. **Deploy the Server**: `docker run -p 4500:4500 -n mini-loader ghcr.io/morten-olsen/mini-loader:main`.
|
||||
3. **Push Your First Load**: `mini-loader loads push script.mjs -r -i first`
|
||||
3. **See the logs**: `mini-loader logs ls -l first`
|
||||
```bash
|
||||
# Install the CLI and the server
|
||||
npm i -g @morten-olsen/mini-loader-cli @morten-olsen/mini-loader-server
|
||||
|
||||
For a detailed guide on getting started, please refer to the [Getting Started Tutorial](./docs/getting-started.md).
|
||||
# Start the server
|
||||
mini-loader-server start &
|
||||
|
||||
# Get your access token
|
||||
mini-loader-server create-token
|
||||
|
||||
# Authenticate the CLI
|
||||
mini-loader auth login
|
||||
|
||||
# Push your first workload
|
||||
mini-loader loads push -r -ai my-script.js -i first
|
||||
|
||||
# See the output logs
|
||||
mini-loader logs ls -l first
|
||||
```
|
||||
|
||||
For a detailed guide on getting started, please refer to the [Getting Started Tutorial](./docs/01-getting-started.md).
|
||||
|
||||
## Support and Contributions
|
||||
|
||||
@@ -45,4 +58,4 @@ mini loader is open-source software licensed under the [GPL-3 License](./LICENSE
|
||||
|
||||
## Let's Get Started!
|
||||
|
||||
Dive into the world of simplified workload management with mini loader. Start with our [Getting Started Tutorial](./docs/getting-started.md) and unleash the full potential of your tasks and applications!
|
||||
Dive into the world of simplified workload management with mini loader. Start with our [Getting Started Tutorial](./docs/01-getting-started.md) and unleash the full potential of your tasks and applications!
|
||||
|
||||
BIN
assets/banner.png
Normal file
BIN
assets/banner.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 418 KiB |
@@ -5,5 +5,9 @@ services:
|
||||
build:
|
||||
context: .
|
||||
dockerfile: ./docker/Dockerfile
|
||||
volumes:
|
||||
- data:/app/data
|
||||
ports:
|
||||
- 4500:4500
|
||||
volumes:
|
||||
data:
|
||||
|
||||
@@ -27,6 +27,11 @@ COPY --from=builder /app/out/full/ .
|
||||
RUN pnpm turbo run build --filter=@morten-olsen/mini-loader-server
|
||||
|
||||
FROM base AS runner
|
||||
ENV \
|
||||
NODE_ENV=production \
|
||||
DATA_DIR=/data \
|
||||
CACHE_DIR=/cache
|
||||
RUN apk add --no-cache jq curl
|
||||
WORKDIR /app
|
||||
|
||||
# Don't run production as root
|
||||
@@ -38,5 +43,12 @@ RUN chmod +x /entrypoint.sh
|
||||
|
||||
COPY --from=installer /app .
|
||||
EXPOSE 4500
|
||||
VOLUME /data
|
||||
|
||||
HEALTHCHECK \
|
||||
--interval=10s \
|
||||
--start-period=10s \
|
||||
CMD curl -f http://localhost:4500/health || exit 1
|
||||
|
||||
ENTRYPOINT ["/entrypoint.sh"]
|
||||
CMD ["mini-loader-server", "start"]
|
||||
@@ -7,6 +7,8 @@ GID=${GID:-1001}
|
||||
addgroup --system --gid ${GID} nodejs && \
|
||||
adduser --system --uid ${UID} -G nodejs miniloader && \
|
||||
|
||||
mkdir -p /app/data
|
||||
chown -R miniloader:nodejs /app/data
|
||||
mkdir -p ${DATA_DIR}
|
||||
mkdir -p ${CACHE_DIR}
|
||||
chown -R miniloader:nodejs ${DATA_DIR}
|
||||
chown -R miniloader:nodejs ${CACHE_DIR}
|
||||
su miniloader -s /bin/sh -c "$CMD"
|
||||
@@ -15,20 +15,18 @@ Before diving into mini loader, ensure you have the following:
|
||||
|
||||
## Contents
|
||||
|
||||
- [Creating you first workload](./installation.md): Learn how to write workloads and execute them locally with the mini loader CLI
|
||||
- [Running the server](./pushing-managing-loads.md): Instructions on how to run the server locally using docker.
|
||||
- [Interacting with the server](./interacting-with-server.md): Learn the basic commands used to manage workloads.
|
||||
- [Managing secrets](./managing-secrets.md): Upload secrets to the server that can be used inside your scripts.
|
||||
- [Authorization](./setting-up-oidc.md): Extend the authorization using OIDC
|
||||
- [Create an API](./creating-an-api.md): Create a workload which exposes a HTTP api
|
||||
- [Creating you first workload](./03-tutorial/01-first-workload.md): Learn how to write workloads and execute them locally with the mini loader CLI
|
||||
- [Running the server](./03-tutorial/02-setup-server.md): Instructions on how to run the server locally using docker.
|
||||
- [Interacting with the server](./03-tutorial/03-interacting-with-server.md): Learn the basic commands used to manage workloads.
|
||||
- [Managing secrets](./03-tutorial/04-managing-secrets.md): Upload secrets to the server that can be used inside your scripts.
|
||||
- [Create an API](./03-tutorial/05-creating-an-api.md): Create a workload which exposes a HTTP api
|
||||
|
||||
## Getting Help
|
||||
|
||||
If you encounter any issues or have questions, please refer to the [FAQs](./faqs.md)
|
||||
If you encounter any issues or have questions, please refer to the [FAQs](./04-faqs.md)
|
||||
|
||||
## Let's Get Started!
|
||||
|
||||
Ready to streamline your workload management? Let's jump right into [creating your first workload](./first-workload.md) and set up the mini loader CLI!
|
||||
Ready to streamline your workload management? Let's jump right into [creating your first workload](./03-tutorial/01-first-workload.md) and set up the mini loader CLI!
|
||||
|
||||
|
||||
[Next: create a workload](./first-workload.md)
|
||||
[Next: create a workload](./03-tutorial/01-first-workload.md)
|
||||
@@ -46,4 +46,4 @@ After running the command, you should see an output confirming that a new artifa
|
||||
|
||||
Congratulations on setting up and running your first script with mini loader! You're now ready to take the next step.
|
||||
|
||||
[Next: Setting Up the Server](./setup-server.md)
|
||||
[Next: Setting Up the Server](./02-setup-server.md)
|
||||
@@ -1,14 +1,15 @@
|
||||
Certainly! Here's a revised version of your documentation page to make it
|
||||
|
||||
## Quick Start with mini loader using Docker
|
||||
|
||||
This guide will help you quickly set up and run a mini loader server using Docker. Follow these simple steps to deploy your server and start interacting with it using the [mini-loader CLI](./first-workload.md).
|
||||
This guide will help you quickly set up and run a mini loader server using Docker. Follow these simple steps to deploy your server and start interacting with it using the [mini-loader CLI](./01-first-workload.md).
|
||||
|
||||
### Step 1: Deploy the mini loader Container
|
||||
|
||||
To begin, let's deploy the mini loader container. Run the following command in your terminal:
|
||||
|
||||
```bash
|
||||
docker run -p 4500:4500 -n mini-loader ghcr.io/morten-olsen/mini-loader:main
|
||||
docker run -p 4500:4500 -n mini-loader ghcr.io/morten-olsen/mini-loader:latest
|
||||
```
|
||||
|
||||
This command downloads the latest mini loader image and runs it, exposing port 4500.
|
||||
@@ -57,4 +58,4 @@ This command lists all the loads currently on your server, confirming that the C
|
||||
|
||||
You've successfully deployed and configured your mini loader server using Docker! You're now ready to start interacting with the server.
|
||||
|
||||
[Next: Interacting with the Server](./interacting-with-server.md)
|
||||
[Next: Interacting with the Server](./03-interacting-with-server.md)
|
||||
@@ -67,4 +67,4 @@ Replace `<id>` with the identifier of the artifact you wish to download.
|
||||
|
||||
You're now equipped to manage loads, runs, logs, and artifacts using the mini loader CLI. For advanced usage, such as managing secrets, proceed to the next section.
|
||||
|
||||
[Next: Managing Secrets](./managing-secrets.md)
|
||||
[Next: Managing Secrets](./04-managing-secrets.md)
|
||||
81
docs/03-tutorial/04-managing-secrets.md
Normal file
81
docs/03-tutorial/04-managing-secrets.md
Normal file
@@ -0,0 +1,81 @@
|
||||
## Managing Secrets
|
||||
|
||||
### Introduction
|
||||
|
||||
In many workflows, accessing sensitive data such as API tokens or credentials is essential. To handle this securely, you can use secrets management. This tutorial demonstrates how to manage secrets using the CLI and implement them in a simple Node.js workload.
|
||||
|
||||
### Creating Secrets with the CLI
|
||||
|
||||
To create a new secret, use the `mini-loader` CLI as follows:
|
||||
|
||||
```bash
|
||||
mini-loader secrets set <id>
|
||||
```
|
||||
|
||||
For example, to store a GitHub personal access token, you would use:
|
||||
|
||||
```bash
|
||||
mini-loader secrets set githubtoken
|
||||
```
|
||||
|
||||
Upon execution, you'll be prompted to enter your access token.
|
||||
|
||||
### Implementing Secrets in Your Workload
|
||||
|
||||
Next, let's create a Node.js script (`github.js`) that uses this token to fetch your GitHub username and saves it as an artifact.
|
||||
|
||||
1. **Create `github.js` File:**
|
||||
|
||||
```javascript
|
||||
import { secrets, artifacts } from '@morten-olsen/mini-loader';
|
||||
import { Octokit } from '@octokit/rest';
|
||||
|
||||
// Retrieve the secret
|
||||
const accessToken = secrets.get('githubtoken');
|
||||
|
||||
// Main async function to fetch and save GitHub username
|
||||
async function run() {
|
||||
const octokit = new Octokit({ auth: accessToken });
|
||||
const user = await octokit.users.getAuthenticated();
|
||||
await artifacts.create('user', JSON.stringify(user.data.login));
|
||||
}
|
||||
|
||||
// Execute the function
|
||||
run().catch(console.error);
|
||||
```
|
||||
|
||||
This script initializes the Octokit client with the access token, fetches the authenticated user's data, and then saves the username as an artifact.
|
||||
|
||||
2. **Run the Script:**
|
||||
|
||||
Execute your script with `mini-loader`:
|
||||
|
||||
```bash
|
||||
mini-loader loads push github.js -r -ai
|
||||
```
|
||||
|
||||
### Managing Local Secrets
|
||||
|
||||
If you're running the script locally, you can manage secrets either by using a `.secrets` file or setting an environment variable.
|
||||
|
||||
1. **Using a `.secrets` File:**
|
||||
|
||||
Create a file named `.secrets` and add your token:
|
||||
|
||||
```
|
||||
githubtoken=<your-token>
|
||||
```
|
||||
|
||||
2. **Using Environment Variables:**
|
||||
|
||||
Prefix your environment variable with `ML_S_` and run the script:
|
||||
|
||||
```bash
|
||||
ML_S_githubtoken=<your-token> mini-loader local run github.js -ai
|
||||
```
|
||||
|
||||
### Conclusion
|
||||
|
||||
By following these steps, you can securely manage and use secrets within your workloads, enhancing the security and integrity of your applications.
|
||||
|
||||
[Next: Creating an API](./05-creating-an-api.md)
|
||||
52
docs/03-tutorial/05-creating-an-api.md
Normal file
52
docs/03-tutorial/05-creating-an-api.md
Normal file
@@ -0,0 +1,52 @@
|
||||
## Creating an API Inside Your Workload
|
||||
|
||||
Workloads in mini loader can set up simple HTTP servers by connecting to a socket file, a feature supported by many JavaScript server libraries.
|
||||
|
||||
### Binding Your Workload to an HTTP Endpoint
|
||||
|
||||
To expose your workload as an HTTP server, specify the path parameter using the `getPath()` method provided by the `@morten-olsen/mini-loader` package. This method dynamically assigns a path for your API.
|
||||
|
||||
### Important Note
|
||||
|
||||
Please be aware that the gateway provided by mini loader isn't fully featured. As such, certain functionalities like streaming and WebSockets may not be supported.
|
||||
|
||||
### Example: Setting Up a Server with Fastify
|
||||
|
||||
Here's how you can create a simple API server using Fastify in TypeScript:
|
||||
|
||||
```typescript
|
||||
import { http } from '@morten-olsen/mini-loader';
|
||||
import fastify from 'fastify';
|
||||
|
||||
const server = fastify();
|
||||
|
||||
// Handling all requests and returning the requested URL
|
||||
server.all('*', async (req) => {
|
||||
return req.url;
|
||||
});
|
||||
|
||||
// Listening on the path provided by mini loader
|
||||
server.listen({
|
||||
path: http.getPath(),
|
||||
});
|
||||
```
|
||||
|
||||
With this setup, your server will respond to all incoming requests by returning the requested URL.
|
||||
|
||||
### Deploying Your Workload
|
||||
|
||||
Now, you can push and run your workload just like any other script:
|
||||
|
||||
```bash
|
||||
mini-loader loads push -r my-script.ts
|
||||
```
|
||||
|
||||
### Accessing Your Server
|
||||
|
||||
After pushing your workload, mini loader will display the run ID. You can use this ID to access your server. For example, to make a request to your server, you can use `curl`:
|
||||
|
||||
```bash
|
||||
curl http://localhost:4500/gateway/{your-run-id}
|
||||
```
|
||||
|
||||
Replace `{your-run-id}` with the actual run ID provided by mini loader.
|
||||
@@ -27,6 +27,6 @@
|
||||
"homepage": "https://github.com/morten-olsen/mini-loader",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/morten-olsen/mini-loader-repo"
|
||||
"url": "https://github.com/morten-olsen/mini-loader"
|
||||
}
|
||||
}
|
||||
@@ -14,12 +14,17 @@
|
||||
"files": [
|
||||
"./dist"
|
||||
],
|
||||
"imports": {
|
||||
"#pkg": "./package.json"
|
||||
},
|
||||
"exports": {
|
||||
".": {
|
||||
"import": "./dist/esm/index.js"
|
||||
}
|
||||
},
|
||||
"dependencies": {
|
||||
"@morten-olsen/mini-loader-runner": "workspace:^",
|
||||
"@morten-olsen/mini-loader-server": "workspace:^",
|
||||
"@rollup/plugin-auto-install": "^3.0.5",
|
||||
"@rollup/plugin-commonjs": "^25.0.7",
|
||||
"@rollup/plugin-json": "^6.1.0",
|
||||
@@ -28,6 +33,7 @@
|
||||
"@rollup/plugin-sucrase": "^5.0.2",
|
||||
"@trpc/client": "^10.45.0",
|
||||
"commander": "^11.1.0",
|
||||
"dotenv": "^16.3.1",
|
||||
"env-paths": "^3.0.0",
|
||||
"inquirer": "^9.2.12",
|
||||
"ora": "^8.0.1",
|
||||
@@ -38,14 +44,12 @@
|
||||
},
|
||||
"devDependencies": {
|
||||
"@morten-olsen/mini-loader-configs": "workspace:^",
|
||||
"@morten-olsen/mini-loader-runner": "workspace:^",
|
||||
"@morten-olsen/mini-loader-server": "workspace:^",
|
||||
"@types/inquirer": "^9.0.7",
|
||||
"typescript": "^5.3.3"
|
||||
},
|
||||
"homepage": "https://github.com/morten-olsen/mini-loader",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/morten-olsen/mini-loader-repo"
|
||||
"url": "https://github.com/morten-olsen/mini-loader"
|
||||
}
|
||||
}
|
||||
@@ -17,12 +17,12 @@ const bundle = async ({ entry, autoInstall }: BundleOptions) => {
|
||||
const entryFile = resolve(entry);
|
||||
const codeBundler = await rollup({
|
||||
plugins: [
|
||||
fix(json)(),
|
||||
fix(sucrase)({
|
||||
transforms: ['typescript', 'jsx'],
|
||||
}),
|
||||
...[autoInstall ? fix(auto) : []],
|
||||
nodeResolve({ extensions: ['.js', '.jsx', '.ts', '.tsx'] }),
|
||||
fix(json)(),
|
||||
nodeResolve({ preferBuiltins: true, extensions: ['.js', '.jsx', '.ts', '.tsx'] }),
|
||||
fix(commonjs)({ include: /node_modules/ }),
|
||||
],
|
||||
input: entryFile,
|
||||
|
||||
@@ -1,9 +1,14 @@
|
||||
import { createTRPCProxyClient, httpBatchLink } from '@trpc/client';
|
||||
import superjson from 'superjson';
|
||||
import { createRequire } from 'module';
|
||||
import type { Runtime } from '@morten-olsen/mini-loader-server';
|
||||
import type { RootRouter } from '@morten-olsen/mini-loader-server';
|
||||
import { Context } from '../context/context.js';
|
||||
import { readFile } from 'fs/promises';
|
||||
|
||||
const require = createRequire(import.meta.url);
|
||||
|
||||
const pkg = JSON.parse(await readFile(require.resolve('#pkg'), 'utf-8'));
|
||||
const createClient = (context: Context) => {
|
||||
if (!context.host || !context.token) {
|
||||
throw new Error('Not signed in');
|
||||
@@ -14,6 +19,7 @@ const createClient = (context: Context) => {
|
||||
httpBatchLink({
|
||||
url: `${context.host}/trpc`,
|
||||
headers: {
|
||||
'x-version': pkg.version,
|
||||
authorization: `Bearer ${context.token}`,
|
||||
},
|
||||
}),
|
||||
|
||||
@@ -2,6 +2,7 @@ import { Command } from 'commander';
|
||||
import { createClient } from '../../client/client.js';
|
||||
import { step } from '../../utils/step.js';
|
||||
import { Context } from '../../context/context.js';
|
||||
import { Config } from '../../config/config.js';
|
||||
|
||||
const list = new Command('list');
|
||||
|
||||
@@ -21,7 +22,8 @@ list
|
||||
.option('-a, --limit <limit>', 'Limit', '1000')
|
||||
.action(async () => {
|
||||
const { runId, loadId, offset, limit } = list.opts();
|
||||
const context = new Context();
|
||||
const config = new Config();
|
||||
const context = new Context(config.context);
|
||||
const client = await step('Connecting to server', async () => {
|
||||
return createClient(context);
|
||||
});
|
||||
|
||||
@@ -4,6 +4,7 @@ import { step } from '../../utils/step.js';
|
||||
import { Context } from '../../context/context.js';
|
||||
import { dirname, resolve } from 'path';
|
||||
import { mkdir, writeFile } from 'fs/promises';
|
||||
import { Config } from '../../config/config.js';
|
||||
|
||||
const pull = new Command('pull');
|
||||
|
||||
@@ -12,7 +13,8 @@ pull
|
||||
.argument('<artifact-id>', 'Artifact ID')
|
||||
.argument('<file>', 'File to save')
|
||||
.action(async (id, file) => {
|
||||
const context = new Context();
|
||||
const config = new Config();
|
||||
const context = new Context(config.context);
|
||||
const target = resolve(file);
|
||||
const client = await step('Connecting to server', async () => {
|
||||
return createClient(context);
|
||||
|
||||
@@ -3,6 +3,7 @@ import { createClient } from '../../client/client.js';
|
||||
import { step } from '../../utils/step.js';
|
||||
import { Context } from '../../context/context.js';
|
||||
import inquirer from 'inquirer';
|
||||
import { Config } from '../../config/config.js';
|
||||
|
||||
const remove = new Command('remove');
|
||||
|
||||
@@ -22,7 +23,8 @@ remove
|
||||
.option('-a, --limit <limit>', 'Limit', '1000')
|
||||
.action(async () => {
|
||||
const { runId, loadId, offset, limit } = remove.opts();
|
||||
const context = new Context();
|
||||
const config = new Config();
|
||||
const context = new Context(config.context);
|
||||
const client = await step('Connecting to server', async () => {
|
||||
return createClient(context);
|
||||
});
|
||||
|
||||
@@ -2,12 +2,14 @@ import { Command } from 'commander';
|
||||
import inquerer from 'inquirer';
|
||||
import { Context } from '../../context/context.js';
|
||||
import { step } from '../../utils/step.js';
|
||||
import { Config } from '../../config/config.js';
|
||||
|
||||
const login = new Command('login');
|
||||
|
||||
login.description('Login to your account');
|
||||
login.action(async () => {
|
||||
const context = new Context();
|
||||
const config = new Config();
|
||||
const context = new Context(config.context);
|
||||
const { host, token } = await inquerer.prompt([
|
||||
{
|
||||
type: 'input',
|
||||
|
||||
10
packages/cli/src/commands/contexts/contexts.current.ts
Normal file
10
packages/cli/src/commands/contexts/contexts.current.ts
Normal file
@@ -0,0 +1,10 @@
|
||||
import { Command } from 'commander';
|
||||
import { Config } from '../../config/config.js';
|
||||
|
||||
const current = new Command('current');
|
||||
current.action(async () => {
|
||||
const config = new Config();
|
||||
console.log(config.context);
|
||||
});
|
||||
|
||||
export { current };
|
||||
11
packages/cli/src/commands/contexts/contexts.list.ts
Normal file
11
packages/cli/src/commands/contexts/contexts.list.ts
Normal file
@@ -0,0 +1,11 @@
|
||||
import { Command } from 'commander';
|
||||
import { Context } from '../../context/context.js';
|
||||
|
||||
const list = new Command('list');
|
||||
list.alias('ls').description('List contexts');
|
||||
list.action(async () => {
|
||||
const contexts = await Context.list();
|
||||
console.table(contexts);
|
||||
});
|
||||
|
||||
export { list };
|
||||
12
packages/cli/src/commands/contexts/contexts.ts
Normal file
12
packages/cli/src/commands/contexts/contexts.ts
Normal file
@@ -0,0 +1,12 @@
|
||||
import { Command } from 'commander';
|
||||
import { list } from './contexts.list.js';
|
||||
import { use } from './contexts.use.js';
|
||||
import { current } from './contexts.current.js';
|
||||
|
||||
const contexts = new Command('contexts');
|
||||
contexts.description('Manage contexts');
|
||||
contexts.addCommand(list);
|
||||
contexts.addCommand(use);
|
||||
contexts.addCommand(current);
|
||||
|
||||
export { contexts };
|
||||
11
packages/cli/src/commands/contexts/contexts.use.ts
Normal file
11
packages/cli/src/commands/contexts/contexts.use.ts
Normal file
@@ -0,0 +1,11 @@
|
||||
import { Command } from 'commander';
|
||||
import { Config } from '../../config/config.js';
|
||||
|
||||
const use = new Command('use');
|
||||
|
||||
use.argument('<name>').action(async (name) => {
|
||||
const config = new Config();
|
||||
await config.setContext(name);
|
||||
});
|
||||
|
||||
export { use };
|
||||
@@ -2,6 +2,7 @@ import { Command } from 'commander';
|
||||
import { createClient } from '../../client/client.js';
|
||||
import { step } from '../../utils/step.js';
|
||||
import { Context } from '../../context/context.js';
|
||||
import { Config } from '../../config/config.js';
|
||||
|
||||
const list = new Command('list');
|
||||
|
||||
@@ -9,7 +10,8 @@ list
|
||||
.alias('ls')
|
||||
.description('List loads')
|
||||
.action(async () => {
|
||||
const context = new Context();
|
||||
const config = new Config();
|
||||
const context = new Context(config.context);
|
||||
const client = await step('Connecting to server', async () => {
|
||||
return createClient(context);
|
||||
});
|
||||
|
||||
@@ -4,6 +4,7 @@ import { createClient } from '../../client/client.js';
|
||||
import { bundle } from '../../bundler/bundler.js';
|
||||
import { step } from '../../utils/step.js';
|
||||
import { Context } from '../../context/context.js';
|
||||
import { Config } from '../../config/config.js';
|
||||
|
||||
const push = new Command('push');
|
||||
|
||||
@@ -15,7 +16,8 @@ push
|
||||
.option('-ai, --auto-install', 'Auto install dependencies', false)
|
||||
.action(async (script) => {
|
||||
const opts = push.opts();
|
||||
const context = new Context();
|
||||
const config = new Config();
|
||||
const context = new Context(config.context);
|
||||
const location = resolve(script);
|
||||
const client = await step('Connecting to server', async () => {
|
||||
return createClient(context);
|
||||
@@ -23,7 +25,7 @@ push
|
||||
const code = await step('Bundling', async () => {
|
||||
return await bundle({ entry: location, autoInstall: opts.autoInstall });
|
||||
});
|
||||
const id = await step('Creating load', async () => {
|
||||
const id = await step(`Creating load ${(code.length / 1024).toFixed(0)}`, async () => {
|
||||
return await client.loads.set.mutate({
|
||||
id: opts.id,
|
||||
name: opts.name,
|
||||
@@ -32,9 +34,10 @@ push
|
||||
});
|
||||
console.log('created load with id', id);
|
||||
if (opts.run) {
|
||||
await step('Creating run', async () => {
|
||||
await client.runs.create.mutate({ loadId: id });
|
||||
const runId = await step('Creating run', async () => {
|
||||
return await client.runs.create.mutate({ loadId: id });
|
||||
});
|
||||
console.log('created run with id', runId);
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
@@ -3,6 +3,8 @@ import { resolve } from 'path';
|
||||
import { run as runLoad } from '@morten-olsen/mini-loader-runner';
|
||||
import { bundle } from '../../bundler/bundler.js';
|
||||
import { step } from '../../utils/step.js';
|
||||
import { readSecrets } from './local.utils.js';
|
||||
import { Config } from '../../config/config.js';
|
||||
|
||||
const run = new Command('run');
|
||||
|
||||
@@ -11,13 +13,17 @@ run
|
||||
.argument('script')
|
||||
.action(async (script) => {
|
||||
const location = resolve(script);
|
||||
const config = new Config();
|
||||
const { autoInstall } = run.opts();
|
||||
const secrets = await readSecrets();
|
||||
|
||||
const code = await step('Bundling', async () => {
|
||||
return await bundle({ entry: location, autoInstall });
|
||||
});
|
||||
const { promise, emitter } = await runLoad({
|
||||
script: code,
|
||||
secrets,
|
||||
cacheLocation: config.cacheLocation,
|
||||
});
|
||||
emitter.addListener('message', (message) => {
|
||||
switch (message.type) {
|
||||
|
||||
25
packages/cli/src/commands/local/local.utils.ts
Normal file
25
packages/cli/src/commands/local/local.utils.ts
Normal file
@@ -0,0 +1,25 @@
|
||||
import dotenv from 'dotenv';
|
||||
import { existsSync } from 'fs';
|
||||
import { readFile } from 'fs/promises';
|
||||
import { join } from 'path';
|
||||
|
||||
const ENV_PREFIX = 'ML_S_';
|
||||
|
||||
const readSecrets = async () => {
|
||||
let secretLocation = join(process.cwd(), '.secrets');
|
||||
|
||||
let secrets: Record<string, string> = {};
|
||||
|
||||
if (existsSync(secretLocation)) {
|
||||
const content = await readFile(secretLocation, 'utf-8');
|
||||
secrets = dotenv.parse(content);
|
||||
}
|
||||
for (const key in process.env) {
|
||||
if (key.startsWith(ENV_PREFIX)) {
|
||||
secrets[key.replace(ENV_PREFIX, '')] = process.env[key]!;
|
||||
}
|
||||
}
|
||||
return secrets;
|
||||
};
|
||||
|
||||
export { readSecrets };
|
||||
@@ -2,6 +2,7 @@ import { Command } from 'commander';
|
||||
import { createClient } from '../../client/client.js';
|
||||
import { step } from '../../utils/step.js';
|
||||
import { Context } from '../../context/context.js';
|
||||
import { Config } from '../../config/config.js';
|
||||
|
||||
const list = new Command('list');
|
||||
|
||||
@@ -23,7 +24,8 @@ list
|
||||
.option('-s, --sort <order>', 'Sort', 'desc')
|
||||
.action(async () => {
|
||||
const { runId, loadId, severities, offset, limit, order } = list.opts();
|
||||
const context = new Context();
|
||||
const config = new Config();
|
||||
const context = new Context(config.context);
|
||||
const client = await step('Connecting to server', async () => {
|
||||
return createClient(context);
|
||||
});
|
||||
|
||||
@@ -3,6 +3,7 @@ import { createClient } from '../../client/client.js';
|
||||
import { step } from '../../utils/step.js';
|
||||
import { Context } from '../../context/context.js';
|
||||
import inquirer from 'inquirer';
|
||||
import { Config } from '../../config/config.js';
|
||||
|
||||
const remove = new Command('remove');
|
||||
|
||||
@@ -24,7 +25,8 @@ remove
|
||||
.option('-s, --sort <order>', 'Sort', 'desc')
|
||||
.action(async () => {
|
||||
const { runId, loadId, severities, offset, limit, order } = remove.opts();
|
||||
const context = new Context();
|
||||
const config = new Config();
|
||||
const context = new Context(config.context);
|
||||
const client = await step('Connecting to server', async () => {
|
||||
return createClient(context);
|
||||
});
|
||||
|
||||
@@ -2,6 +2,7 @@ import { Command } from 'commander';
|
||||
import { createClient } from '../../client/client.js';
|
||||
import { step } from '../../utils/step.js';
|
||||
import { Context } from '../../context/context.js';
|
||||
import { Config } from '../../config/config.js';
|
||||
|
||||
const create = new Command('create');
|
||||
|
||||
@@ -9,7 +10,8 @@ create
|
||||
.description('Create a new run')
|
||||
.argument('load-id', 'Load ID')
|
||||
.action(async (loadId) => {
|
||||
const context = new Context();
|
||||
const config = new Config();
|
||||
const context = new Context(config.context);
|
||||
const client = await step('Connecting to server', async () => {
|
||||
return createClient(context);
|
||||
});
|
||||
|
||||
@@ -2,6 +2,7 @@ import { Command } from 'commander';
|
||||
import { createClient } from '../../client/client.js';
|
||||
import { step } from '../../utils/step.js';
|
||||
import { Context } from '../../context/context.js';
|
||||
import { Config } from '../../config/config.js';
|
||||
|
||||
const list = new Command('list');
|
||||
|
||||
@@ -10,7 +11,8 @@ list
|
||||
.description('Find a run')
|
||||
.argument('[load-id]', 'Load ID')
|
||||
.action(async (loadId) => {
|
||||
const context = new Context();
|
||||
const config = new Config();
|
||||
const context = new Context(config.context);
|
||||
const client = await step('Connecting to server', async () => {
|
||||
return createClient(context);
|
||||
});
|
||||
|
||||
59
packages/cli/src/commands/runs/runs.remove.ts
Normal file
59
packages/cli/src/commands/runs/runs.remove.ts
Normal file
@@ -0,0 +1,59 @@
|
||||
import { Command } from 'commander';
|
||||
import { createClient } from '../../client/client.js';
|
||||
import { step } from '../../utils/step.js';
|
||||
import { Context } from '../../context/context.js';
|
||||
import inquirer from 'inquirer';
|
||||
import { Config } from '../../config/config.js';
|
||||
|
||||
const remove = new Command('remove');
|
||||
|
||||
const toInt = (value?: string) => {
|
||||
if (!value) {
|
||||
return undefined;
|
||||
}
|
||||
return parseInt(value, 10);
|
||||
};
|
||||
|
||||
remove
|
||||
.alias('ls')
|
||||
.description('List logs')
|
||||
.option('-l, --load-id <loadId>', 'Load ID')
|
||||
.option('-o, --offset <offset>', 'Offset')
|
||||
.option('-a, --limit <limit>', 'Limit', '1000')
|
||||
.action(async () => {
|
||||
const { loadId, offset, limit } = remove.opts();
|
||||
const config = new Config();
|
||||
const context = new Context(config.context);
|
||||
const client = await step('Connecting to server', async () => {
|
||||
return createClient(context);
|
||||
});
|
||||
const response = await step('Preparing to delete', async () => {
|
||||
return await client.runs.prepareRemove.query({
|
||||
loadId,
|
||||
offset: toInt(offset),
|
||||
limit: toInt(limit),
|
||||
});
|
||||
});
|
||||
|
||||
if (!response.ids.length) {
|
||||
console.log('No logs to delete');
|
||||
return;
|
||||
}
|
||||
const { confirm } = await inquirer.prompt([
|
||||
{
|
||||
type: 'confirm',
|
||||
name: 'confirm',
|
||||
message: `Are you sure you want to delete ${response.ids.length} logs?`,
|
||||
},
|
||||
]);
|
||||
|
||||
if (!confirm) {
|
||||
return;
|
||||
}
|
||||
|
||||
await step('Deleting artifacts', async () => {
|
||||
await client.runs.remove.mutate(response);
|
||||
});
|
||||
});
|
||||
|
||||
export { remove };
|
||||
23
packages/cli/src/commands/runs/runs.terminate.ts
Normal file
23
packages/cli/src/commands/runs/runs.terminate.ts
Normal file
@@ -0,0 +1,23 @@
|
||||
import { Command } from 'commander';
|
||||
import { createClient } from '../../client/client.js';
|
||||
import { step } from '../../utils/step.js';
|
||||
import { Context } from '../../context/context.js';
|
||||
import { Config } from '../../config/config.js';
|
||||
|
||||
const terminate = new Command('terminate');
|
||||
|
||||
terminate
|
||||
.description('Terminate an in progress run')
|
||||
.argument('run-id', 'Run ID')
|
||||
.action(async (runId) => {
|
||||
const config = new Config();
|
||||
const context = new Context(config.context);
|
||||
const client = await step('Connecting to server', async () => {
|
||||
return createClient(context);
|
||||
});
|
||||
await step('Terminating run', async () => {
|
||||
await client.runs.terminate.mutate(runId);
|
||||
});
|
||||
});
|
||||
|
||||
export { terminate };
|
||||
@@ -1,8 +1,14 @@
|
||||
import { Command } from 'commander';
|
||||
import { create } from './runs.create.js';
|
||||
import { list } from './runs.list.js';
|
||||
import { remove } from './runs.remove.js';
|
||||
import { terminate } from './runs.terminate.js';
|
||||
|
||||
const runs = new Command('runs');
|
||||
runs.description('Manage runs').addCommand(create).addCommand(list);
|
||||
runs.description('Manage runs');
|
||||
runs.addCommand(create);
|
||||
runs.addCommand(list);
|
||||
runs.addCommand(remove);
|
||||
runs.addCommand(terminate);
|
||||
|
||||
export { runs };
|
||||
|
||||
32
packages/cli/src/commands/schedules/schedules.add.ts
Normal file
32
packages/cli/src/commands/schedules/schedules.add.ts
Normal file
@@ -0,0 +1,32 @@
|
||||
import { Command } from 'commander';
|
||||
import { createClient } from '../../client/client.js';
|
||||
import { step } from '../../utils/step.js';
|
||||
import { Context } from '../../context/context.js';
|
||||
import { Config } from '../../config/config.js';
|
||||
|
||||
const add = new Command('add');
|
||||
|
||||
add
|
||||
.description('Add schedule')
|
||||
.argument('<load-id>', 'Load ID')
|
||||
.argument('<cron>', 'Cron')
|
||||
.option('-n, --name <name>', 'Name')
|
||||
.action(async (loadId, cron) => {
|
||||
const config = new Config();
|
||||
const context = new Context(config.context);
|
||||
const { name } = add.opts();
|
||||
const client = await step('Connecting to server', async () => {
|
||||
return createClient(context);
|
||||
});
|
||||
const id = await step('Adding schedule', async () => {
|
||||
return await client.schedules.add.mutate({
|
||||
name,
|
||||
load: loadId,
|
||||
cron,
|
||||
});
|
||||
});
|
||||
|
||||
console.log(`Schedule added with ID ${id}`);
|
||||
});
|
||||
|
||||
export { add };
|
||||
39
packages/cli/src/commands/schedules/schedules.list.ts
Normal file
39
packages/cli/src/commands/schedules/schedules.list.ts
Normal file
@@ -0,0 +1,39 @@
|
||||
import { Command } from 'commander';
|
||||
import { createClient } from '../../client/client.js';
|
||||
import { step } from '../../utils/step.js';
|
||||
import { Context } from '../../context/context.js';
|
||||
import { Config } from '../../config/config.js';
|
||||
|
||||
const list = new Command('list');
|
||||
|
||||
const toInt = (value?: string) => {
|
||||
if (!value) {
|
||||
return undefined;
|
||||
}
|
||||
return parseInt(value, 10);
|
||||
};
|
||||
|
||||
list
|
||||
.alias('ls')
|
||||
.description('List schedules')
|
||||
.option('-l, --load-ids <loadIds...>', 'Load ID')
|
||||
.option('-o, --offset <offset>', 'Offset')
|
||||
.option('-a, --limit <limit>', 'Limit', '1000')
|
||||
.action(async () => {
|
||||
const { loadIds, offset, limit } = list.opts();
|
||||
const config = new Config();
|
||||
const context = new Context(config.context);
|
||||
const client = await step('Connecting to server', async () => {
|
||||
return createClient(context);
|
||||
});
|
||||
const schedules = await step('Getting schedules', async () => {
|
||||
return await client.schedules.find.query({
|
||||
loadIds,
|
||||
offset: toInt(offset),
|
||||
limit: toInt(limit),
|
||||
});
|
||||
});
|
||||
console.table(schedules);
|
||||
});
|
||||
|
||||
export { list };
|
||||
61
packages/cli/src/commands/schedules/schedules.remove.ts
Normal file
61
packages/cli/src/commands/schedules/schedules.remove.ts
Normal file
@@ -0,0 +1,61 @@
|
||||
import { Command } from 'commander';
|
||||
import { createClient } from '../../client/client.js';
|
||||
import { step } from '../../utils/step.js';
|
||||
import { Context } from '../../context/context.js';
|
||||
import inquirer from 'inquirer';
|
||||
import { Config } from '../../config/config.js';
|
||||
|
||||
const remove = new Command('remove');
|
||||
|
||||
const toInt = (value?: string) => {
|
||||
if (!value) {
|
||||
return undefined;
|
||||
}
|
||||
return parseInt(value, 10);
|
||||
};
|
||||
|
||||
remove
|
||||
.alias('ls')
|
||||
.description('LRemove schedules')
|
||||
.option('-i, --ids <ids...>', 'Load IDs')
|
||||
.option('-l, --load-ids <loadIds...>', 'Load IDs')
|
||||
.option('-o, --offset <offset>', 'Offset')
|
||||
.option('-a, --limit <limit>', 'Limit', '1000')
|
||||
.action(async () => {
|
||||
const { ids, loadIds, offset, limit } = remove.opts();
|
||||
const config = new Config();
|
||||
const context = new Context(config.context);
|
||||
const client = await step('Connecting to server', async () => {
|
||||
return createClient(context);
|
||||
});
|
||||
const response = await step('Preparing to delete', async () => {
|
||||
return await client.schedules.prepareRemove.query({
|
||||
ids,
|
||||
loadIds,
|
||||
offset: toInt(offset),
|
||||
limit: toInt(limit),
|
||||
});
|
||||
});
|
||||
|
||||
if (!response.ids.length) {
|
||||
console.log('No logs to delete');
|
||||
return;
|
||||
}
|
||||
const { confirm } = await inquirer.prompt([
|
||||
{
|
||||
type: 'confirm',
|
||||
name: 'confirm',
|
||||
message: `Are you sure you want to delete ${response.ids.length} schedules?`,
|
||||
},
|
||||
]);
|
||||
|
||||
if (!confirm) {
|
||||
return;
|
||||
}
|
||||
|
||||
await step('Deleting artifacts', async () => {
|
||||
await client.artifacts.remove.mutate(response);
|
||||
});
|
||||
});
|
||||
|
||||
export { remove };
|
||||
11
packages/cli/src/commands/schedules/schedules.ts
Normal file
11
packages/cli/src/commands/schedules/schedules.ts
Normal file
@@ -0,0 +1,11 @@
|
||||
import { Command } from 'commander';
|
||||
import { list } from './schedules.list.js';
|
||||
import { remove } from './schedules.remove.js';
|
||||
import { add } from './schedules.add.js';
|
||||
|
||||
const schedules = new Command('schedules');
|
||||
schedules.addCommand(list);
|
||||
schedules.addCommand(remove);
|
||||
schedules.addCommand(add);
|
||||
|
||||
export { schedules };
|
||||
@@ -2,6 +2,7 @@ import { Command } from 'commander';
|
||||
import { createClient } from '../../client/client.js';
|
||||
import { step } from '../../utils/step.js';
|
||||
import { Context } from '../../context/context.js';
|
||||
import { Config } from '../../config/config.js';
|
||||
|
||||
const list = new Command('list');
|
||||
|
||||
@@ -19,7 +20,8 @@ list
|
||||
.option('-a, --limit <limit>', 'Limit', '1000')
|
||||
.action(async () => {
|
||||
const { offset, limit } = list.opts();
|
||||
const context = new Context();
|
||||
const config = new Config();
|
||||
const context = new Context(config.context);
|
||||
const client = await step('Connecting to server', async () => {
|
||||
return createClient(context);
|
||||
});
|
||||
|
||||
@@ -2,6 +2,7 @@ import { Command } from 'commander';
|
||||
import { createClient } from '../../client/client.js';
|
||||
import { step } from '../../utils/step.js';
|
||||
import { Context } from '../../context/context.js';
|
||||
import { Config } from '../../config/config.js';
|
||||
|
||||
const remove = new Command('remove');
|
||||
|
||||
@@ -9,7 +10,8 @@ remove
|
||||
.alias('rm')
|
||||
.argument('<id>')
|
||||
.action(async (id) => {
|
||||
const context = new Context();
|
||||
const config = new Config();
|
||||
const context = new Context(config.context);
|
||||
const client = await step('Connecting to server', async () => {
|
||||
return createClient(context);
|
||||
});
|
||||
|
||||
@@ -2,6 +2,7 @@ import { Command } from 'commander';
|
||||
import { createClient } from '../../client/client.js';
|
||||
import { step } from '../../utils/step.js';
|
||||
import { Context } from '../../context/context.js';
|
||||
import { Config } from '../../config/config.js';
|
||||
|
||||
const set = new Command('set');
|
||||
|
||||
@@ -9,7 +10,8 @@ set
|
||||
.argument('<id>')
|
||||
.argument('[value]')
|
||||
.action(async (id, value) => {
|
||||
const context = new Context();
|
||||
const config = new Config();
|
||||
const context = new Context(config.context);
|
||||
const client = await step('Connecting to server', async () => {
|
||||
return createClient(context);
|
||||
});
|
||||
|
||||
49
packages/cli/src/config/config.ts
Normal file
49
packages/cli/src/config/config.ts
Normal file
@@ -0,0 +1,49 @@
|
||||
import envPaths from 'env-paths';
|
||||
import { existsSync, readFileSync, writeFileSync } from 'fs';
|
||||
import { mkdir } from 'fs/promises';
|
||||
import { join, dirname } from 'path';
|
||||
|
||||
type ConfigValues = {
|
||||
context?: string;
|
||||
};
|
||||
|
||||
const paths = envPaths('mini-loader');
|
||||
|
||||
class Config {
|
||||
#location: string;
|
||||
#config?: ConfigValues;
|
||||
|
||||
constructor() {
|
||||
this.#location = join(paths.config, 'config.json');
|
||||
if (existsSync(this.#location)) {
|
||||
this.#config = JSON.parse(readFileSync(this.#location, 'utf-8'));
|
||||
}
|
||||
}
|
||||
|
||||
public get context() {
|
||||
return this.#config?.context || 'default';
|
||||
}
|
||||
|
||||
public get cacheLocation() {
|
||||
return join(paths.cache, this.context);
|
||||
}
|
||||
|
||||
public setContext = (context: string) => {
|
||||
this.#config = {
|
||||
...(this.#config || {}),
|
||||
context,
|
||||
};
|
||||
this.save();
|
||||
};
|
||||
|
||||
public save = async () => {
|
||||
if (!this.#config) {
|
||||
return;
|
||||
}
|
||||
const json = JSON.stringify(this.#config);
|
||||
mkdir(dirname(this.#location), { recursive: true });
|
||||
writeFileSync(this.#location, json);
|
||||
};
|
||||
}
|
||||
|
||||
export { Config };
|
||||
@@ -1,7 +1,7 @@
|
||||
import envPaths from 'env-paths';
|
||||
import { existsSync, readFileSync, writeFileSync } from 'fs';
|
||||
import { mkdir } from 'fs/promises';
|
||||
import { dirname } from 'path';
|
||||
import { existsSync, readFileSync } from 'fs';
|
||||
import { mkdir, readdir, writeFile } from 'fs/promises';
|
||||
import { dirname, join } from 'path';
|
||||
|
||||
type ContextValues = {
|
||||
host: string;
|
||||
@@ -12,9 +12,9 @@ class Context {
|
||||
#location: string;
|
||||
#config?: ContextValues;
|
||||
|
||||
constructor() {
|
||||
const paths = envPaths('dws');
|
||||
this.#location = paths.config;
|
||||
constructor(name: string) {
|
||||
const paths = envPaths('mini-loader');
|
||||
this.#location = join(paths.config, 'contexts', name);
|
||||
if (existsSync(this.#location)) {
|
||||
this.#config = JSON.parse(readFileSync(this.#location, 'utf-8'));
|
||||
}
|
||||
@@ -42,8 +42,17 @@ class Context {
|
||||
return;
|
||||
}
|
||||
const json = JSON.stringify(this.#config);
|
||||
mkdir(dirname(this.#location), { recursive: true });
|
||||
writeFileSync(this.#location, json);
|
||||
await mkdir(dirname(this.#location), { recursive: true });
|
||||
await writeFile(this.#location, json);
|
||||
};
|
||||
|
||||
public static list = async () => {
|
||||
const paths = envPaths('mini-loader');
|
||||
const location = join(paths.config, 'contexts');
|
||||
if (!existsSync(location)) {
|
||||
return [];
|
||||
}
|
||||
return await readdir(location);
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import { program } from 'commander';
|
||||
import { Command, program } from 'commander';
|
||||
import { createRequire } from 'module';
|
||||
import { loads } from './commands/loads/loads.js';
|
||||
import { runs } from './commands/runs/runs.js';
|
||||
import { logs } from './commands/logs/logs.js';
|
||||
@@ -6,6 +7,13 @@ import { artifacts } from './commands/artifacts/artifacts.js';
|
||||
import { secrets } from './commands/secrets/secrets.js';
|
||||
import { local } from './commands/local/local.js';
|
||||
import { auth } from './commands/auth/auth.js';
|
||||
import { contexts } from './commands/contexts/contexts.js';
|
||||
import { schedules } from './commands/schedules/schedules.js';
|
||||
import { readFile } from 'fs/promises';
|
||||
|
||||
const require = createRequire(import.meta.url);
|
||||
|
||||
const pkg = JSON.parse(await readFile(require.resolve('#pkg'), 'utf-8'));
|
||||
|
||||
program.addCommand(loads);
|
||||
program.addCommand(runs);
|
||||
@@ -14,5 +22,15 @@ program.addCommand(artifacts);
|
||||
program.addCommand(secrets);
|
||||
program.addCommand(local);
|
||||
program.addCommand(auth);
|
||||
program.addCommand(contexts);
|
||||
program.addCommand(schedules);
|
||||
|
||||
program.version(pkg.version);
|
||||
|
||||
const version = new Command('version');
|
||||
version.action(() => {
|
||||
console.log(pkg.version);
|
||||
});
|
||||
program.addCommand(version);
|
||||
|
||||
await program.parseAsync();
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
"extends": "@morten-olsen/mini-loader-configs/tsconfig.esm.json",
|
||||
"compilerOptions": {
|
||||
"outDir": "dist/esm",
|
||||
"rootDir": "src"
|
||||
},
|
||||
"include": [
|
||||
"./src/**/*.ts"
|
||||
|
||||
@@ -13,6 +13,6 @@
|
||||
"homepage": "https://github.com/morten-olsen/mini-loader",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/morten-olsen/mini-loader-repo"
|
||||
"url": "https://github.com/morten-olsen/mini-loader"
|
||||
}
|
||||
}
|
||||
@@ -8,6 +8,7 @@
|
||||
"sourceMap": true,
|
||||
"esModuleInterop": true,
|
||||
"strict": true,
|
||||
"resolveJsonModule": true,
|
||||
"allowSyntheticDefaultImports": true,
|
||||
"jsx": "react"
|
||||
},
|
||||
|
||||
1
packages/examples/.secrets
Normal file
1
packages/examples/.secrets
Normal file
@@ -0,0 +1 @@
|
||||
demo=foobar
|
||||
@@ -18,15 +18,18 @@
|
||||
}
|
||||
},
|
||||
"devDependencies": {
|
||||
"@morten-olsen/mini-loader-configs": "workspace:^",
|
||||
"@morten-olsen/mini-loader-cli": "workspace:^",
|
||||
"@morten-olsen/mini-loader": "workspace:^",
|
||||
"@morten-olsen/mini-loader-cli": "workspace:^",
|
||||
"@morten-olsen/mini-loader-configs": "workspace:^",
|
||||
"@types/node": "^20.10.8",
|
||||
"typescript": "^5.3.3"
|
||||
},
|
||||
"homepage": "https://github.com/morten-olsen/mini-loader",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/morten-olsen/mini-loader-repo"
|
||||
"url": "https://github.com/morten-olsen/mini-loader"
|
||||
},
|
||||
"dependencies": {
|
||||
"fastify": "^4.25.2"
|
||||
}
|
||||
}
|
||||
12
packages/examples/src/http.ts
Normal file
12
packages/examples/src/http.ts
Normal file
@@ -0,0 +1,12 @@
|
||||
import { http } from '@morten-olsen/mini-loader';
|
||||
import fastify from 'fastify';
|
||||
|
||||
const server = fastify();
|
||||
|
||||
server.all('*', async (req) => {
|
||||
return req.url;
|
||||
});
|
||||
|
||||
server.listen({
|
||||
path: http.getPath(),
|
||||
});
|
||||
3
packages/examples/src/secrets.ts
Normal file
3
packages/examples/src/secrets.ts
Normal file
@@ -0,0 +1,3 @@
|
||||
import { secrets } from '@morten-olsen/mini-loader';
|
||||
|
||||
console.log(secrets.get('demo'));
|
||||
@@ -24,6 +24,6 @@
|
||||
"homepage": "https://github.com/morten-olsen/mini-loader",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/morten-olsen/mini-loader-repo"
|
||||
"url": "https://github.com/morten-olsen/mini-loader"
|
||||
}
|
||||
}
|
||||
7
packages/mini-loader/src/http/http.ts
Normal file
7
packages/mini-loader/src/http/http.ts
Normal file
@@ -0,0 +1,7 @@
|
||||
const getPath = () => process.env.HTTP_GATEWAY_PATH!;
|
||||
|
||||
const http = {
|
||||
getPath,
|
||||
};
|
||||
|
||||
export { http };
|
||||
@@ -8,3 +8,4 @@ export { logger } from './logger/logger.js';
|
||||
export { artifacts } from './artifacts/artifacts.js';
|
||||
export { input } from './input/input.js';
|
||||
export { secrets } from './secrets/secrets.js';
|
||||
export { http } from './http/http.js';
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import { Socket, createConnection } from 'net';
|
||||
import { Event } from './index.js';
|
||||
|
||||
const connect = () =>
|
||||
new Promise<Socket>((resolve, reject) => {
|
||||
@@ -12,7 +13,7 @@ const connect = () =>
|
||||
});
|
||||
});
|
||||
|
||||
const send = async (data: any) =>
|
||||
const send = async (data: Event) =>
|
||||
new Promise<void>(async (resolve, reject) => {
|
||||
const connection = await connect();
|
||||
const cleaned = JSON.parse(JSON.stringify(data));
|
||||
|
||||
@@ -17,18 +17,18 @@
|
||||
}
|
||||
},
|
||||
"devDependencies": {
|
||||
"@morten-olsen/mini-loader": "workspace:^",
|
||||
"@morten-olsen/mini-loader-configs": "workspace:^",
|
||||
"@types/node": "^20.10.8",
|
||||
"typescript": "^5.3.3"
|
||||
},
|
||||
"dependencies": {
|
||||
"@morten-olsen/mini-loader": "workspace:^",
|
||||
"eventemitter3": "^5.0.1",
|
||||
"nanoid": "^5.0.4"
|
||||
},
|
||||
"homepage": "https://github.com/morten-olsen/mini-loader",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/morten-olsen/mini-loader-repo"
|
||||
"url": "https://github.com/morten-olsen/mini-loader"
|
||||
}
|
||||
}
|
||||
@@ -1,63 +1,25 @@
|
||||
import { Worker } from 'worker_threads';
|
||||
import os from 'os';
|
||||
import { EventEmitter } from 'eventemitter3';
|
||||
import { Event } from '@morten-olsen/mini-loader';
|
||||
import { join } from 'path';
|
||||
import { createServer } from 'http';
|
||||
import { nanoid } from 'nanoid';
|
||||
import { chmod, mkdir, rm, writeFile } from 'fs/promises';
|
||||
|
||||
type RunEvents = {
|
||||
message: (event: Event) => void;
|
||||
error: (error: Error) => void;
|
||||
exit: () => void;
|
||||
};
|
||||
import { setup } from './setup/setup.js';
|
||||
|
||||
type RunOptions = {
|
||||
script: string;
|
||||
input?: Buffer | string;
|
||||
secrets?: Record<string, string>;
|
||||
cacheLocation: string;
|
||||
};
|
||||
|
||||
const run = async ({ script, input, secrets }: RunOptions) => {
|
||||
const dataDir = join(os.tmpdir(), 'mini-loader', nanoid());
|
||||
await mkdir(dataDir, { recursive: true });
|
||||
await chmod(dataDir, 0o700);
|
||||
const hostSocket = join(dataDir, 'host');
|
||||
const server = createServer();
|
||||
const inputLocation = join(dataDir, 'input');
|
||||
const run = async ({ script, input, secrets, cacheLocation }: RunOptions) => {
|
||||
const info = await setup({ script, input, secrets, cacheLocation });
|
||||
|
||||
if (input) {
|
||||
await writeFile(inputLocation, input);
|
||||
}
|
||||
|
||||
const emitter = new EventEmitter<RunEvents>();
|
||||
|
||||
server.on('connection', (socket) => {
|
||||
socket.on('data', (data) => {
|
||||
const message = JSON.parse(data.toString());
|
||||
emitter.emit('message', message);
|
||||
});
|
||||
});
|
||||
server.listen(hostSocket);
|
||||
|
||||
const worker = new Worker(script, {
|
||||
eval: true,
|
||||
const worker = new Worker(info.scriptLocation, {
|
||||
stdin: false,
|
||||
stdout: false,
|
||||
stderr: false,
|
||||
env: {
|
||||
HOST_SOCKET: hostSocket,
|
||||
SECRETS: JSON.stringify(secrets),
|
||||
INPUT_PATH: inputLocation,
|
||||
},
|
||||
workerData: {
|
||||
input,
|
||||
},
|
||||
env: info.env,
|
||||
});
|
||||
|
||||
worker.stdout?.on('data', (data) => {
|
||||
emitter.emit('message', {
|
||||
info.emitter.emit('message', {
|
||||
type: 'log',
|
||||
payload: {
|
||||
severity: 'info',
|
||||
@@ -67,7 +29,7 @@ const run = async ({ script, input, secrets }: RunOptions) => {
|
||||
});
|
||||
|
||||
worker.stderr?.on('data', (data) => {
|
||||
emitter.emit('message', {
|
||||
info.emitter.emit('message', {
|
||||
type: 'log',
|
||||
payload: {
|
||||
severity: 'error',
|
||||
@@ -78,20 +40,24 @@ const run = async ({ script, input, secrets }: RunOptions) => {
|
||||
|
||||
const promise = new Promise<void>((resolve, reject) => {
|
||||
worker.on('exit', async () => {
|
||||
server.close();
|
||||
await rm(dataDir, { recursive: true, force: true });
|
||||
await info.teardown();
|
||||
resolve();
|
||||
});
|
||||
worker.on('error', async (error) => {
|
||||
server.close();
|
||||
reject(error);
|
||||
});
|
||||
});
|
||||
|
||||
return {
|
||||
emitter,
|
||||
...info,
|
||||
teardown: async () => {
|
||||
worker.terminate();
|
||||
},
|
||||
promise,
|
||||
};
|
||||
};
|
||||
|
||||
type RunInfo = Awaited<ReturnType<typeof run>>;
|
||||
|
||||
export type { RunInfo };
|
||||
export { run };
|
||||
|
||||
71
packages/runner/src/setup/setup.ts
Normal file
71
packages/runner/src/setup/setup.ts
Normal file
@@ -0,0 +1,71 @@
|
||||
import { join } from 'path';
|
||||
import { nanoid } from 'nanoid';
|
||||
import { chmod, mkdir, rm, writeFile } from 'fs/promises';
|
||||
import { createServer } from 'net';
|
||||
import { EventEmitter } from 'eventemitter3';
|
||||
|
||||
type SetupOptions = {
|
||||
input?: Buffer | string;
|
||||
script: string;
|
||||
secrets?: Record<string, string>;
|
||||
cacheLocation: string;
|
||||
};
|
||||
|
||||
type RunEvents = {
|
||||
message: (event: any) => void;
|
||||
error: (error: Error) => void;
|
||||
exit: () => void;
|
||||
};
|
||||
|
||||
const setup = async (options: SetupOptions) => {
|
||||
const { input, script, secrets } = options;
|
||||
const emitter = new EventEmitter<RunEvents>();
|
||||
const dataDir = join(options.cacheLocation, nanoid());
|
||||
|
||||
await mkdir(dataDir, { recursive: true });
|
||||
await chmod(dataDir, 0o700);
|
||||
const hostSocket = join(dataDir, 'host');
|
||||
const httpGatewaySocket = join(dataDir, 'socket');
|
||||
const server = createServer();
|
||||
const inputLocation = join(dataDir, 'input');
|
||||
const scriptLocation = join(dataDir, 'script.js');
|
||||
|
||||
if (input) {
|
||||
await writeFile(inputLocation, input);
|
||||
}
|
||||
await writeFile(scriptLocation, script);
|
||||
const env = {
|
||||
HOST_SOCKET: hostSocket,
|
||||
SECRETS: JSON.stringify(secrets || {}),
|
||||
INPUT_PATH: inputLocation,
|
||||
HTTP_GATEWAY_PATH: httpGatewaySocket,
|
||||
};
|
||||
|
||||
const teardown = async () => {
|
||||
server.close();
|
||||
await rm(dataDir, { recursive: true, force: true });
|
||||
};
|
||||
|
||||
server.on('connection', (socket) => {
|
||||
socket.on('data', (data) => {
|
||||
const message = JSON.parse(data.toString());
|
||||
emitter.emit('message', message);
|
||||
});
|
||||
});
|
||||
|
||||
server.listen(hostSocket);
|
||||
|
||||
return {
|
||||
env,
|
||||
emitter,
|
||||
teardown,
|
||||
httpGatewaySocket,
|
||||
scriptLocation,
|
||||
hostSocket,
|
||||
};
|
||||
};
|
||||
|
||||
type Setup = Awaited<ReturnType<typeof setup>>;
|
||||
|
||||
export type { Setup };
|
||||
export { setup };
|
||||
@@ -14,6 +14,9 @@
|
||||
"files": [
|
||||
"./dist"
|
||||
],
|
||||
"imports": {
|
||||
"#pkg": "./package.json"
|
||||
},
|
||||
"exports": {
|
||||
".": {
|
||||
"import": "./dist/esm/index.js"
|
||||
@@ -21,15 +24,18 @@
|
||||
},
|
||||
"devDependencies": {
|
||||
"@morten-olsen/mini-loader-configs": "workspace:^",
|
||||
"@morten-olsen/mini-loader-runner": "workspace:^",
|
||||
"@types/jsonwebtoken": "^9.0.5",
|
||||
"@types/node": "^20.10.8",
|
||||
"typescript": "^5.3.3"
|
||||
},
|
||||
"dependencies": {
|
||||
"@fastify/reply-from": "^9.7.0",
|
||||
"@morten-olsen/mini-loader-runner": "workspace:^",
|
||||
"@trpc/client": "^10.45.0",
|
||||
"@trpc/server": "^10.45.0",
|
||||
"commander": "^11.1.0",
|
||||
"cron": "^3.1.6",
|
||||
"env-paths": "^3.0.0",
|
||||
"eventemitter3": "^5.0.1",
|
||||
"fastify": "^4.25.2",
|
||||
"jsonwebtoken": "^9.0.2",
|
||||
@@ -43,6 +49,6 @@
|
||||
"homepage": "https://github.com/morten-olsen/mini-loader",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/morten-olsen/mini-loader-repo"
|
||||
"url": "https://github.com/morten-olsen/mini-loader"
|
||||
}
|
||||
}
|
||||
@@ -20,10 +20,10 @@ class Auth {
|
||||
|
||||
#setup = async () => {
|
||||
const { config } = this.#options;
|
||||
const secretLocation = resolve(config.files.location, 'secret');
|
||||
const secretLocation = resolve(config.files.data, 'secret');
|
||||
let secret = '';
|
||||
await mkdir(config.files.data, { recursive: true });
|
||||
if (!existsSync(secretLocation)) {
|
||||
await mkdir(config.files.location, { recursive: true });
|
||||
secret = nanoid();
|
||||
await writeFile(secretLocation, secret);
|
||||
} else {
|
||||
|
||||
@@ -3,7 +3,8 @@ import { Knex } from 'knex';
|
||||
type Config = {
|
||||
database: Omit<Knex.Config, 'migrations'>;
|
||||
files: {
|
||||
location: string;
|
||||
data: string;
|
||||
cache: string;
|
||||
};
|
||||
auth?: {
|
||||
oidc?: {
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
import knex, { Knex } from 'knex';
|
||||
|
||||
import { source } from './migrations/migrations.source.js';
|
||||
import { mkdir } from 'fs/promises';
|
||||
import { dirname } from 'path';
|
||||
|
||||
const tableNames = {
|
||||
loads: 'loads',
|
||||
@@ -20,6 +22,15 @@ class Database {
|
||||
}
|
||||
|
||||
#setup = async (config: Knex.Config) => {
|
||||
if (
|
||||
config.connection &&
|
||||
typeof config.connection !== 'string' &&
|
||||
'filename' in config.connection &&
|
||||
typeof config.connection.filename === 'string' &&
|
||||
config.connection.filename !== ':memory:'
|
||||
) {
|
||||
await mkdir(dirname(config.connection.filename), { recursive: true });
|
||||
}
|
||||
const db = knex(config);
|
||||
await db.migrate.latest();
|
||||
return db;
|
||||
|
||||
@@ -0,0 +1,22 @@
|
||||
import { Knex } from 'knex';
|
||||
|
||||
const name = 'schedule-support';
|
||||
|
||||
const up = async (knex: Knex) => {
|
||||
await knex.schema.createTable('schedules', (table) => {
|
||||
table.string('id').primary();
|
||||
table.string('name').nullable();
|
||||
table.string('description').nullable();
|
||||
table.string('load').notNullable();
|
||||
table.string('cron').notNullable();
|
||||
table.string('input').nullable();
|
||||
table.timestamp('createdAt').notNullable();
|
||||
table.timestamp('updatedAt').notNullable();
|
||||
});
|
||||
};
|
||||
|
||||
const down = async (knex: Knex) => {
|
||||
await knex.schema.dropTable('schedule');
|
||||
};
|
||||
|
||||
export { name, up, down };
|
||||
@@ -1,6 +1,7 @@
|
||||
import { Knex } from 'knex';
|
||||
|
||||
import * as init from './migration.init.js';
|
||||
import * as scheduleSupport from './migration.schedule.js';
|
||||
|
||||
type Migration = {
|
||||
name: string;
|
||||
@@ -8,7 +9,7 @@ type Migration = {
|
||||
down: (knex: Knex) => Promise<void>;
|
||||
};
|
||||
|
||||
const migrations = [init] satisfies Migration[];
|
||||
const migrations = [init, scheduleSupport] satisfies Migration[];
|
||||
|
||||
const source: Knex.MigrationSource<Migration> = {
|
||||
getMigrations: async () => migrations,
|
||||
|
||||
34
packages/server/src/gateway/gateway.ts
Normal file
34
packages/server/src/gateway/gateway.ts
Normal file
@@ -0,0 +1,34 @@
|
||||
import { FastifyPluginAsync } from 'fastify';
|
||||
import FastifyReplyFrom from '@fastify/reply-from';
|
||||
import { escape } from 'querystring';
|
||||
import { Runtime } from '../runtime/runtime.js';
|
||||
|
||||
type Options = {
|
||||
runtime: Runtime;
|
||||
};
|
||||
|
||||
const gateway: FastifyPluginAsync<Options> = async (fastify, { runtime }) => {
|
||||
await fastify.register(FastifyReplyFrom, {
|
||||
http: {},
|
||||
});
|
||||
|
||||
fastify.all('/gateway/*', (req, res) => {
|
||||
const [runId, ...pathSegments] = (req.params as any)['*'].split('/').filter(Boolean);
|
||||
const run = runtime.runner.getInstance(runId);
|
||||
if (!run) {
|
||||
res.statusCode = 404;
|
||||
res.send({ error: 'Run not found' });
|
||||
return;
|
||||
}
|
||||
const socketPath = run.run?.httpGatewaySocket;
|
||||
if (!socketPath) {
|
||||
res.statusCode = 404;
|
||||
res.send({ error: 'No socket path to run' });
|
||||
return;
|
||||
}
|
||||
const path = pathSegments.join('/');
|
||||
res.from(`unix+http://${escape(socketPath)}/${path}`);
|
||||
});
|
||||
};
|
||||
|
||||
export { gateway };
|
||||
@@ -6,6 +6,7 @@ const start = new Command('start');
|
||||
start.action(async () => {
|
||||
const port = 4500;
|
||||
const runtime = await Runtime.create();
|
||||
await runtime.scheduler.start();
|
||||
const server = await createServer(runtime);
|
||||
await server.listen({
|
||||
port,
|
||||
@@ -18,7 +19,11 @@ start.action(async () => {
|
||||
const createToken = new Command('create-token');
|
||||
createToken.action(async () => {
|
||||
const runtime = await Runtime.create();
|
||||
const token = await runtime.auth.createToken({});
|
||||
const token = await runtime.auth.createToken({
|
||||
policy: {
|
||||
'*:*': ['*'],
|
||||
},
|
||||
});
|
||||
console.log(token);
|
||||
});
|
||||
|
||||
|
||||
10
packages/server/src/knex.d.ts
vendored
10
packages/server/src/knex.d.ts
vendored
@@ -43,5 +43,15 @@ declare module 'knex/types/tables.js' {
|
||||
createdAt: Date;
|
||||
updatedAt: Date;
|
||||
};
|
||||
schedules: {
|
||||
id: string;
|
||||
name?: string;
|
||||
description?: string;
|
||||
load: string;
|
||||
cron: string;
|
||||
input?: string;
|
||||
createdAt: Date;
|
||||
updatedAt: Date;
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { z } from 'zod';
|
||||
|
||||
const setLoadSchema = z.object({
|
||||
id: z.string(),
|
||||
id: z.string().optional(),
|
||||
name: z.string().optional(),
|
||||
script: z.string(),
|
||||
});
|
||||
|
||||
@@ -60,9 +60,9 @@ class LoadRepo extends EventEmitter<LoadRepoEvents> {
|
||||
public set = async (options: SetLoadOptions) => {
|
||||
const { database } = this.#options;
|
||||
const db = await database.instance;
|
||||
const id = options.id || nanoid();
|
||||
const id = options.id || `auto-id/${nanoid()}`;
|
||||
const script = createHash('sha256').update(options.script).digest('hex');
|
||||
const scriptDir = resolve(this.#options.config.files.location, 'scripts');
|
||||
const scriptDir = resolve(this.#options.config.files.data, 'scripts');
|
||||
await mkdir(scriptDir, { recursive: true });
|
||||
await writeFile(resolve(scriptDir, `${script}.js`), options.script);
|
||||
|
||||
|
||||
@@ -4,6 +4,7 @@ import { ArtifactRepo } from './artifacts/artifacts.js';
|
||||
import { LoadRepo } from './loads/loads.js';
|
||||
import { LogRepo } from './logs/logs.js';
|
||||
import { RunRepo } from './runs/runs.js';
|
||||
import { ScheduleRepo } from './schedules/schedules.js';
|
||||
import { SecretRepo } from './secrets/secrets.js';
|
||||
|
||||
type ReposOptions = {
|
||||
@@ -17,6 +18,7 @@ class Repos {
|
||||
#logs: LogRepo;
|
||||
#artifacts: ArtifactRepo;
|
||||
#secrets: SecretRepo;
|
||||
#schedule: ScheduleRepo;
|
||||
|
||||
constructor({ database, config }: ReposOptions) {
|
||||
this.#loads = new LoadRepo({
|
||||
@@ -36,6 +38,9 @@ class Repos {
|
||||
this.#secrets = new SecretRepo({
|
||||
database,
|
||||
});
|
||||
this.#schedule = new ScheduleRepo({
|
||||
database,
|
||||
});
|
||||
}
|
||||
|
||||
public get loads() {
|
||||
@@ -57,8 +62,13 @@ class Repos {
|
||||
public get secrets() {
|
||||
return this.#secrets;
|
||||
}
|
||||
|
||||
public get schedules() {
|
||||
return this.#schedule;
|
||||
}
|
||||
}
|
||||
|
||||
export { findSchedulesSchema, addScheduleSchema } from './schedules/schedules.js';
|
||||
export { findLogsSchema, addLogSchema } from './logs/logs.js';
|
||||
export { setLoadSchema, findLoadsSchema } from './loads/loads.js';
|
||||
export { createRunSchema, findRunsSchema } from './runs/runs.js';
|
||||
|
||||
@@ -3,6 +3,7 @@ import { EventEmitter } from 'eventemitter3';
|
||||
import { Database } from '../../database/database.js';
|
||||
import { CreateRunOptions, FindRunsOptions, UpdateRunOptions } from './runs.schemas.js';
|
||||
import { LoadRepo } from '../loads/loads.js';
|
||||
import { createHash } from 'crypto';
|
||||
|
||||
type RunRepoEvents = {
|
||||
created: (args: { id: string; loadId: string }) => void;
|
||||
@@ -18,13 +19,22 @@ type RunRepoOptions = {
|
||||
|
||||
class RunRepo extends EventEmitter<RunRepoEvents> {
|
||||
#options: RunRepoOptions;
|
||||
#isReady: Promise<void>;
|
||||
|
||||
constructor(options: RunRepoOptions) {
|
||||
super();
|
||||
this.#options = options;
|
||||
this.#isReady = this.#setup();
|
||||
}
|
||||
|
||||
#setup = async () => {
|
||||
const { database } = this.#options;
|
||||
const db = await database.instance;
|
||||
await db('runs').update({ status: 'failed', error: 'server was shut down' }).where({ status: 'running' });
|
||||
};
|
||||
|
||||
public getById = async (id: string) => {
|
||||
await this.#isReady;
|
||||
const { database } = this.#options;
|
||||
const db = await database.instance;
|
||||
|
||||
@@ -36,6 +46,7 @@ class RunRepo extends EventEmitter<RunRepoEvents> {
|
||||
};
|
||||
|
||||
public getByLoadId = async (loadId: string) => {
|
||||
await this.#isReady;
|
||||
const { database } = this.#options;
|
||||
const db = await database.instance;
|
||||
|
||||
@@ -44,6 +55,7 @@ class RunRepo extends EventEmitter<RunRepoEvents> {
|
||||
};
|
||||
|
||||
public find = async (options: FindRunsOptions) => {
|
||||
await this.#isReady;
|
||||
const { database } = this.#options;
|
||||
const db = await database.instance;
|
||||
const query = db('runs').select(['id', 'status', 'startedAt', 'status', 'error', 'endedAt']);
|
||||
@@ -62,19 +74,41 @@ class RunRepo extends EventEmitter<RunRepoEvents> {
|
||||
return runs;
|
||||
};
|
||||
|
||||
public remove = async (options: FindRunsOptions) => {
|
||||
public prepareRemove = async (options: FindRunsOptions) => {
|
||||
await this.#isReady;
|
||||
const { database } = this.#options;
|
||||
const db = await database.instance;
|
||||
const query = db('runs');
|
||||
const query = db('runs').select('id');
|
||||
|
||||
if (options.loadId) {
|
||||
query.where({ loadId: options.loadId });
|
||||
}
|
||||
|
||||
await query.del();
|
||||
const result = await query;
|
||||
const ids = result.map((row) => row.id);
|
||||
const token = ids.map((id) => Buffer.from(id).toString('base64')).join('|');
|
||||
const hash = createHash('sha256').update(token).digest('hex');
|
||||
return {
|
||||
ids,
|
||||
hash,
|
||||
};
|
||||
};
|
||||
|
||||
public remove = async (hash: string, ids: string[]) => {
|
||||
const { database } = this.#options;
|
||||
const db = await database.instance;
|
||||
const token = ids.map((id) => Buffer.from(id).toString('base64')).join('|');
|
||||
const actualHash = createHash('sha256').update(token).digest('hex');
|
||||
|
||||
if (hash !== actualHash) {
|
||||
throw new Error('Invalid hash');
|
||||
}
|
||||
|
||||
await db('runs').whereIn('id', ids).delete();
|
||||
};
|
||||
|
||||
public started = async (id: string) => {
|
||||
await this.#isReady;
|
||||
const { database } = this.#options;
|
||||
const db = await database.instance;
|
||||
const current = await this.getById(id);
|
||||
@@ -92,6 +126,7 @@ class RunRepo extends EventEmitter<RunRepoEvents> {
|
||||
};
|
||||
|
||||
public finished = async (id: string, options: UpdateRunOptions) => {
|
||||
await this.#isReady;
|
||||
const { database } = this.#options;
|
||||
const db = await database.instance;
|
||||
const { loadId } = await this.getById(id);
|
||||
@@ -114,6 +149,7 @@ class RunRepo extends EventEmitter<RunRepoEvents> {
|
||||
};
|
||||
|
||||
public create = async (options: CreateRunOptions) => {
|
||||
await this.#isReady;
|
||||
const { database, loads } = this.#options;
|
||||
const id = nanoid();
|
||||
const db = await database.instance;
|
||||
|
||||
22
packages/server/src/repos/schedules/schedules.schemas.ts
Normal file
22
packages/server/src/repos/schedules/schedules.schemas.ts
Normal file
@@ -0,0 +1,22 @@
|
||||
import { z } from 'zod';
|
||||
|
||||
const addScheduleSchema = z.object({
|
||||
name: z.string().optional(),
|
||||
description: z.string().optional(),
|
||||
load: z.string(),
|
||||
cron: z.string(),
|
||||
input: z.string().optional(),
|
||||
});
|
||||
|
||||
const findSchedulesSchema = z.object({
|
||||
ids: z.array(z.string()).optional(),
|
||||
loadIds: z.array(z.string()).optional(),
|
||||
offset: z.number().optional(),
|
||||
limit: z.number().optional(),
|
||||
});
|
||||
|
||||
type AddScheduleOptions = z.infer<typeof addScheduleSchema>;
|
||||
type FindSchedulesOptions = z.infer<typeof findSchedulesSchema>;
|
||||
|
||||
export type { AddScheduleOptions, FindSchedulesOptions };
|
||||
export { addScheduleSchema, findSchedulesSchema };
|
||||
118
packages/server/src/repos/schedules/schedules.ts
Normal file
118
packages/server/src/repos/schedules/schedules.ts
Normal file
@@ -0,0 +1,118 @@
|
||||
import { EventEmitter } from 'eventemitter3';
|
||||
import { Database } from '../../database/database.js';
|
||||
import { nanoid } from 'nanoid';
|
||||
import { AddScheduleOptions, FindSchedulesOptions } from './schedules.schemas.js';
|
||||
import { createHash } from 'crypto';
|
||||
|
||||
type ScheduleRepoEvents = {
|
||||
added: (id: string) => void;
|
||||
removed: (id: string) => void;
|
||||
};
|
||||
|
||||
type ScheduleRepoOptions = {
|
||||
database: Database;
|
||||
};
|
||||
|
||||
class ScheduleRepo extends EventEmitter<ScheduleRepoEvents> {
|
||||
#options: ScheduleRepoOptions;
|
||||
|
||||
constructor(options: ScheduleRepoOptions) {
|
||||
super();
|
||||
this.#options = options;
|
||||
}
|
||||
|
||||
public get = async (id: string) => {
|
||||
const { database } = this.#options;
|
||||
const db = await database.instance;
|
||||
const result = await db('schedules').where('id', id).first();
|
||||
return result;
|
||||
};
|
||||
|
||||
public add = async (options: AddScheduleOptions) => {
|
||||
const { database } = this.#options;
|
||||
const db = await database.instance;
|
||||
const id = nanoid();
|
||||
|
||||
await db('schedules').insert({
|
||||
id,
|
||||
name: options.name,
|
||||
description: options.description,
|
||||
cron: options.cron,
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
});
|
||||
|
||||
this.emit('added', id);
|
||||
|
||||
return id;
|
||||
};
|
||||
|
||||
public prepareRemove = async (options: FindSchedulesOptions) => {
|
||||
const { database } = this.#options;
|
||||
const db = await database.instance;
|
||||
|
||||
const query = db('schedules').select('id');
|
||||
|
||||
if (options.ids) {
|
||||
query.whereIn('id', options.ids);
|
||||
}
|
||||
|
||||
if (options.loadIds) {
|
||||
query.whereIn('loadId', options.loadIds);
|
||||
}
|
||||
|
||||
const result = await query;
|
||||
const ids = result.map((row) => row.id);
|
||||
const token = ids.map((id) => Buffer.from(id).toString('base64')).join('|');
|
||||
const hash = createHash('sha256').update(token).digest('hex');
|
||||
return {
|
||||
ids,
|
||||
hash,
|
||||
};
|
||||
};
|
||||
|
||||
public remove = async (hash: string, ids: string[]) => {
|
||||
const { database } = this.#options;
|
||||
const db = await database.instance;
|
||||
const token = ids.map((id) => Buffer.from(id).toString('base64')).join('|');
|
||||
const actualHash = createHash('sha256').update(token).digest('hex');
|
||||
|
||||
if (hash !== actualHash) {
|
||||
throw new Error('Invalid hash');
|
||||
}
|
||||
|
||||
await db('schedules').whereIn('id', ids).delete();
|
||||
ids.forEach((id) => {
|
||||
this.emit('removed', id);
|
||||
});
|
||||
};
|
||||
|
||||
public find = async (options: FindSchedulesOptions) => {
|
||||
const { database } = this.#options;
|
||||
const db = await database.instance;
|
||||
|
||||
const query = db('schedules');
|
||||
|
||||
if (options.ids) {
|
||||
query.whereIn('id', options.ids);
|
||||
}
|
||||
|
||||
if (options.loadIds) {
|
||||
query.whereIn('loadId', options.loadIds);
|
||||
}
|
||||
|
||||
if (options.offset) {
|
||||
query.offset(options.offset);
|
||||
}
|
||||
|
||||
if (options.limit) {
|
||||
query.limit(options.limit);
|
||||
}
|
||||
|
||||
const results = await query;
|
||||
return results;
|
||||
};
|
||||
}
|
||||
|
||||
export { addScheduleSchema, findSchedulesSchema } from './schedules.schemas.js';
|
||||
export { ScheduleRepo };
|
||||
@@ -1,3 +1,4 @@
|
||||
import { z } from 'zod';
|
||||
import { createRunSchema, findRunsSchema } from '../repos/repos.js';
|
||||
import { publicProcedure, router } from './router.utils.js';
|
||||
|
||||
@@ -17,17 +18,50 @@ const find = publicProcedure.input(findRunsSchema).query(async ({ input, ctx })
|
||||
return results;
|
||||
});
|
||||
|
||||
const remove = publicProcedure.input(findRunsSchema).mutation(async ({ input, ctx }) => {
|
||||
const prepareRemove = publicProcedure.input(findRunsSchema).query(async ({ input, ctx }) => {
|
||||
const { runtime } = ctx;
|
||||
const { repos } = runtime;
|
||||
const { runs } = repos;
|
||||
await runs.remove(input);
|
||||
return await runs.prepareRemove(input);
|
||||
});
|
||||
|
||||
const remove = publicProcedure
|
||||
|
||||
.input(
|
||||
z.object({
|
||||
hash: z.string(),
|
||||
ids: z.array(z.string()),
|
||||
}),
|
||||
)
|
||||
.mutation(async ({ input, ctx }) => {
|
||||
const { runtime } = ctx;
|
||||
const { repos } = runtime;
|
||||
const { runs } = repos;
|
||||
for (const id of input.ids) {
|
||||
const instance = runtime.runner.getInstance(id);
|
||||
if (instance) {
|
||||
await instance.run?.teardown();
|
||||
}
|
||||
}
|
||||
await runs.remove(input.hash, input.ids);
|
||||
});
|
||||
|
||||
const terminate = publicProcedure.input(z.string()).mutation(async ({ input, ctx }) => {
|
||||
const { runtime } = ctx;
|
||||
const { runner } = runtime;
|
||||
const instance = runner.getInstance(input);
|
||||
if (!instance || !instance.run) {
|
||||
return;
|
||||
}
|
||||
await instance.run.teardown();
|
||||
});
|
||||
|
||||
const runsRouter = router({
|
||||
create,
|
||||
find,
|
||||
remove,
|
||||
prepareRemove,
|
||||
terminate,
|
||||
});
|
||||
|
||||
export { runsRouter };
|
||||
|
||||
53
packages/server/src/router/router.schedules.ts
Normal file
53
packages/server/src/router/router.schedules.ts
Normal file
@@ -0,0 +1,53 @@
|
||||
import { z } from 'zod';
|
||||
import { addScheduleSchema, findSchedulesSchema } from '../repos/repos.js';
|
||||
import { publicProcedure, router } from './router.utils.js';
|
||||
|
||||
const add = publicProcedure.input(addScheduleSchema).mutation(async ({ input, ctx }) => {
|
||||
const { runtime } = ctx;
|
||||
const { repos } = runtime;
|
||||
const { schedules } = repos;
|
||||
|
||||
const result = await schedules.add(input);
|
||||
return result;
|
||||
});
|
||||
|
||||
const find = publicProcedure.input(findSchedulesSchema).query(async ({ input, ctx }) => {
|
||||
const { runtime } = ctx;
|
||||
const { repos } = runtime;
|
||||
const { schedules } = repos;
|
||||
|
||||
const result = await schedules.find(input);
|
||||
return result;
|
||||
});
|
||||
|
||||
const prepareRemove = publicProcedure.input(findSchedulesSchema).query(async ({ input, ctx }) => {
|
||||
const { runtime } = ctx;
|
||||
const { repos } = runtime;
|
||||
const { schedules } = repos;
|
||||
|
||||
return await schedules.prepareRemove(input);
|
||||
});
|
||||
|
||||
const remove = publicProcedure
|
||||
.input(
|
||||
z.object({
|
||||
hash: z.string(),
|
||||
ids: z.array(z.string()),
|
||||
}),
|
||||
)
|
||||
.mutation(async ({ input, ctx }) => {
|
||||
const { runtime } = ctx;
|
||||
const { repos } = runtime;
|
||||
const { artifacts } = repos;
|
||||
|
||||
await artifacts.remove(input.hash, input.ids);
|
||||
});
|
||||
|
||||
const schedulesRouter = router({
|
||||
add,
|
||||
find,
|
||||
remove,
|
||||
prepareRemove,
|
||||
});
|
||||
|
||||
export { schedulesRouter };
|
||||
@@ -2,6 +2,7 @@ import { artifactsRouter } from './router.artifacts.js';
|
||||
import { loadsRouter } from './router.loads.js';
|
||||
import { logsRouter } from './router.logs.js';
|
||||
import { runsRouter } from './router.runs.js';
|
||||
import { schedulesRouter } from './router.schedules.js';
|
||||
import { secretsRouter } from './router.secrets.js';
|
||||
import { router } from './router.utils.js';
|
||||
|
||||
@@ -11,6 +12,7 @@ const rootRouter = router({
|
||||
logs: logsRouter,
|
||||
artifacts: artifactsRouter,
|
||||
secrets: secretsRouter,
|
||||
schedules: schedulesRouter,
|
||||
});
|
||||
|
||||
type RootRouter = typeof rootRouter;
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { EventEmitter } from 'eventemitter3';
|
||||
import { run } from '@morten-olsen/mini-loader-runner';
|
||||
import { RunInfo, run } from '@morten-olsen/mini-loader-runner';
|
||||
import { Repos } from '../repos/repos.js';
|
||||
import { LoggerEvent } from '../../../mini-loader/dist/esm/logger/logger.js';
|
||||
import { ArtifactCreateEvent } from '../../../mini-loader/dist/esm/artifacts/artifacts.js';
|
||||
@@ -20,12 +20,17 @@ type RunnerInstanceOptions = {
|
||||
|
||||
class RunnerInstance extends EventEmitter<RunnerInstanceEvents> {
|
||||
#options: RunnerInstanceOptions;
|
||||
#run?: RunInfo;
|
||||
|
||||
constructor(options: RunnerInstanceOptions) {
|
||||
super();
|
||||
this.#options = options;
|
||||
}
|
||||
|
||||
public get run() {
|
||||
return this.#run;
|
||||
}
|
||||
|
||||
#addLog = async (event: LoggerEvent['payload']) => {
|
||||
const { repos, id, loadId } = this.#options;
|
||||
const { logs } = repos;
|
||||
@@ -54,15 +59,18 @@ class RunnerInstance extends EventEmitter<RunnerInstanceEvents> {
|
||||
const { runs, secrets } = repos;
|
||||
try {
|
||||
const { script: scriptHash, input } = await runs.getById(id);
|
||||
const scriptLocation = resolve(config.files.location, 'scripts', `${scriptHash}.js`);
|
||||
const scriptLocation = resolve(config.files.data, 'scripts', `${scriptHash}.js`);
|
||||
const script = await readFile(scriptLocation, 'utf-8');
|
||||
const allSecrets = await secrets.getAll();
|
||||
await runs.started(id);
|
||||
const { promise, emitter } = await run({
|
||||
const current = await run({
|
||||
script,
|
||||
secrets: allSecrets,
|
||||
input,
|
||||
cacheLocation: config.files.cache,
|
||||
});
|
||||
this.#run = current;
|
||||
const { promise, emitter } = current;
|
||||
emitter.on('message', (message) => {
|
||||
switch (message.type) {
|
||||
case 'log': {
|
||||
@@ -84,9 +92,11 @@ class RunnerInstance extends EventEmitter<RunnerInstanceEvents> {
|
||||
}
|
||||
await runs.finished(id, { status: 'failed', error: errorMessage });
|
||||
} finally {
|
||||
this.#run = undefined;
|
||||
this.emit('completed', { id });
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
export type { RunInfo };
|
||||
export { RunnerInstance };
|
||||
|
||||
@@ -36,6 +36,10 @@ class Runner {
|
||||
this.#instances.set(args.id, instance);
|
||||
await instance.start();
|
||||
};
|
||||
|
||||
public getInstance = (id: string) => {
|
||||
return this.#instances.get(id);
|
||||
};
|
||||
}
|
||||
|
||||
export { Runner };
|
||||
|
||||
@@ -1,20 +1,26 @@
|
||||
import { resolve } from 'path';
|
||||
import envPaths from 'env-paths';
|
||||
import { Database } from '../database/database.js';
|
||||
import { Repos } from '../repos/repos.js';
|
||||
import { Runner } from '../runner/runner.js';
|
||||
import { Config } from '../config/config.js';
|
||||
import { Auth } from '../auth/auth.js';
|
||||
import { resolve } from 'path';
|
||||
import { Scheduler } from '../scheduler/scheduler.js';
|
||||
|
||||
const paths = envPaths('mini-loader-server');
|
||||
|
||||
class Runtime {
|
||||
#repos: Repos;
|
||||
#runner: Runner;
|
||||
#auth: Auth;
|
||||
#scheduler: Scheduler;
|
||||
|
||||
constructor(options: Config) {
|
||||
const database = new Database(options.database);
|
||||
this.#repos = new Repos({ database, config: options });
|
||||
this.#runner = new Runner({ repos: this.#repos, config: options });
|
||||
this.#auth = new Auth({ config: options });
|
||||
this.#scheduler = new Scheduler({ runs: this.#repos.runs, schedules: this.#repos.schedules });
|
||||
}
|
||||
|
||||
public get repos() {
|
||||
@@ -29,17 +35,22 @@ class Runtime {
|
||||
return this.#auth;
|
||||
}
|
||||
|
||||
public get scheduler() {
|
||||
return this.#scheduler;
|
||||
}
|
||||
|
||||
public static create = async () => {
|
||||
const runtime = new Runtime({
|
||||
database: {
|
||||
client: 'sqlite3',
|
||||
connection: {
|
||||
filename: resolve(process.cwd(), 'data', 'database.sqlite'),
|
||||
filename: resolve(paths.data, 'database.sqlite'),
|
||||
},
|
||||
useNullAsDefault: true,
|
||||
},
|
||||
files: {
|
||||
location: resolve(process.cwd(), 'data', 'files'),
|
||||
data: process.env.DATA_DIR || resolve(paths.data, 'data', 'files'),
|
||||
cache: process.env.CACHE_DIR || resolve(paths.cache, 'data', 'cache'),
|
||||
},
|
||||
});
|
||||
|
||||
|
||||
73
packages/server/src/scheduler/scheduler.ts
Normal file
73
packages/server/src/scheduler/scheduler.ts
Normal file
@@ -0,0 +1,73 @@
|
||||
import { CronJob } from 'cron';
|
||||
import { ScheduleRepo } from '../repos/schedules/schedules.js';
|
||||
import { RunRepo } from '../repos/runs/runs.js';
|
||||
|
||||
type SchedulerOptions = {
|
||||
runs: RunRepo;
|
||||
schedules: ScheduleRepo;
|
||||
};
|
||||
|
||||
type RunningSchedule = {
|
||||
id: string;
|
||||
job: CronJob;
|
||||
stop: () => Promise<void>;
|
||||
};
|
||||
|
||||
class Scheduler {
|
||||
#running: RunningSchedule[] = [];
|
||||
#options: SchedulerOptions;
|
||||
|
||||
constructor(options: SchedulerOptions) {
|
||||
this.#options = options;
|
||||
const { schedules } = this.#options;
|
||||
schedules.on('added', this.#add);
|
||||
schedules.on('removed', this.#remove);
|
||||
}
|
||||
|
||||
#remove = async (id: string) => {
|
||||
const current = this.#running.filter((r) => r.id === id);
|
||||
await Promise.all(current.map((r) => r.stop()));
|
||||
this.#running = this.#running.filter((r) => r.id !== id);
|
||||
};
|
||||
|
||||
#add = async (id: string) => {
|
||||
const { schedules, runs } = this.#options;
|
||||
const current = this.#running.filter((r) => r.id === id);
|
||||
await Promise.all(current.map((r) => r.stop()));
|
||||
const schedule = await schedules.get(id);
|
||||
if (!schedule) {
|
||||
return;
|
||||
}
|
||||
const job = new CronJob(schedule.cron, async () => {
|
||||
await runs.create({
|
||||
loadId: schedule.load,
|
||||
});
|
||||
});
|
||||
const stop = async () => {
|
||||
job.stop();
|
||||
};
|
||||
this.#running.push({
|
||||
id: schedule.id,
|
||||
job,
|
||||
stop,
|
||||
});
|
||||
};
|
||||
|
||||
public stop = async () => {
|
||||
for (const running of this.#running) {
|
||||
await running.stop();
|
||||
this.#running = this.#running.filter((r) => r !== running);
|
||||
}
|
||||
};
|
||||
|
||||
public start = async () => {
|
||||
const { schedules } = this.#options;
|
||||
await this.stop();
|
||||
const all = await schedules.find({});
|
||||
for (const schedule of all) {
|
||||
await this.#add(schedule.id);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
export { Scheduler };
|
||||
@@ -3,11 +3,22 @@ import fastify from 'fastify';
|
||||
import { RootRouter, rootRouter } from '../router/router.js';
|
||||
import { createContext } from '../router/router.utils.js';
|
||||
import { Runtime } from '../runtime/runtime.js';
|
||||
import { gateway } from '../gateway/gateway.js';
|
||||
import { createRequire } from 'module';
|
||||
import { readFile } from 'fs/promises';
|
||||
|
||||
const require = createRequire(import.meta.url);
|
||||
|
||||
const createServer = async (runtime: Runtime) => {
|
||||
const server = fastify({});
|
||||
server.get('/', async () => {
|
||||
return { hello: 'world' };
|
||||
const pkgLocation = require.resolve('#pkg');
|
||||
const pkg = JSON.parse(await readFile(pkgLocation, 'utf-8'));
|
||||
|
||||
const server = fastify({
|
||||
maxParamLength: 10000,
|
||||
bodyLimit: 30 * 1024 * 1024,
|
||||
logger: {
|
||||
level: 'warn',
|
||||
},
|
||||
});
|
||||
|
||||
server.get('/health', async (req) => {
|
||||
@@ -20,7 +31,7 @@ const createServer = async (runtime: Runtime) => {
|
||||
authorized = true;
|
||||
}
|
||||
} catch (error) {}
|
||||
return { authorized, status: 'ok' };
|
||||
return { authorized, status: 'ok', version: pkg.version };
|
||||
});
|
||||
|
||||
server.register(fastifyTRPCPlugin, {
|
||||
@@ -33,6 +44,14 @@ const createServer = async (runtime: Runtime) => {
|
||||
},
|
||||
} satisfies FastifyTRPCPluginOptions<RootRouter>['trpcOptions'],
|
||||
});
|
||||
|
||||
server.register(gateway, {
|
||||
runtime,
|
||||
});
|
||||
|
||||
server.addHook('onError', async (request, reply, error) => {
|
||||
console.error(error);
|
||||
});
|
||||
await server.ready();
|
||||
|
||||
return server;
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
"extends": "@morten-olsen/mini-loader-configs/tsconfig.esm.json",
|
||||
"compilerOptions": {
|
||||
"outDir": "dist/esm",
|
||||
"rootDir": "src"
|
||||
},
|
||||
"include": [
|
||||
"src"
|
||||
|
||||
95
pnpm-lock.yaml
generated
95
pnpm-lock.yaml
generated
@@ -36,6 +36,12 @@ importers:
|
||||
|
||||
packages/cli:
|
||||
dependencies:
|
||||
'@morten-olsen/mini-loader-runner':
|
||||
specifier: workspace:^
|
||||
version: link:../runner
|
||||
'@morten-olsen/mini-loader-server':
|
||||
specifier: workspace:^
|
||||
version: link:../server
|
||||
'@rollup/plugin-auto-install':
|
||||
specifier: ^3.0.5
|
||||
version: 3.0.5(rollup@4.9.4)
|
||||
@@ -60,6 +66,9 @@ importers:
|
||||
commander:
|
||||
specifier: ^11.1.0
|
||||
version: 11.1.0
|
||||
dotenv:
|
||||
specifier: ^16.3.1
|
||||
version: 16.3.1
|
||||
env-paths:
|
||||
specifier: ^3.0.0
|
||||
version: 3.0.0
|
||||
@@ -85,12 +94,6 @@ importers:
|
||||
'@morten-olsen/mini-loader-configs':
|
||||
specifier: workspace:^
|
||||
version: link:../configs
|
||||
'@morten-olsen/mini-loader-runner':
|
||||
specifier: workspace:^
|
||||
version: link:../runner
|
||||
'@morten-olsen/mini-loader-server':
|
||||
specifier: workspace:^
|
||||
version: link:../server
|
||||
'@types/inquirer':
|
||||
specifier: ^9.0.7
|
||||
version: 9.0.7
|
||||
@@ -101,6 +104,10 @@ importers:
|
||||
packages/configs: {}
|
||||
|
||||
packages/examples:
|
||||
dependencies:
|
||||
fastify:
|
||||
specifier: ^4.25.2
|
||||
version: 4.25.2
|
||||
devDependencies:
|
||||
'@morten-olsen/mini-loader':
|
||||
specifier: workspace:^
|
||||
@@ -132,6 +139,9 @@ importers:
|
||||
|
||||
packages/runner:
|
||||
dependencies:
|
||||
'@morten-olsen/mini-loader':
|
||||
specifier: workspace:^
|
||||
version: link:../mini-loader
|
||||
eventemitter3:
|
||||
specifier: ^5.0.1
|
||||
version: 5.0.1
|
||||
@@ -139,9 +149,6 @@ importers:
|
||||
specifier: ^5.0.4
|
||||
version: 5.0.4
|
||||
devDependencies:
|
||||
'@morten-olsen/mini-loader':
|
||||
specifier: workspace:^
|
||||
version: link:../mini-loader
|
||||
'@morten-olsen/mini-loader-configs':
|
||||
specifier: workspace:^
|
||||
version: link:../configs
|
||||
@@ -154,6 +161,12 @@ importers:
|
||||
|
||||
packages/server:
|
||||
dependencies:
|
||||
'@fastify/reply-from':
|
||||
specifier: ^9.7.0
|
||||
version: 9.7.0
|
||||
'@morten-olsen/mini-loader-runner':
|
||||
specifier: workspace:^
|
||||
version: link:../runner
|
||||
'@trpc/client':
|
||||
specifier: ^10.45.0
|
||||
version: 10.45.0(@trpc/server@10.45.0)
|
||||
@@ -163,6 +176,12 @@ importers:
|
||||
commander:
|
||||
specifier: ^11.1.0
|
||||
version: 11.1.0
|
||||
cron:
|
||||
specifier: ^3.1.6
|
||||
version: 3.1.6
|
||||
env-paths:
|
||||
specifier: ^3.0.0
|
||||
version: 3.0.0
|
||||
eventemitter3:
|
||||
specifier: ^5.0.1
|
||||
version: 5.0.1
|
||||
@@ -194,9 +213,6 @@ importers:
|
||||
'@morten-olsen/mini-loader-configs':
|
||||
specifier: workspace:^
|
||||
version: link:../configs
|
||||
'@morten-olsen/mini-loader-runner':
|
||||
specifier: workspace:^
|
||||
version: link:../runner
|
||||
'@types/jsonwebtoken':
|
||||
specifier: ^9.0.5
|
||||
version: 9.0.5
|
||||
@@ -476,6 +492,11 @@ packages:
|
||||
fast-uri: 2.3.0
|
||||
dev: false
|
||||
|
||||
/@fastify/busboy@2.1.0:
|
||||
resolution: {integrity: sha512-+KpH+QxZU7O4675t3mnkQKcZZg56u+K/Ct2K+N2AZYNVK8kyeo/bI18tI8aPm3tvNNRyTWfj6s5tnGNlcbQRsA==}
|
||||
engines: {node: '>=14'}
|
||||
dev: false
|
||||
|
||||
/@fastify/deepmerge@1.3.0:
|
||||
resolution: {integrity: sha512-J8TOSBq3SoZbDhM9+R/u77hP93gz/rajSA+K2kGyijPpORPWUXHUpTaleoj+92As0S9uPRP7Oi8IqMf0u+ro6A==}
|
||||
dev: false
|
||||
@@ -490,6 +511,19 @@ packages:
|
||||
fast-json-stringify: 5.10.0
|
||||
dev: false
|
||||
|
||||
/@fastify/reply-from@9.7.0:
|
||||
resolution: {integrity: sha512-/F1QBl3FGlTqStjmiuoLRDchVxP967TZh6FZPwQteWhdLsDec8mqSACE+cRzw6qHUj3v9hfdd7JNgmb++fyFhQ==}
|
||||
dependencies:
|
||||
'@fastify/error': 3.4.1
|
||||
end-of-stream: 1.4.4
|
||||
fast-content-type-parse: 1.1.0
|
||||
fast-querystring: 1.1.2
|
||||
fastify-plugin: 4.5.1
|
||||
pump: 3.0.0
|
||||
tiny-lru: 11.2.5
|
||||
undici: 5.28.2
|
||||
dev: false
|
||||
|
||||
/@gar/promisify@1.1.3:
|
||||
resolution: {integrity: sha512-k2Ty1JcVojjJFwrg/ThKi2ujJ7XNLYaFGNB/bWT9wGR+oSMJHMa5w+CUq6p/pVrKeNNgA7pCqEcjSnHVoqJQFw==}
|
||||
requiresBuild: true
|
||||
@@ -1260,6 +1294,10 @@ packages:
|
||||
'@types/node': 20.10.8
|
||||
dev: true
|
||||
|
||||
/@types/luxon@3.3.8:
|
||||
resolution: {integrity: sha512-jYvz8UMLDgy3a5SkGJne8H7VA7zPV2Lwohjx0V8V31+SqAjNmurWMkk9cQhfvlcnXWudBpK9xPM1n4rljOcHYQ==}
|
||||
dev: false
|
||||
|
||||
/@types/node@20.10.8:
|
||||
resolution: {integrity: sha512-f8nQs3cLxbAFc00vEU59yf9UyGUftkPaLGfvbVOIDdx2i1b8epBqj2aNGyP19fiyXWvlmZ7qC1XLjAzw/OKIeA==}
|
||||
dependencies:
|
||||
@@ -2055,6 +2093,13 @@ packages:
|
||||
resolution: {integrity: sha512-dcKFX3jn0MpIaXjisoRvexIJVEKzaq7z2rZKxf+MSr9TkdmHmsU4m2lcLojrj/FHl8mk5VxMmYA+ftRkP/3oKQ==}
|
||||
dev: false
|
||||
|
||||
/cron@3.1.6:
|
||||
resolution: {integrity: sha512-cvFiQCeVzsA+QPM6fhjBtlKGij7tLLISnTSvFxVdnFGLdz+ZdXN37kNe0i2gefmdD17XuZA6n2uPVwzl4FxW/w==}
|
||||
dependencies:
|
||||
'@types/luxon': 3.3.8
|
||||
luxon: 3.4.4
|
||||
dev: false
|
||||
|
||||
/cross-spawn@7.0.3:
|
||||
resolution: {integrity: sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==}
|
||||
engines: {node: '>= 8'}
|
||||
@@ -2164,6 +2209,11 @@ packages:
|
||||
esutils: 2.0.3
|
||||
dev: true
|
||||
|
||||
/dotenv@16.3.1:
|
||||
resolution: {integrity: sha512-IPzF4w4/Rd94bA9imS68tZBaYyBWSCE47V1RGuMrB94iyTOIEwRmVL2x/4An+6mETpLrKJ5hQkB8W4kFAadeIQ==}
|
||||
engines: {node: '>=12'}
|
||||
dev: false
|
||||
|
||||
/eastasianwidth@0.2.0:
|
||||
resolution: {integrity: sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA==}
|
||||
dev: false
|
||||
@@ -2683,6 +2733,10 @@ packages:
|
||||
resolution: {integrity: sha512-eel5UKGn369gGEWOqBShmFJWfq/xSJvsgDzgLYC845GneayWvXBf0lJCBn5qTABfewy1ZDPoaR5OZCP+kssfuw==}
|
||||
dev: false
|
||||
|
||||
/fastify-plugin@4.5.1:
|
||||
resolution: {integrity: sha512-stRHYGeuqpEZTL1Ef0Ovr2ltazUT9g844X5z/zEBFLG8RYlpDiOCIG+ATvYEp+/zmc7sN29mcIMp8gvYplYPIQ==}
|
||||
dev: false
|
||||
|
||||
/fastify@4.25.2:
|
||||
resolution: {integrity: sha512-SywRouGleDHvRh054onj+lEZnbC1sBCLkR0UY3oyJwjD4BdZJUrxBqfkfCaqn74pVCwBaRHGuL3nEWeHbHzAfw==}
|
||||
dependencies:
|
||||
@@ -3715,6 +3769,11 @@ packages:
|
||||
dependencies:
|
||||
yallist: 4.0.0
|
||||
|
||||
/luxon@3.4.4:
|
||||
resolution: {integrity: sha512-zobTr7akeGHnv7eBOXcRgMeCP6+uyYsczwmeRCauvpvaAltgNyTbLH/+VaEAPUeWBT+1GuNmz4wC/6jtQzbbVA==}
|
||||
engines: {node: '>=12'}
|
||||
dev: false
|
||||
|
||||
/magic-string@0.25.9:
|
||||
resolution: {integrity: sha512-RmF0AsMzgt25qzqqLc1+MbHmhdx0ojF2Fvs4XnOqz2ZOBXzzkEwc/dJQZCYHAn7v1jbVOjAZfK8msRn4BxO4VQ==}
|
||||
dependencies:
|
||||
@@ -5151,6 +5210,11 @@ packages:
|
||||
engines: {node: '>=8'}
|
||||
dev: false
|
||||
|
||||
/tiny-lru@11.2.5:
|
||||
resolution: {integrity: sha512-JpqM0K33lG6iQGKiigcwuURAKZlq6rHXfrgeL4/I8/REoyJTGU+tEMszvT/oTRVHG2OiylhGDjqPp1jWMlr3bw==}
|
||||
engines: {node: '>=12'}
|
||||
dev: false
|
||||
|
||||
/tmp@0.0.33:
|
||||
resolution: {integrity: sha512-jRCJlojKnZ3addtTOjdIqoRuPEKBvNXcGYqzO6zWZX8KfKEpnGY5jfggJQ3EjKuu8D4bJRr0y+cYJFmYbImXGw==}
|
||||
engines: {node: '>=0.6.0'}
|
||||
@@ -5368,6 +5432,13 @@ packages:
|
||||
/undici-types@5.26.5:
|
||||
resolution: {integrity: sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA==}
|
||||
|
||||
/undici@5.28.2:
|
||||
resolution: {integrity: sha512-wh1pHJHnUeQV5Xa8/kyQhO7WFa8M34l026L5P/+2TYiakvGy5Rdc8jWZVyG7ieht/0WgJLEd3kcU5gKx+6GC8w==}
|
||||
engines: {node: '>=14.0'}
|
||||
dependencies:
|
||||
'@fastify/busboy': 2.1.0
|
||||
dev: false
|
||||
|
||||
/unique-filename@1.1.1:
|
||||
resolution: {integrity: sha512-Vmp0jIp2ln35UTXuryvjzkjGdRyf9b2lTXuSYUiPmzRcl3FDtYqAwOnTJkAngD9SWhnoJzDbTKwaOrZ+STtxNQ==}
|
||||
requiresBuild: true
|
||||
|
||||
@@ -3,6 +3,6 @@
|
||||
"homepage": "https://github.com/morten-olsen/mini-loader",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/morten-olsen/mini-loader-repo"
|
||||
"url": "https://github.com/morten-olsen/mini-loader"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,15 +1,15 @@
|
||||
{
|
||||
"include": [],
|
||||
"references": [
|
||||
{
|
||||
"path": "./packages/mini-loader/tsconfig.json"
|
||||
},
|
||||
{
|
||||
"path": "./packages/runner/tsconfig.json"
|
||||
},
|
||||
{
|
||||
"path": "./packages/server/tsconfig.json"
|
||||
},
|
||||
{
|
||||
"path": "./packages/mini-loader/tsconfig.json"
|
||||
},
|
||||
{
|
||||
"path": "./packages/cli/tsconfig.json"
|
||||
},
|
||||
|
||||
Reference in New Issue
Block a user