mirror of
https://github.com/morten-olsen/mini-loader.git
synced 2026-02-08 01:36:26 +01:00
Compare commits
19 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
a08f9e1c91 | ||
|
|
e0c41d9220 | ||
|
|
028b65587e | ||
|
|
7436b3439c | ||
|
|
2109bc3af9 | ||
|
|
eeaad68f6e | ||
|
|
c7ca97f041 | ||
|
|
c8e02d8da4 | ||
|
|
9a5b27f1be | ||
|
|
0760328854 | ||
|
|
fa23b325b3 | ||
|
|
4f183310a6 | ||
|
|
ecce49209f | ||
|
|
f8f0eca320 | ||
|
|
1115ce2fb3 | ||
|
|
9c5249956e | ||
|
|
b5d8cf3a51 | ||
|
|
5154fbb4a5 | ||
|
|
59d6faaafc |
5
.devcontainer/devcontainer.json
Normal file
5
.devcontainer/devcontainer.json
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
{
|
||||||
|
"image": "mcr.microsoft.com/devcontainers/universal:2",
|
||||||
|
"features": {
|
||||||
|
}
|
||||||
|
}
|
||||||
27
.github/ISSUE_TEMPLATE/bug_report.md
vendored
Normal file
27
.github/ISSUE_TEMPLATE/bug_report.md
vendored
Normal file
@@ -0,0 +1,27 @@
|
|||||||
|
---
|
||||||
|
name: Bug report
|
||||||
|
about: Create a report to help us improve
|
||||||
|
title: ''
|
||||||
|
labels: bug
|
||||||
|
assignees: ''
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
**Describe the bug**
|
||||||
|
A clear and concise description of what the bug is.
|
||||||
|
|
||||||
|
**To Reproduce**
|
||||||
|
Steps to reproduce the behavior:
|
||||||
|
1. Go to '...'
|
||||||
|
2. Click on '....'
|
||||||
|
3. Scroll down to '....'
|
||||||
|
4. See error
|
||||||
|
|
||||||
|
**Expected behavior**
|
||||||
|
A clear and concise description of what you expected to happen.
|
||||||
|
|
||||||
|
**Screenshots**
|
||||||
|
If applicable, add screenshots to help explain your problem.
|
||||||
|
|
||||||
|
**Additional context**
|
||||||
|
Add any other context about the problem here.
|
||||||
20
.github/ISSUE_TEMPLATE/feature_request.md
vendored
Normal file
20
.github/ISSUE_TEMPLATE/feature_request.md
vendored
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
---
|
||||||
|
name: Feature request
|
||||||
|
about: Suggest an idea for this project
|
||||||
|
title: ''
|
||||||
|
labels: enhancement
|
||||||
|
assignees: ''
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
**Is your feature request related to a problem? Please describe.**
|
||||||
|
A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
|
||||||
|
|
||||||
|
**Describe the solution you'd like**
|
||||||
|
A clear and concise description of what you want to happen.
|
||||||
|
|
||||||
|
**Describe alternatives you've considered**
|
||||||
|
A clear and concise description of any alternative solutions or features you've considered.
|
||||||
|
|
||||||
|
**Additional context**
|
||||||
|
Add any other context or screenshots about the feature request here.
|
||||||
79
.github/workflows/release.yml
vendored
79
.github/workflows/release.yml
vendored
@@ -71,53 +71,72 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Log in to the Container registry
|
- name: Log in to the Container registry
|
||||||
uses: docker/login-action@65b78e6e13532edd9afa3aa52ac7964289d1a9c1
|
uses: docker/login-action@65b78e6e13532edd9afa3aa52ac7964289d1a9c1
|
||||||
with:
|
with:
|
||||||
registry: ${{ env.REGISTRY }}
|
registry: ${{ env.REGISTRY }}
|
||||||
username: ${{ github.actor }}
|
username: ${{ github.actor }}
|
||||||
password: ${{ secrets.GITHUB_TOKEN }}
|
password: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
- name: Set up QEMU
|
||||||
|
uses: docker/setup-qemu-action@v3
|
||||||
|
|
||||||
|
- name: Set up Docker Buildx
|
||||||
|
uses: docker/setup-buildx-action@v3
|
||||||
|
|
||||||
|
- name: Retrieve version
|
||||||
|
run: |
|
||||||
|
echo "TAG_NAME=$(git describe --tag --abbrev=0) >> $GITHUB_OUTPUT
|
||||||
|
id: version
|
||||||
|
|
||||||
- name: Extract metadata (tags, labels) for Docker
|
- name: Extract metadata (tags, labels) for Docker
|
||||||
id: meta
|
id: meta
|
||||||
uses: docker/metadata-action@9ec57ed1fcdbf14dcef7dfbe97b2010124a938b7
|
uses: docker/metadata-action@9ec57ed1fcdbf14dcef7dfbe97b2010124a938b7
|
||||||
with:
|
with:
|
||||||
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
|
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
|
||||||
|
tags: |
|
||||||
|
latest
|
||||||
|
${{ steps.version.outputs.TAG_NAME }}
|
||||||
|
|
||||||
- name: Build and push Docker image
|
- name: Build and push Docker image
|
||||||
uses: docker/build-push-action@f2a1d5e99d037542a71f64918e516c093c6f3fc4
|
uses: docker/build-push-action@f2a1d5e99d037542a71f64918e516c093c6f3fc4
|
||||||
with:
|
with:
|
||||||
context: .
|
context: .
|
||||||
file: ./docker/Dockerfile
|
file: ./docker/Dockerfile
|
||||||
|
platforms: linux/amd64,linux/arm64
|
||||||
|
cache-from: type=gha
|
||||||
|
cache-to: type=gha,mode=max
|
||||||
push: true
|
push: true
|
||||||
tags: ${{ steps.meta.outputs.tags }}
|
tags: ${{ steps.meta.outputs.tags }}
|
||||||
labels: ${{ steps.meta.outputs.labels }}
|
labels: ${{ steps.meta.outputs.labels }}
|
||||||
|
|
||||||
# release-npm:
|
release-npm:
|
||||||
# if: github.ref == 'refs/heads/main'
|
if: github.ref == 'refs/heads/main'
|
||||||
# runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
# needs: [build, update-release-draft]
|
needs: [build, update-release-draft]
|
||||||
# permissions:
|
permissions:
|
||||||
# contents: read
|
contents: read
|
||||||
# packages: write
|
packages: write
|
||||||
# steps:
|
steps:
|
||||||
# - uses: actions/checkout@v3
|
- uses: actions/checkout@v3
|
||||||
# with:
|
with:
|
||||||
# fetch-depth: 0
|
fetch-depth: 0
|
||||||
# - run: corepack enable
|
- run: corepack enable
|
||||||
# - uses: actions/setup-node@v3
|
- uses: actions/setup-node@v3
|
||||||
# with:
|
with:
|
||||||
# cache: '${{ env.NODE_CACHE }}'
|
cache: '${{ env.NODE_CACHE }}'
|
||||||
# node-version: '${{ env.NODE_VERSION }}'
|
node-version: '${{ env.NODE_VERSION }}'
|
||||||
# scope: '${{ env.NODE_SCOPE }}'
|
scope: '${{ env.NODE_SCOPE }}'
|
||||||
# - uses: actions/download-artifact@v3
|
- uses: actions/download-artifact@v3
|
||||||
# with:
|
with:
|
||||||
# name: lib
|
name: lib
|
||||||
# path: ./
|
path: ./
|
||||||
# - run: |
|
- run: |
|
||||||
# pnpm config set //registry.npmjs.org/:_authToken ${NPM_TOKEN}
|
pnpm config set //registry.npmjs.org/:_authToken ${NPM_TOKEN}
|
||||||
# pnpm install
|
pnpm install
|
||||||
# git config user.name "Github Actions Bot"
|
git config user.name "Github Actions Bot"
|
||||||
# git config user.email "<>"
|
git config user.email "<>"
|
||||||
# node scripts/set-version.ts $(git describe --tag --abbrev=0)
|
node scripts/set-version.mjs $(git describe --tag --abbrev=0)
|
||||||
# pnpm publish -r --publish-branch main --access public --no-git-checks
|
pnpm publish -r --publish-branch main --access public --no-git-checks
|
||||||
# env:
|
env:
|
||||||
# NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
|
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
|
||||||
|
|||||||
43
CODE_OF_CONDUCT.md
Normal file
43
CODE_OF_CONDUCT.md
Normal file
@@ -0,0 +1,43 @@
|
|||||||
|
# Code of Conduct
|
||||||
|
|
||||||
|
## Our Pledge
|
||||||
|
|
||||||
|
In the interest of fostering an open and welcoming environment, we as contributors and maintainers pledge to making participation in our project and our community a harassment-free experience for everyone, regardless of age, body size, disability, ethnicity, gender identity and expression, level of experience, nationality, personal appearance, race, religion, or sexual identity and orientation.
|
||||||
|
|
||||||
|
## Our Standards
|
||||||
|
|
||||||
|
Examples of behavior that contributes to creating a positive environment include:
|
||||||
|
|
||||||
|
- Using welcoming and inclusive language
|
||||||
|
- Being respectful of differing viewpoints and experiences
|
||||||
|
- Gracefully accepting constructive criticism
|
||||||
|
- Focusing on what is best for the community
|
||||||
|
- Showing empathy towards other community members
|
||||||
|
|
||||||
|
Examples of unacceptable behavior by participants include:
|
||||||
|
|
||||||
|
- The use of sexualized language or imagery and unwelcome sexual attention or advances
|
||||||
|
- Trolling, insulting/derogatory comments, and personal or political attacks
|
||||||
|
- Public or private harassment
|
||||||
|
- Publishing others' private information, such as a physical or electronic address, without explicit permission
|
||||||
|
- Other conduct which could reasonably be considered inappropriate in a professional setting
|
||||||
|
|
||||||
|
## Our Responsibilities
|
||||||
|
|
||||||
|
Project maintainers are responsible for clarifying the standards of acceptable behavior and are expected to take appropriate and fair corrective action in response to any instances of unacceptable behavior.
|
||||||
|
|
||||||
|
Project maintainers have the right and responsibility to remove, edit, or reject comments, commits, code, wiki edits, issues, and other contributions that are not aligned with this Code of Conduct, or to ban temporarily or permanently any contributor for other behaviors that they deem inappropriate, threatening, offensive, or harmful.
|
||||||
|
|
||||||
|
## Scope
|
||||||
|
|
||||||
|
This Code of Conduct applies both within project spaces and in public spaces when an individual is representing the project or its community. Examples of representing a project or community include using an official project e-mail address, posting via an official social media account, or acting as an appointed representative at an online or offline event. Representation of a project may be further defined and clarified by project maintainers.
|
||||||
|
|
||||||
|
## Enforcement
|
||||||
|
|
||||||
|
Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by contacting the project team at s56gkgkq@void.black. All complaints will be reviewed and investigated and will result in a response that is deemed necessary and appropriate to the circumstances. The project team is obligated to maintain confidentiality with regard to the reporter of an incident. Further details of specific enforcement policies may be posted separately.
|
||||||
|
|
||||||
|
Project maintainers who do not follow or enforce the Code of Conduct in good faith may face temporary or permanent repercussions as determined by other members of the project's leadership.
|
||||||
|
|
||||||
|
## Attribution
|
||||||
|
|
||||||
|
This Code of Conduct is adapted from the [Contributor Covenant](https://www.contributor-covenant.org), version 1.4, available at https://www.contributor-covenant.org/version/1/4/code-of-conduct.html
|
||||||
59
CONTRIBUTING.md
Normal file
59
CONTRIBUTING.md
Normal file
@@ -0,0 +1,59 @@
|
|||||||
|
# Contributing to mini loader
|
||||||
|
|
||||||
|
First off, thank you for considering contributing to mini loader! It's people like you that make mini loader such a great tool.
|
||||||
|
|
||||||
|
## Code of Conduct
|
||||||
|
|
||||||
|
This project and everyone participating in it is governed by the [mini loader Code of Conduct](CODE_OF_CONDUCT.md). By participating, you are expected to uphold this code. Please report unacceptable behavior to s56gkgkq@void.black.
|
||||||
|
|
||||||
|
## How Can I Contribute?
|
||||||
|
|
||||||
|
### Reporting Bugs
|
||||||
|
|
||||||
|
This section guides you through submitting a bug report for mini loader. Following these guidelines helps maintainers and the community understand your report, reproduce the behavior, and find related reports.
|
||||||
|
|
||||||
|
**Before Submitting A Bug Report**
|
||||||
|
|
||||||
|
- Ensure the bug was not already reported by searching on GitHub under [Issues](https://github.com/morten-olsen/mini-loader/issues).
|
||||||
|
- If you're unable to find an open issue addressing the problem, open a new one. Be sure to include a title and clear description, as much relevant information as possible, and a code sample or an executable test case demonstrating the expected behavior that is not occurring.
|
||||||
|
|
||||||
|
### Suggesting Enhancements
|
||||||
|
|
||||||
|
This section guides you through submitting an enhancement suggestion for mini loader, including completely new features and minor improvements to existing functionality.
|
||||||
|
|
||||||
|
**Before Submitting An Enhancement Suggestion**
|
||||||
|
|
||||||
|
- Check if the enhancement has already been suggested under [Issues](https://github.com/morten-olsen/mini-loader/issues).
|
||||||
|
- If it hasn't, create a new issue and provide a concise description of the enhancement with as much detail as possible.
|
||||||
|
|
||||||
|
### Your First Code Contribution
|
||||||
|
|
||||||
|
Unsure where to begin contributing to mini loader? You can start by looking through `beginner` and `help-wanted` issues:
|
||||||
|
|
||||||
|
- Beginner issues - issues which should only require a few lines of code, and a test or two.
|
||||||
|
- Help wanted issues - issues which should be a bit more involved than `beginner` issues.
|
||||||
|
|
||||||
|
### Pull Requests
|
||||||
|
|
||||||
|
- Fill in the required template
|
||||||
|
- Do not include issue numbers in the PR title
|
||||||
|
- Follow the coding style used throughout the project
|
||||||
|
- Include appropriate test coverage. New features should include new tests.
|
||||||
|
- Document new code based on the [Documentation Styleguide](#documentation-styleguide)
|
||||||
|
|
||||||
|
## Documentation Styleguide
|
||||||
|
|
||||||
|
Use this style guide for documentation:
|
||||||
|
|
||||||
|
- Use Markdown
|
||||||
|
- Reference methods and classes in markdown backticks. For example, `ClassName.methodName`
|
||||||
|
- Document new code or add comments in code to explain parts that might be confusing.
|
||||||
|
|
||||||
|
## Use a Consistent Coding Style
|
||||||
|
|
||||||
|
* 2 spaces for indentation rather than tabs
|
||||||
|
* You can try running `pnpm run test:lint` for style unification
|
||||||
|
|
||||||
|
## License
|
||||||
|
|
||||||
|
By contributing to mini loader, you agree that your contributions will be licensed under its GPL-3 License.
|
||||||
14
README.md
14
README.md
@@ -1,10 +1,6 @@
|
|||||||
|
|
||||||
|
|
||||||
<p>
|

|
||||||
<center>
|
|
||||||
<img src="./assets/logo.png" width="300" height="300" />
|
|
||||||
</center>
|
|
||||||
</p>
|
|
||||||
|
|
||||||
# Welcome to Mini Loader! 🌐
|
# Welcome to Mini Loader! 🌐
|
||||||
|
|
||||||
@@ -26,9 +22,11 @@ Also see [anti-features and limitations](./docs/anti-features.md)
|
|||||||
Get up and running with mini loader in just a few steps:
|
Get up and running with mini loader in just a few steps:
|
||||||
|
|
||||||
1. **Install the CLI**: `npm install -g @morten-olsen/mini-loader-cli`
|
1. **Install the CLI**: `npm install -g @morten-olsen/mini-loader-cli`
|
||||||
2. **Deploy the Server**: `docker run -p 4500:4500 -n mini-loader ghcr.io/morten-olsen/mini-loader:main`.
|
2. **Deploy the Server**: `docker run -p 4500:4500 —-name mini-loader ghcr.io/morten-olsen/mini-loader`.
|
||||||
3. **Push Your First Load**: `mini-loader loads push script.mjs -r -i first`
|
3. **Get your access token**: `docker exec mini-loader mini-loader-server create-token`
|
||||||
3. **See the logs**: `mini-loader logs ls -l first`
|
4. **Login**: `mini-loader auth login http://localhost:4500`
|
||||||
|
5. **Push Your First Load**: `mini-loader loads push script.mjs -r -i first`
|
||||||
|
6. **See the logs**: `mini-loader logs ls -l first`
|
||||||
|
|
||||||
For a detailed guide on getting started, please refer to the [Getting Started Tutorial](./docs/getting-started.md).
|
For a detailed guide on getting started, please refer to the [Getting Started Tutorial](./docs/getting-started.md).
|
||||||
|
|
||||||
|
|||||||
BIN
assets/banner.png
Normal file
BIN
assets/banner.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 418 KiB |
@@ -5,5 +5,9 @@ services:
|
|||||||
build:
|
build:
|
||||||
context: .
|
context: .
|
||||||
dockerfile: ./docker/Dockerfile
|
dockerfile: ./docker/Dockerfile
|
||||||
|
volumes:
|
||||||
|
- data:/app/data
|
||||||
ports:
|
ports:
|
||||||
- 4500:4500
|
- 4500:4500
|
||||||
|
volumes:
|
||||||
|
data:
|
||||||
|
|||||||
@@ -27,6 +27,11 @@ COPY --from=builder /app/out/full/ .
|
|||||||
RUN pnpm turbo run build --filter=@morten-olsen/mini-loader-server
|
RUN pnpm turbo run build --filter=@morten-olsen/mini-loader-server
|
||||||
|
|
||||||
FROM base AS runner
|
FROM base AS runner
|
||||||
|
ENV \
|
||||||
|
NODE_ENV=production \
|
||||||
|
DATA_DIR=/data \
|
||||||
|
CACHE_DIR=/cache
|
||||||
|
RUN apk add --no-cache jq curl
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
|
|
||||||
# Don't run production as root
|
# Don't run production as root
|
||||||
@@ -38,5 +43,12 @@ RUN chmod +x /entrypoint.sh
|
|||||||
|
|
||||||
COPY --from=installer /app .
|
COPY --from=installer /app .
|
||||||
EXPOSE 4500
|
EXPOSE 4500
|
||||||
|
VOLUME /data
|
||||||
|
|
||||||
|
HEALTHCHECK \
|
||||||
|
--interval=10s \
|
||||||
|
--start-period=10s \
|
||||||
|
CMD curl -f http://localhost:4500/health || exit 1
|
||||||
|
|
||||||
ENTRYPOINT ["/entrypoint.sh"]
|
ENTRYPOINT ["/entrypoint.sh"]
|
||||||
CMD ["mini-loader-server", "start"]
|
CMD ["mini-loader-server", "start"]
|
||||||
@@ -7,6 +7,8 @@ GID=${GID:-1001}
|
|||||||
addgroup --system --gid ${GID} nodejs && \
|
addgroup --system --gid ${GID} nodejs && \
|
||||||
adduser --system --uid ${UID} -G nodejs miniloader && \
|
adduser --system --uid ${UID} -G nodejs miniloader && \
|
||||||
|
|
||||||
mkdir -p /app/data
|
mkdir -p ${DATA_DIR}
|
||||||
chown -R miniloader:nodejs /app/data
|
mkdir -p ${CACHE_DIR}
|
||||||
|
chown -R miniloader:nodejs ${DATA_DIR}
|
||||||
|
chown -R miniloader:nodejs ${CACHE_DIR}
|
||||||
su miniloader -s /bin/sh -c "$CMD"
|
su miniloader -s /bin/sh -c "$CMD"
|
||||||
@@ -15,14 +15,19 @@ npm install -g @morten-olsen/mini-loader-cli
|
|||||||
Now, let's write a basic script that outputs a single artifact named “hello”. Create a new file with the following JavaScript code:
|
Now, let's write a basic script that outputs a single artifact named “hello”. Create a new file with the following JavaScript code:
|
||||||
|
|
||||||
```javascript
|
```javascript
|
||||||
import { artifacts } from "@morten-olsen/mini-loader";
|
import { artifacts } from '@morten-olsen/mini-loader';
|
||||||
|
|
||||||
artifacts.create('hello', 'world');
|
const run = async () => {
|
||||||
|
artifacts.create('hello', 'world');
|
||||||
|
};
|
||||||
|
|
||||||
|
run();
|
||||||
```
|
```
|
||||||
|
|
||||||
Save this file as `script.mjs`.
|
Save this file as `script.js`.
|
||||||
|
|
||||||
#### A Note on Dependencies
|
#### A Note on Dependencies
|
||||||
|
|
||||||
In this script, we're using the `@morten-olsen/mini-loader` package, which might not be installed in your local environment. No worries though, as mini loader can automatically download necessary packages when preparing the script. Alternatively, for a more structured approach (especially if you're using TypeScript), you can initialize a Node.js project and install the dependencies for complete access to typings.
|
In this script, we're using the `@morten-olsen/mini-loader` package, which might not be installed in your local environment. No worries though, as mini loader can automatically download necessary packages when preparing the script. Alternatively, for a more structured approach (especially if you're using TypeScript), you can initialize a Node.js project and install the dependencies for complete access to typings.
|
||||||
|
|
||||||
### Step 3: Run the Script Locally
|
### Step 3: Run the Script Locally
|
||||||
@@ -30,7 +35,7 @@ In this script, we're using the `@morten-olsen/mini-loader` package, which might
|
|||||||
To validate that your script is functioning correctly, execute it locally using the following command:
|
To validate that your script is functioning correctly, execute it locally using the following command:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
mini-loader local run script.mjs -ai
|
mini-loader local run script.js -ai
|
||||||
```
|
```
|
||||||
|
|
||||||
The `-ai` flag instructs the CLI to automatically download any referenced packages when bundling the script.
|
The `-ai` flag instructs the CLI to automatically download any referenced packages when bundling the script.
|
||||||
|
|||||||
@@ -58,7 +58,7 @@ mini-loader artifacts ls
|
|||||||
To download a specific artifact:
|
To download a specific artifact:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
mini-loader artifacts pull <id> > myfile.txt
|
mini-loader artifacts pull <id> myfile.txt
|
||||||
```
|
```
|
||||||
|
|
||||||
Replace `<id>` with the identifier of the artifact you wish to download.
|
Replace `<id>` with the identifier of the artifact you wish to download.
|
||||||
|
|||||||
@@ -8,7 +8,7 @@ This guide will help you quickly set up and run a mini loader server using Docke
|
|||||||
To begin, let's deploy the mini loader container. Run the following command in your terminal:
|
To begin, let's deploy the mini loader container. Run the following command in your terminal:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
docker run -p 4500:4500 -n mini-loader ghcr.io/morten-olsen/mini-loader:main
|
docker run -p 4500:4500 -n mini-loader ghcr.io/morten-olsen/mini-loader:latest
|
||||||
```
|
```
|
||||||
|
|
||||||
This command downloads the latest mini loader image and runs it, exposing port 4500.
|
This command downloads the latest mini loader image and runs it, exposing port 4500.
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
{
|
{
|
||||||
"name": "@morten-olsen/mini-loader-repo",
|
"name": "@morten-olsen/mini-loader-repo",
|
||||||
"private": "true",
|
"private": "true",
|
||||||
|
"license": "GPL-3.0",
|
||||||
"packageManager": "pnpm@8.10.4",
|
"packageManager": "pnpm@8.10.4",
|
||||||
"version": "1.0.0",
|
"version": "1.0.0",
|
||||||
"scripts": {
|
"scripts": {
|
||||||
@@ -11,7 +12,6 @@
|
|||||||
},
|
},
|
||||||
"keywords": [],
|
"keywords": [],
|
||||||
"author": "",
|
"author": "",
|
||||||
"license": "ISC",
|
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@react-native-community/eslint-config": "^3.2.0",
|
"@react-native-community/eslint-config": "^3.2.0",
|
||||||
"eslint": "^8.53.0",
|
"eslint": "^8.53.0",
|
||||||
@@ -23,5 +23,10 @@
|
|||||||
"@pnpm/find-workspace-packages": "^6.0.9",
|
"@pnpm/find-workspace-packages": "^6.0.9",
|
||||||
"@types/node": "^20.10.8",
|
"@types/node": "^20.10.8",
|
||||||
"ts-node": "^10.9.2"
|
"ts-node": "^10.9.2"
|
||||||
|
},
|
||||||
|
"homepage": "https://github.com/morten-olsen/mini-loader",
|
||||||
|
"repository": {
|
||||||
|
"type": "git",
|
||||||
|
"url": "https://github.com/morten-olsen/mini-loader"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
1
packages/cli/README.md
Normal file
1
packages/cli/README.md
Normal file
@@ -0,0 +1 @@
|
|||||||
|
[Go to documentation](https://github.com/morten-olsen/mini-loader)
|
||||||
@@ -1,4 +1,4 @@
|
|||||||
#!/usr/bin/env node
|
#!/usr/bin/env node
|
||||||
|
|
||||||
import 'source-map-support/register.js';
|
import 'source-map-support/register.js';
|
||||||
import '../dist/esm/index.js';
|
import '../dist/esm/src/index.js';
|
||||||
|
|||||||
@@ -1,8 +1,9 @@
|
|||||||
{
|
{
|
||||||
"name": "@morten-olsen/mini-loader-cli",
|
"name": "@morten-olsen/mini-loader-cli",
|
||||||
"version": "1.0.0",
|
"version": "1.0.0",
|
||||||
"main": "./dist/esm/index.js",
|
"main": "./dist/esm/src/index.js",
|
||||||
"types": "./dist/esm/index.d.ts",
|
"types": "./dist/esm/src/index.d.ts",
|
||||||
|
"license": "GPL-3.0",
|
||||||
"bin": {
|
"bin": {
|
||||||
"mini-loader": "./bin/index.mjs"
|
"mini-loader": "./bin/index.mjs"
|
||||||
},
|
},
|
||||||
@@ -15,10 +16,12 @@
|
|||||||
],
|
],
|
||||||
"exports": {
|
"exports": {
|
||||||
".": {
|
".": {
|
||||||
"import": "./dist/esm/index.js"
|
"import": "./dist/esm/src/index.js"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
|
"@morten-olsen/mini-loader-runner": "workspace:^",
|
||||||
|
"@morten-olsen/mini-loader-server": "workspace:^",
|
||||||
"@rollup/plugin-auto-install": "^3.0.5",
|
"@rollup/plugin-auto-install": "^3.0.5",
|
||||||
"@rollup/plugin-commonjs": "^25.0.7",
|
"@rollup/plugin-commonjs": "^25.0.7",
|
||||||
"@rollup/plugin-json": "^6.1.0",
|
"@rollup/plugin-json": "^6.1.0",
|
||||||
@@ -27,6 +30,8 @@
|
|||||||
"@rollup/plugin-sucrase": "^5.0.2",
|
"@rollup/plugin-sucrase": "^5.0.2",
|
||||||
"@trpc/client": "^10.45.0",
|
"@trpc/client": "^10.45.0",
|
||||||
"commander": "^11.1.0",
|
"commander": "^11.1.0",
|
||||||
|
"dotenv": "^16.3.1",
|
||||||
|
"env-paths": "^3.0.0",
|
||||||
"inquirer": "^9.2.12",
|
"inquirer": "^9.2.12",
|
||||||
"ora": "^8.0.1",
|
"ora": "^8.0.1",
|
||||||
"rollup": "^4.9.4",
|
"rollup": "^4.9.4",
|
||||||
@@ -36,9 +41,12 @@
|
|||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@morten-olsen/mini-loader-configs": "workspace:^",
|
"@morten-olsen/mini-loader-configs": "workspace:^",
|
||||||
"@morten-olsen/mini-loader-runner": "workspace:^",
|
|
||||||
"@morten-olsen/mini-loader-server": "workspace:^",
|
|
||||||
"@types/inquirer": "^9.0.7",
|
"@types/inquirer": "^9.0.7",
|
||||||
"typescript": "^5.3.3"
|
"typescript": "^5.3.3"
|
||||||
|
},
|
||||||
|
"homepage": "https://github.com/morten-olsen/mini-loader",
|
||||||
|
"repository": {
|
||||||
|
"type": "git",
|
||||||
|
"url": "https://github.com/morten-olsen/mini-loader"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -17,12 +17,12 @@ const bundle = async ({ entry, autoInstall }: BundleOptions) => {
|
|||||||
const entryFile = resolve(entry);
|
const entryFile = resolve(entry);
|
||||||
const codeBundler = await rollup({
|
const codeBundler = await rollup({
|
||||||
plugins: [
|
plugins: [
|
||||||
|
fix(json)(),
|
||||||
fix(sucrase)({
|
fix(sucrase)({
|
||||||
transforms: ['typescript', 'jsx'],
|
transforms: ['typescript', 'jsx'],
|
||||||
}),
|
}),
|
||||||
...[autoInstall ? fix(auto) : []],
|
...[autoInstall ? fix(auto) : []],
|
||||||
nodeResolve({ extensions: ['.js', '.jsx', '.ts', '.tsx'] }),
|
nodeResolve({ preferBuiltins: true, extensions: ['.js', '.jsx', '.ts', '.tsx'] }),
|
||||||
fix(json)(),
|
|
||||||
fix(commonjs)({ include: /node_modules/ }),
|
fix(commonjs)({ include: /node_modules/ }),
|
||||||
],
|
],
|
||||||
input: entryFile,
|
input: entryFile,
|
||||||
|
|||||||
@@ -2,13 +2,22 @@ import { createTRPCProxyClient, httpBatchLink } from '@trpc/client';
|
|||||||
import superjson from 'superjson';
|
import superjson from 'superjson';
|
||||||
import type { Runtime } from '@morten-olsen/mini-loader-server';
|
import type { Runtime } from '@morten-olsen/mini-loader-server';
|
||||||
import type { RootRouter } from '@morten-olsen/mini-loader-server';
|
import type { RootRouter } from '@morten-olsen/mini-loader-server';
|
||||||
|
import pkg from '../../package.json';
|
||||||
|
import { Context } from '../context/context.js';
|
||||||
|
|
||||||
const createClient = () => {
|
const createClient = (context: Context) => {
|
||||||
|
if (!context.host || !context.token) {
|
||||||
|
throw new Error('Not signed in');
|
||||||
|
}
|
||||||
const client = createTRPCProxyClient<RootRouter>({
|
const client = createTRPCProxyClient<RootRouter>({
|
||||||
transformer: superjson,
|
transformer: superjson,
|
||||||
links: [
|
links: [
|
||||||
httpBatchLink({
|
httpBatchLink({
|
||||||
url: 'http://localhost:4500/trpc',
|
url: `${context.host}/trpc`,
|
||||||
|
headers: {
|
||||||
|
'x-version': pkg.version,
|
||||||
|
authorization: `Bearer ${context.token}`,
|
||||||
|
},
|
||||||
}),
|
}),
|
||||||
],
|
],
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -1,6 +1,8 @@
|
|||||||
import { Command } from 'commander';
|
import { Command } from 'commander';
|
||||||
import { createClient } from '../../client/client.js';
|
import { createClient } from '../../client/client.js';
|
||||||
import { step } from '../../utils/step.js';
|
import { step } from '../../utils/step.js';
|
||||||
|
import { Context } from '../../context/context.js';
|
||||||
|
import { Config } from '../../config/config.js';
|
||||||
|
|
||||||
const list = new Command('list');
|
const list = new Command('list');
|
||||||
|
|
||||||
@@ -20,8 +22,10 @@ list
|
|||||||
.option('-a, --limit <limit>', 'Limit', '1000')
|
.option('-a, --limit <limit>', 'Limit', '1000')
|
||||||
.action(async () => {
|
.action(async () => {
|
||||||
const { runId, loadId, offset, limit } = list.opts();
|
const { runId, loadId, offset, limit } = list.opts();
|
||||||
|
const config = new Config();
|
||||||
|
const context = new Context(config.context);
|
||||||
const client = await step('Connecting to server', async () => {
|
const client = await step('Connecting to server', async () => {
|
||||||
return createClient();
|
return createClient(context);
|
||||||
});
|
});
|
||||||
const artifacts = await step('Getting artifacts', async () => {
|
const artifacts = await step('Getting artifacts', async () => {
|
||||||
return await client.artifacts.find.query({
|
return await client.artifacts.find.query({
|
||||||
|
|||||||
34
packages/cli/src/commands/artifacts/artifacts.pull.ts
Normal file
34
packages/cli/src/commands/artifacts/artifacts.pull.ts
Normal file
@@ -0,0 +1,34 @@
|
|||||||
|
import { Command } from 'commander';
|
||||||
|
import { createClient } from '../../client/client.js';
|
||||||
|
import { step } from '../../utils/step.js';
|
||||||
|
import { Context } from '../../context/context.js';
|
||||||
|
import { dirname, resolve } from 'path';
|
||||||
|
import { mkdir, writeFile } from 'fs/promises';
|
||||||
|
import { Config } from '../../config/config.js';
|
||||||
|
|
||||||
|
const pull = new Command('pull');
|
||||||
|
|
||||||
|
pull
|
||||||
|
.description('Download artifact')
|
||||||
|
.argument('<artifact-id>', 'Artifact ID')
|
||||||
|
.argument('<file>', 'File to save')
|
||||||
|
.action(async (id, file) => {
|
||||||
|
const config = new Config();
|
||||||
|
const context = new Context(config.context);
|
||||||
|
const target = resolve(file);
|
||||||
|
const client = await step('Connecting to server', async () => {
|
||||||
|
return createClient(context);
|
||||||
|
});
|
||||||
|
const artifact = await step('Getting artifact', async () => {
|
||||||
|
const result = await client.artifacts.get.query(id);
|
||||||
|
if (!result) {
|
||||||
|
throw new Error('Artifact not found');
|
||||||
|
}
|
||||||
|
return result;
|
||||||
|
});
|
||||||
|
await mkdir(dirname(target), { recursive: true });
|
||||||
|
const data = Buffer.from(artifact.data, 'base64').toString('utf-8');
|
||||||
|
await writeFile(target, data, 'utf-8');
|
||||||
|
});
|
||||||
|
|
||||||
|
export { pull };
|
||||||
61
packages/cli/src/commands/artifacts/artifacts.remove.ts
Normal file
61
packages/cli/src/commands/artifacts/artifacts.remove.ts
Normal file
@@ -0,0 +1,61 @@
|
|||||||
|
import { Command } from 'commander';
|
||||||
|
import { createClient } from '../../client/client.js';
|
||||||
|
import { step } from '../../utils/step.js';
|
||||||
|
import { Context } from '../../context/context.js';
|
||||||
|
import inquirer from 'inquirer';
|
||||||
|
import { Config } from '../../config/config.js';
|
||||||
|
|
||||||
|
const remove = new Command('remove');
|
||||||
|
|
||||||
|
const toInt = (value?: string) => {
|
||||||
|
if (!value) {
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
return parseInt(value, 10);
|
||||||
|
};
|
||||||
|
|
||||||
|
remove
|
||||||
|
.alias('ls')
|
||||||
|
.description('List logs')
|
||||||
|
.option('-r, --run-id <runId>', 'Run ID')
|
||||||
|
.option('-l, --load-id <loadId>', 'Load ID')
|
||||||
|
.option('-o, --offset <offset>', 'Offset')
|
||||||
|
.option('-a, --limit <limit>', 'Limit', '1000')
|
||||||
|
.action(async () => {
|
||||||
|
const { runId, loadId, offset, limit } = remove.opts();
|
||||||
|
const config = new Config();
|
||||||
|
const context = new Context(config.context);
|
||||||
|
const client = await step('Connecting to server', async () => {
|
||||||
|
return createClient(context);
|
||||||
|
});
|
||||||
|
const response = await step('Preparing to delete', async () => {
|
||||||
|
return await client.artifacts.prepareRemove.query({
|
||||||
|
runId,
|
||||||
|
loadId,
|
||||||
|
offset: toInt(offset),
|
||||||
|
limit: toInt(limit),
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!response.ids.length) {
|
||||||
|
console.log('No logs to delete');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
const { confirm } = await inquirer.prompt([
|
||||||
|
{
|
||||||
|
type: 'confirm',
|
||||||
|
name: 'confirm',
|
||||||
|
message: `Are you sure you want to delete ${response.ids.length} logs?`,
|
||||||
|
},
|
||||||
|
]);
|
||||||
|
|
||||||
|
if (!confirm) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
await step('Deleting artifacts', async () => {
|
||||||
|
await client.artifacts.remove.mutate(response);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
export { remove };
|
||||||
@@ -1,7 +1,11 @@
|
|||||||
import { Command } from 'commander';
|
import { Command } from 'commander';
|
||||||
import { list } from './artifacts.list.js';
|
import { list } from './artifacts.list.js';
|
||||||
|
import { remove } from './artifacts.remove.js';
|
||||||
|
import { pull } from './artifacts.pull.js';
|
||||||
|
|
||||||
const artifacts = new Command('artifacts');
|
const artifacts = new Command('artifacts');
|
||||||
artifacts.addCommand(list);
|
artifacts.addCommand(list);
|
||||||
|
artifacts.addCommand(remove);
|
||||||
|
artifacts.addCommand(pull);
|
||||||
|
|
||||||
export { artifacts };
|
export { artifacts };
|
||||||
|
|||||||
@@ -1,16 +1,21 @@
|
|||||||
import { Command } from 'commander';
|
import { Command } from 'commander';
|
||||||
import inquerer from 'inquirer';
|
import inquerer from 'inquirer';
|
||||||
|
import { Context } from '../../context/context.js';
|
||||||
|
import { step } from '../../utils/step.js';
|
||||||
|
import { Config } from '../../config/config.js';
|
||||||
|
|
||||||
const login = new Command('login');
|
const login = new Command('login');
|
||||||
|
|
||||||
login.description('Login to your account');
|
login.description('Login to your account');
|
||||||
login.action(async () => {
|
login.action(async () => {
|
||||||
|
const config = new Config();
|
||||||
|
const context = new Context(config.context);
|
||||||
const { host, token } = await inquerer.prompt([
|
const { host, token } = await inquerer.prompt([
|
||||||
{
|
{
|
||||||
type: 'input',
|
type: 'input',
|
||||||
name: 'host',
|
name: 'host',
|
||||||
message: 'Enter the host of your server',
|
message: 'Enter the host of your server',
|
||||||
default: 'http://localhost:4500',
|
default: context.host ?? 'http://localhost:4500',
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
type: 'password',
|
type: 'password',
|
||||||
@@ -19,7 +24,25 @@ login.action(async () => {
|
|||||||
},
|
},
|
||||||
]);
|
]);
|
||||||
|
|
||||||
console.log(host, token);
|
const healthResponse = await step('Getting auth status', async () => {
|
||||||
|
return await fetch(`${host}/health`, {
|
||||||
|
headers: {
|
||||||
|
authorization: `Bearer ${token}`,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!healthResponse.ok) {
|
||||||
|
throw new Error('Invalid token');
|
||||||
|
}
|
||||||
|
const health = await healthResponse.json();
|
||||||
|
if (!health.authorized) {
|
||||||
|
throw new Error('Invalid token');
|
||||||
|
}
|
||||||
|
|
||||||
|
await step('Saving login', async () => {
|
||||||
|
await context.saveLogin(host, token);
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
export { login };
|
export { login };
|
||||||
|
|||||||
10
packages/cli/src/commands/contexts/contexts.current.ts
Normal file
10
packages/cli/src/commands/contexts/contexts.current.ts
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
import { Command } from 'commander';
|
||||||
|
import { Config } from '../../config/config.js';
|
||||||
|
|
||||||
|
const current = new Command('current');
|
||||||
|
current.action(async () => {
|
||||||
|
const config = new Config();
|
||||||
|
console.log(config.context);
|
||||||
|
});
|
||||||
|
|
||||||
|
export { current };
|
||||||
11
packages/cli/src/commands/contexts/contexts.list.ts
Normal file
11
packages/cli/src/commands/contexts/contexts.list.ts
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
import { Command } from 'commander';
|
||||||
|
import { Context } from '../../context/context.js';
|
||||||
|
|
||||||
|
const list = new Command('list');
|
||||||
|
list.alias('ls').description('List contexts');
|
||||||
|
list.action(async () => {
|
||||||
|
const contexts = await Context.list();
|
||||||
|
console.table(contexts);
|
||||||
|
});
|
||||||
|
|
||||||
|
export { list };
|
||||||
12
packages/cli/src/commands/contexts/contexts.ts
Normal file
12
packages/cli/src/commands/contexts/contexts.ts
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
import { Command } from 'commander';
|
||||||
|
import { list } from './contexts.list.js';
|
||||||
|
import { use } from './contexts.use.js';
|
||||||
|
import { current } from './contexts.current.js';
|
||||||
|
|
||||||
|
const contexts = new Command('contexts');
|
||||||
|
contexts.description('Manage contexts');
|
||||||
|
contexts.addCommand(list);
|
||||||
|
contexts.addCommand(use);
|
||||||
|
contexts.addCommand(current);
|
||||||
|
|
||||||
|
export { contexts };
|
||||||
11
packages/cli/src/commands/contexts/contexts.use.ts
Normal file
11
packages/cli/src/commands/contexts/contexts.use.ts
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
import { Command } from 'commander';
|
||||||
|
import { Config } from '../../config/config.js';
|
||||||
|
|
||||||
|
const use = new Command('use');
|
||||||
|
|
||||||
|
use.argument('<name>').action(async (name) => {
|
||||||
|
const config = new Config();
|
||||||
|
await config.setContext(name);
|
||||||
|
});
|
||||||
|
|
||||||
|
export { use };
|
||||||
@@ -1,6 +1,8 @@
|
|||||||
import { Command } from 'commander';
|
import { Command } from 'commander';
|
||||||
import { createClient } from '../../client/client.js';
|
import { createClient } from '../../client/client.js';
|
||||||
import { step } from '../../utils/step.js';
|
import { step } from '../../utils/step.js';
|
||||||
|
import { Context } from '../../context/context.js';
|
||||||
|
import { Config } from '../../config/config.js';
|
||||||
|
|
||||||
const list = new Command('list');
|
const list = new Command('list');
|
||||||
|
|
||||||
@@ -8,11 +10,13 @@ list
|
|||||||
.alias('ls')
|
.alias('ls')
|
||||||
.description('List loads')
|
.description('List loads')
|
||||||
.action(async () => {
|
.action(async () => {
|
||||||
|
const config = new Config();
|
||||||
|
const context = new Context(config.context);
|
||||||
const client = await step('Connecting to server', async () => {
|
const client = await step('Connecting to server', async () => {
|
||||||
return createClient();
|
return createClient(context);
|
||||||
});
|
});
|
||||||
const loads = step('Getting data', async () => {
|
const loads = await step('Getting data', async () => {
|
||||||
await client.loads.find.query({});
|
return await client.loads.find.query({});
|
||||||
});
|
});
|
||||||
console.table(loads);
|
console.table(loads);
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -3,6 +3,8 @@ import { resolve } from 'path';
|
|||||||
import { createClient } from '../../client/client.js';
|
import { createClient } from '../../client/client.js';
|
||||||
import { bundle } from '../../bundler/bundler.js';
|
import { bundle } from '../../bundler/bundler.js';
|
||||||
import { step } from '../../utils/step.js';
|
import { step } from '../../utils/step.js';
|
||||||
|
import { Context } from '../../context/context.js';
|
||||||
|
import { Config } from '../../config/config.js';
|
||||||
|
|
||||||
const push = new Command('push');
|
const push = new Command('push');
|
||||||
|
|
||||||
@@ -14,14 +16,16 @@ push
|
|||||||
.option('-ai, --auto-install', 'Auto install dependencies', false)
|
.option('-ai, --auto-install', 'Auto install dependencies', false)
|
||||||
.action(async (script) => {
|
.action(async (script) => {
|
||||||
const opts = push.opts();
|
const opts = push.opts();
|
||||||
|
const config = new Config();
|
||||||
|
const context = new Context(config.context);
|
||||||
const location = resolve(script);
|
const location = resolve(script);
|
||||||
const client = await step('Connecting to server', async () => {
|
const client = await step('Connecting to server', async () => {
|
||||||
return createClient();
|
return createClient(context);
|
||||||
});
|
});
|
||||||
const code = await step('Bundling', async () => {
|
const code = await step('Bundling', async () => {
|
||||||
return await bundle({ entry: location, autoInstall: opts.autoInstall });
|
return await bundle({ entry: location, autoInstall: opts.autoInstall });
|
||||||
});
|
});
|
||||||
const id = await step('Creating load', async () => {
|
const id = await step(`Creating load ${(code.length / 1024).toFixed(0)}`, async () => {
|
||||||
return await client.loads.set.mutate({
|
return await client.loads.set.mutate({
|
||||||
id: opts.id,
|
id: opts.id,
|
||||||
name: opts.name,
|
name: opts.name,
|
||||||
@@ -30,9 +34,10 @@ push
|
|||||||
});
|
});
|
||||||
console.log('created load with id', id);
|
console.log('created load with id', id);
|
||||||
if (opts.run) {
|
if (opts.run) {
|
||||||
await step('Creating run', async () => {
|
const runId = await step('Creating run', async () => {
|
||||||
await client.runs.create.mutate({ loadId: id });
|
return await client.runs.create.mutate({ loadId: id });
|
||||||
});
|
});
|
||||||
|
console.log('created run with id', runId);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|||||||
@@ -3,6 +3,8 @@ import { resolve } from 'path';
|
|||||||
import { run as runLoad } from '@morten-olsen/mini-loader-runner';
|
import { run as runLoad } from '@morten-olsen/mini-loader-runner';
|
||||||
import { bundle } from '../../bundler/bundler.js';
|
import { bundle } from '../../bundler/bundler.js';
|
||||||
import { step } from '../../utils/step.js';
|
import { step } from '../../utils/step.js';
|
||||||
|
import { readSecrets } from './local.utils.js';
|
||||||
|
import { Config } from '../../config/config.js';
|
||||||
|
|
||||||
const run = new Command('run');
|
const run = new Command('run');
|
||||||
|
|
||||||
@@ -11,13 +13,17 @@ run
|
|||||||
.argument('script')
|
.argument('script')
|
||||||
.action(async (script) => {
|
.action(async (script) => {
|
||||||
const location = resolve(script);
|
const location = resolve(script);
|
||||||
|
const config = new Config();
|
||||||
const { autoInstall } = run.opts();
|
const { autoInstall } = run.opts();
|
||||||
|
const secrets = await readSecrets();
|
||||||
|
|
||||||
const code = await step('Bundling', async () => {
|
const code = await step('Bundling', async () => {
|
||||||
return await bundle({ entry: location, autoInstall });
|
return await bundle({ entry: location, autoInstall });
|
||||||
});
|
});
|
||||||
const { promise, emitter } = await runLoad({
|
const { promise, emitter } = await runLoad({
|
||||||
script: code,
|
script: code,
|
||||||
|
secrets,
|
||||||
|
cacheLocation: config.cacheLocation,
|
||||||
});
|
});
|
||||||
emitter.addListener('message', (message) => {
|
emitter.addListener('message', (message) => {
|
||||||
switch (message.type) {
|
switch (message.type) {
|
||||||
|
|||||||
25
packages/cli/src/commands/local/local.utils.ts
Normal file
25
packages/cli/src/commands/local/local.utils.ts
Normal file
@@ -0,0 +1,25 @@
|
|||||||
|
import dotenv from 'dotenv';
|
||||||
|
import { existsSync } from 'fs';
|
||||||
|
import { readFile } from 'fs/promises';
|
||||||
|
import { join } from 'path';
|
||||||
|
|
||||||
|
const ENV_PREFIX = 'ML_S_';
|
||||||
|
|
||||||
|
const readSecrets = async () => {
|
||||||
|
let secretLocation = join(process.cwd(), '.secret');
|
||||||
|
|
||||||
|
let secrets: Record<string, string> = {};
|
||||||
|
|
||||||
|
if (existsSync(secretLocation)) {
|
||||||
|
const content = await readFile(secretLocation, 'utf-8');
|
||||||
|
secrets = dotenv.parse(content);
|
||||||
|
}
|
||||||
|
for (const key in process.env) {
|
||||||
|
if (key.startsWith(ENV_PREFIX)) {
|
||||||
|
secrets[key.replace(ENV_PREFIX, '')] = process.env[key]!;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return secrets;
|
||||||
|
};
|
||||||
|
|
||||||
|
export { readSecrets };
|
||||||
@@ -1,6 +1,8 @@
|
|||||||
import { Command } from 'commander';
|
import { Command } from 'commander';
|
||||||
import { createClient } from '../../client/client.js';
|
import { createClient } from '../../client/client.js';
|
||||||
import { step } from '../../utils/step.js';
|
import { step } from '../../utils/step.js';
|
||||||
|
import { Context } from '../../context/context.js';
|
||||||
|
import { Config } from '../../config/config.js';
|
||||||
|
|
||||||
const list = new Command('list');
|
const list = new Command('list');
|
||||||
|
|
||||||
@@ -22,8 +24,10 @@ list
|
|||||||
.option('-s, --sort <order>', 'Sort', 'desc')
|
.option('-s, --sort <order>', 'Sort', 'desc')
|
||||||
.action(async () => {
|
.action(async () => {
|
||||||
const { runId, loadId, severities, offset, limit, order } = list.opts();
|
const { runId, loadId, severities, offset, limit, order } = list.opts();
|
||||||
|
const config = new Config();
|
||||||
|
const context = new Context(config.context);
|
||||||
const client = await step('Connecting to server', async () => {
|
const client = await step('Connecting to server', async () => {
|
||||||
return createClient();
|
return createClient(context);
|
||||||
});
|
});
|
||||||
const logs = await step('Getting logs', async () => {
|
const logs = await step('Getting logs', async () => {
|
||||||
return await client.logs.find.query({
|
return await client.logs.find.query({
|
||||||
@@ -35,7 +39,7 @@ list
|
|||||||
order,
|
order,
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
console.table(logs.reverse());
|
console.table(logs);
|
||||||
});
|
});
|
||||||
|
|
||||||
export { list };
|
export { list };
|
||||||
|
|||||||
65
packages/cli/src/commands/logs/logs.remove.ts
Normal file
65
packages/cli/src/commands/logs/logs.remove.ts
Normal file
@@ -0,0 +1,65 @@
|
|||||||
|
import { Command } from 'commander';
|
||||||
|
import { createClient } from '../../client/client.js';
|
||||||
|
import { step } from '../../utils/step.js';
|
||||||
|
import { Context } from '../../context/context.js';
|
||||||
|
import inquirer from 'inquirer';
|
||||||
|
import { Config } from '../../config/config.js';
|
||||||
|
|
||||||
|
const remove = new Command('remove');
|
||||||
|
|
||||||
|
const toInt = (value?: string) => {
|
||||||
|
if (!value) {
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
return parseInt(value, 10);
|
||||||
|
};
|
||||||
|
|
||||||
|
remove
|
||||||
|
.alias('ls')
|
||||||
|
.description('List logs')
|
||||||
|
.option('-r, --run-id <runId>', 'Run ID')
|
||||||
|
.option('-l, --load-id <loadId>', 'Load ID')
|
||||||
|
.option('--severities <severities...>', 'Severities')
|
||||||
|
.option('-o, --offset <offset>', 'Offset')
|
||||||
|
.option('-a, --limit <limit>', 'Limit', '1000')
|
||||||
|
.option('-s, --sort <order>', 'Sort', 'desc')
|
||||||
|
.action(async () => {
|
||||||
|
const { runId, loadId, severities, offset, limit, order } = remove.opts();
|
||||||
|
const config = new Config();
|
||||||
|
const context = new Context(config.context);
|
||||||
|
const client = await step('Connecting to server', async () => {
|
||||||
|
return createClient(context);
|
||||||
|
});
|
||||||
|
const response = await step('Preparing to delete', async () => {
|
||||||
|
return await client.logs.prepareRemove.query({
|
||||||
|
runId,
|
||||||
|
loadId,
|
||||||
|
severities,
|
||||||
|
offset: toInt(offset),
|
||||||
|
limit: toInt(limit),
|
||||||
|
order,
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!response.ids.length) {
|
||||||
|
console.log('No logs to delete');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
const { confirm } = await inquirer.prompt([
|
||||||
|
{
|
||||||
|
type: 'confirm',
|
||||||
|
name: 'confirm',
|
||||||
|
message: `Are you sure you want to delete ${response.ids.length} logs?`,
|
||||||
|
},
|
||||||
|
]);
|
||||||
|
|
||||||
|
if (!confirm) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
await step('Deleting logs', async () => {
|
||||||
|
await client.logs.remove.mutate(response);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
export { remove };
|
||||||
@@ -1,7 +1,9 @@
|
|||||||
import { Command } from 'commander';
|
import { Command } from 'commander';
|
||||||
import { list } from './logs.list.js';
|
import { list } from './logs.list.js';
|
||||||
|
import { remove } from './logs.remove.js';
|
||||||
|
|
||||||
const logs = new Command('logs');
|
const logs = new Command('logs');
|
||||||
logs.addCommand(list);
|
logs.addCommand(list);
|
||||||
|
logs.addCommand(remove);
|
||||||
|
|
||||||
export { logs };
|
export { logs };
|
||||||
|
|||||||
@@ -1,6 +1,8 @@
|
|||||||
import { Command } from 'commander';
|
import { Command } from 'commander';
|
||||||
import { createClient } from '../../client/client.js';
|
import { createClient } from '../../client/client.js';
|
||||||
import { step } from '../../utils/step.js';
|
import { step } from '../../utils/step.js';
|
||||||
|
import { Context } from '../../context/context.js';
|
||||||
|
import { Config } from '../../config/config.js';
|
||||||
|
|
||||||
const create = new Command('create');
|
const create = new Command('create');
|
||||||
|
|
||||||
@@ -8,8 +10,10 @@ create
|
|||||||
.description('Create a new run')
|
.description('Create a new run')
|
||||||
.argument('load-id', 'Load ID')
|
.argument('load-id', 'Load ID')
|
||||||
.action(async (loadId) => {
|
.action(async (loadId) => {
|
||||||
|
const config = new Config();
|
||||||
|
const context = new Context(config.context);
|
||||||
const client = await step('Connecting to server', async () => {
|
const client = await step('Connecting to server', async () => {
|
||||||
return createClient();
|
return createClient(context);
|
||||||
});
|
});
|
||||||
await step('Creating run', async () => {
|
await step('Creating run', async () => {
|
||||||
await client.runs.create.mutate({ loadId });
|
await client.runs.create.mutate({ loadId });
|
||||||
|
|||||||
@@ -1,16 +1,20 @@
|
|||||||
import { Command } from 'commander';
|
import { Command } from 'commander';
|
||||||
import { createClient } from '../../client/client.js';
|
import { createClient } from '../../client/client.js';
|
||||||
import { step } from '../../utils/step.js';
|
import { step } from '../../utils/step.js';
|
||||||
|
import { Context } from '../../context/context.js';
|
||||||
|
import { Config } from '../../config/config.js';
|
||||||
|
|
||||||
const list = new Command('create');
|
const list = new Command('list');
|
||||||
|
|
||||||
list
|
list
|
||||||
.alias('ls')
|
.alias('ls')
|
||||||
.description('Find a run')
|
.description('Find a run')
|
||||||
.argument('[load-id]', 'Load ID')
|
.argument('[load-id]', 'Load ID')
|
||||||
.action(async (loadId) => {
|
.action(async (loadId) => {
|
||||||
|
const config = new Config();
|
||||||
|
const context = new Context(config.context);
|
||||||
const client = await step('Connecting to server', async () => {
|
const client = await step('Connecting to server', async () => {
|
||||||
return createClient();
|
return createClient(context);
|
||||||
});
|
});
|
||||||
const runs = await step('Getting runs', async () => {
|
const runs = await step('Getting runs', async () => {
|
||||||
return await client.runs.find.query({ loadId });
|
return await client.runs.find.query({ loadId });
|
||||||
|
|||||||
59
packages/cli/src/commands/runs/runs.remove.ts
Normal file
59
packages/cli/src/commands/runs/runs.remove.ts
Normal file
@@ -0,0 +1,59 @@
|
|||||||
|
import { Command } from 'commander';
|
||||||
|
import { createClient } from '../../client/client.js';
|
||||||
|
import { step } from '../../utils/step.js';
|
||||||
|
import { Context } from '../../context/context.js';
|
||||||
|
import inquirer from 'inquirer';
|
||||||
|
import { Config } from '../../config/config.js';
|
||||||
|
|
||||||
|
const remove = new Command('remove');
|
||||||
|
|
||||||
|
const toInt = (value?: string) => {
|
||||||
|
if (!value) {
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
return parseInt(value, 10);
|
||||||
|
};
|
||||||
|
|
||||||
|
remove
|
||||||
|
.alias('ls')
|
||||||
|
.description('List logs')
|
||||||
|
.option('-l, --load-id <loadId>', 'Load ID')
|
||||||
|
.option('-o, --offset <offset>', 'Offset')
|
||||||
|
.option('-a, --limit <limit>', 'Limit', '1000')
|
||||||
|
.action(async () => {
|
||||||
|
const { loadId, offset, limit } = remove.opts();
|
||||||
|
const config = new Config();
|
||||||
|
const context = new Context(config.context);
|
||||||
|
const client = await step('Connecting to server', async () => {
|
||||||
|
return createClient(context);
|
||||||
|
});
|
||||||
|
const response = await step('Preparing to delete', async () => {
|
||||||
|
return await client.runs.prepareRemove.query({
|
||||||
|
loadId,
|
||||||
|
offset: toInt(offset),
|
||||||
|
limit: toInt(limit),
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!response.ids.length) {
|
||||||
|
console.log('No logs to delete');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
const { confirm } = await inquirer.prompt([
|
||||||
|
{
|
||||||
|
type: 'confirm',
|
||||||
|
name: 'confirm',
|
||||||
|
message: `Are you sure you want to delete ${response.ids.length} logs?`,
|
||||||
|
},
|
||||||
|
]);
|
||||||
|
|
||||||
|
if (!confirm) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
await step('Deleting artifacts', async () => {
|
||||||
|
await client.runs.remove.mutate(response);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
export { remove };
|
||||||
23
packages/cli/src/commands/runs/runs.terminate.ts
Normal file
23
packages/cli/src/commands/runs/runs.terminate.ts
Normal file
@@ -0,0 +1,23 @@
|
|||||||
|
import { Command } from 'commander';
|
||||||
|
import { createClient } from '../../client/client.js';
|
||||||
|
import { step } from '../../utils/step.js';
|
||||||
|
import { Context } from '../../context/context.js';
|
||||||
|
import { Config } from '../../config/config.js';
|
||||||
|
|
||||||
|
const terminate = new Command('terminate');
|
||||||
|
|
||||||
|
terminate
|
||||||
|
.description('Terminate an in progress run')
|
||||||
|
.argument('run-id', 'Run ID')
|
||||||
|
.action(async (runId) => {
|
||||||
|
const config = new Config();
|
||||||
|
const context = new Context(config.context);
|
||||||
|
const client = await step('Connecting to server', async () => {
|
||||||
|
return createClient(context);
|
||||||
|
});
|
||||||
|
await step('Terminating run', async () => {
|
||||||
|
await client.runs.terminate.mutate(runId);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
export { terminate };
|
||||||
@@ -1,8 +1,14 @@
|
|||||||
import { Command } from 'commander';
|
import { Command } from 'commander';
|
||||||
import { create } from './runs.create.js';
|
import { create } from './runs.create.js';
|
||||||
import { list } from './runs.list.js';
|
import { list } from './runs.list.js';
|
||||||
|
import { remove } from './runs.remove.js';
|
||||||
|
import { terminate } from './runs.terminate.js';
|
||||||
|
|
||||||
const runs = new Command('runs');
|
const runs = new Command('runs');
|
||||||
runs.description('Manage runs').addCommand(create).addCommand(list);
|
runs.description('Manage runs');
|
||||||
|
runs.addCommand(create);
|
||||||
|
runs.addCommand(list);
|
||||||
|
runs.addCommand(remove);
|
||||||
|
runs.addCommand(terminate);
|
||||||
|
|
||||||
export { runs };
|
export { runs };
|
||||||
|
|||||||
32
packages/cli/src/commands/schedules/schedules.add.ts
Normal file
32
packages/cli/src/commands/schedules/schedules.add.ts
Normal file
@@ -0,0 +1,32 @@
|
|||||||
|
import { Command } from 'commander';
|
||||||
|
import { createClient } from '../../client/client.js';
|
||||||
|
import { step } from '../../utils/step.js';
|
||||||
|
import { Context } from '../../context/context.js';
|
||||||
|
import { Config } from '../../config/config.js';
|
||||||
|
|
||||||
|
const add = new Command('add');
|
||||||
|
|
||||||
|
add
|
||||||
|
.description('Add schedule')
|
||||||
|
.argument('<load-id>', 'Load ID')
|
||||||
|
.argument('<cron>', 'Cron')
|
||||||
|
.option('-n, --name <name>', 'Name')
|
||||||
|
.action(async (loadId, cron) => {
|
||||||
|
const config = new Config();
|
||||||
|
const context = new Context(config.context);
|
||||||
|
const { name } = add.opts();
|
||||||
|
const client = await step('Connecting to server', async () => {
|
||||||
|
return createClient(context);
|
||||||
|
});
|
||||||
|
const id = await step('Adding schedule', async () => {
|
||||||
|
return await client.schedules.add.mutate({
|
||||||
|
name,
|
||||||
|
load: loadId,
|
||||||
|
cron,
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
console.log(`Schedule added with ID ${id}`);
|
||||||
|
});
|
||||||
|
|
||||||
|
export { add };
|
||||||
39
packages/cli/src/commands/schedules/schedules.list.ts
Normal file
39
packages/cli/src/commands/schedules/schedules.list.ts
Normal file
@@ -0,0 +1,39 @@
|
|||||||
|
import { Command } from 'commander';
|
||||||
|
import { createClient } from '../../client/client.js';
|
||||||
|
import { step } from '../../utils/step.js';
|
||||||
|
import { Context } from '../../context/context.js';
|
||||||
|
import { Config } from '../../config/config.js';
|
||||||
|
|
||||||
|
const list = new Command('list');
|
||||||
|
|
||||||
|
const toInt = (value?: string) => {
|
||||||
|
if (!value) {
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
return parseInt(value, 10);
|
||||||
|
};
|
||||||
|
|
||||||
|
list
|
||||||
|
.alias('ls')
|
||||||
|
.description('List schedules')
|
||||||
|
.option('-l, --load-ids <loadIds...>', 'Load ID')
|
||||||
|
.option('-o, --offset <offset>', 'Offset')
|
||||||
|
.option('-a, --limit <limit>', 'Limit', '1000')
|
||||||
|
.action(async () => {
|
||||||
|
const { loadIds, offset, limit } = list.opts();
|
||||||
|
const config = new Config();
|
||||||
|
const context = new Context(config.context);
|
||||||
|
const client = await step('Connecting to server', async () => {
|
||||||
|
return createClient(context);
|
||||||
|
});
|
||||||
|
const schedules = await step('Getting schedules', async () => {
|
||||||
|
return await client.schedules.find.query({
|
||||||
|
loadIds,
|
||||||
|
offset: toInt(offset),
|
||||||
|
limit: toInt(limit),
|
||||||
|
});
|
||||||
|
});
|
||||||
|
console.table(schedules);
|
||||||
|
});
|
||||||
|
|
||||||
|
export { list };
|
||||||
61
packages/cli/src/commands/schedules/schedules.remove.ts
Normal file
61
packages/cli/src/commands/schedules/schedules.remove.ts
Normal file
@@ -0,0 +1,61 @@
|
|||||||
|
import { Command } from 'commander';
|
||||||
|
import { createClient } from '../../client/client.js';
|
||||||
|
import { step } from '../../utils/step.js';
|
||||||
|
import { Context } from '../../context/context.js';
|
||||||
|
import inquirer from 'inquirer';
|
||||||
|
import { Config } from '../../config/config.js';
|
||||||
|
|
||||||
|
const remove = new Command('remove');
|
||||||
|
|
||||||
|
const toInt = (value?: string) => {
|
||||||
|
if (!value) {
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
return parseInt(value, 10);
|
||||||
|
};
|
||||||
|
|
||||||
|
remove
|
||||||
|
.alias('ls')
|
||||||
|
.description('LRemove schedules')
|
||||||
|
.option('-i, --ids <ids...>', 'Load IDs')
|
||||||
|
.option('-l, --load-ids <loadIds...>', 'Load IDs')
|
||||||
|
.option('-o, --offset <offset>', 'Offset')
|
||||||
|
.option('-a, --limit <limit>', 'Limit', '1000')
|
||||||
|
.action(async () => {
|
||||||
|
const { ids, loadIds, offset, limit } = remove.opts();
|
||||||
|
const config = new Config();
|
||||||
|
const context = new Context(config.context);
|
||||||
|
const client = await step('Connecting to server', async () => {
|
||||||
|
return createClient(context);
|
||||||
|
});
|
||||||
|
const response = await step('Preparing to delete', async () => {
|
||||||
|
return await client.schedules.prepareRemove.query({
|
||||||
|
ids,
|
||||||
|
loadIds,
|
||||||
|
offset: toInt(offset),
|
||||||
|
limit: toInt(limit),
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!response.ids.length) {
|
||||||
|
console.log('No logs to delete');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
const { confirm } = await inquirer.prompt([
|
||||||
|
{
|
||||||
|
type: 'confirm',
|
||||||
|
name: 'confirm',
|
||||||
|
message: `Are you sure you want to delete ${response.ids.length} schedules?`,
|
||||||
|
},
|
||||||
|
]);
|
||||||
|
|
||||||
|
if (!confirm) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
await step('Deleting artifacts', async () => {
|
||||||
|
await client.artifacts.remove.mutate(response);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
export { remove };
|
||||||
11
packages/cli/src/commands/schedules/schedules.ts
Normal file
11
packages/cli/src/commands/schedules/schedules.ts
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
import { Command } from 'commander';
|
||||||
|
import { list } from './schedules.list.js';
|
||||||
|
import { remove } from './schedules.remove.js';
|
||||||
|
import { add } from './schedules.add.js';
|
||||||
|
|
||||||
|
const schedules = new Command('schedules');
|
||||||
|
schedules.addCommand(list);
|
||||||
|
schedules.addCommand(remove);
|
||||||
|
schedules.addCommand(add);
|
||||||
|
|
||||||
|
export { schedules };
|
||||||
@@ -1,6 +1,8 @@
|
|||||||
import { Command } from 'commander';
|
import { Command } from 'commander';
|
||||||
import { createClient } from '../../client/client.js';
|
import { createClient } from '../../client/client.js';
|
||||||
import { step } from '../../utils/step.js';
|
import { step } from '../../utils/step.js';
|
||||||
|
import { Context } from '../../context/context.js';
|
||||||
|
import { Config } from '../../config/config.js';
|
||||||
|
|
||||||
const list = new Command('list');
|
const list = new Command('list');
|
||||||
|
|
||||||
@@ -18,8 +20,10 @@ list
|
|||||||
.option('-a, --limit <limit>', 'Limit', '1000')
|
.option('-a, --limit <limit>', 'Limit', '1000')
|
||||||
.action(async () => {
|
.action(async () => {
|
||||||
const { offset, limit } = list.opts();
|
const { offset, limit } = list.opts();
|
||||||
|
const config = new Config();
|
||||||
|
const context = new Context(config.context);
|
||||||
const client = await step('Connecting to server', async () => {
|
const client = await step('Connecting to server', async () => {
|
||||||
return createClient();
|
return createClient(context);
|
||||||
});
|
});
|
||||||
const secrets = await step('Getting secrets', async () => {
|
const secrets = await step('Getting secrets', async () => {
|
||||||
return await client.secrets.find.query({
|
return await client.secrets.find.query({
|
||||||
|
|||||||
@@ -1,6 +1,8 @@
|
|||||||
import { Command } from 'commander';
|
import { Command } from 'commander';
|
||||||
import { createClient } from '../../client/client.js';
|
import { createClient } from '../../client/client.js';
|
||||||
import { step } from '../../utils/step.js';
|
import { step } from '../../utils/step.js';
|
||||||
|
import { Context } from '../../context/context.js';
|
||||||
|
import { Config } from '../../config/config.js';
|
||||||
|
|
||||||
const remove = new Command('remove');
|
const remove = new Command('remove');
|
||||||
|
|
||||||
@@ -8,8 +10,10 @@ remove
|
|||||||
.alias('rm')
|
.alias('rm')
|
||||||
.argument('<id>')
|
.argument('<id>')
|
||||||
.action(async (id) => {
|
.action(async (id) => {
|
||||||
|
const config = new Config();
|
||||||
|
const context = new Context(config.context);
|
||||||
const client = await step('Connecting to server', async () => {
|
const client = await step('Connecting to server', async () => {
|
||||||
return createClient();
|
return createClient(context);
|
||||||
});
|
});
|
||||||
await step('Removing', async () => {
|
await step('Removing', async () => {
|
||||||
await client.secrets.remove.mutate({
|
await client.secrets.remove.mutate({
|
||||||
|
|||||||
@@ -1,6 +1,8 @@
|
|||||||
import { Command } from 'commander';
|
import { Command } from 'commander';
|
||||||
import { createClient } from '../../client/client.js';
|
import { createClient } from '../../client/client.js';
|
||||||
import { step } from '../../utils/step.js';
|
import { step } from '../../utils/step.js';
|
||||||
|
import { Context } from '../../context/context.js';
|
||||||
|
import { Config } from '../../config/config.js';
|
||||||
|
|
||||||
const set = new Command('set');
|
const set = new Command('set');
|
||||||
|
|
||||||
@@ -8,8 +10,10 @@ set
|
|||||||
.argument('<id>')
|
.argument('<id>')
|
||||||
.argument('[value]')
|
.argument('[value]')
|
||||||
.action(async (id, value) => {
|
.action(async (id, value) => {
|
||||||
|
const config = new Config();
|
||||||
|
const context = new Context(config.context);
|
||||||
const client = await step('Connecting to server', async () => {
|
const client = await step('Connecting to server', async () => {
|
||||||
return createClient();
|
return createClient(context);
|
||||||
});
|
});
|
||||||
await step('Setting secret', async () => {
|
await step('Setting secret', async () => {
|
||||||
await client.secrets.set.mutate({
|
await client.secrets.set.mutate({
|
||||||
|
|||||||
49
packages/cli/src/config/config.ts
Normal file
49
packages/cli/src/config/config.ts
Normal file
@@ -0,0 +1,49 @@
|
|||||||
|
import envPaths from 'env-paths';
|
||||||
|
import { existsSync, readFileSync, writeFileSync } from 'fs';
|
||||||
|
import { mkdir } from 'fs/promises';
|
||||||
|
import { join, dirname } from 'path';
|
||||||
|
|
||||||
|
type ConfigValues = {
|
||||||
|
context?: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
const paths = envPaths('mini-loader');
|
||||||
|
|
||||||
|
class Config {
|
||||||
|
#location: string;
|
||||||
|
#config?: ConfigValues;
|
||||||
|
|
||||||
|
constructor() {
|
||||||
|
this.#location = join(paths.config, 'config.json');
|
||||||
|
if (existsSync(this.#location)) {
|
||||||
|
this.#config = JSON.parse(readFileSync(this.#location, 'utf-8'));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public get context() {
|
||||||
|
return this.#config?.context || 'default';
|
||||||
|
}
|
||||||
|
|
||||||
|
public get cacheLocation() {
|
||||||
|
return join(paths.cache, this.context);
|
||||||
|
}
|
||||||
|
|
||||||
|
public setContext = (context: string) => {
|
||||||
|
this.#config = {
|
||||||
|
...(this.#config || {}),
|
||||||
|
context,
|
||||||
|
};
|
||||||
|
this.save();
|
||||||
|
};
|
||||||
|
|
||||||
|
public save = async () => {
|
||||||
|
if (!this.#config) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
const json = JSON.stringify(this.#config);
|
||||||
|
mkdir(dirname(this.#location), { recursive: true });
|
||||||
|
writeFileSync(this.#location, json);
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
export { Config };
|
||||||
59
packages/cli/src/context/context.ts
Normal file
59
packages/cli/src/context/context.ts
Normal file
@@ -0,0 +1,59 @@
|
|||||||
|
import envPaths from 'env-paths';
|
||||||
|
import { existsSync, readFileSync, writeFileSync } from 'fs';
|
||||||
|
import { mkdir, readdir } from 'fs/promises';
|
||||||
|
import { dirname, join } from 'path';
|
||||||
|
|
||||||
|
type ContextValues = {
|
||||||
|
host: string;
|
||||||
|
token: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
class Context {
|
||||||
|
#location: string;
|
||||||
|
#config?: ContextValues;
|
||||||
|
|
||||||
|
constructor(name: string) {
|
||||||
|
const paths = envPaths('mini-loader');
|
||||||
|
this.#location = join(paths.config, 'contexts', name);
|
||||||
|
if (existsSync(this.#location)) {
|
||||||
|
this.#config = JSON.parse(readFileSync(this.#location, 'utf-8'));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public get host() {
|
||||||
|
return this.#config?.host;
|
||||||
|
}
|
||||||
|
|
||||||
|
public get token() {
|
||||||
|
return this.#config?.token;
|
||||||
|
}
|
||||||
|
|
||||||
|
public saveLogin = (host: string, token: string) => {
|
||||||
|
this.#config = {
|
||||||
|
...(this.#config || {}),
|
||||||
|
host,
|
||||||
|
token,
|
||||||
|
};
|
||||||
|
this.save();
|
||||||
|
};
|
||||||
|
|
||||||
|
public save = async () => {
|
||||||
|
if (!this.#config) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
const json = JSON.stringify(this.#config);
|
||||||
|
mkdir(dirname(this.#location), { recursive: true });
|
||||||
|
writeFileSync(this.#location, json);
|
||||||
|
};
|
||||||
|
|
||||||
|
public static list = async () => {
|
||||||
|
const paths = envPaths('mini-loader');
|
||||||
|
const location = join(paths.config, 'contexts');
|
||||||
|
if (!existsSync(location)) {
|
||||||
|
return [];
|
||||||
|
}
|
||||||
|
return await readdir(location);
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
export { Context };
|
||||||
@@ -1,4 +1,5 @@
|
|||||||
import { program } from 'commander';
|
import { Command, program } from 'commander';
|
||||||
|
import pkg from '../package.json';
|
||||||
import { loads } from './commands/loads/loads.js';
|
import { loads } from './commands/loads/loads.js';
|
||||||
import { runs } from './commands/runs/runs.js';
|
import { runs } from './commands/runs/runs.js';
|
||||||
import { logs } from './commands/logs/logs.js';
|
import { logs } from './commands/logs/logs.js';
|
||||||
@@ -6,6 +7,8 @@ import { artifacts } from './commands/artifacts/artifacts.js';
|
|||||||
import { secrets } from './commands/secrets/secrets.js';
|
import { secrets } from './commands/secrets/secrets.js';
|
||||||
import { local } from './commands/local/local.js';
|
import { local } from './commands/local/local.js';
|
||||||
import { auth } from './commands/auth/auth.js';
|
import { auth } from './commands/auth/auth.js';
|
||||||
|
import { contexts } from './commands/contexts/contexts.js';
|
||||||
|
import { schedules } from './commands/schedules/schedules.js';
|
||||||
|
|
||||||
program.addCommand(loads);
|
program.addCommand(loads);
|
||||||
program.addCommand(runs);
|
program.addCommand(runs);
|
||||||
@@ -14,5 +17,15 @@ program.addCommand(artifacts);
|
|||||||
program.addCommand(secrets);
|
program.addCommand(secrets);
|
||||||
program.addCommand(local);
|
program.addCommand(local);
|
||||||
program.addCommand(auth);
|
program.addCommand(auth);
|
||||||
|
program.addCommand(contexts);
|
||||||
|
program.addCommand(schedules);
|
||||||
|
|
||||||
|
program.version(pkg.version);
|
||||||
|
|
||||||
|
const version = new Command('version');
|
||||||
|
version.action(() => {
|
||||||
|
console.log(pkg.version);
|
||||||
|
});
|
||||||
|
program.addCommand(version);
|
||||||
|
|
||||||
await program.parseAsync();
|
await program.parseAsync();
|
||||||
|
|||||||
@@ -4,10 +4,10 @@ const step = async <T>(message: string, fn: () => Promise<T>): Promise<T> => {
|
|||||||
const spinner = ora(message).start();
|
const spinner = ora(message).start();
|
||||||
try {
|
try {
|
||||||
const result = await fn();
|
const result = await fn();
|
||||||
spinner.succeed();
|
await spinner.succeed();
|
||||||
return result;
|
return result;
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
spinner.fail();
|
await spinner.fail();
|
||||||
throw err;
|
throw err;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
{
|
{
|
||||||
"name": "@morten-olsen/mini-loader-configs",
|
"name": "@morten-olsen/mini-loader-configs",
|
||||||
"version": "1.0.0",
|
"version": "1.0.0",
|
||||||
|
"private": true,
|
||||||
"description": "",
|
"description": "",
|
||||||
"main": "index.js",
|
"main": "index.js",
|
||||||
"scripts": {
|
"scripts": {
|
||||||
@@ -8,5 +9,10 @@
|
|||||||
},
|
},
|
||||||
"keywords": [],
|
"keywords": [],
|
||||||
"author": "",
|
"author": "",
|
||||||
"license": "ISC"
|
"license": "GPL-3.0",
|
||||||
|
"homepage": "https://github.com/morten-olsen/mini-loader",
|
||||||
|
"repository": {
|
||||||
|
"type": "git",
|
||||||
|
"url": "https://github.com/morten-olsen/mini-loader"
|
||||||
|
}
|
||||||
}
|
}
|
||||||
@@ -8,6 +8,7 @@
|
|||||||
"sourceMap": true,
|
"sourceMap": true,
|
||||||
"esModuleInterop": true,
|
"esModuleInterop": true,
|
||||||
"strict": true,
|
"strict": true,
|
||||||
|
"resolveJsonModule": true,
|
||||||
"allowSyntheticDefaultImports": true,
|
"allowSyntheticDefaultImports": true,
|
||||||
"jsx": "react"
|
"jsx": "react"
|
||||||
},
|
},
|
||||||
|
|||||||
@@ -1,6 +1,8 @@
|
|||||||
{
|
{
|
||||||
"name": "@morten-olsen/mini-loader-examples",
|
"name": "@morten-olsen/mini-loader-examples",
|
||||||
"version": "1.0.0",
|
"version": "1.0.0",
|
||||||
|
"license": "GPL-3.0",
|
||||||
|
"private": true,
|
||||||
"main": "./dist/esm/index.js",
|
"main": "./dist/esm/index.js",
|
||||||
"types": "./dist/esm/index.d.ts",
|
"types": "./dist/esm/index.d.ts",
|
||||||
"scripts": {
|
"scripts": {
|
||||||
@@ -16,10 +18,18 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@morten-olsen/mini-loader-configs": "workspace:^",
|
|
||||||
"@morten-olsen/mini-loader-cli": "workspace:^",
|
|
||||||
"@morten-olsen/mini-loader": "workspace:^",
|
"@morten-olsen/mini-loader": "workspace:^",
|
||||||
|
"@morten-olsen/mini-loader-cli": "workspace:^",
|
||||||
|
"@morten-olsen/mini-loader-configs": "workspace:^",
|
||||||
"@types/node": "^20.10.8",
|
"@types/node": "^20.10.8",
|
||||||
"typescript": "^5.3.3"
|
"typescript": "^5.3.3"
|
||||||
|
},
|
||||||
|
"homepage": "https://github.com/morten-olsen/mini-loader",
|
||||||
|
"repository": {
|
||||||
|
"type": "git",
|
||||||
|
"url": "https://github.com/morten-olsen/mini-loader"
|
||||||
|
},
|
||||||
|
"dependencies": {
|
||||||
|
"fastify": "^4.25.2"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
12
packages/examples/src/http.ts
Normal file
12
packages/examples/src/http.ts
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
import { http } from '@morten-olsen/mini-loader';
|
||||||
|
import fastify from 'fastify';
|
||||||
|
|
||||||
|
const server = fastify();
|
||||||
|
|
||||||
|
server.all('*', async (req) => {
|
||||||
|
return req.url;
|
||||||
|
});
|
||||||
|
|
||||||
|
server.listen({
|
||||||
|
path: http.getPath(),
|
||||||
|
});
|
||||||
@@ -1,5 +1,9 @@
|
|||||||
import { artifacts, logger } from '@morten-olsen/mini-loader';
|
import { artifacts, logger } from '@morten-olsen/mini-loader';
|
||||||
|
|
||||||
logger.info('Hello world');
|
const run = async () => {
|
||||||
|
await logger.info('Hello world');
|
||||||
|
await artifacts.create('foo', 'bar');
|
||||||
|
process.exit(0);
|
||||||
|
};
|
||||||
|
|
||||||
artifacts.create('foo', 'bar');
|
run();
|
||||||
|
|||||||
1
packages/mini-loader/README.md
Normal file
1
packages/mini-loader/README.md
Normal file
@@ -0,0 +1 @@
|
|||||||
|
[Go to documentation](https://github.com/morten-olsen/mini-loader)
|
||||||
@@ -1,6 +1,7 @@
|
|||||||
{
|
{
|
||||||
"name": "@morten-olsen/mini-loader",
|
"name": "@morten-olsen/mini-loader",
|
||||||
"version": "1.0.0",
|
"version": "1.0.0",
|
||||||
|
"license": "GPL-3.0",
|
||||||
"main": "./dist/esm/index.js",
|
"main": "./dist/esm/index.js",
|
||||||
"types": "./dist/esm/index.d.ts",
|
"types": "./dist/esm/index.d.ts",
|
||||||
"scripts": {
|
"scripts": {
|
||||||
@@ -19,5 +20,10 @@
|
|||||||
"@morten-olsen/mini-loader-configs": "workspace:^",
|
"@morten-olsen/mini-loader-configs": "workspace:^",
|
||||||
"@types/node": "^20.10.8",
|
"@types/node": "^20.10.8",
|
||||||
"typescript": "^5.3.3"
|
"typescript": "^5.3.3"
|
||||||
|
},
|
||||||
|
"homepage": "https://github.com/morten-olsen/mini-loader",
|
||||||
|
"repository": {
|
||||||
|
"type": "git",
|
||||||
|
"url": "https://github.com/morten-olsen/mini-loader"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -8,8 +8,8 @@ type ArtifactCreateEvent = {
|
|||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
const create = (name: string, data: Buffer | string) => {
|
const create = async (name: string, data: Buffer | string) => {
|
||||||
send({
|
await send({
|
||||||
type: 'artifact:create',
|
type: 'artifact:create',
|
||||||
payload: {
|
payload: {
|
||||||
name,
|
name,
|
||||||
|
|||||||
7
packages/mini-loader/src/http/http.ts
Normal file
7
packages/mini-loader/src/http/http.ts
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
const getPath = () => process.env.HTTP_GATEWAY_PATH!;
|
||||||
|
|
||||||
|
const http = {
|
||||||
|
getPath,
|
||||||
|
};
|
||||||
|
|
||||||
|
export { http };
|
||||||
@@ -8,3 +8,4 @@ export { logger } from './logger/logger.js';
|
|||||||
export { artifacts } from './artifacts/artifacts.js';
|
export { artifacts } from './artifacts/artifacts.js';
|
||||||
export { input } from './input/input.js';
|
export { input } from './input/input.js';
|
||||||
export { secrets } from './secrets/secrets.js';
|
export { secrets } from './secrets/secrets.js';
|
||||||
|
export { http } from './http/http.js';
|
||||||
|
|||||||
@@ -1,7 +1,14 @@
|
|||||||
import { workerData } from 'worker_threads';
|
import { existsSync } from 'fs';
|
||||||
|
import { readFile } from 'fs/promises';
|
||||||
|
|
||||||
const get = <T>() => {
|
const path = process.env.INPUT_PATH;
|
||||||
return workerData as T;
|
const hasInput = path ? existsSync(path) : false;
|
||||||
|
|
||||||
|
const get = () => {
|
||||||
|
if (!hasInput || !path) {
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
return readFile(path, 'utf-8');
|
||||||
};
|
};
|
||||||
|
|
||||||
const input = {
|
const input = {
|
||||||
|
|||||||
@@ -9,31 +9,31 @@ type LoggerEvent = {
|
|||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
const sendLog = (event: LoggerEvent['payload']) => {
|
const sendLog = async (event: LoggerEvent['payload']) => {
|
||||||
send({
|
await send({
|
||||||
type: 'log',
|
type: 'log',
|
||||||
payload: event,
|
payload: event,
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
|
|
||||||
const info = (message: string, data?: unknown) => {
|
const info = async (message: string, data?: unknown) => {
|
||||||
sendLog({
|
await sendLog({
|
||||||
severity: 'info',
|
severity: 'info',
|
||||||
message,
|
message,
|
||||||
data,
|
data,
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
|
|
||||||
const warn = (message: string, data?: unknown) => {
|
const warn = async (message: string, data?: unknown) => {
|
||||||
sendLog({
|
await sendLog({
|
||||||
severity: 'warning',
|
severity: 'warning',
|
||||||
message,
|
message,
|
||||||
data,
|
data,
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
|
|
||||||
const error = (message: string, data?: unknown) => {
|
const error = async (message: string, data?: unknown) => {
|
||||||
sendLog({
|
await sendLog({
|
||||||
severity: 'error',
|
severity: 'error',
|
||||||
message,
|
message,
|
||||||
data,
|
data,
|
||||||
|
|||||||
@@ -1,8 +1,7 @@
|
|||||||
import { workerData } from 'worker_threads';
|
const secretData = JSON.parse(process.env.SECRETS || '{}');
|
||||||
|
|
||||||
const get = (id: string) => {
|
const get = (id: string) => {
|
||||||
const items = workerData?.secrets ?? {};
|
return secretData[id];
|
||||||
return items[id];
|
|
||||||
};
|
};
|
||||||
|
|
||||||
const secrets = {
|
const secrets = {
|
||||||
|
|||||||
@@ -1,8 +1,28 @@
|
|||||||
import { parentPort } from 'worker_threads';
|
import { Socket, createConnection } from 'net';
|
||||||
|
|
||||||
const send = (data: any) => {
|
const connect = () =>
|
||||||
|
new Promise<Socket>((resolve, reject) => {
|
||||||
|
const current = createConnection(process.env.HOST_SOCKET!);
|
||||||
|
|
||||||
|
current.on('connect', () => {
|
||||||
|
resolve(current);
|
||||||
|
});
|
||||||
|
current.on('error', (error) => {
|
||||||
|
reject(error);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
const send = async (data: any) =>
|
||||||
|
new Promise<void>(async (resolve, reject) => {
|
||||||
|
const connection = await connect();
|
||||||
const cleaned = JSON.parse(JSON.stringify(data));
|
const cleaned = JSON.parse(JSON.stringify(data));
|
||||||
parentPort?.postMessage(cleaned);
|
connection.write(JSON.stringify(cleaned), 'utf-8', (err) => {
|
||||||
};
|
if (err) {
|
||||||
|
reject(err);
|
||||||
|
} else {
|
||||||
|
resolve();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
export { send };
|
export { send };
|
||||||
|
|||||||
1
packages/runner/README.md
Normal file
1
packages/runner/README.md
Normal file
@@ -0,0 +1 @@
|
|||||||
|
[Go to documentation](https://github.com/morten-olsen/mini-loader)
|
||||||
@@ -1,6 +1,7 @@
|
|||||||
{
|
{
|
||||||
"name": "@morten-olsen/mini-loader-runner",
|
"name": "@morten-olsen/mini-loader-runner",
|
||||||
"version": "1.0.0",
|
"version": "1.0.0",
|
||||||
|
"license": "GPL-3.0",
|
||||||
"main": "./dist/esm/index.js",
|
"main": "./dist/esm/index.js",
|
||||||
"types": "./dist/esm/index.d.ts",
|
"types": "./dist/esm/index.d.ts",
|
||||||
"scripts": {
|
"scripts": {
|
||||||
@@ -17,11 +18,17 @@
|
|||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@morten-olsen/mini-loader-configs": "workspace:^",
|
"@morten-olsen/mini-loader-configs": "workspace:^",
|
||||||
"@morten-olsen/mini-loader": "workspace:^",
|
|
||||||
"@types/node": "^20.10.8",
|
"@types/node": "^20.10.8",
|
||||||
"typescript": "^5.3.3"
|
"typescript": "^5.3.3"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"eventemitter3": "^5.0.1"
|
"@morten-olsen/mini-loader": "workspace:^",
|
||||||
|
"eventemitter3": "^5.0.1",
|
||||||
|
"nanoid": "^5.0.4"
|
||||||
|
},
|
||||||
|
"homepage": "https://github.com/morten-olsen/mini-loader",
|
||||||
|
"repository": {
|
||||||
|
"type": "git",
|
||||||
|
"url": "https://github.com/morten-olsen/mini-loader"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -1,46 +1,63 @@
|
|||||||
import { Worker } from 'worker_threads';
|
import { Worker } from 'worker_threads';
|
||||||
import { EventEmitter } from 'eventemitter3';
|
import { setup } from './setup/setup.js';
|
||||||
import { Event } from '@morten-olsen/mini-loader';
|
|
||||||
|
|
||||||
type RunEvents = {
|
|
||||||
message: (event: Event) => void;
|
|
||||||
error: (error: Error) => void;
|
|
||||||
exit: () => void;
|
|
||||||
};
|
|
||||||
|
|
||||||
type RunOptions = {
|
type RunOptions = {
|
||||||
script: string;
|
script: string;
|
||||||
input?: unknown;
|
input?: Buffer | string;
|
||||||
secrets?: Record<string, string>;
|
secrets?: Record<string, string>;
|
||||||
|
cacheLocation: string;
|
||||||
};
|
};
|
||||||
|
|
||||||
const run = async ({ script, input, secrets }: RunOptions) => {
|
const run = async ({ script, input, secrets, cacheLocation }: RunOptions) => {
|
||||||
const emitter = new EventEmitter<RunEvents>();
|
const info = await setup({ script, input, secrets, cacheLocation });
|
||||||
const worker = new Worker(script, {
|
|
||||||
eval: true,
|
const worker = new Worker(info.scriptLocation, {
|
||||||
env: secrets,
|
stdin: false,
|
||||||
workerData: {
|
stdout: false,
|
||||||
input,
|
stderr: false,
|
||||||
secrets,
|
env: info.env,
|
||||||
|
});
|
||||||
|
|
||||||
|
worker.stdout?.on('data', (data) => {
|
||||||
|
info.emitter.emit('message', {
|
||||||
|
type: 'log',
|
||||||
|
payload: {
|
||||||
|
severity: 'info',
|
||||||
|
message: data.toString(),
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
worker.stderr?.on('data', (data) => {
|
||||||
|
info.emitter.emit('message', {
|
||||||
|
type: 'log',
|
||||||
|
payload: {
|
||||||
|
severity: 'error',
|
||||||
|
message: data.toString(),
|
||||||
|
},
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
const promise = new Promise<void>((resolve, reject) => {
|
const promise = new Promise<void>((resolve, reject) => {
|
||||||
worker.on('message', (message: Event) => {
|
worker.on('exit', async () => {
|
||||||
emitter.emit('message', message);
|
await info.teardown();
|
||||||
});
|
|
||||||
worker.on('exit', () => {
|
|
||||||
resolve();
|
resolve();
|
||||||
});
|
});
|
||||||
worker.on('error', (error) => {
|
worker.on('error', async (error) => {
|
||||||
reject(error);
|
reject(error);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
return {
|
return {
|
||||||
emitter,
|
...info,
|
||||||
|
teardown: async () => {
|
||||||
|
worker.terminate();
|
||||||
|
},
|
||||||
promise,
|
promise,
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
|
type RunInfo = Awaited<ReturnType<typeof run>>;
|
||||||
|
|
||||||
|
export type { RunInfo };
|
||||||
export { run };
|
export { run };
|
||||||
|
|||||||
71
packages/runner/src/setup/setup.ts
Normal file
71
packages/runner/src/setup/setup.ts
Normal file
@@ -0,0 +1,71 @@
|
|||||||
|
import { join } from 'path';
|
||||||
|
import { nanoid } from 'nanoid';
|
||||||
|
import { chmod, mkdir, rm, writeFile } from 'fs/promises';
|
||||||
|
import { createServer } from 'net';
|
||||||
|
import { EventEmitter } from 'eventemitter3';
|
||||||
|
|
||||||
|
type SetupOptions = {
|
||||||
|
input?: Buffer | string;
|
||||||
|
script: string;
|
||||||
|
secrets?: Record<string, string>;
|
||||||
|
cacheLocation: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
type RunEvents = {
|
||||||
|
message: (event: any) => void;
|
||||||
|
error: (error: Error) => void;
|
||||||
|
exit: () => void;
|
||||||
|
};
|
||||||
|
|
||||||
|
const setup = async (options: SetupOptions) => {
|
||||||
|
const { input, script, secrets } = options;
|
||||||
|
const emitter = new EventEmitter<RunEvents>();
|
||||||
|
const dataDir = join(options.cacheLocation, nanoid());
|
||||||
|
|
||||||
|
await mkdir(dataDir, { recursive: true });
|
||||||
|
await chmod(dataDir, 0o700);
|
||||||
|
const hostSocket = join(dataDir, 'host');
|
||||||
|
const httpGatewaySocket = join(dataDir, 'socket');
|
||||||
|
const server = createServer();
|
||||||
|
const inputLocation = join(dataDir, 'input');
|
||||||
|
const scriptLocation = join(dataDir, 'script.js');
|
||||||
|
|
||||||
|
if (input) {
|
||||||
|
await writeFile(inputLocation, input);
|
||||||
|
}
|
||||||
|
await writeFile(scriptLocation, script);
|
||||||
|
const env = {
|
||||||
|
HOST_SOCKET: hostSocket,
|
||||||
|
SECRETS: JSON.stringify(secrets || {}),
|
||||||
|
INPUT_PATH: inputLocation,
|
||||||
|
HTTP_GATEWAY_PATH: httpGatewaySocket,
|
||||||
|
};
|
||||||
|
|
||||||
|
const teardown = async () => {
|
||||||
|
server.close();
|
||||||
|
await rm(dataDir, { recursive: true, force: true });
|
||||||
|
};
|
||||||
|
|
||||||
|
server.on('connection', (socket) => {
|
||||||
|
socket.on('data', (data) => {
|
||||||
|
const message = JSON.parse(data.toString());
|
||||||
|
emitter.emit('message', message);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
server.listen(hostSocket);
|
||||||
|
|
||||||
|
return {
|
||||||
|
env,
|
||||||
|
emitter,
|
||||||
|
teardown,
|
||||||
|
httpGatewaySocket,
|
||||||
|
scriptLocation,
|
||||||
|
hostSocket,
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
type Setup = Awaited<ReturnType<typeof setup>>;
|
||||||
|
|
||||||
|
export type { Setup };
|
||||||
|
export { setup };
|
||||||
1
packages/server/README.md
Normal file
1
packages/server/README.md
Normal file
@@ -0,0 +1 @@
|
|||||||
|
[Go to documentation](https://github.com/morten-olsen/mini-loader)
|
||||||
@@ -1,4 +1,4 @@
|
|||||||
#!/usr/bin/env node
|
#!/usr/bin/env node
|
||||||
|
|
||||||
import 'source-map-support/register.js';
|
import 'source-map-support/register.js';
|
||||||
import '../dist/esm/index.js';
|
import '../dist/esm/src/index.js';
|
||||||
|
|||||||
@@ -1,8 +1,9 @@
|
|||||||
{
|
{
|
||||||
"name": "@morten-olsen/mini-loader-server",
|
"name": "@morten-olsen/mini-loader-server",
|
||||||
"version": "1.0.0",
|
"version": "1.0.0",
|
||||||
"main": "./dist/esm/index.js",
|
"license": "GPL-3.0",
|
||||||
"types": "./dist/esm/index.d.ts",
|
"main": "./dist/esm/src/index.js",
|
||||||
|
"types": "./dist/esm/src/index.d.ts",
|
||||||
"bin": {
|
"bin": {
|
||||||
"mini-loader-server": "./bin/index.mjs"
|
"mini-loader-server": "./bin/index.mjs"
|
||||||
},
|
},
|
||||||
@@ -15,7 +16,7 @@
|
|||||||
],
|
],
|
||||||
"exports": {
|
"exports": {
|
||||||
".": {
|
".": {
|
||||||
"import": "./dist/esm/index.js"
|
"import": "./dist/esm/src/index.js"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
@@ -26,9 +27,12 @@
|
|||||||
"typescript": "^5.3.3"
|
"typescript": "^5.3.3"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
|
"@fastify/reply-from": "^9.7.0",
|
||||||
"@trpc/client": "^10.45.0",
|
"@trpc/client": "^10.45.0",
|
||||||
"@trpc/server": "^10.45.0",
|
"@trpc/server": "^10.45.0",
|
||||||
"commander": "^11.1.0",
|
"commander": "^11.1.0",
|
||||||
|
"cron": "^3.1.6",
|
||||||
|
"env-paths": "^3.0.0",
|
||||||
"eventemitter3": "^5.0.1",
|
"eventemitter3": "^5.0.1",
|
||||||
"fastify": "^4.25.2",
|
"fastify": "^4.25.2",
|
||||||
"jsonwebtoken": "^9.0.2",
|
"jsonwebtoken": "^9.0.2",
|
||||||
@@ -38,5 +42,10 @@
|
|||||||
"sqlite3": "^5.1.7",
|
"sqlite3": "^5.1.7",
|
||||||
"superjson": "^2.2.1",
|
"superjson": "^2.2.1",
|
||||||
"zod": "^3.22.4"
|
"zod": "^3.22.4"
|
||||||
|
},
|
||||||
|
"homepage": "https://github.com/morten-olsen/mini-loader",
|
||||||
|
"repository": {
|
||||||
|
"type": "git",
|
||||||
|
"url": "https://github.com/morten-olsen/mini-loader"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -20,10 +20,10 @@ class Auth {
|
|||||||
|
|
||||||
#setup = async () => {
|
#setup = async () => {
|
||||||
const { config } = this.#options;
|
const { config } = this.#options;
|
||||||
const secretLocation = resolve(config.files.location, 'secret');
|
const secretLocation = resolve(config.files.data, 'secret');
|
||||||
let secret = '';
|
let secret = '';
|
||||||
|
await mkdir(config.files.data, { recursive: true });
|
||||||
if (!existsSync(secretLocation)) {
|
if (!existsSync(secretLocation)) {
|
||||||
await mkdir(config.files.location, { recursive: true });
|
|
||||||
secret = nanoid();
|
secret = nanoid();
|
||||||
await writeFile(secretLocation, secret);
|
await writeFile(secretLocation, secret);
|
||||||
} else {
|
} else {
|
||||||
|
|||||||
@@ -3,7 +3,8 @@ import { Knex } from 'knex';
|
|||||||
type Config = {
|
type Config = {
|
||||||
database: Omit<Knex.Config, 'migrations'>;
|
database: Omit<Knex.Config, 'migrations'>;
|
||||||
files: {
|
files: {
|
||||||
location: string;
|
data: string;
|
||||||
|
cache: string;
|
||||||
};
|
};
|
||||||
auth?: {
|
auth?: {
|
||||||
oidc?: {
|
oidc?: {
|
||||||
|
|||||||
@@ -0,0 +1,22 @@
|
|||||||
|
import { Knex } from 'knex';
|
||||||
|
|
||||||
|
const name = 'schedule-support';
|
||||||
|
|
||||||
|
const up = async (knex: Knex) => {
|
||||||
|
await knex.schema.createTable('schedules', (table) => {
|
||||||
|
table.string('id').primary();
|
||||||
|
table.string('name').nullable();
|
||||||
|
table.string('description').nullable();
|
||||||
|
table.string('load').notNullable();
|
||||||
|
table.string('cron').notNullable();
|
||||||
|
table.string('input').nullable();
|
||||||
|
table.timestamp('createdAt').notNullable();
|
||||||
|
table.timestamp('updatedAt').notNullable();
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
const down = async (knex: Knex) => {
|
||||||
|
await knex.schema.dropTable('schedule');
|
||||||
|
};
|
||||||
|
|
||||||
|
export { name, up, down };
|
||||||
@@ -1,6 +1,7 @@
|
|||||||
import { Knex } from 'knex';
|
import { Knex } from 'knex';
|
||||||
|
|
||||||
import * as init from './migration.init.js';
|
import * as init from './migration.init.js';
|
||||||
|
import * as scheduleSupport from './migration.schedule.js';
|
||||||
|
|
||||||
type Migration = {
|
type Migration = {
|
||||||
name: string;
|
name: string;
|
||||||
@@ -8,7 +9,7 @@ type Migration = {
|
|||||||
down: (knex: Knex) => Promise<void>;
|
down: (knex: Knex) => Promise<void>;
|
||||||
};
|
};
|
||||||
|
|
||||||
const migrations = [init] satisfies Migration[];
|
const migrations = [init, scheduleSupport] satisfies Migration[];
|
||||||
|
|
||||||
const source: Knex.MigrationSource<Migration> = {
|
const source: Knex.MigrationSource<Migration> = {
|
||||||
getMigrations: async () => migrations,
|
getMigrations: async () => migrations,
|
||||||
|
|||||||
34
packages/server/src/gateway/gateway.ts
Normal file
34
packages/server/src/gateway/gateway.ts
Normal file
@@ -0,0 +1,34 @@
|
|||||||
|
import { FastifyPluginAsync } from 'fastify';
|
||||||
|
import FastifyReplyFrom from '@fastify/reply-from';
|
||||||
|
import { escape } from 'querystring';
|
||||||
|
import { Runtime } from '../runtime/runtime.js';
|
||||||
|
|
||||||
|
type Options = {
|
||||||
|
runtime: Runtime;
|
||||||
|
};
|
||||||
|
|
||||||
|
const gateway: FastifyPluginAsync<Options> = async (fastify, { runtime }) => {
|
||||||
|
await fastify.register(FastifyReplyFrom, {
|
||||||
|
http: {},
|
||||||
|
});
|
||||||
|
|
||||||
|
fastify.all('/gateway/*', (req, res) => {
|
||||||
|
const [runId, ...pathSegments] = (req.params as any)['*'].split('/').filter(Boolean);
|
||||||
|
const run = runtime.runner.getInstance(runId);
|
||||||
|
if (!run) {
|
||||||
|
res.statusCode = 404;
|
||||||
|
res.send({ error: 'Run not found' });
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
const socketPath = run.run?.httpGatewaySocket;
|
||||||
|
if (!socketPath) {
|
||||||
|
res.statusCode = 404;
|
||||||
|
res.send({ error: 'No socket path to run' });
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
const path = pathSegments.join('/');
|
||||||
|
res.from(`unix+http://${escape(socketPath)}/${path}`);
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
export { gateway };
|
||||||
@@ -6,6 +6,7 @@ const start = new Command('start');
|
|||||||
start.action(async () => {
|
start.action(async () => {
|
||||||
const port = 4500;
|
const port = 4500;
|
||||||
const runtime = await Runtime.create();
|
const runtime = await Runtime.create();
|
||||||
|
await runtime.scheduler.start();
|
||||||
const server = await createServer(runtime);
|
const server = await createServer(runtime);
|
||||||
await server.listen({
|
await server.listen({
|
||||||
port,
|
port,
|
||||||
@@ -18,7 +19,11 @@ start.action(async () => {
|
|||||||
const createToken = new Command('create-token');
|
const createToken = new Command('create-token');
|
||||||
createToken.action(async () => {
|
createToken.action(async () => {
|
||||||
const runtime = await Runtime.create();
|
const runtime = await Runtime.create();
|
||||||
const token = await runtime.auth.createToken({});
|
const token = await runtime.auth.createToken({
|
||||||
|
policy: {
|
||||||
|
'*:*': ['*'],
|
||||||
|
},
|
||||||
|
});
|
||||||
console.log(token);
|
console.log(token);
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -27,5 +32,9 @@ program.addCommand(createToken);
|
|||||||
|
|
||||||
await program.parseAsync(process.argv);
|
await program.parseAsync(process.argv);
|
||||||
|
|
||||||
|
process.on('unhandledRejection', (reason, p) => {
|
||||||
|
console.log('Unhandled Rejection at: Promise', p, 'reason:', reason);
|
||||||
|
});
|
||||||
|
|
||||||
export type { Runtime } from './runtime/runtime.js';
|
export type { Runtime } from './runtime/runtime.js';
|
||||||
export type { RootRouter } from './router/router.js';
|
export type { RootRouter } from './router/router.js';
|
||||||
|
|||||||
10
packages/server/src/knex.d.ts
vendored
10
packages/server/src/knex.d.ts
vendored
@@ -43,5 +43,15 @@ declare module 'knex/types/tables.js' {
|
|||||||
createdAt: Date;
|
createdAt: Date;
|
||||||
updatedAt: Date;
|
updatedAt: Date;
|
||||||
};
|
};
|
||||||
|
schedules: {
|
||||||
|
id: string;
|
||||||
|
name?: string;
|
||||||
|
description?: string;
|
||||||
|
load: string;
|
||||||
|
cron: string;
|
||||||
|
input?: string;
|
||||||
|
createdAt: Date;
|
||||||
|
updatedAt: Date;
|
||||||
|
};
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -18,6 +18,13 @@ class ArtifactRepo extends EventEmitter<ArtifactRepoEvents> {
|
|||||||
this.#options = options;
|
this.#options = options;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public get = async (id: string) => {
|
||||||
|
const { database } = this.#options;
|
||||||
|
const db = await database.instance;
|
||||||
|
const result = await db('artifacts').where({ id }).first();
|
||||||
|
return result || null;
|
||||||
|
};
|
||||||
|
|
||||||
public add = async (options: AddArtifactOptions) => {
|
public add = async (options: AddArtifactOptions) => {
|
||||||
const { database } = this.#options;
|
const { database } = this.#options;
|
||||||
const db = await database.instance;
|
const db = await database.instance;
|
||||||
@@ -59,8 +66,9 @@ class ArtifactRepo extends EventEmitter<ArtifactRepoEvents> {
|
|||||||
query.limit(options.limit);
|
query.limit(options.limit);
|
||||||
}
|
}
|
||||||
|
|
||||||
const ids = await query;
|
const result = await query;
|
||||||
const token = ids.map((id) => Buffer.from(id.id).toString('base64')).join('|');
|
const ids = result.map((row) => row.id);
|
||||||
|
const token = ids.map((id) => Buffer.from(id).toString('base64')).join('|');
|
||||||
const hash = createHash('sha256').update(token).digest('hex');
|
const hash = createHash('sha256').update(token).digest('hex');
|
||||||
return {
|
return {
|
||||||
ids,
|
ids,
|
||||||
|
|||||||
@@ -62,7 +62,7 @@ class LoadRepo extends EventEmitter<LoadRepoEvents> {
|
|||||||
const db = await database.instance;
|
const db = await database.instance;
|
||||||
const id = options.id || nanoid();
|
const id = options.id || nanoid();
|
||||||
const script = createHash('sha256').update(options.script).digest('hex');
|
const script = createHash('sha256').update(options.script).digest('hex');
|
||||||
const scriptDir = resolve(this.#options.config.files.location, 'scripts');
|
const scriptDir = resolve(this.#options.config.files.data, 'scripts');
|
||||||
await mkdir(scriptDir, { recursive: true });
|
await mkdir(scriptDir, { recursive: true });
|
||||||
await writeFile(resolve(scriptDir, `${script}.js`), options.script);
|
await writeFile(resolve(scriptDir, `${script}.js`), options.script);
|
||||||
|
|
||||||
|
|||||||
@@ -56,8 +56,9 @@ class LogRepo extends EventEmitter<LogRepoEvents> {
|
|||||||
query.whereIn('severity', options.severities);
|
query.whereIn('severity', options.severities);
|
||||||
}
|
}
|
||||||
|
|
||||||
const ids = await query;
|
const result = await query;
|
||||||
const token = ids.map((id) => Buffer.from(id.id).toString('base64')).join('|');
|
const ids = result.map((row) => row.id);
|
||||||
|
const token = ids.map((id) => Buffer.from(id).toString('base64')).join('|');
|
||||||
const hash = createHash('sha256').update(token).digest('hex');
|
const hash = createHash('sha256').update(token).digest('hex');
|
||||||
return {
|
return {
|
||||||
ids,
|
ids,
|
||||||
|
|||||||
@@ -4,6 +4,7 @@ import { ArtifactRepo } from './artifacts/artifacts.js';
|
|||||||
import { LoadRepo } from './loads/loads.js';
|
import { LoadRepo } from './loads/loads.js';
|
||||||
import { LogRepo } from './logs/logs.js';
|
import { LogRepo } from './logs/logs.js';
|
||||||
import { RunRepo } from './runs/runs.js';
|
import { RunRepo } from './runs/runs.js';
|
||||||
|
import { ScheduleRepo } from './schedules/schedules.js';
|
||||||
import { SecretRepo } from './secrets/secrets.js';
|
import { SecretRepo } from './secrets/secrets.js';
|
||||||
|
|
||||||
type ReposOptions = {
|
type ReposOptions = {
|
||||||
@@ -17,6 +18,7 @@ class Repos {
|
|||||||
#logs: LogRepo;
|
#logs: LogRepo;
|
||||||
#artifacts: ArtifactRepo;
|
#artifacts: ArtifactRepo;
|
||||||
#secrets: SecretRepo;
|
#secrets: SecretRepo;
|
||||||
|
#schedule: ScheduleRepo;
|
||||||
|
|
||||||
constructor({ database, config }: ReposOptions) {
|
constructor({ database, config }: ReposOptions) {
|
||||||
this.#loads = new LoadRepo({
|
this.#loads = new LoadRepo({
|
||||||
@@ -36,6 +38,9 @@ class Repos {
|
|||||||
this.#secrets = new SecretRepo({
|
this.#secrets = new SecretRepo({
|
||||||
database,
|
database,
|
||||||
});
|
});
|
||||||
|
this.#schedule = new ScheduleRepo({
|
||||||
|
database,
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
public get loads() {
|
public get loads() {
|
||||||
@@ -57,8 +62,13 @@ class Repos {
|
|||||||
public get secrets() {
|
public get secrets() {
|
||||||
return this.#secrets;
|
return this.#secrets;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public get schedules() {
|
||||||
|
return this.#schedule;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export { findSchedulesSchema, addScheduleSchema } from './schedules/schedules.js';
|
||||||
export { findLogsSchema, addLogSchema } from './logs/logs.js';
|
export { findLogsSchema, addLogSchema } from './logs/logs.js';
|
||||||
export { setLoadSchema, findLoadsSchema } from './loads/loads.js';
|
export { setLoadSchema, findLoadsSchema } from './loads/loads.js';
|
||||||
export { createRunSchema, findRunsSchema } from './runs/runs.js';
|
export { createRunSchema, findRunsSchema } from './runs/runs.js';
|
||||||
|
|||||||
@@ -3,6 +3,7 @@ import { EventEmitter } from 'eventemitter3';
|
|||||||
import { Database } from '../../database/database.js';
|
import { Database } from '../../database/database.js';
|
||||||
import { CreateRunOptions, FindRunsOptions, UpdateRunOptions } from './runs.schemas.js';
|
import { CreateRunOptions, FindRunsOptions, UpdateRunOptions } from './runs.schemas.js';
|
||||||
import { LoadRepo } from '../loads/loads.js';
|
import { LoadRepo } from '../loads/loads.js';
|
||||||
|
import { createHash } from 'crypto';
|
||||||
|
|
||||||
type RunRepoEvents = {
|
type RunRepoEvents = {
|
||||||
created: (args: { id: string; loadId: string }) => void;
|
created: (args: { id: string; loadId: string }) => void;
|
||||||
@@ -18,13 +19,22 @@ type RunRepoOptions = {
|
|||||||
|
|
||||||
class RunRepo extends EventEmitter<RunRepoEvents> {
|
class RunRepo extends EventEmitter<RunRepoEvents> {
|
||||||
#options: RunRepoOptions;
|
#options: RunRepoOptions;
|
||||||
|
#isReady: Promise<void>;
|
||||||
|
|
||||||
constructor(options: RunRepoOptions) {
|
constructor(options: RunRepoOptions) {
|
||||||
super();
|
super();
|
||||||
this.#options = options;
|
this.#options = options;
|
||||||
|
this.#isReady = this.#setup();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#setup = async () => {
|
||||||
|
const { database } = this.#options;
|
||||||
|
const db = await database.instance;
|
||||||
|
await db('runs').update({ status: 'failed', error: 'server was shut down' }).where({ status: 'running' });
|
||||||
|
};
|
||||||
|
|
||||||
public getById = async (id: string) => {
|
public getById = async (id: string) => {
|
||||||
|
await this.#isReady;
|
||||||
const { database } = this.#options;
|
const { database } = this.#options;
|
||||||
const db = await database.instance;
|
const db = await database.instance;
|
||||||
|
|
||||||
@@ -36,6 +46,7 @@ class RunRepo extends EventEmitter<RunRepoEvents> {
|
|||||||
};
|
};
|
||||||
|
|
||||||
public getByLoadId = async (loadId: string) => {
|
public getByLoadId = async (loadId: string) => {
|
||||||
|
await this.#isReady;
|
||||||
const { database } = this.#options;
|
const { database } = this.#options;
|
||||||
const db = await database.instance;
|
const db = await database.instance;
|
||||||
|
|
||||||
@@ -44,6 +55,7 @@ class RunRepo extends EventEmitter<RunRepoEvents> {
|
|||||||
};
|
};
|
||||||
|
|
||||||
public find = async (options: FindRunsOptions) => {
|
public find = async (options: FindRunsOptions) => {
|
||||||
|
await this.#isReady;
|
||||||
const { database } = this.#options;
|
const { database } = this.#options;
|
||||||
const db = await database.instance;
|
const db = await database.instance;
|
||||||
const query = db('runs').select(['id', 'status', 'startedAt', 'status', 'error', 'endedAt']);
|
const query = db('runs').select(['id', 'status', 'startedAt', 'status', 'error', 'endedAt']);
|
||||||
@@ -62,19 +74,41 @@ class RunRepo extends EventEmitter<RunRepoEvents> {
|
|||||||
return runs;
|
return runs;
|
||||||
};
|
};
|
||||||
|
|
||||||
public remove = async (options: FindRunsOptions) => {
|
public prepareRemove = async (options: FindRunsOptions) => {
|
||||||
|
await this.#isReady;
|
||||||
const { database } = this.#options;
|
const { database } = this.#options;
|
||||||
const db = await database.instance;
|
const db = await database.instance;
|
||||||
const query = db('runs');
|
const query = db('runs').select('id');
|
||||||
|
|
||||||
if (options.loadId) {
|
if (options.loadId) {
|
||||||
query.where({ loadId: options.loadId });
|
query.where({ loadId: options.loadId });
|
||||||
}
|
}
|
||||||
|
|
||||||
await query.del();
|
const result = await query;
|
||||||
|
const ids = result.map((row) => row.id);
|
||||||
|
const token = ids.map((id) => Buffer.from(id).toString('base64')).join('|');
|
||||||
|
const hash = createHash('sha256').update(token).digest('hex');
|
||||||
|
return {
|
||||||
|
ids,
|
||||||
|
hash,
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
public remove = async (hash: string, ids: string[]) => {
|
||||||
|
const { database } = this.#options;
|
||||||
|
const db = await database.instance;
|
||||||
|
const token = ids.map((id) => Buffer.from(id).toString('base64')).join('|');
|
||||||
|
const actualHash = createHash('sha256').update(token).digest('hex');
|
||||||
|
|
||||||
|
if (hash !== actualHash) {
|
||||||
|
throw new Error('Invalid hash');
|
||||||
|
}
|
||||||
|
|
||||||
|
await db('runs').whereIn('id', ids).delete();
|
||||||
};
|
};
|
||||||
|
|
||||||
public started = async (id: string) => {
|
public started = async (id: string) => {
|
||||||
|
await this.#isReady;
|
||||||
const { database } = this.#options;
|
const { database } = this.#options;
|
||||||
const db = await database.instance;
|
const db = await database.instance;
|
||||||
const current = await this.getById(id);
|
const current = await this.getById(id);
|
||||||
@@ -92,6 +126,7 @@ class RunRepo extends EventEmitter<RunRepoEvents> {
|
|||||||
};
|
};
|
||||||
|
|
||||||
public finished = async (id: string, options: UpdateRunOptions) => {
|
public finished = async (id: string, options: UpdateRunOptions) => {
|
||||||
|
await this.#isReady;
|
||||||
const { database } = this.#options;
|
const { database } = this.#options;
|
||||||
const db = await database.instance;
|
const db = await database.instance;
|
||||||
const { loadId } = await this.getById(id);
|
const { loadId } = await this.getById(id);
|
||||||
@@ -114,6 +149,7 @@ class RunRepo extends EventEmitter<RunRepoEvents> {
|
|||||||
};
|
};
|
||||||
|
|
||||||
public create = async (options: CreateRunOptions) => {
|
public create = async (options: CreateRunOptions) => {
|
||||||
|
await this.#isReady;
|
||||||
const { database, loads } = this.#options;
|
const { database, loads } = this.#options;
|
||||||
const id = nanoid();
|
const id = nanoid();
|
||||||
const db = await database.instance;
|
const db = await database.instance;
|
||||||
|
|||||||
22
packages/server/src/repos/schedules/schedules.schemas.ts
Normal file
22
packages/server/src/repos/schedules/schedules.schemas.ts
Normal file
@@ -0,0 +1,22 @@
|
|||||||
|
import { z } from 'zod';
|
||||||
|
|
||||||
|
const addScheduleSchema = z.object({
|
||||||
|
name: z.string().optional(),
|
||||||
|
description: z.string().optional(),
|
||||||
|
load: z.string(),
|
||||||
|
cron: z.string(),
|
||||||
|
input: z.string().optional(),
|
||||||
|
});
|
||||||
|
|
||||||
|
const findSchedulesSchema = z.object({
|
||||||
|
ids: z.array(z.string()).optional(),
|
||||||
|
loadIds: z.array(z.string()).optional(),
|
||||||
|
offset: z.number().optional(),
|
||||||
|
limit: z.number().optional(),
|
||||||
|
});
|
||||||
|
|
||||||
|
type AddScheduleOptions = z.infer<typeof addScheduleSchema>;
|
||||||
|
type FindSchedulesOptions = z.infer<typeof findSchedulesSchema>;
|
||||||
|
|
||||||
|
export type { AddScheduleOptions, FindSchedulesOptions };
|
||||||
|
export { addScheduleSchema, findSchedulesSchema };
|
||||||
118
packages/server/src/repos/schedules/schedules.ts
Normal file
118
packages/server/src/repos/schedules/schedules.ts
Normal file
@@ -0,0 +1,118 @@
|
|||||||
|
import { EventEmitter } from 'eventemitter3';
|
||||||
|
import { Database } from '../../database/database.js';
|
||||||
|
import { nanoid } from 'nanoid';
|
||||||
|
import { AddScheduleOptions, FindSchedulesOptions } from './schedules.schemas.js';
|
||||||
|
import { createHash } from 'crypto';
|
||||||
|
|
||||||
|
type ScheduleRepoEvents = {
|
||||||
|
added: (id: string) => void;
|
||||||
|
removed: (id: string) => void;
|
||||||
|
};
|
||||||
|
|
||||||
|
type ScheduleRepoOptions = {
|
||||||
|
database: Database;
|
||||||
|
};
|
||||||
|
|
||||||
|
class ScheduleRepo extends EventEmitter<ScheduleRepoEvents> {
|
||||||
|
#options: ScheduleRepoOptions;
|
||||||
|
|
||||||
|
constructor(options: ScheduleRepoOptions) {
|
||||||
|
super();
|
||||||
|
this.#options = options;
|
||||||
|
}
|
||||||
|
|
||||||
|
public get = async (id: string) => {
|
||||||
|
const { database } = this.#options;
|
||||||
|
const db = await database.instance;
|
||||||
|
const result = await db('schedules').where('id', id).first();
|
||||||
|
return result;
|
||||||
|
};
|
||||||
|
|
||||||
|
public add = async (options: AddScheduleOptions) => {
|
||||||
|
const { database } = this.#options;
|
||||||
|
const db = await database.instance;
|
||||||
|
const id = nanoid();
|
||||||
|
|
||||||
|
await db('schedules').insert({
|
||||||
|
id,
|
||||||
|
name: options.name,
|
||||||
|
description: options.description,
|
||||||
|
cron: options.cron,
|
||||||
|
createdAt: new Date(),
|
||||||
|
updatedAt: new Date(),
|
||||||
|
});
|
||||||
|
|
||||||
|
this.emit('added', id);
|
||||||
|
|
||||||
|
return id;
|
||||||
|
};
|
||||||
|
|
||||||
|
public prepareRemove = async (options: FindSchedulesOptions) => {
|
||||||
|
const { database } = this.#options;
|
||||||
|
const db = await database.instance;
|
||||||
|
|
||||||
|
const query = db('schedules').select('id');
|
||||||
|
|
||||||
|
if (options.ids) {
|
||||||
|
query.whereIn('id', options.ids);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (options.loadIds) {
|
||||||
|
query.whereIn('loadId', options.loadIds);
|
||||||
|
}
|
||||||
|
|
||||||
|
const result = await query;
|
||||||
|
const ids = result.map((row) => row.id);
|
||||||
|
const token = ids.map((id) => Buffer.from(id).toString('base64')).join('|');
|
||||||
|
const hash = createHash('sha256').update(token).digest('hex');
|
||||||
|
return {
|
||||||
|
ids,
|
||||||
|
hash,
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
public remove = async (hash: string, ids: string[]) => {
|
||||||
|
const { database } = this.#options;
|
||||||
|
const db = await database.instance;
|
||||||
|
const token = ids.map((id) => Buffer.from(id).toString('base64')).join('|');
|
||||||
|
const actualHash = createHash('sha256').update(token).digest('hex');
|
||||||
|
|
||||||
|
if (hash !== actualHash) {
|
||||||
|
throw new Error('Invalid hash');
|
||||||
|
}
|
||||||
|
|
||||||
|
await db('schedules').whereIn('id', ids).delete();
|
||||||
|
ids.forEach((id) => {
|
||||||
|
this.emit('removed', id);
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
public find = async (options: FindSchedulesOptions) => {
|
||||||
|
const { database } = this.#options;
|
||||||
|
const db = await database.instance;
|
||||||
|
|
||||||
|
const query = db('schedules');
|
||||||
|
|
||||||
|
if (options.ids) {
|
||||||
|
query.whereIn('id', options.ids);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (options.loadIds) {
|
||||||
|
query.whereIn('loadId', options.loadIds);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (options.offset) {
|
||||||
|
query.offset(options.offset);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (options.limit) {
|
||||||
|
query.limit(options.limit);
|
||||||
|
}
|
||||||
|
|
||||||
|
const results = await query;
|
||||||
|
return results;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
export { addScheduleSchema, findSchedulesSchema } from './schedules.schemas.js';
|
||||||
|
export { ScheduleRepo };
|
||||||
@@ -11,12 +11,21 @@ const find = publicProcedure.input(findArtifactsSchema).query(async ({ input, ct
|
|||||||
return result;
|
return result;
|
||||||
});
|
});
|
||||||
|
|
||||||
|
const get = publicProcedure.input(z.string()).query(async ({ input, ctx }) => {
|
||||||
|
const { runtime } = ctx;
|
||||||
|
const { repos } = runtime;
|
||||||
|
const { artifacts } = repos;
|
||||||
|
|
||||||
|
const result = await artifacts.get(input);
|
||||||
|
return result;
|
||||||
|
});
|
||||||
|
|
||||||
const prepareRemove = publicProcedure.input(findArtifactsSchema).query(async ({ input, ctx }) => {
|
const prepareRemove = publicProcedure.input(findArtifactsSchema).query(async ({ input, ctx }) => {
|
||||||
const { runtime } = ctx;
|
const { runtime } = ctx;
|
||||||
const { repos } = runtime;
|
const { repos } = runtime;
|
||||||
const { artifacts } = repos;
|
const { artifacts } = repos;
|
||||||
|
|
||||||
await artifacts.prepareRemove(input);
|
return await artifacts.prepareRemove(input);
|
||||||
});
|
});
|
||||||
|
|
||||||
const remove = publicProcedure
|
const remove = publicProcedure
|
||||||
@@ -35,6 +44,7 @@ const remove = publicProcedure
|
|||||||
});
|
});
|
||||||
|
|
||||||
const artifactsRouter = router({
|
const artifactsRouter = router({
|
||||||
|
get,
|
||||||
find,
|
find,
|
||||||
remove,
|
remove,
|
||||||
prepareRemove,
|
prepareRemove,
|
||||||
|
|||||||
@@ -16,7 +16,7 @@ const prepareRemove = publicProcedure.input(findLogsSchema).query(async ({ input
|
|||||||
const { repos } = runtime;
|
const { repos } = runtime;
|
||||||
const { logs } = repos;
|
const { logs } = repos;
|
||||||
|
|
||||||
await logs.prepareRemove(input);
|
return await logs.prepareRemove(input);
|
||||||
});
|
});
|
||||||
|
|
||||||
const remove = publicProcedure
|
const remove = publicProcedure
|
||||||
|
|||||||
@@ -1,3 +1,4 @@
|
|||||||
|
import { z } from 'zod';
|
||||||
import { createRunSchema, findRunsSchema } from '../repos/repos.js';
|
import { createRunSchema, findRunsSchema } from '../repos/repos.js';
|
||||||
import { publicProcedure, router } from './router.utils.js';
|
import { publicProcedure, router } from './router.utils.js';
|
||||||
|
|
||||||
@@ -17,17 +18,50 @@ const find = publicProcedure.input(findRunsSchema).query(async ({ input, ctx })
|
|||||||
return results;
|
return results;
|
||||||
});
|
});
|
||||||
|
|
||||||
const remove = publicProcedure.input(findRunsSchema).mutation(async ({ input, ctx }) => {
|
const prepareRemove = publicProcedure.input(findRunsSchema).query(async ({ input, ctx }) => {
|
||||||
const { runtime } = ctx;
|
const { runtime } = ctx;
|
||||||
const { repos } = runtime;
|
const { repos } = runtime;
|
||||||
const { runs } = repos;
|
const { runs } = repos;
|
||||||
await runs.remove(input);
|
return await runs.prepareRemove(input);
|
||||||
|
});
|
||||||
|
|
||||||
|
const remove = publicProcedure
|
||||||
|
|
||||||
|
.input(
|
||||||
|
z.object({
|
||||||
|
hash: z.string(),
|
||||||
|
ids: z.array(z.string()),
|
||||||
|
}),
|
||||||
|
)
|
||||||
|
.mutation(async ({ input, ctx }) => {
|
||||||
|
const { runtime } = ctx;
|
||||||
|
const { repos } = runtime;
|
||||||
|
const { runs } = repos;
|
||||||
|
for (const id of input.ids) {
|
||||||
|
const instance = runtime.runner.getInstance(id);
|
||||||
|
if (instance) {
|
||||||
|
await instance.run?.teardown();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
await runs.remove(input.hash, input.ids);
|
||||||
|
});
|
||||||
|
|
||||||
|
const terminate = publicProcedure.input(z.string()).mutation(async ({ input, ctx }) => {
|
||||||
|
const { runtime } = ctx;
|
||||||
|
const { runner } = runtime;
|
||||||
|
const instance = runner.getInstance(input);
|
||||||
|
if (!instance || !instance.run) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
await instance.run.teardown();
|
||||||
});
|
});
|
||||||
|
|
||||||
const runsRouter = router({
|
const runsRouter = router({
|
||||||
create,
|
create,
|
||||||
find,
|
find,
|
||||||
remove,
|
remove,
|
||||||
|
prepareRemove,
|
||||||
|
terminate,
|
||||||
});
|
});
|
||||||
|
|
||||||
export { runsRouter };
|
export { runsRouter };
|
||||||
|
|||||||
53
packages/server/src/router/router.schedules.ts
Normal file
53
packages/server/src/router/router.schedules.ts
Normal file
@@ -0,0 +1,53 @@
|
|||||||
|
import { z } from 'zod';
|
||||||
|
import { addScheduleSchema, findSchedulesSchema } from '../repos/repos.js';
|
||||||
|
import { publicProcedure, router } from './router.utils.js';
|
||||||
|
|
||||||
|
const add = publicProcedure.input(addScheduleSchema).mutation(async ({ input, ctx }) => {
|
||||||
|
const { runtime } = ctx;
|
||||||
|
const { repos } = runtime;
|
||||||
|
const { schedules } = repos;
|
||||||
|
|
||||||
|
const result = await schedules.add(input);
|
||||||
|
return result;
|
||||||
|
});
|
||||||
|
|
||||||
|
const find = publicProcedure.input(findSchedulesSchema).query(async ({ input, ctx }) => {
|
||||||
|
const { runtime } = ctx;
|
||||||
|
const { repos } = runtime;
|
||||||
|
const { schedules } = repos;
|
||||||
|
|
||||||
|
const result = await schedules.find(input);
|
||||||
|
return result;
|
||||||
|
});
|
||||||
|
|
||||||
|
const prepareRemove = publicProcedure.input(findSchedulesSchema).query(async ({ input, ctx }) => {
|
||||||
|
const { runtime } = ctx;
|
||||||
|
const { repos } = runtime;
|
||||||
|
const { schedules } = repos;
|
||||||
|
|
||||||
|
return await schedules.prepareRemove(input);
|
||||||
|
});
|
||||||
|
|
||||||
|
const remove = publicProcedure
|
||||||
|
.input(
|
||||||
|
z.object({
|
||||||
|
hash: z.string(),
|
||||||
|
ids: z.array(z.string()),
|
||||||
|
}),
|
||||||
|
)
|
||||||
|
.mutation(async ({ input, ctx }) => {
|
||||||
|
const { runtime } = ctx;
|
||||||
|
const { repos } = runtime;
|
||||||
|
const { artifacts } = repos;
|
||||||
|
|
||||||
|
await artifacts.remove(input.hash, input.ids);
|
||||||
|
});
|
||||||
|
|
||||||
|
const schedulesRouter = router({
|
||||||
|
add,
|
||||||
|
find,
|
||||||
|
remove,
|
||||||
|
prepareRemove,
|
||||||
|
});
|
||||||
|
|
||||||
|
export { schedulesRouter };
|
||||||
@@ -2,6 +2,7 @@ import { artifactsRouter } from './router.artifacts.js';
|
|||||||
import { loadsRouter } from './router.loads.js';
|
import { loadsRouter } from './router.loads.js';
|
||||||
import { logsRouter } from './router.logs.js';
|
import { logsRouter } from './router.logs.js';
|
||||||
import { runsRouter } from './router.runs.js';
|
import { runsRouter } from './router.runs.js';
|
||||||
|
import { schedulesRouter } from './router.schedules.js';
|
||||||
import { secretsRouter } from './router.secrets.js';
|
import { secretsRouter } from './router.secrets.js';
|
||||||
import { router } from './router.utils.js';
|
import { router } from './router.utils.js';
|
||||||
|
|
||||||
@@ -11,6 +12,7 @@ const rootRouter = router({
|
|||||||
logs: logsRouter,
|
logs: logsRouter,
|
||||||
artifacts: artifactsRouter,
|
artifacts: artifactsRouter,
|
||||||
secrets: secretsRouter,
|
secrets: secretsRouter,
|
||||||
|
schedules: schedulesRouter,
|
||||||
});
|
});
|
||||||
|
|
||||||
type RootRouter = typeof rootRouter;
|
type RootRouter = typeof rootRouter;
|
||||||
|
|||||||
@@ -14,7 +14,8 @@ const createContext = async ({ runtime }: ContextOptions) => {
|
|||||||
if (!authorization) {
|
if (!authorization) {
|
||||||
throw new Error('No authorization header');
|
throw new Error('No authorization header');
|
||||||
}
|
}
|
||||||
await auth.validateToken(authorization);
|
const [, token] = authorization.split(' ');
|
||||||
|
await auth.validateToken(token);
|
||||||
return {
|
return {
|
||||||
runtime,
|
runtime,
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
import { EventEmitter } from 'eventemitter3';
|
import { EventEmitter } from 'eventemitter3';
|
||||||
import { run } from '@morten-olsen/mini-loader-runner';
|
import { RunInfo, run } from '@morten-olsen/mini-loader-runner';
|
||||||
import { Repos } from '../repos/repos.js';
|
import { Repos } from '../repos/repos.js';
|
||||||
import { LoggerEvent } from '../../../mini-loader/dist/esm/logger/logger.js';
|
import { LoggerEvent } from '../../../mini-loader/dist/esm/logger/logger.js';
|
||||||
import { ArtifactCreateEvent } from '../../../mini-loader/dist/esm/artifacts/artifacts.js';
|
import { ArtifactCreateEvent } from '../../../mini-loader/dist/esm/artifacts/artifacts.js';
|
||||||
@@ -20,12 +20,17 @@ type RunnerInstanceOptions = {
|
|||||||
|
|
||||||
class RunnerInstance extends EventEmitter<RunnerInstanceEvents> {
|
class RunnerInstance extends EventEmitter<RunnerInstanceEvents> {
|
||||||
#options: RunnerInstanceOptions;
|
#options: RunnerInstanceOptions;
|
||||||
|
#run?: RunInfo;
|
||||||
|
|
||||||
constructor(options: RunnerInstanceOptions) {
|
constructor(options: RunnerInstanceOptions) {
|
||||||
super();
|
super();
|
||||||
this.#options = options;
|
this.#options = options;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public get run() {
|
||||||
|
return this.#run;
|
||||||
|
}
|
||||||
|
|
||||||
#addLog = async (event: LoggerEvent['payload']) => {
|
#addLog = async (event: LoggerEvent['payload']) => {
|
||||||
const { repos, id, loadId } = this.#options;
|
const { repos, id, loadId } = this.#options;
|
||||||
const { logs } = repos;
|
const { logs } = repos;
|
||||||
@@ -54,15 +59,18 @@ class RunnerInstance extends EventEmitter<RunnerInstanceEvents> {
|
|||||||
const { runs, secrets } = repos;
|
const { runs, secrets } = repos;
|
||||||
try {
|
try {
|
||||||
const { script: scriptHash, input } = await runs.getById(id);
|
const { script: scriptHash, input } = await runs.getById(id);
|
||||||
const scriptLocation = resolve(config.files.location, 'script', `${scriptHash}.js`);
|
const scriptLocation = resolve(config.files.data, 'scripts', `${scriptHash}.js`);
|
||||||
const script = await readFile(scriptLocation, 'utf-8');
|
const script = await readFile(scriptLocation, 'utf-8');
|
||||||
const allSecrets = await secrets.getAll();
|
const allSecrets = await secrets.getAll();
|
||||||
await runs.started(id);
|
await runs.started(id);
|
||||||
const { promise, emitter } = await run({
|
const current = await run({
|
||||||
script,
|
script,
|
||||||
secrets: allSecrets,
|
secrets: allSecrets,
|
||||||
input,
|
input,
|
||||||
|
cacheLocation: config.files.cache,
|
||||||
});
|
});
|
||||||
|
this.#run = current;
|
||||||
|
const { promise, emitter } = current;
|
||||||
emitter.on('message', (message) => {
|
emitter.on('message', (message) => {
|
||||||
switch (message.type) {
|
switch (message.type) {
|
||||||
case 'log': {
|
case 'log': {
|
||||||
@@ -84,9 +92,11 @@ class RunnerInstance extends EventEmitter<RunnerInstanceEvents> {
|
|||||||
}
|
}
|
||||||
await runs.finished(id, { status: 'failed', error: errorMessage });
|
await runs.finished(id, { status: 'failed', error: errorMessage });
|
||||||
} finally {
|
} finally {
|
||||||
|
this.#run = undefined;
|
||||||
this.emit('completed', { id });
|
this.emit('completed', { id });
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export type { RunInfo };
|
||||||
export { RunnerInstance };
|
export { RunnerInstance };
|
||||||
|
|||||||
@@ -36,6 +36,10 @@ class Runner {
|
|||||||
this.#instances.set(args.id, instance);
|
this.#instances.set(args.id, instance);
|
||||||
await instance.start();
|
await instance.start();
|
||||||
};
|
};
|
||||||
|
|
||||||
|
public getInstance = (id: string) => {
|
||||||
|
return this.#instances.get(id);
|
||||||
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
export { Runner };
|
export { Runner };
|
||||||
|
|||||||
@@ -1,20 +1,26 @@
|
|||||||
|
import { resolve } from 'path';
|
||||||
|
import envPaths from 'env-paths';
|
||||||
import { Database } from '../database/database.js';
|
import { Database } from '../database/database.js';
|
||||||
import { Repos } from '../repos/repos.js';
|
import { Repos } from '../repos/repos.js';
|
||||||
import { Runner } from '../runner/runner.js';
|
import { Runner } from '../runner/runner.js';
|
||||||
import { Config } from '../config/config.js';
|
import { Config } from '../config/config.js';
|
||||||
import { Auth } from '../auth/auth.js';
|
import { Auth } from '../auth/auth.js';
|
||||||
import { resolve } from 'path';
|
import { Scheduler } from '../scheduler/scheduler.js';
|
||||||
|
|
||||||
|
const paths = envPaths('mini-loader-server');
|
||||||
|
|
||||||
class Runtime {
|
class Runtime {
|
||||||
#repos: Repos;
|
#repos: Repos;
|
||||||
#runner: Runner;
|
#runner: Runner;
|
||||||
#auth: Auth;
|
#auth: Auth;
|
||||||
|
#scheduler: Scheduler;
|
||||||
|
|
||||||
constructor(options: Config) {
|
constructor(options: Config) {
|
||||||
const database = new Database(options.database);
|
const database = new Database(options.database);
|
||||||
this.#repos = new Repos({ database, config: options });
|
this.#repos = new Repos({ database, config: options });
|
||||||
this.#runner = new Runner({ repos: this.#repos, config: options });
|
this.#runner = new Runner({ repos: this.#repos, config: options });
|
||||||
this.#auth = new Auth({ config: options });
|
this.#auth = new Auth({ config: options });
|
||||||
|
this.#scheduler = new Scheduler({ runs: this.#repos.runs, schedules: this.#repos.schedules });
|
||||||
}
|
}
|
||||||
|
|
||||||
public get repos() {
|
public get repos() {
|
||||||
@@ -29,17 +35,22 @@ class Runtime {
|
|||||||
return this.#auth;
|
return this.#auth;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public get scheduler() {
|
||||||
|
return this.#scheduler;
|
||||||
|
}
|
||||||
|
|
||||||
public static create = async () => {
|
public static create = async () => {
|
||||||
const runtime = new Runtime({
|
const runtime = new Runtime({
|
||||||
database: {
|
database: {
|
||||||
client: 'sqlite3',
|
client: 'sqlite3',
|
||||||
connection: {
|
connection: {
|
||||||
filename: resolve(process.cwd(), 'data', 'database.sqlite'),
|
filename: resolve(paths.data, 'database.sqlite'),
|
||||||
},
|
},
|
||||||
useNullAsDefault: true,
|
useNullAsDefault: true,
|
||||||
},
|
},
|
||||||
files: {
|
files: {
|
||||||
location: resolve(process.cwd(), 'data', 'files'),
|
data: process.env.DATA_DIR || resolve(paths.data, 'data', 'files'),
|
||||||
|
cache: process.env.CACHE_DIR || resolve(paths.cache, 'data', 'cache'),
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|||||||
73
packages/server/src/scheduler/scheduler.ts
Normal file
73
packages/server/src/scheduler/scheduler.ts
Normal file
@@ -0,0 +1,73 @@
|
|||||||
|
import { CronJob } from 'cron';
|
||||||
|
import { ScheduleRepo } from '../repos/schedules/schedules.js';
|
||||||
|
import { RunRepo } from '../repos/runs/runs.js';
|
||||||
|
|
||||||
|
type SchedulerOptions = {
|
||||||
|
runs: RunRepo;
|
||||||
|
schedules: ScheduleRepo;
|
||||||
|
};
|
||||||
|
|
||||||
|
type RunningSchedule = {
|
||||||
|
id: string;
|
||||||
|
job: CronJob;
|
||||||
|
stop: () => Promise<void>;
|
||||||
|
};
|
||||||
|
|
||||||
|
class Scheduler {
|
||||||
|
#running: RunningSchedule[] = [];
|
||||||
|
#options: SchedulerOptions;
|
||||||
|
|
||||||
|
constructor(options: SchedulerOptions) {
|
||||||
|
this.#options = options;
|
||||||
|
const { schedules } = this.#options;
|
||||||
|
schedules.on('added', this.#add);
|
||||||
|
schedules.on('removed', this.#remove);
|
||||||
|
}
|
||||||
|
|
||||||
|
#remove = async (id: string) => {
|
||||||
|
const current = this.#running.filter((r) => r.id === id);
|
||||||
|
await Promise.all(current.map((r) => r.stop()));
|
||||||
|
this.#running = this.#running.filter((r) => r.id !== id);
|
||||||
|
};
|
||||||
|
|
||||||
|
#add = async (id: string) => {
|
||||||
|
const { schedules, runs } = this.#options;
|
||||||
|
const current = this.#running.filter((r) => r.id === id);
|
||||||
|
await Promise.all(current.map((r) => r.stop()));
|
||||||
|
const schedule = await schedules.get(id);
|
||||||
|
if (!schedule) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
const job = new CronJob(schedule.cron, async () => {
|
||||||
|
await runs.create({
|
||||||
|
loadId: schedule.load,
|
||||||
|
});
|
||||||
|
});
|
||||||
|
const stop = async () => {
|
||||||
|
job.stop();
|
||||||
|
};
|
||||||
|
this.#running.push({
|
||||||
|
id: schedule.id,
|
||||||
|
job,
|
||||||
|
stop,
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
public stop = async () => {
|
||||||
|
for (const running of this.#running) {
|
||||||
|
await running.stop();
|
||||||
|
this.#running = this.#running.filter((r) => r !== running);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
public start = async () => {
|
||||||
|
const { schedules } = this.#options;
|
||||||
|
await this.stop();
|
||||||
|
const all = await schedules.find({});
|
||||||
|
for (const schedule of all) {
|
||||||
|
await this.#add(schedule.id);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
export { Scheduler };
|
||||||
@@ -1,13 +1,31 @@
|
|||||||
|
import pkg from '../../package.json';
|
||||||
import { fastifyTRPCPlugin, FastifyTRPCPluginOptions } from '@trpc/server/adapters/fastify';
|
import { fastifyTRPCPlugin, FastifyTRPCPluginOptions } from '@trpc/server/adapters/fastify';
|
||||||
import fastify from 'fastify';
|
import fastify from 'fastify';
|
||||||
import { RootRouter, rootRouter } from '../router/router.js';
|
import { RootRouter, rootRouter } from '../router/router.js';
|
||||||
import { createContext } from '../router/router.utils.js';
|
import { createContext } from '../router/router.utils.js';
|
||||||
import { Runtime } from '../runtime/runtime.js';
|
import { Runtime } from '../runtime/runtime.js';
|
||||||
|
import { gateway } from '../gateway/gateway.js';
|
||||||
|
|
||||||
const createServer = async (runtime: Runtime) => {
|
const createServer = async (runtime: Runtime) => {
|
||||||
const server = fastify({});
|
const server = fastify({
|
||||||
server.get('/', async () => {
|
maxParamLength: 10000,
|
||||||
return { hello: 'world' };
|
bodyLimit: 30 * 1024 * 1024,
|
||||||
|
logger: {
|
||||||
|
level: 'warn',
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
server.get('/health', async (req) => {
|
||||||
|
let authorized = false;
|
||||||
|
try {
|
||||||
|
const { authorization } = req.headers;
|
||||||
|
if (authorization) {
|
||||||
|
const [, token] = authorization.split(' ');
|
||||||
|
await runtime.auth.validateToken(token);
|
||||||
|
authorized = true;
|
||||||
|
}
|
||||||
|
} catch (error) {}
|
||||||
|
return { authorized, status: 'ok', version: pkg.version };
|
||||||
});
|
});
|
||||||
|
|
||||||
server.register(fastifyTRPCPlugin, {
|
server.register(fastifyTRPCPlugin, {
|
||||||
@@ -20,6 +38,14 @@ const createServer = async (runtime: Runtime) => {
|
|||||||
},
|
},
|
||||||
} satisfies FastifyTRPCPluginOptions<RootRouter>['trpcOptions'],
|
} satisfies FastifyTRPCPluginOptions<RootRouter>['trpcOptions'],
|
||||||
});
|
});
|
||||||
|
|
||||||
|
server.register(gateway, {
|
||||||
|
runtime,
|
||||||
|
});
|
||||||
|
|
||||||
|
server.addHook('onError', async (request, reply, error) => {
|
||||||
|
console.error(error);
|
||||||
|
});
|
||||||
await server.ready();
|
await server.ready();
|
||||||
|
|
||||||
return server;
|
return server;
|
||||||
|
|||||||
102
pnpm-lock.yaml
generated
102
pnpm-lock.yaml
generated
@@ -36,6 +36,12 @@ importers:
|
|||||||
|
|
||||||
packages/cli:
|
packages/cli:
|
||||||
dependencies:
|
dependencies:
|
||||||
|
'@morten-olsen/mini-loader-runner':
|
||||||
|
specifier: workspace:^
|
||||||
|
version: link:../runner
|
||||||
|
'@morten-olsen/mini-loader-server':
|
||||||
|
specifier: workspace:^
|
||||||
|
version: link:../server
|
||||||
'@rollup/plugin-auto-install':
|
'@rollup/plugin-auto-install':
|
||||||
specifier: ^3.0.5
|
specifier: ^3.0.5
|
||||||
version: 3.0.5(rollup@4.9.4)
|
version: 3.0.5(rollup@4.9.4)
|
||||||
@@ -60,6 +66,12 @@ importers:
|
|||||||
commander:
|
commander:
|
||||||
specifier: ^11.1.0
|
specifier: ^11.1.0
|
||||||
version: 11.1.0
|
version: 11.1.0
|
||||||
|
dotenv:
|
||||||
|
specifier: ^16.3.1
|
||||||
|
version: 16.3.1
|
||||||
|
env-paths:
|
||||||
|
specifier: ^3.0.0
|
||||||
|
version: 3.0.0
|
||||||
inquirer:
|
inquirer:
|
||||||
specifier: ^9.2.12
|
specifier: ^9.2.12
|
||||||
version: 9.2.12
|
version: 9.2.12
|
||||||
@@ -82,12 +94,6 @@ importers:
|
|||||||
'@morten-olsen/mini-loader-configs':
|
'@morten-olsen/mini-loader-configs':
|
||||||
specifier: workspace:^
|
specifier: workspace:^
|
||||||
version: link:../configs
|
version: link:../configs
|
||||||
'@morten-olsen/mini-loader-runner':
|
|
||||||
specifier: workspace:^
|
|
||||||
version: link:../runner
|
|
||||||
'@morten-olsen/mini-loader-server':
|
|
||||||
specifier: workspace:^
|
|
||||||
version: link:../server
|
|
||||||
'@types/inquirer':
|
'@types/inquirer':
|
||||||
specifier: ^9.0.7
|
specifier: ^9.0.7
|
||||||
version: 9.0.7
|
version: 9.0.7
|
||||||
@@ -98,6 +104,10 @@ importers:
|
|||||||
packages/configs: {}
|
packages/configs: {}
|
||||||
|
|
||||||
packages/examples:
|
packages/examples:
|
||||||
|
dependencies:
|
||||||
|
fastify:
|
||||||
|
specifier: ^4.25.2
|
||||||
|
version: 4.25.2
|
||||||
devDependencies:
|
devDependencies:
|
||||||
'@morten-olsen/mini-loader':
|
'@morten-olsen/mini-loader':
|
||||||
specifier: workspace:^
|
specifier: workspace:^
|
||||||
@@ -129,13 +139,16 @@ importers:
|
|||||||
|
|
||||||
packages/runner:
|
packages/runner:
|
||||||
dependencies:
|
dependencies:
|
||||||
eventemitter3:
|
|
||||||
specifier: ^5.0.1
|
|
||||||
version: 5.0.1
|
|
||||||
devDependencies:
|
|
||||||
'@morten-olsen/mini-loader':
|
'@morten-olsen/mini-loader':
|
||||||
specifier: workspace:^
|
specifier: workspace:^
|
||||||
version: link:../mini-loader
|
version: link:../mini-loader
|
||||||
|
eventemitter3:
|
||||||
|
specifier: ^5.0.1
|
||||||
|
version: 5.0.1
|
||||||
|
nanoid:
|
||||||
|
specifier: ^5.0.4
|
||||||
|
version: 5.0.4
|
||||||
|
devDependencies:
|
||||||
'@morten-olsen/mini-loader-configs':
|
'@morten-olsen/mini-loader-configs':
|
||||||
specifier: workspace:^
|
specifier: workspace:^
|
||||||
version: link:../configs
|
version: link:../configs
|
||||||
@@ -148,6 +161,9 @@ importers:
|
|||||||
|
|
||||||
packages/server:
|
packages/server:
|
||||||
dependencies:
|
dependencies:
|
||||||
|
'@fastify/reply-from':
|
||||||
|
specifier: ^9.7.0
|
||||||
|
version: 9.7.0
|
||||||
'@trpc/client':
|
'@trpc/client':
|
||||||
specifier: ^10.45.0
|
specifier: ^10.45.0
|
||||||
version: 10.45.0(@trpc/server@10.45.0)
|
version: 10.45.0(@trpc/server@10.45.0)
|
||||||
@@ -157,6 +173,12 @@ importers:
|
|||||||
commander:
|
commander:
|
||||||
specifier: ^11.1.0
|
specifier: ^11.1.0
|
||||||
version: 11.1.0
|
version: 11.1.0
|
||||||
|
cron:
|
||||||
|
specifier: ^3.1.6
|
||||||
|
version: 3.1.6
|
||||||
|
env-paths:
|
||||||
|
specifier: ^3.0.0
|
||||||
|
version: 3.0.0
|
||||||
eventemitter3:
|
eventemitter3:
|
||||||
specifier: ^5.0.1
|
specifier: ^5.0.1
|
||||||
version: 5.0.1
|
version: 5.0.1
|
||||||
@@ -470,6 +492,11 @@ packages:
|
|||||||
fast-uri: 2.3.0
|
fast-uri: 2.3.0
|
||||||
dev: false
|
dev: false
|
||||||
|
|
||||||
|
/@fastify/busboy@2.1.0:
|
||||||
|
resolution: {integrity: sha512-+KpH+QxZU7O4675t3mnkQKcZZg56u+K/Ct2K+N2AZYNVK8kyeo/bI18tI8aPm3tvNNRyTWfj6s5tnGNlcbQRsA==}
|
||||||
|
engines: {node: '>=14'}
|
||||||
|
dev: false
|
||||||
|
|
||||||
/@fastify/deepmerge@1.3.0:
|
/@fastify/deepmerge@1.3.0:
|
||||||
resolution: {integrity: sha512-J8TOSBq3SoZbDhM9+R/u77hP93gz/rajSA+K2kGyijPpORPWUXHUpTaleoj+92As0S9uPRP7Oi8IqMf0u+ro6A==}
|
resolution: {integrity: sha512-J8TOSBq3SoZbDhM9+R/u77hP93gz/rajSA+K2kGyijPpORPWUXHUpTaleoj+92As0S9uPRP7Oi8IqMf0u+ro6A==}
|
||||||
dev: false
|
dev: false
|
||||||
@@ -484,6 +511,19 @@ packages:
|
|||||||
fast-json-stringify: 5.10.0
|
fast-json-stringify: 5.10.0
|
||||||
dev: false
|
dev: false
|
||||||
|
|
||||||
|
/@fastify/reply-from@9.7.0:
|
||||||
|
resolution: {integrity: sha512-/F1QBl3FGlTqStjmiuoLRDchVxP967TZh6FZPwQteWhdLsDec8mqSACE+cRzw6qHUj3v9hfdd7JNgmb++fyFhQ==}
|
||||||
|
dependencies:
|
||||||
|
'@fastify/error': 3.4.1
|
||||||
|
end-of-stream: 1.4.4
|
||||||
|
fast-content-type-parse: 1.1.0
|
||||||
|
fast-querystring: 1.1.2
|
||||||
|
fastify-plugin: 4.5.1
|
||||||
|
pump: 3.0.0
|
||||||
|
tiny-lru: 11.2.5
|
||||||
|
undici: 5.28.2
|
||||||
|
dev: false
|
||||||
|
|
||||||
/@gar/promisify@1.1.3:
|
/@gar/promisify@1.1.3:
|
||||||
resolution: {integrity: sha512-k2Ty1JcVojjJFwrg/ThKi2ujJ7XNLYaFGNB/bWT9wGR+oSMJHMa5w+CUq6p/pVrKeNNgA7pCqEcjSnHVoqJQFw==}
|
resolution: {integrity: sha512-k2Ty1JcVojjJFwrg/ThKi2ujJ7XNLYaFGNB/bWT9wGR+oSMJHMa5w+CUq6p/pVrKeNNgA7pCqEcjSnHVoqJQFw==}
|
||||||
requiresBuild: true
|
requiresBuild: true
|
||||||
@@ -1254,6 +1294,10 @@ packages:
|
|||||||
'@types/node': 20.10.8
|
'@types/node': 20.10.8
|
||||||
dev: true
|
dev: true
|
||||||
|
|
||||||
|
/@types/luxon@3.3.8:
|
||||||
|
resolution: {integrity: sha512-jYvz8UMLDgy3a5SkGJne8H7VA7zPV2Lwohjx0V8V31+SqAjNmurWMkk9cQhfvlcnXWudBpK9xPM1n4rljOcHYQ==}
|
||||||
|
dev: false
|
||||||
|
|
||||||
/@types/node@20.10.8:
|
/@types/node@20.10.8:
|
||||||
resolution: {integrity: sha512-f8nQs3cLxbAFc00vEU59yf9UyGUftkPaLGfvbVOIDdx2i1b8epBqj2aNGyP19fiyXWvlmZ7qC1XLjAzw/OKIeA==}
|
resolution: {integrity: sha512-f8nQs3cLxbAFc00vEU59yf9UyGUftkPaLGfvbVOIDdx2i1b8epBqj2aNGyP19fiyXWvlmZ7qC1XLjAzw/OKIeA==}
|
||||||
dependencies:
|
dependencies:
|
||||||
@@ -2049,6 +2093,13 @@ packages:
|
|||||||
resolution: {integrity: sha512-dcKFX3jn0MpIaXjisoRvexIJVEKzaq7z2rZKxf+MSr9TkdmHmsU4m2lcLojrj/FHl8mk5VxMmYA+ftRkP/3oKQ==}
|
resolution: {integrity: sha512-dcKFX3jn0MpIaXjisoRvexIJVEKzaq7z2rZKxf+MSr9TkdmHmsU4m2lcLojrj/FHl8mk5VxMmYA+ftRkP/3oKQ==}
|
||||||
dev: false
|
dev: false
|
||||||
|
|
||||||
|
/cron@3.1.6:
|
||||||
|
resolution: {integrity: sha512-cvFiQCeVzsA+QPM6fhjBtlKGij7tLLISnTSvFxVdnFGLdz+ZdXN37kNe0i2gefmdD17XuZA6n2uPVwzl4FxW/w==}
|
||||||
|
dependencies:
|
||||||
|
'@types/luxon': 3.3.8
|
||||||
|
luxon: 3.4.4
|
||||||
|
dev: false
|
||||||
|
|
||||||
/cross-spawn@7.0.3:
|
/cross-spawn@7.0.3:
|
||||||
resolution: {integrity: sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==}
|
resolution: {integrity: sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==}
|
||||||
engines: {node: '>= 8'}
|
engines: {node: '>= 8'}
|
||||||
@@ -2158,6 +2209,11 @@ packages:
|
|||||||
esutils: 2.0.3
|
esutils: 2.0.3
|
||||||
dev: true
|
dev: true
|
||||||
|
|
||||||
|
/dotenv@16.3.1:
|
||||||
|
resolution: {integrity: sha512-IPzF4w4/Rd94bA9imS68tZBaYyBWSCE47V1RGuMrB94iyTOIEwRmVL2x/4An+6mETpLrKJ5hQkB8W4kFAadeIQ==}
|
||||||
|
engines: {node: '>=12'}
|
||||||
|
dev: false
|
||||||
|
|
||||||
/eastasianwidth@0.2.0:
|
/eastasianwidth@0.2.0:
|
||||||
resolution: {integrity: sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA==}
|
resolution: {integrity: sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA==}
|
||||||
dev: false
|
dev: false
|
||||||
@@ -2206,6 +2262,11 @@ packages:
|
|||||||
dev: false
|
dev: false
|
||||||
optional: true
|
optional: true
|
||||||
|
|
||||||
|
/env-paths@3.0.0:
|
||||||
|
resolution: {integrity: sha512-dtJUTepzMW3Lm/NPxRf3wP4642UWhjL2sQxc+ym2YMj1m/H2zDNQOlezafzkHwn6sMstjHTwG6iQQsctDW/b1A==}
|
||||||
|
engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0}
|
||||||
|
dev: false
|
||||||
|
|
||||||
/err-code@2.0.3:
|
/err-code@2.0.3:
|
||||||
resolution: {integrity: sha512-2bmlRpNKBxT/CRmPOlyISQpNj+qSeYvcym/uT0Jx2bMOlKLtSy1ZmLuVxSEKKyor/N5yhvp/ZiG1oE3DEYMSFA==}
|
resolution: {integrity: sha512-2bmlRpNKBxT/CRmPOlyISQpNj+qSeYvcym/uT0Jx2bMOlKLtSy1ZmLuVxSEKKyor/N5yhvp/ZiG1oE3DEYMSFA==}
|
||||||
requiresBuild: true
|
requiresBuild: true
|
||||||
@@ -2672,6 +2733,10 @@ packages:
|
|||||||
resolution: {integrity: sha512-eel5UKGn369gGEWOqBShmFJWfq/xSJvsgDzgLYC845GneayWvXBf0lJCBn5qTABfewy1ZDPoaR5OZCP+kssfuw==}
|
resolution: {integrity: sha512-eel5UKGn369gGEWOqBShmFJWfq/xSJvsgDzgLYC845GneayWvXBf0lJCBn5qTABfewy1ZDPoaR5OZCP+kssfuw==}
|
||||||
dev: false
|
dev: false
|
||||||
|
|
||||||
|
/fastify-plugin@4.5.1:
|
||||||
|
resolution: {integrity: sha512-stRHYGeuqpEZTL1Ef0Ovr2ltazUT9g844X5z/zEBFLG8RYlpDiOCIG+ATvYEp+/zmc7sN29mcIMp8gvYplYPIQ==}
|
||||||
|
dev: false
|
||||||
|
|
||||||
/fastify@4.25.2:
|
/fastify@4.25.2:
|
||||||
resolution: {integrity: sha512-SywRouGleDHvRh054onj+lEZnbC1sBCLkR0UY3oyJwjD4BdZJUrxBqfkfCaqn74pVCwBaRHGuL3nEWeHbHzAfw==}
|
resolution: {integrity: sha512-SywRouGleDHvRh054onj+lEZnbC1sBCLkR0UY3oyJwjD4BdZJUrxBqfkfCaqn74pVCwBaRHGuL3nEWeHbHzAfw==}
|
||||||
dependencies:
|
dependencies:
|
||||||
@@ -3704,6 +3769,11 @@ packages:
|
|||||||
dependencies:
|
dependencies:
|
||||||
yallist: 4.0.0
|
yallist: 4.0.0
|
||||||
|
|
||||||
|
/luxon@3.4.4:
|
||||||
|
resolution: {integrity: sha512-zobTr7akeGHnv7eBOXcRgMeCP6+uyYsczwmeRCauvpvaAltgNyTbLH/+VaEAPUeWBT+1GuNmz4wC/6jtQzbbVA==}
|
||||||
|
engines: {node: '>=12'}
|
||||||
|
dev: false
|
||||||
|
|
||||||
/magic-string@0.25.9:
|
/magic-string@0.25.9:
|
||||||
resolution: {integrity: sha512-RmF0AsMzgt25qzqqLc1+MbHmhdx0ojF2Fvs4XnOqz2ZOBXzzkEwc/dJQZCYHAn7v1jbVOjAZfK8msRn4BxO4VQ==}
|
resolution: {integrity: sha512-RmF0AsMzgt25qzqqLc1+MbHmhdx0ojF2Fvs4XnOqz2ZOBXzzkEwc/dJQZCYHAn7v1jbVOjAZfK8msRn4BxO4VQ==}
|
||||||
dependencies:
|
dependencies:
|
||||||
@@ -5140,6 +5210,11 @@ packages:
|
|||||||
engines: {node: '>=8'}
|
engines: {node: '>=8'}
|
||||||
dev: false
|
dev: false
|
||||||
|
|
||||||
|
/tiny-lru@11.2.5:
|
||||||
|
resolution: {integrity: sha512-JpqM0K33lG6iQGKiigcwuURAKZlq6rHXfrgeL4/I8/REoyJTGU+tEMszvT/oTRVHG2OiylhGDjqPp1jWMlr3bw==}
|
||||||
|
engines: {node: '>=12'}
|
||||||
|
dev: false
|
||||||
|
|
||||||
/tmp@0.0.33:
|
/tmp@0.0.33:
|
||||||
resolution: {integrity: sha512-jRCJlojKnZ3addtTOjdIqoRuPEKBvNXcGYqzO6zWZX8KfKEpnGY5jfggJQ3EjKuu8D4bJRr0y+cYJFmYbImXGw==}
|
resolution: {integrity: sha512-jRCJlojKnZ3addtTOjdIqoRuPEKBvNXcGYqzO6zWZX8KfKEpnGY5jfggJQ3EjKuu8D4bJRr0y+cYJFmYbImXGw==}
|
||||||
engines: {node: '>=0.6.0'}
|
engines: {node: '>=0.6.0'}
|
||||||
@@ -5357,6 +5432,13 @@ packages:
|
|||||||
/undici-types@5.26.5:
|
/undici-types@5.26.5:
|
||||||
resolution: {integrity: sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA==}
|
resolution: {integrity: sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA==}
|
||||||
|
|
||||||
|
/undici@5.28.2:
|
||||||
|
resolution: {integrity: sha512-wh1pHJHnUeQV5Xa8/kyQhO7WFa8M34l026L5P/+2TYiakvGy5Rdc8jWZVyG7ieht/0WgJLEd3kcU5gKx+6GC8w==}
|
||||||
|
engines: {node: '>=14.0'}
|
||||||
|
dependencies:
|
||||||
|
'@fastify/busboy': 2.1.0
|
||||||
|
dev: false
|
||||||
|
|
||||||
/unique-filename@1.1.1:
|
/unique-filename@1.1.1:
|
||||||
resolution: {integrity: sha512-Vmp0jIp2ln35UTXuryvjzkjGdRyf9b2lTXuSYUiPmzRcl3FDtYqAwOnTJkAngD9SWhnoJzDbTKwaOrZ+STtxNQ==}
|
resolution: {integrity: sha512-Vmp0jIp2ln35UTXuryvjzkjGdRyf9b2lTXuSYUiPmzRcl3FDtYqAwOnTJkAngD9SWhnoJzDbTKwaOrZ+STtxNQ==}
|
||||||
requiresBuild: true
|
requiresBuild: true
|
||||||
|
|||||||
@@ -1,7 +1,9 @@
|
|||||||
import { findWorkspacePackages } from '@pnpm/find-workspace-packages';
|
import { findWorkspacePackages } from '@pnpm/find-workspace-packages';
|
||||||
import { writeFile } from 'fs/promises';
|
import { readFile, writeFile } from 'fs/promises';
|
||||||
import { join } from 'path';
|
import { join } from 'path';
|
||||||
|
|
||||||
|
const sharedData = JSON.parse(await readFile(join(process.cwd(), 'scripts/shared-data.json')));
|
||||||
|
|
||||||
const version = process.argv[2];
|
const version = process.argv[2];
|
||||||
if (!version) {
|
if (!version) {
|
||||||
throw new Error('Version is required');
|
throw new Error('Version is required');
|
||||||
@@ -11,6 +13,9 @@ const packages = await findWorkspacePackages(process.cwd());
|
|||||||
|
|
||||||
for (const { manifest, dir } of packages) {
|
for (const { manifest, dir } of packages) {
|
||||||
console.log(dir, version);
|
console.log(dir, version);
|
||||||
|
for (let [key, value] of Object.entries(sharedData || {})) {
|
||||||
|
manifest[key] = value;
|
||||||
|
}
|
||||||
manifest.version = version;
|
manifest.version = version;
|
||||||
await writeFile(join(dir, 'package.json'), JSON.stringify(manifest, null, 2));
|
await writeFile(join(dir, 'package.json'), JSON.stringify(manifest, null, 2));
|
||||||
}
|
}
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user