Compare commits

..

9 Commits

Author SHA1 Message Date
Morten Olsen
f6799586f4 fix: issue in document matching
Some checks failed
Build and release / Build (push) Failing after 1m11s
Build and release / update-release-draft (push) Has been skipped
Build and release / Release (push) Has been skipped
2025-12-11 08:47:58 +01:00
Morten Olsen
68abe3ce79 feat: improved zod handling
Some checks failed
Build and release / Build (push) Failing after 2m41s
Build and release / update-release-draft (push) Has been skipped
Build and release / Release (push) Has been skipped
2025-12-11 08:36:40 +01:00
Morten Olsen
3c475ab5d6 feat: support filtered subscriptions
Some checks failed
Build and release / Build (push) Failing after 1m13s
Build and release / update-release-draft (push) Has been skipped
Build and release / Release (push) Has been skipped
2025-12-11 00:00:24 +01:00
Morten Olsen
c7f9270ef2 feat: support filtering JS objects using QueryFilter
Some checks failed
Build and release / Build (push) Failing after 1m12s
Build and release / update-release-draft (push) Has been skipped
Build and release / Release (push) Has been skipped
2025-12-10 23:30:06 +01:00
Morten Olsen
904b0f783e feat: use postgres for change notification
Some checks failed
Build and release / Release (push) Has been skipped
Build and release / Build (push) Failing after 1m14s
Build and release / update-release-draft (push) Has been skipped
2025-12-10 23:21:25 +01:00
Morten Olsen
3641e86da5 feat: all fields optional
Some checks failed
Build and release / Build (push) Failing after 1m14s
Build and release / update-release-draft (push) Has been skipped
Build and release / Release (push) Has been skipped
2025-12-10 22:09:09 +01:00
Morten Olsen
1255639058 change document to text and binary content
Some checks failed
Build and release / Build (push) Failing after 1m12s
Build and release / update-release-draft (push) Has been skipped
Build and release / Release (push) Has been skipped
2025-12-10 22:06:15 +01:00
Morten Olsen
25f614a730 ci: add server build
Some checks failed
Build and release / Build (push) Failing after 2m28s
Build and release / update-release-draft (push) Has been skipped
Build and release / Release (push) Has been skipped
2025-12-10 21:52:20 +01:00
Morten Olsen
0646390d52 chore: improved schema 2025-12-10 21:12:17 +01:00
54 changed files with 1914 additions and 244 deletions

2
.dockerignore Normal file
View File

@@ -0,0 +1,2 @@
node_modules/
packages/*/dist/

48
.github/release-drafter-config.yml vendored Normal file
View File

@@ -0,0 +1,48 @@
name-template: "$RESOLVED_VERSION 🌈"
tag-template: "$RESOLVED_VERSION"
categories:
- title: "🚀 Features"
labels:
- "feature"
- "enhancement"
- title: "🐛 Bug Fixes"
labels:
- "fix"
- "bugfix"
- "bug"
- title: "🧰 Maintenance"
label: "chore"
change-template: "- $TITLE @$AUTHOR (#$NUMBER)"
change-title-escapes: '\<*_&' # You can add # and @ to disable mentions, and add ` to disable code blocks.
version-resolver:
major:
labels:
- "major"
minor:
labels:
- "minor"
patch:
labels:
- "patch"
default: patch
autolabeler:
- label: "chore"
files:
- "*.md"
branch:
- '/docs{0,1}\/.+/'
- label: "bug"
branch:
- '/fix\/.+/'
title:
- "/fix/i"
- label: "enhancement"
branch:
- '/feature\/.+/'
- '/feat\/.+/'
title:
- "/feat:.+/"
template: |
## Changes
$CHANGES

View File

@@ -0,0 +1,21 @@
name: Auto Labeler
on:
pull_request:
types: [opened, reopened, synchronize]
permissions:
contents: read
jobs:
auto-labeler:
permissions:
contents: write
pull-requests: write
runs-on: ubuntu-latest
steps:
- uses: release-drafter/release-drafter@v6
with:
config-name: release-drafter-config.yml
disable-releaser: true
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}

55
.github/workflows/job-build.yaml vendored Normal file
View File

@@ -0,0 +1,55 @@
name: Build
on:
workflow_call:
jobs:
build:
name: Build
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/setup-node@v4
with:
node-version: "${{ env.NODE_VERSION }}"
registry-url: "${{ env.NODE_REGISTRY }}"
- uses: pnpm/action-setup@v4
name: Install pnpm
with:
version: ${{ env.PNPM_VERSION }}
run_install: false
- name: Get pnpm store directory
shell: bash
run: |
echo "STORE_PATH=$(pnpm store path --silent)" >> $GITHUB_ENV
- uses: actions/cache@v4
name: Setup pnpm cache
with:
path: ${{ env.STORE_PATH }}
key: ${{ runner.os }}-pnpm-store-${{ hashFiles('**/pnpm-lock.yaml') }}
restore-keys: |
${{ runner.os }}-pnpm-store-
- name: Install dependencies
run: pnpm install
env:
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
- name: Build
run: pnpm build
- name: Run tests
run: pnpm test
- uses: actions/upload-artifact@v4
with:
name: lib
retention-days: 5
path: |
packages/*/dist
extensions/*/dist
server/*/dist
package.json
README.md

View File

@@ -0,0 +1,18 @@
name: Draft release
on:
workflow_call:
jobs:
draft-release:
name: Update release drafter
permissions:
contents: write
pull-requests: write
environment: release
runs-on: ubuntu-latest
steps:
- uses: release-drafter/release-drafter@v6
with:
config-name: release-drafter-config.yml
publish: true
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}

115
.github/workflows/pipeline-default.yaml vendored Normal file
View File

@@ -0,0 +1,115 @@
name: Build and release
on:
push:
branches:
- main
pull_request:
types:
- opened
- synchronize
env:
environment: test
release_channel: latest
DO_NOT_TRACK: "1"
NODE_VERSION: "23.x"
NODE_REGISTRY: "https://registry.npmjs.org"
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
DOCKER_REGISTRY: ghcr.io
IMAGE_NAME: ${{ github.repository }}
PNPM_VERSION: 10.6.0
permissions:
contents: write
packages: read
pull-requests: write
id-token: write
actions: read
security-events: write
jobs:
build:
uses: ./.github/workflows/job-build.yaml
name: Build
update-release-draft:
needs: build
if: github.ref == 'refs/heads/main'
uses: ./.github/workflows/job-draft-release.yaml
release:
permissions:
contents: read
packages: write
attestations: write
id-token: write
pages: write
name: Release
if: github.ref == 'refs/heads/main'
runs-on: ubuntu-latest
needs: update-release-draft
environment: release
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0
# - uses: actions/setup-node@v4
# with:
# node-version: '${{ env.NODE_VERSION }}'
# registry-url: '${{ env.NODE_REGISTRY }}'
#
# - uses: pnpm/action-setup@v4
# name: Install pnpm
# with:
# version: ${{ env.PNPM_VERSION }}
# run_install: false
#
# - name: Install dependencies
# run: pnpm install
# env:
# NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
#
# - uses: actions/download-artifact@v4
# with:
# name: lib
# path: ./
#
# - name: Publish to npm
# run: |
# git config user.name "Github Actions Bot"
# git config user.email "<>"
# node ./scripts/set-version.mjs $(git describe --tag --abbrev=0)
# pnpm publish -r --no-git-checks --access public
# env:
# NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
- name: Log in to the Container registry
uses: docker/login-action@65b78e6e13532edd9afa3aa52ac7964289d1a9c1
with:
registry: ${{ env.DOCKER_REGISTRY }}
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Extract metadata (tags, labels) for Docker
id: meta
uses: docker/metadata-action@9ec57ed1fcdbf14dcef7dfbe97b2010124a938b7
with:
images: ${{ env.DOCKER_REGISTRY }}/${{ env.IMAGE_NAME }}
- name: Build and push Docker image
id: push
uses: docker/build-push-action@f2a1d5e99d037542a71f64918e516c093c6f3fc4
with:
context: .
file: ./packages/server/Dockerfile
push: true
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
# - name: Generate artifact attestation
# uses: actions/attest-build-provenance@v2
# with:
# subject-name: ${{ env.DOCKER_REGISTRY }}/${{ env.IMAGE_NAME}}
# subject-digest: ${{ steps.push.outputs.digest }}
# push-to-registry: true

View File

@@ -56,6 +56,16 @@
"packageVersion": "1.0.0",
"packageName": "runtime"
}
},
{
"timestamp": "2025-12-10T09:46:52.130Z",
"template": "pkg",
"values": {
"monoRepo": true,
"packagePrefix": "@morten-olsen/stash-",
"packageVersion": "1.0.0",
"packageName": "client"
}
}
]
}

9
docker-compose.yaml Normal file
View File

@@ -0,0 +1,9 @@
name: stash
services:
app:
build:
context: .
dockerfile: ./packages/server/Dockerfile
read_only: true
ports:
- 3400:3400

View File

@@ -46,6 +46,6 @@ export default tseslint.config(
},
...compat.extends('plugin:prettier/recommended'),
{
ignores: ['**/node_modules/', '**/dist/', '**/.turbo/', '**/generated/'],
ignores: ['**/node_modules/', '**/dist/', '**/.turbo/', '**/__generated__/'],
},
);

4
packages/client/.gitignore vendored Normal file
View File

@@ -0,0 +1,4 @@
/node_modules/
/dist/
/coverage/
/.env

View File

@@ -0,0 +1,31 @@
{
"type": "module",
"main": "dist/exports.js",
"scripts": {
"build": "tsc --build",
"test:unit": "vitest --run --passWithNoTests",
"test": "pnpm run \"/^test:/\"",
"generate:client": "openapi-typescript http://localhost:3400/docs/openapi.json -o src/__generated__/schema.ts"
},
"packageManager": "pnpm@10.6.0",
"files": [
"dist"
],
"exports": {
".": "./dist/exports.js"
},
"devDependencies": {
"@morten-olsen/stash-configs": "workspace:*",
"@morten-olsen/stash-tests": "workspace:*",
"@types/node": "24.10.2",
"@vitest/coverage-v8": "4.0.15",
"openapi-typescript": "^7.10.1",
"typescript": "5.9.3",
"vitest": "4.0.15"
},
"name": "@morten-olsen/stash-client",
"version": "1.0.0",
"dependencies": {
"openapi-fetch": "^0.15.0"
}
}

View File

@@ -0,0 +1,515 @@
/**
* This file was auto-generated by openapi-typescript.
* Do not make direct changes to the file.
*/
export interface paths {
"/system/ready": {
parameters: {
query?: never;
header?: never;
path?: never;
cookie?: never;
};
/** Get system ready state */
get: operations["GET/system/ready"];
put?: never;
post?: never;
delete?: never;
options?: never;
head?: never;
patch?: never;
trace?: never;
};
"/documents": {
parameters: {
query?: never;
header?: never;
path?: never;
cookie?: never;
};
get?: never;
put?: never;
/** Upsert document */
post: operations["POST/documents"];
delete?: never;
options?: never;
head?: never;
patch?: never;
trace?: never;
};
"/document-filters": {
parameters: {
query?: never;
header?: never;
path?: never;
cookie?: never;
};
get?: never;
put?: never;
/** Find documents */
post: operations["POST/documents-filters"];
delete?: never;
options?: never;
head?: never;
patch?: never;
trace?: never;
};
"/document-chunk-filters": {
parameters: {
query?: never;
header?: never;
path?: never;
cookie?: never;
};
get?: never;
put?: never;
/** Find document chunks */
post: operations["POST/documents-chunk-filters"];
delete?: never;
options?: never;
head?: never;
patch?: never;
trace?: never;
};
}
export type webhooks = Record<string, never>;
export interface components {
schemas: never;
responses: never;
parameters: never;
requestBodies: never;
headers: never;
pathItems: never;
}
export type $defs = Record<string, never>;
export interface operations {
"GET/system/ready": {
parameters: {
query?: never;
header?: never;
path?: never;
cookie?: never;
};
requestBody?: never;
responses: {
/** @description Default Response */
200: {
headers: {
[name: string]: unknown;
};
content: {
"application/json": {
/** @enum {string} */
status: "ok";
};
};
};
};
};
"POST/documents": {
parameters: {
query?: never;
header?: never;
path?: never;
cookie?: never;
};
requestBody?: {
content: {
"application/json": {
id?: string | null;
owner?: string | null;
contentType?: string | null;
content?: string | null;
source?: string | null;
sourceId?: string | null;
type?: string;
typeVersion?: number | null;
searchText?: string | null;
metadata?: unknown;
};
};
};
responses: {
/** @description Default Response */
200: {
headers: {
[name: string]: unknown;
};
content: {
"application/json": {
/** @enum {string} */
action: "inserted" | "updated" | "skipped";
id: string;
document: {
id: string;
owner: string | null;
/** Format: date-time */
createdAt: string;
/** Format: date-time */
updatedAt: string;
/** Format: date-time */
deletedAt: string | null;
contentType: string | null;
content: string | null;
source: string | null;
sourceId: string | null;
type: string;
typeVersion: number | null;
searchText: string | null;
metadata: unknown;
};
};
};
};
};
};
"POST/documents-filters": {
parameters: {
query?: never;
header?: never;
path?: never;
cookie?: never;
};
requestBody: {
content: {
"application/json": {
/** @default 0 */
offset?: number;
/** @default 20 */
limit?: number;
condition: (({
/** @enum {string} */
type: "text";
tableName?: string;
field: string[];
conditions: {
equal?: string | null;
notEqual?: string;
like?: string;
notLike?: string;
in?: string[];
notIn?: string[];
};
} | {
/** @enum {string} */
type: "number";
tableName?: string;
field: string[];
conditions: {
equals?: number | null;
notEquals?: number | null;
greaterThan?: number;
greaterThanOrEqual?: number;
lessThan?: number;
lessThanOrEqual?: number;
in?: number[];
notIn?: number[];
};
}) | {
/** @enum {string} */
type: "operator";
/** @enum {string} */
operator: "and" | "or";
conditions: (({
/** @enum {string} */
type: "text";
tableName?: string;
field: string[];
conditions: {
equal?: string | null;
notEqual?: string;
like?: string;
notLike?: string;
in?: string[];
notIn?: string[];
};
} | {
/** @enum {string} */
type: "number";
tableName?: string;
field: string[];
conditions: {
equals?: number | null;
notEquals?: number | null;
greaterThan?: number;
greaterThanOrEqual?: number;
lessThan?: number;
lessThanOrEqual?: number;
in?: number[];
notIn?: number[];
};
}) | {
/** @enum {string} */
type: "operator";
/** @enum {string} */
operator: "and" | "or";
conditions: (({
/** @enum {string} */
type: "text";
tableName?: string;
field: string[];
conditions: {
equal?: string | null;
notEqual?: string;
like?: string;
notLike?: string;
in?: string[];
notIn?: string[];
};
} | {
/** @enum {string} */
type: "number";
tableName?: string;
field: string[];
conditions: {
equals?: number | null;
notEquals?: number | null;
greaterThan?: number;
greaterThanOrEqual?: number;
lessThan?: number;
lessThanOrEqual?: number;
in?: number[];
notIn?: number[];
};
}) | {
/** @enum {string} */
type: "operator";
/** @enum {string} */
operator: "and" | "or";
conditions: ({
/** @enum {string} */
type: "text";
tableName?: string;
field: string[];
conditions: {
equal?: string | null;
notEqual?: string;
like?: string;
notLike?: string;
in?: string[];
notIn?: string[];
};
} | {
/** @enum {string} */
type: "number";
tableName?: string;
field: string[];
conditions: {
equals?: number | null;
notEquals?: number | null;
greaterThan?: number;
greaterThanOrEqual?: number;
lessThan?: number;
lessThanOrEqual?: number;
in?: number[];
notIn?: number[];
};
})[];
})[];
})[];
}) | string;
};
};
};
responses: {
/** @description Default Response */
200: {
headers: {
[name: string]: unknown;
};
content: {
"application/json": {
items: {
id: string;
owner: string | null;
/** Format: date-time */
createdAt: string;
/** Format: date-time */
updatedAt: string;
/** Format: date-time */
deletedAt: string | null;
contentType: string | null;
content: string | null;
source: string | null;
sourceId: string | null;
type: string;
typeVersion: number | null;
searchText: string | null;
metadata: unknown;
}[];
};
};
};
};
};
"POST/documents-chunk-filters": {
parameters: {
query?: never;
header?: never;
path?: never;
cookie?: never;
};
requestBody?: {
content: {
"application/json": {
/** @default 20 */
limit?: number;
/** @default 0 */
offset?: number;
semanticText?: string;
conditions?: (({
/** @enum {string} */
type: "text";
tableName?: string;
field: string[];
conditions: {
equal?: string | null;
notEqual?: string;
like?: string;
notLike?: string;
in?: string[];
notIn?: string[];
};
} | {
/** @enum {string} */
type: "number";
tableName?: string;
field: string[];
conditions: {
equals?: number | null;
notEquals?: number | null;
greaterThan?: number;
greaterThanOrEqual?: number;
lessThan?: number;
lessThanOrEqual?: number;
in?: number[];
notIn?: number[];
};
}) | {
/** @enum {string} */
type: "operator";
/** @enum {string} */
operator: "and" | "or";
conditions: (({
/** @enum {string} */
type: "text";
tableName?: string;
field: string[];
conditions: {
equal?: string | null;
notEqual?: string;
like?: string;
notLike?: string;
in?: string[];
notIn?: string[];
};
} | {
/** @enum {string} */
type: "number";
tableName?: string;
field: string[];
conditions: {
equals?: number | null;
notEquals?: number | null;
greaterThan?: number;
greaterThanOrEqual?: number;
lessThan?: number;
lessThanOrEqual?: number;
in?: number[];
notIn?: number[];
};
}) | {
/** @enum {string} */
type: "operator";
/** @enum {string} */
operator: "and" | "or";
conditions: (({
/** @enum {string} */
type: "text";
tableName?: string;
field: string[];
conditions: {
equal?: string | null;
notEqual?: string;
like?: string;
notLike?: string;
in?: string[];
notIn?: string[];
};
} | {
/** @enum {string} */
type: "number";
tableName?: string;
field: string[];
conditions: {
equals?: number | null;
notEquals?: number | null;
greaterThan?: number;
greaterThanOrEqual?: number;
lessThan?: number;
lessThanOrEqual?: number;
in?: number[];
notIn?: number[];
};
}) | {
/** @enum {string} */
type: "operator";
/** @enum {string} */
operator: "and" | "or";
conditions: ({
/** @enum {string} */
type: "text";
tableName?: string;
field: string[];
conditions: {
equal?: string | null;
notEqual?: string;
like?: string;
notLike?: string;
in?: string[];
notIn?: string[];
};
} | {
/** @enum {string} */
type: "number";
tableName?: string;
field: string[];
conditions: {
equals?: number | null;
notEquals?: number | null;
greaterThan?: number;
greaterThanOrEqual?: number;
lessThan?: number;
lessThanOrEqual?: number;
in?: number[];
notIn?: number[];
};
})[];
})[];
})[];
}) | string;
};
};
};
responses: {
/** @description Default Response */
200: {
headers: {
[name: string]: unknown;
};
content: {
"application/json": {
items: {
id: string;
owner: string;
content: string;
metadata: unknown;
distance?: number;
}[];
};
};
};
};
};
}

View File

@@ -0,0 +1,19 @@
import createApiClient from 'openapi-fetch';
import type { paths } from './__generated__/schema.js';
type CreateStashClientOptions = {
baseUrl: string;
};
type StashClient = ReturnType<typeof createApiClient<paths>>;
const createStashClient = (options: CreateStashClientOptions): StashClient => {
const client = createApiClient<paths>({
baseUrl: options.baseUrl,
});
return client;
};
export type { StashClient };
export { createStashClient };

View File

@@ -0,0 +1,9 @@
{
"compilerOptions": {
"outDir": "./dist"
},
"include": [
"src/**/*.ts"
],
"extends": "@morten-olsen/stash-configs/tsconfig.json"
}

View File

@@ -0,0 +1,12 @@
import { defineConfig } from 'vitest/config';
import { getAliases } from '@morten-olsen/stash-tests/vitest';
// eslint-disable-next-line import/no-default-export
export default defineConfig(async () => {
const aliases = await getAliases();
return {
resolve: {
alias: aliases,
},
};
});

View File

@@ -23,9 +23,6 @@
},
"name": "@morten-olsen/stash-query-dsl",
"version": "1.0.0",
"imports": {
"#root/*": "./src/*"
},
"dependencies": {
"chevrotain": "^11.0.3",
"zod": "4.1.13"

View File

@@ -1,2 +1,4 @@
export * from './query-parser.schemas.js';
export { QueryParser } from './query-parser.js';
export { QueryParser, queryParser } from './query-parser.js';
export * from './utils.filter.js';
export * from './query-parser.codec.js';

View File

@@ -0,0 +1,20 @@
import { z } from 'zod';
import { queryFilterSchema } from './query-parser.schemas.js';
import { queryParser } from './query-parser.js';
const queryStringSchema: typeof queryFilterSchema = z
.codec(z.string(), queryFilterSchema, {
encode: (filter) => {
return queryParser.stringify(filter);
},
decode: (input) => {
return queryParser.parse(input);
},
// eslint-disable-next-line
}).meta({ id: 'QueryString', examples: ["metadata.foo = 'bar'"] }) as any;
// eslint-disable-next-line
const querySchema: typeof queryFilterSchema = z.union([queryStringSchema, queryFilterSchema]) as any
export { querySchema };

View File

@@ -1,6 +1,6 @@
import { createToken, Lexer, EmbeddedActionsParser } from 'chevrotain';
import type { QueryFilter, QueryCondition } from './query-parser.schemas.js';
import { type QueryFilter, type QueryCondition, queryFilterSchema } from './query-parser.schemas.js';
// ----------------- Lexer -----------------
@@ -426,7 +426,10 @@ class QueryParserParser extends EmbeddedActionsParser {
return this.SUBRULE(this.#orExpression);
});
public parse = (input: string): QueryFilter => {
public parse = <T extends typeof queryFilterSchema>(
input: string,
schema: T = queryFilterSchema as unknown as T,
): QueryFilter => {
const lexResult = QueryLexer.tokenize(input);
if (lexResult.errors.length > 0) {
@@ -450,7 +453,7 @@ class QueryParserParser extends EmbeddedActionsParser {
throw new Error(`Parse error: ${error.message}`);
}
return result;
return schema.parse(result);
};
}

View File

@@ -1,85 +1,65 @@
import { z } from 'zod';
import { z, ZodArray } from 'zod';
const queryConditionTextSchema = z.object({
type: z.literal('text'),
tableName: z.string().optional(),
field: z.array(z.string()),
conditions: z.object({
equal: z.string().nullish(),
notEqual: z.string().optional(),
like: z.string().optional(),
notLike: z.string().optional(),
in: z.array(z.string()).optional(),
notIn: z.array(z.string()).optional(),
}),
});
const queryConditionTextSchema = z
.object({
type: z.literal('text'),
tableName: z.string().optional(),
field: z.array(z.string()),
conditions: z.object({
equal: z.string().nullish(),
notEqual: z.string().optional(),
like: z.string().optional(),
notLike: z.string().optional(),
in: z.array(z.string()).optional(),
notIn: z.array(z.string()).optional(),
}),
})
.meta({ id: 'QueryConditionText' });
type QueryConditionText = z.infer<typeof queryConditionTextSchema>;
const queryConditionNumberSchema = z.object({
type: z.literal('number'),
tableName: z.string().optional(),
field: z.array(z.string()),
conditions: z.object({
equals: z.number().nullish(),
notEquals: z.number().nullish(),
greaterThan: z.number().optional(),
greaterThanOrEqual: z.number().optional(),
lessThan: z.number().optional(),
lessThanOrEqual: z.number().optional(),
in: z.array(z.number()).optional(),
notIn: z.array(z.number()).optional(),
}),
});
const queryConditionNumberSchema = z
.object({
type: z.literal('number'),
tableName: z.string().optional(),
field: z.array(z.string()),
conditions: z.object({
equals: z.number().nullish(),
notEquals: z.number().nullish(),
greaterThan: z.number().optional(),
greaterThanOrEqual: z.number().optional(),
lessThan: z.number().optional(),
lessThanOrEqual: z.number().optional(),
in: z.array(z.number()).optional(),
notIn: z.array(z.number()).optional(),
}),
})
.meta({ id: 'QueryConditionNumber' });
type QueryConditionNumber = z.infer<typeof queryConditionNumberSchema>;
const queryConditionSchema = z.discriminatedUnion('type', [queryConditionTextSchema, queryConditionNumberSchema]);
const queryConditionSchema = z
.discriminatedUnion('type', [queryConditionTextSchema, queryConditionNumberSchema])
.meta({ id: 'QueryCondition' });
type QueryCondition = z.infer<typeof queryConditionSchema>;
type QueryFilter = QueryCondition | QueryOperator;
type QueryOperator = {
type: 'operator';
operator: 'and' | 'or';
conditions: QueryFilter[];
};
// Create a depth-limited recursive schema for OpenAPI compatibility
// This supports up to 3 levels of nesting, which should be sufficient for most use cases
// OpenAPI cannot handle z.lazy(), so we manually define the nesting
// If you need deeper nesting, you can add more levels (Level3, Level4, etc.)
const queryFilterSchemaLevel0: z.ZodType<QueryFilter> = z.union([
queryConditionSchema,
z.object({
const queryOperatorSchema = z
.object({
type: z.literal('operator'),
operator: z.enum(['and', 'or']),
conditions: z.array(queryConditionSchema),
}),
]);
get conditions(): ZodArray<typeof queryOperatorSchema | typeof queryConditionSchema> {
// eslint-disable-next-line
return z.array(queryFilterSchema) as any;
},
})
.meta({ id: 'QueryOperator' });
const queryFilterSchemaLevel1: z.ZodType<QueryFilter> = z.union([
queryConditionSchema,
z.object({
type: z.literal('operator'),
operator: z.enum(['and', 'or']),
conditions: z.array(queryFilterSchemaLevel0),
}),
]);
type QueryOperator = z.infer<typeof queryOperatorSchema>;
const queryFilterSchemaLevel2: z.ZodType<QueryFilter> = z.union([
queryConditionSchema,
z.object({
type: z.literal('operator'),
operator: z.enum(['and', 'or']),
conditions: z.array(queryFilterSchemaLevel1),
}),
]);
const queryFilterSchema = z.union([queryOperatorSchema, queryConditionSchema]).meta({ id: 'QueryFilter' });
// Export the depth-limited schema (supports 3 levels of nesting)
// This works with OpenAPI schema generation
const queryFilterSchema = queryFilterSchemaLevel2;
type QueryFilter = z.infer<typeof queryFilterSchema>;
export type { QueryConditionText, QueryConditionNumber, QueryOperator, QueryCondition, QueryFilter };
export { queryConditionSchema, queryFilterSchema };

View File

@@ -19,4 +19,6 @@ class QueryParser {
};
}
export { QueryParser };
const queryParser = new QueryParser();
export { QueryParser, queryParser };

View File

@@ -0,0 +1,171 @@
import type { QueryCondition, QueryConditionNumber, QueryConditionText, QueryFilter } from './query-parser.schemas.js';
const getFieldValue = <T extends Record<string, unknown>>(obj: T, field: string[]): unknown => {
let current: unknown = obj;
for (const key of field) {
if (current === null || current === undefined) {
return undefined;
}
if (typeof current !== 'object') {
return undefined;
}
current = (current as Record<string, unknown>)[key];
}
return current;
};
/**
* Converts a SQL LIKE pattern to a RegExp.
* Handles % (any characters) and _ (single character) wildcards.
*/
const likeToRegex = (pattern: string): RegExp => {
const escaped = pattern
.replace(/[.*+?^${}()|[\]\\]/g, '\\$&') // Escape regex special chars
.replace(/%/g, '.*') // % matches any characters
.replace(/_/g, '.'); // _ matches single character
return new RegExp(`^${escaped}$`, 'i');
};
const applyQueryConditionText = <T extends Record<string, unknown>>(
obj: T,
{ field, conditions }: QueryConditionText,
): boolean => {
const value = getFieldValue(obj, field);
if (conditions.equal !== undefined) {
if (conditions.equal === null) {
if (value !== null && value !== undefined) return false;
} else {
if (value !== conditions.equal) return false;
}
}
if (conditions.notEqual !== undefined) {
if (conditions.notEqual === null) {
if (value === null || value === undefined) return false;
} else {
if (value === conditions.notEqual) return false;
}
}
if (conditions.like !== undefined) {
if (typeof value !== 'string') return false;
const regex = likeToRegex(conditions.like);
if (!regex.test(value)) return false;
}
if (conditions.notLike !== undefined) {
if (typeof value !== 'string') return false;
const regex = likeToRegex(conditions.notLike);
if (regex.test(value)) return false;
}
if (conditions.in !== undefined) {
if (!conditions.in.includes(value as string)) return false;
}
if (conditions.notIn !== undefined) {
if (conditions.notIn.includes(value as string)) return false;
}
return true;
};
const applyQueryConditionNumber = <T extends Record<string, unknown>>(
obj: T,
{ field, conditions }: QueryConditionNumber,
): boolean => {
const value = getFieldValue(obj, field);
if (conditions.equals !== undefined) {
if (conditions.equals === null) {
if (value !== null && value !== undefined) return false;
} else {
if (value !== conditions.equals) return false;
}
}
if (conditions.notEquals !== undefined) {
if (conditions.notEquals === null) {
if (value === null || value === undefined) return false;
} else {
if (value === conditions.notEquals) return false;
}
}
if (conditions.greaterThan !== undefined) {
if (typeof value !== 'number' || value <= conditions.greaterThan) return false;
}
if (conditions.greaterThanOrEqual !== undefined) {
if (typeof value !== 'number' || value < conditions.greaterThanOrEqual) return false;
}
if (conditions.lessThan !== undefined) {
if (typeof value !== 'number' || value >= conditions.lessThan) return false;
}
if (conditions.lessThanOrEqual !== undefined) {
if (typeof value !== 'number' || value > conditions.lessThanOrEqual) return false;
}
if (conditions.in !== undefined) {
if (!conditions.in.includes(value as number)) return false;
}
if (conditions.notIn !== undefined) {
if (conditions.notIn.includes(value as number)) return false;
}
return true;
};
const applyQueryCondition = <T extends Record<string, unknown>>(obj: T, options: QueryCondition): boolean => {
switch (options.type) {
case 'text': {
return applyQueryConditionText(obj, options);
}
case 'number': {
return applyQueryConditionNumber(obj, options);
}
default: {
throw new Error(`Unknown filter type`);
}
}
};
const applyQueryFilter = <T extends Record<string, unknown>>(obj: T, filter: QueryFilter): boolean => {
if (filter.type === 'operator') {
if (filter.conditions.length === 0) {
return true;
}
switch (filter.operator) {
case 'or': {
return filter.conditions.some((condition) => applyQueryFilter(obj, condition));
}
case 'and': {
return filter.conditions.every((condition) => applyQueryFilter(obj, condition));
}
}
} else {
return applyQueryCondition(obj, filter);
}
};
const createFilterFunction = <T extends Record<string, unknown>>(filter: QueryFilter): ((obj: T) => boolean) => {
return (obj: T) => applyQueryFilter(obj, filter);
};
const filterObjects = <T extends Record<string, unknown>>(objects: T[], filter: QueryFilter): T[] => {
return objects.filter(createFilterFunction(filter));
};
const isMatch = <T extends Record<string, unknown>>(input: T, filter: QueryFilter) => {
const fn = createFilterFunction(filter);
return fn(input);
};
export { createFilterFunction, filterObjects, isMatch };

View File

@@ -24,9 +24,6 @@
},
"name": "@morten-olsen/stash-runtime",
"version": "1.0.0",
"imports": {
"#root/*": "./src/*"
},
"dependencies": {
"@electric-sql/pglite": "^0.3.14",
"@huggingface/transformers": "^3.8.1",

View File

@@ -1,4 +1,17 @@
import { env, pipeline } from '@huggingface/transformers';
import { EMBEDDING_MODEL } from './utils/utils.consts.js';
const setModelLocation = (location: string) => {
env.cacheDir = location;
};
const preloadModel = async () => {
await pipeline('feature-extraction', EMBEDDING_MODEL);
};
export { Services } from './utils/utils.services.js';
export { StashRuntime } from './runtime.js';
export * from './services/documents/documents.js';
export * from './services/document-chunks/document-chunks.js';
export { setModelLocation, preloadModel };

View File

@@ -0,0 +1,40 @@
import { z } from 'zod';
const insertedRecordEventSchema = z.object({
operation: z.literal('INSERT'),
table: z.string(),
schema: z.string(),
newRecord: z.record(z.string(), z.unknown()),
});
type InsertedRecordEvent = z.infer<typeof insertedRecordEventSchema>;
const deletedRecordEventSchema = z.object({
operation: z.literal('DELETE'),
table: z.string(),
schema: z.string(),
oldRecord: z.record(z.string(), z.unknown()),
});
type DeletedRecordEvent = z.infer<typeof deletedRecordEventSchema>;
const updatedRecordEventSchema = z.object({
operation: z.literal('UPDATE'),
table: z.string(),
schema: z.string(),
newRecord: z.record(z.string(), z.unknown()),
oldRecord: z.record(z.string(), z.unknown()),
});
type UpdatedRecordEvent = z.infer<typeof updatedRecordEventSchema>;
const changedRecordEventSchema = z.discriminatedUnion('operation', [
insertedRecordEventSchema,
deletedRecordEventSchema,
updatedRecordEventSchema,
]);
type ChangedRecordEvent = z.infer<typeof changedRecordEventSchema>;
export type { InsertedRecordEvent, DeletedRecordEvent, UpdatedRecordEvent, ChangedRecordEvent };
export { insertedRecordEventSchema, deletedRecordEventSchema, updatedRecordEventSchema, changedRecordEventSchema };

View File

@@ -1,51 +1,59 @@
import knex, { type Knex } from 'knex';
import ClientPgLite from 'knex-pglite';
import { PGlite } from '@electric-sql/pglite';
import { vector } from '@electric-sql/pglite/vector';
import { destroy, type Services } from '../../utils/utils.services.js';
import { EventEmitter } from '../../utils/utils.event-emitter.js';
import { migrationSource } from './migrations/migrations.js';
import { type ChangedRecordEvent } from './database.schemas.js';
import type { GeneratorOutput } from './generators/generators.types.js';
import { pgLiteGenerator } from './generators/generators.pglite.js';
import { createEmitter } from './generators/generators.js';
import { destroy, Services } from '#root/utils/utils.services.js';
type DatabaseServiceEvents = {
changedRecord: (event: ChangedRecordEvent) => void;
};
class DatabaseService {
class DatabaseService extends EventEmitter<DatabaseServiceEvents> {
#services: Services;
#instance?: Promise<Knex>;
#generated?: Promise<GeneratorOutput>;
constructor(services: Services) {
super();
this.#services = services;
}
#setup = async () => {
const pglite = new PGlite({
extensions: { vector },
});
const instance = knex({
client: ClientPgLite,
dialect: 'postgres',
connection: () => ({ pglite }) as object,
});
await instance.raw(`CREATE EXTENSION IF NOT EXISTS vector`);
const emitter = createEmitter();
const output = await pgLiteGenerator({ emitter, config: {} });
const { instance } = output;
await instance.migrate.latest({
migrationSource: migrationSource({ services: this.#services }),
});
emitter.on('changed', this.emit.bind(this, 'changedRecord'));
return output;
};
#getGenerated = async () => {
if (!this.#generated) {
this.#generated = this.#setup();
}
return this.#generated;
};
public listen = async () => {
const { subscribe } = await this.#getGenerated();
await subscribe();
};
public getInstance = async () => {
const { instance } = await this.#getGenerated();
return instance;
};
public getInstance = () => {
if (!this.#instance) {
this.#instance = this.#setup();
}
return this.#instance;
};
[destroy] = async () => {
if (!this.#instance) {
if (!this.#generated) {
return;
}
const instance = await this.#instance;
const { instance } = await this.#generated;
await instance.destroy();
};
}

View File

@@ -0,0 +1,38 @@
import { PGlite } from '@electric-sql/pglite';
import knex from 'knex';
import ClientPGLite from 'knex-pglite';
import { vector } from '@electric-sql/pglite/vector';
import { changedRecordEventSchema } from '../database.schemas.js';
import type { Generator } from './generators.types.js';
type PGLiteGeneratorOptions = {
dataLocation?: string;
};
const pgLiteGenerator: Generator<PGLiteGeneratorOptions> = async ({ emitter }) => {
const pglite = new PGlite({
extensions: { vector },
});
const instance = knex({
client: ClientPGLite,
dialect: 'postgres',
connection: () => ({ pglite }) as object,
});
await instance.raw(`CREATE EXTENSION IF NOT EXISTS vector`);
const subscribe = async () => {
pglite.onNotification((channel, data) => {
if (channel !== 'row_changed') {
return;
}
const payload = changedRecordEventSchema.parse(JSON.parse(data));
emitter.emit('changed', payload);
});
await instance.raw('LISTEN row_changed');
};
return { instance, subscribe };
};
export { pgLiteGenerator };

View File

@@ -0,0 +1,7 @@
import { EventEmitter } from '../../../utils/utils.event-emitter.js';
import type { GeneratorEvents } from './generators.types.js';
const createEmitter = () => new EventEmitter<GeneratorEvents>();
export { createEmitter };

View File

@@ -0,0 +1,21 @@
import type { Knex } from 'knex';
import type { ChangedRecordEvent } from '../database.schemas.js';
import type { EventEmitter } from '../../../utils/utils.event-emitter.js';
type GeneratorEvents = {
changed: (event: ChangedRecordEvent) => void;
};
type GeneratorOutput = {
instance: Knex;
subscribe: () => Promise<void>;
};
type GeneratorOptions<T> = {
config: T;
emitter: EventEmitter<GeneratorEvents>;
};
type Generator<T> = (options: GeneratorOptions<T>) => Promise<GeneratorOutput>;
export type { GeneratorEvents, GeneratorOutput, Generator };

View File

@@ -1,7 +1,7 @@
import type { Migration } from './migrations.types.js';
import { EMBEDDING_MODEL } from '../../../utils/utils.consts.js';
import { EmbeddingsService } from '../../embeddings/embeddings.js';
import { EmbeddingsService } from '#root/services/embeddings/embeddings.js';
import { EMBEDDING_MODEL } from '#root/utils/utils.consts.js';
import type { Migration } from './migrations.types.js';
const tableNames = {
documents: 'documents',
@@ -15,6 +15,45 @@ const init: Migration = {
const embedding = services.get(EmbeddingsService);
const embeddingField = await embedding.getFieldType(EMBEDDING_MODEL);
await knex.raw(`
CREATE OR REPLACE FUNCTION notify_changes()
RETURNS trigger AS $$
DECLARE
payload TEXT;
BEGIN
-- Build the JSON payload based on the operation type
IF (TG_OP = 'DELETE') THEN
payload := json_build_object(
'operation', TG_OP,
'table', TG_TABLE_NAME,
'schema', TG_TABLE_SCHEMA,
'oldRecord', row_to_json(OLD)
)::text;
ELSIF (TG_OP = 'INSERT') THEN
payload := json_build_object(
'operation', TG_OP,
'table', TG_TABLE_NAME,
'schema', TG_TABLE_SCHEMA,
'newRecord', row_to_json(NEW)
)::text;
ELSIF (TG_OP = 'UPDATE') THEN
payload := json_build_object(
'operation', TG_OP,
'table', TG_TABLE_NAME,
'schema', TG_TABLE_SCHEMA,
'oldRecord', row_to_json(OLD),
'newRecord', row_to_json(NEW)
)::text;
END IF;
-- Send the notification to the channel
PERFORM pg_notify('row_changed', payload);
RETURN NEW;
END;
$$ LANGUAGE plpgsql;
`);
await knex.schema.createTable(tableNames.documents, (table) => {
table.uuid('id').primary();
table.uuid('owner').nullable().references('id').inTable(tableNames.documents).onDelete('CASCADE');
@@ -22,18 +61,25 @@ const init: Migration = {
table.datetime('createdAt').notNullable();
table.datetime('deletedAt').nullable();
table.string('contentType').nullable();
table.text('content').nullable();
table.binary('content').nullable();
table.text('text').nullable();
table.string('source').nullable();
table.string('sourceId').nullable();
table.string('type').notNullable();
table.string('type').nullable();
table.integer('typeVersion').nullable();
table.text('searchText').nullable();
table.jsonb('metadata').nullable();
table.index(['source', 'sourceId']);
table.index(['owner']);
});
await knex.raw(`
CREATE TRIGGER document_changes_trigger
AFTER INSERT OR UPDATE OR DELETE ON documents
FOR EACH ROW
EXECUTE PROCEDURE notify_changes();
`);
await knex.schema.createTable(tableNames.documentChunks, (table) => {
table.uuid('id').primary();
table.uuid('owner').nullable().references('id').inTable(tableNames.documents).onDelete('CASCADE');
@@ -61,6 +107,8 @@ const init: Migration = {
});
},
down: async ({ knex }) => {
await knex.raw('DROP TRIGGER IF EXISTS document_changes_trigger ON documents;');
await knex.raw('DROP FUNCTION IF EXISTS notify_changes();');
await knex.schema.dropTableIfExists(tableNames.relations);
await knex.schema.dropTableIfExists(tableNames.documentChunks);
await knex.schema.dropTableIfExists(tableNames.documents);
@@ -74,13 +122,13 @@ type DocumentRow = {
createdAt: Date;
deletedAt: Date | null;
contentType: string | null;
content: string | null;
content: Buffer | null;
text: string | null;
source: string | null;
sourceId: string | null;
type: string;
type: string | null;
typeVersion: number | null;
searchText: string | null;
metadata: unknown;
metadata: unknown | null;
};
type DocumentChunkRow = {

View File

@@ -1,10 +1,10 @@
import type { Knex } from 'knex';
import type { Services } from '../../../utils/utils.services.js';
import type { Migration } from './migrations.types.js';
import { init } from './migrations.001-init.js';
import type { Services } from '#root/utils/utils.services.js';
const migrations = [init] satisfies Migration[];
type MigrationSourceOptions = {

View File

@@ -1,6 +1,6 @@
import type { Knex } from 'knex';
import type { Services } from '#root/utils/utils.services.js';
import type { Services } from '../../../utils/utils.services.js';
type MigrationOptions = {
knex: Knex;

View File

@@ -1,13 +1,11 @@
import type { TableRows } from '../database/database.js';
import type { DocumentChunk } from './document-chunks.schemas.js';
import { documentChunkWithDistanceSchema } from './document-chunks.schemas.js';
const mapFromDocumentChunkRow = (
row: TableRows['documentChunks'] & {
metadata: unknown;
metadata?: unknown;
},
): DocumentChunk => ({
...row,
});
) => documentChunkWithDistanceSchema.parse(row);
export { mapFromDocumentChunkRow };

View File

@@ -1,33 +1,46 @@
import { z } from 'zod';
import { queryFilterSchema } from '@morten-olsen/stash-query-dsl';
import { querySchema } from '@morten-olsen/stash-query-dsl';
import { createListResultSchema } from '#root/utils/utils.schema.js';
import { createListResultSchema } from '../../utils/utils.schema.js';
const documentChunkSchema = z.object({
id: z.string(),
owner: z.string(),
content: z.string(),
metadata: z.unknown(),
});
const documentChunkSchema = z
.object({
id: z.guid(),
owner: z.string(),
content: z.string(),
metadata: z.unknown(),
})
.meta({ id: 'DocumentChunk' });
type DocumentChunk = z.infer<typeof documentChunkSchema>;
const documentChunkFilterSchema = z.object({
limit: z.number().default(20),
offset: z.number().default(0),
semanticText: z.string().optional(),
conditions: z.union([queryFilterSchema, z.string()]).optional(),
});
const documentChunkFilterSchema = z
.object({
limit: z.number().default(20),
offset: z.number().default(0),
semanticText: z.string().optional(),
conditions: querySchema.optional(),
})
.meta({ id: 'DocumentChunkFilter' });
type DocumentChunkFilter = z.infer<typeof documentChunkFilterSchema>;
const documentChunksFindResultSchema = createListResultSchema(
documentChunkSchema.extend({
const documentChunkWithDistanceSchema = documentChunkSchema
.extend({
distance: z.number().optional(),
}),
);
})
.meta({ id: 'DocumentChunkWithDistance' });
const documentChunksFindResultSchema = createListResultSchema(documentChunkWithDistanceSchema).meta({
id: 'DocumentChunkFindResult',
});
type DocumentChunksFindResult = z.infer<typeof documentChunksFindResultSchema>;
export type { DocumentChunk, DocumentChunkFilter, DocumentChunksFindResult };
export { documentChunkSchema, documentChunkFilterSchema, documentChunksFindResultSchema };
export {
documentChunkSchema,
documentChunkFilterSchema,
documentChunksFindResultSchema,
documentChunkWithDistanceSchema,
};

View File

@@ -1,16 +1,12 @@
import { QueryParser } from '@morten-olsen/stash-query-dsl';
import { DatabaseService, tableNames, type TableRows } from '../database/database.js';
import { EmbeddingsService } from '../embeddings/embeddings.js';
import type { Services } from '../../utils/utils.services.js';
import { EMBEDDING_MODEL } from '../../utils/utils.consts.js';
import { applyQueryFilter } from '../../utils/utils.query.js';
import type { DocumentChunkFilter, DocumentChunksFindResult } from './document-chunks.schemas.js';
import { mapFromDocumentChunkRow } from './document-chunks.mappings.js';
import type { Services } from '#root/utils/utils.services.js';
import { EMBEDDING_MODEL } from '#root/utils/utils.consts.js';
import type { ExplicitAny } from '#root/global.js';
import { applyQueryFilter } from '#root/utils/utils.query.js';
const baseFields = [
`${tableNames.documentChunks}.*`,
`${tableNames.documents}.metadata`,
@@ -45,11 +41,7 @@ class DocumentChunksService {
query = query.orderBy('createdAt', 'desc');
}
if (filter.conditions) {
const parser = this.#services.get(QueryParser);
query = applyQueryFilter(
query,
typeof filter.conditions === 'string' ? parser.parse(filter.conditions) : filter.conditions,
);
query = applyQueryFilter(query, filter.conditions);
}
query = query.limit(filter.limit).offset(filter.offset);
@@ -57,7 +49,7 @@ class DocumentChunksService {
const items = await query;
return {
items: items.map(mapFromDocumentChunkRow as ExplicitAny),
items: items.map(mapFromDocumentChunkRow),
};
};
}

View File

@@ -1,12 +1,7 @@
import type { TableRows } from '../database/database.js';
import type { Document } from './documents.schemas.js';
import { documentSchema, type Document } from './documents.schemas.js';
const mapFromDocumentRow = (row: TableRows['documents']): Document => ({
...row,
createdAt: row.createdAt.toISOString(),
updatedAt: row.updatedAt.toISOString(),
deletedAt: row.deletedAt?.toISOString() || null,
});
const mapFromDocumentRow = (row: TableRows['documents']): Document => documentSchema.parse(row);
export { mapFromDocumentRow };

View File

@@ -1,42 +1,44 @@
import { z } from 'zod';
import { queryFilterSchema } from '@morten-olsen/stash-query-dsl';
import { createListResultSchema } from '#root/utils/utils.schema.js';
import { createListResultSchema, queryDSLSchema } from '../../utils/utils.schema.js';
const documentSchema = z.object({
id: z.string(),
owner: z.string().nullable(),
createdAt: z.iso.datetime(),
updatedAt: z.iso.datetime(),
deletedAt: z.iso.datetime().nullable(),
contentType: z.string().nullable(),
content: z.string().nullable(),
source: z.string().nullable(),
sourceId: z.string().nullable(),
type: z.string(),
typeVersion: z.int().nullable(),
searchText: z.string().nullable(),
metadata: z.unknown(),
});
const documentSchema = z
.object({
id: z.guid(),
owner: z.guid().nullable(),
createdAt: z.coerce.date(),
updatedAt: z.coerce.date(),
deletedAt: z.coerce.date().nullable(),
contentType: z.string().nullable(),
text: z.string().nullable(),
source: z.string().nullable(),
sourceId: z.string().nullable(),
type: z.string().nullable(),
typeVersion: z.int().nullable(),
metadata: z.unknown().nullable(),
})
.meta({ id: 'Document' });
type Document = z.infer<typeof documentSchema>;
const documentUpsertSchema = z
.object({
id: z.string().nullish(),
id: z.guid().optional(),
owner: z.string().nullish(),
contentType: z.string().nullish(),
content: z.string().nullish(),
text: z.string().nullish(),
source: z.string().nullish(),
sourceId: z.string().nullish(),
type: z.string().optional(),
type: z.string().nullish(),
typeVersion: z.int().nullish(),
searchText: z.string().nullish(),
metadata: z.unknown().nullish(),
})
.meta({
id: 'DocumentUpsert',
example: {
content: 'the cat is yellow',
text: 'the cat is yellow',
contentType: 'text/plain',
source: 'test',
sourceId: 'test',
@@ -61,7 +63,7 @@ type DocumentUpsertResult = z.infer<typeof documentUpsertResultSchema>;
const documentFilterSchema = z.object({
offset: z.number().default(0),
limit: z.number().default(20),
condition: z.union([queryFilterSchema, z.string()]),
condition: z.union([queryDSLSchema, queryFilterSchema]),
});
type DocumentFilter = z.infer<typeof documentFilterSchema>;
@@ -70,11 +72,26 @@ const documentFindResultSchema = createListResultSchema(documentSchema);
type DocumentFindResult = z.infer<typeof documentFindResultSchema>;
export type { Document, DocumentUpsert, DocumentUpsertResult, DocumentFilter, DocumentFindResult };
const documentFilterChangedEventSchema = z.object({
action: z.enum(['add', 'remove', 'update']),
document: documentSchema,
});
type DocumentFilterChangedEvent = z.infer<typeof documentFilterChangedEventSchema>;
export type {
Document,
DocumentUpsert,
DocumentUpsertResult,
DocumentFilter,
DocumentFindResult,
DocumentFilterChangedEvent,
};
export {
documentSchema,
documentUpsertSchema,
documentUpsertResultSchema,
documentFilterSchema,
documentFindResultSchema,
documentFilterChangedEventSchema,
};

View File

@@ -1,36 +1,159 @@
import { QueryParser } from '@morten-olsen/stash-query-dsl';
import { isMatch, QueryParser, type QueryFilter } from '@morten-olsen/stash-query-dsl';
import { DatabaseService, tableNames, type TableRows } from '../database/database.js';
import { SplittingService } from '../splitter/splitter.js';
import { EventEmitter } from '../../utils/utils.event-emitter.js';
import { destroy, type Services } from '../../utils/utils.services.js';
import { compareObjectKeys } from '../../utils/utils.compare.js';
import { applyQueryFilter } from '../../utils/utils.query.js';
import { base64ToMaybeBuffer } from '../../utils/utils.binary.js';
import type {
Document,
DocumentFilter,
DocumentFindResult,
DocumentUpsert,
DocumentUpsertResult,
} from './documents.schemas.ts';
import { mapFromDocumentRow } from './documents.mapping.js';
import { EventEmitter } from '#root/utils/utils.event-emitter.js';
import type { Services } from '#root/utils/utils.services.js';
import { compareObjectKeys } from '#root/utils/utils.compare.js';
import { applyQueryFilter } from '#root/utils/utils.query.js';
import {
type Document,
type DocumentFilter,
type DocumentFilterChangedEvent,
type DocumentFindResult,
type DocumentUpsert,
type DocumentUpsertResult,
} from './documents.schemas.js';
type DocumentsServiceEvents = {
upserted: (document: Document) => void;
inserted: (document: Document) => void;
updated: (document: Document) => void;
updated: (next: Document, prev: Document) => void;
deleted: (document: Document) => void;
};
type DocumentServiceFilterSubscriber = {
filter?: QueryFilter | string;
fn: (event: DocumentFilterChangedEvent) => void;
abortSignal?: AbortSignal;
};
class DocumentsService extends EventEmitter<DocumentsServiceEvents> {
#services: Services;
#subscribeListenAbortController: AbortController;
#databaseListenAbortController?: Promise<AbortController>;
constructor(services: Services) {
super();
this.#subscribeListenAbortController = new AbortController();
this.onSubscribe(
async () => {
await this.#listen();
},
{
abortSignal: this.#subscribeListenAbortController.signal,
},
);
this.#services = services;
}
#setupListen = async () => {
const abortController = new AbortController();
const databaseService = this.#services.get(DatabaseService);
await databaseService.listen();
databaseService.on(
'changedRecord',
(evt) => {
if (evt.table !== tableNames.documents) {
return;
}
if (evt.operation === 'INSERT') {
const newDocument = mapFromDocumentRow(evt.newRecord as TableRows['documents']);
this.emit('inserted', newDocument);
this.emit('upserted', newDocument);
}
if (evt.operation === 'UPDATE') {
const newDocument = mapFromDocumentRow(evt.newRecord as TableRows['documents']);
const oldDocument = mapFromDocumentRow(evt.oldRecord as TableRows['documents']);
this.emit('updated', newDocument, oldDocument);
this.emit('upserted', newDocument);
}
if (evt.operation === 'DELETE') {
const oldDocument = mapFromDocumentRow(evt.oldRecord as TableRows['documents']);
this.emit('deleted', oldDocument);
}
},
{ abortSignal: abortController.signal },
);
return abortController;
};
#listen = async () => {
if (!this.#databaseListenAbortController) {
this.#databaseListenAbortController = this.#setupListen();
}
return this.#databaseListenAbortController;
};
public subscribe = async (options: DocumentServiceFilterSubscriber) => {
const abortController = new AbortController();
const queryParser = this.#services.get(QueryParser);
const filter = typeof options.filter === 'string' ? queryParser.parse(options.filter) : options.filter;
this.on(
'inserted',
(next) => {
const nextIncluded = !filter || isMatch(next, filter);
if (!nextIncluded) {
return;
}
options.fn({
action: 'add',
document: next,
});
},
{ abortSignal: abortController.signal },
);
this.on(
'updated',
(next, prev) => {
const nextIncluded = !filter || isMatch(next, filter);
const prevIncluded = !filter || isMatch(prev, filter);
if (nextIncluded && prevIncluded) {
options.fn({
action: 'update',
document: next,
});
} else if (nextIncluded && !prevIncluded) {
options.fn({
action: 'add',
document: next,
});
} else if (!nextIncluded && prevIncluded) {
options.fn({
action: 'remove',
document: next,
});
}
},
{ abortSignal: abortController.signal },
);
this.on(
'deleted',
(prev) => {
const prevIncluded = !filter || isMatch(prev, filter);
if (!prevIncluded) {
return;
}
options.fn({
action: 'remove',
document: prev,
});
},
{ abortSignal: abortController.signal },
);
options.abortSignal?.addEventListener('abort', () => abortController.abort());
await this.#listen();
return () => abortController.abort();
};
public find = async (filter: DocumentFilter): Promise<DocumentFindResult> => {
const databaseService = this.#services.get(DatabaseService);
const db = await databaseService.getInstance();
@@ -68,7 +191,7 @@ class DocumentsService extends EventEmitter<DocumentsServiceEvents> {
const result = await db.transaction(async (trx) => {
let id = document.id || crypto.randomUUID();
if (document.source && document.sourceId) {
if (!document.id && document.source && document.sourceId) {
const [currentSourceDocument] = await trx<TableRows['documents']>(tableNames.documents)
.where('source', document.source)
.andWhere('sourceId', document.sourceId)
@@ -80,16 +203,18 @@ class DocumentsService extends EventEmitter<DocumentsServiceEvents> {
const now = new Date();
const [current] = await trx<TableRows['documents']>(tableNames.documents).where('id', id).limit(1);
if (current) {
id = current.id;
document.id = id;
if (
compareObjectKeys(current, document, [
'sourceId',
'source',
'content',
'contentType',
'searchText',
'type',
'typeVersion',
'metadata',
'text',
])
) {
return {
@@ -101,6 +226,7 @@ class DocumentsService extends EventEmitter<DocumentsServiceEvents> {
await trx<TableRows['documents']>(tableNames.documents)
.update({
...document,
content: base64ToMaybeBuffer(document.content),
id,
updatedAt: now,
})
@@ -108,10 +234,9 @@ class DocumentsService extends EventEmitter<DocumentsServiceEvents> {
const resultDocument: Document = mapFromDocumentRow({
...current,
...document,
content: base64ToMaybeBuffer(document.content ?? current.content) || null,
id,
});
this.emit('updated', resultDocument);
this.emit('upserted', resultDocument);
return {
id,
action: 'updated',
@@ -119,31 +244,28 @@ class DocumentsService extends EventEmitter<DocumentsServiceEvents> {
} as const;
} else {
await trx<TableRows['documents']>(tableNames.documents).insert({
metadata: {},
type: 'raw',
...document,
content: base64ToMaybeBuffer(document.content),
id,
createdAt: now,
updatedAt: now,
});
const resultDocument: Document = mapFromDocumentRow({
type: 'raw',
type: null,
text: null,
owner: null,
contentType: null,
content: null,
source: null,
sourceId: null,
typeVersion: null,
searchText: null,
metadata: {},
metadata: null,
...document,
content: base64ToMaybeBuffer(document.content) || null,
deletedAt: null,
id,
createdAt: now,
updatedAt: now,
});
this.emit('inserted', resultDocument);
this.emit('upserted', resultDocument);
return {
id,
action: 'inserted',
@@ -173,6 +295,13 @@ class DocumentsService extends EventEmitter<DocumentsServiceEvents> {
return result;
};
[destroy] = async () => {
this.#subscribeListenAbortController.abort();
if (this.#databaseListenAbortController) {
(await this.#databaseListenAbortController).abort();
}
};
}
export * from './documents.schemas.js';

View File

@@ -1,8 +1,8 @@
import { pipeline, type FeatureExtractionPipeline } from '@huggingface/transformers';
import { Vector } from './embeddings.vector.js';
import type { ExplicitAny } from '../../global.js';
import type { ExplicitAny } from '#root/global.js';
import { Vector } from './embeddings.vector.js';
type ExtractOptions = {
input: string[];

View File

@@ -1,12 +1,11 @@
import { EmbeddingsService } from '../embeddings/embeddings.js';
import type { Document } from '../documents/documents.schemas.js';
import type { Services } from '../../utils/utils.services.js';
import { EMBEDDING_MODEL } from '../../utils/utils.consts.js';
import type { Chunk, Splitter } from './splitter.types.js';
import { textSplitter } from './splitters/splitters.text.js';
import type { Services } from '#root/utils/utils.services.js';
import { EMBEDDING_MODEL } from '#root/utils/utils.consts.js';
class SplittingService {
#services: Services;
#chunkers: Set<Splitter>;

View File

@@ -3,13 +3,13 @@ import { RecursiveCharacterTextSplitter } from '@langchain/textsplitters';
import type { Splitter } from '../splitter.types.js';
const textSplitter: Splitter = {
match: (document) => !!document.content,
match: (document) => !!document.text,
chunk: async (document) => {
if (!document.content) {
if (!document.text) {
return [];
}
const splitter = new RecursiveCharacterTextSplitter({ chunkSize: 100, chunkOverlap: 0 });
const texts = await splitter.splitText(document.content);
const texts = await splitter.splitText(document.text);
return texts;
},
};

View File

@@ -1,6 +1,5 @@
import { DatabaseService } from '../database/database.js';
import { Services } from '#root/utils/utils.services.js';
import { Services } from '../../utils/utils.services.js';
class WarmupService {
#services: Services;

View File

@@ -0,0 +1,14 @@
const base64ToMaybeBuffer = (input?: string | null | Buffer) => {
if (input === null) {
return input;
}
if (!input) {
return undefined;
}
if (typeof input === 'object') {
return input;
}
return Buffer.from(input, 'base64');
};
export { base64ToMaybeBuffer };

View File

@@ -1,9 +1,16 @@
import deepEqual from 'deep-equal';
const compareObjectKeys = <T extends Record<string, unknown>>(a: T, b: T, keys: (keyof T)[]) => {
const compareObjectKeys = <A extends Record<string, unknown>, B extends Record<string, unknown>>(
a: A,
b: B,
keys: (keyof (A & B))[],
) => {
for (const key of keys) {
const avalue = a[key];
const bvalue = b[key];
const avalue = a[key as keyof A];
const bvalue = b[key as keyof B];
if (bvalue === undefined) {
continue;
}
if (!deepEqual(avalue, bvalue)) {
return false;
}

View File

@@ -1,6 +1,7 @@
import type { ExplicitAny } from '#root/global.js';
import type { ExplicitAny } from '../global.js';
type EventListener<T extends unknown[]> = (...args: T) => void | Promise<void>;
type SubscribeListener<T> = (type: T) => void | Promise<void>;
type OnOptions = {
abortSignal?: AbortSignal;
@@ -8,8 +9,25 @@ type OnOptions = {
class EventEmitter<T extends Record<string, (...args: ExplicitAny[]) => void | Promise<void>>> {
#listeners = new Map<keyof T, Set<EventListener<ExplicitAny>>>();
#subscribeListeners = new Set<SubscribeListener<keyof T>>();
onSubscribe = (callback: SubscribeListener<keyof T>, options: OnOptions = {}) => {
const { abortSignal } = options;
const callbackClone = (type: keyof T) => callback(type);
this.#subscribeListeners.add(callbackClone);
const abortController = new AbortController();
abortSignal?.addEventListener('abort', abortController.abort);
abortController.signal.addEventListener('abort', () => {
this.#subscribeListeners.difference(new Set([callbackClone]));
});
return abortController.abort;
};
on = <K extends keyof T>(event: K, callback: EventListener<Parameters<T[K]>>, options: OnOptions = {}) => {
for (const subscribeListener of this.#subscribeListeners) {
subscribeListener(event);
}
const { abortSignal } = options;
if (!this.#listeners.has(event)) {
this.#listeners.set(event, new Set());

View File

@@ -1,8 +1,26 @@
import { QueryParser } from '@morten-olsen/stash-query-dsl';
import { z, type ZodType } from 'zod';
const parser = new QueryParser();
const createListResultSchema = <T extends ZodType>(schema: T) =>
z.object({
items: z.array(schema),
});
export { createListResultSchema };
const queryDSLSchema = z
.string()
.describe('Query DSL based filter')
.superRefine((value, context) => {
try {
parser.parse(value);
} catch (err) {
context.addIssue(String(err));
}
})
.meta({
id: 'QueryDQLString',
examples: ["metadata.foo = 'bar'"],
});
export { createListResultSchema, queryDSLSchema };

View File

@@ -0,0 +1,32 @@
FROM node:23-slim AS base
ENV \
MODEL_DIR=/models
RUN corepack enable
WORKDIR /app
FROM base AS builder
RUN npm i -g turbo
COPY . .
RUN turbo prune @morten-olsen/stash-server --docker
FROM base AS installer
COPY --from=builder /app/out/json/ .
RUN pnpm install --frozen-lockfile
COPY --from=builder /app/out/full/ .
RUN \
pnpm build \
&& node /app/packages/server/dist/preload-data.js
FROM base AS runner
ENV \
SERVER_HOST=0.0.0.0
RUN \
addgroup --system --gid 1001 nodejs \
&& adduser --system --uid 1001 nodejs \
&& mkdir /data \
&& chown nodejs:nodejs /data
COPY --from=installer /models /models
COPY --from=installer /app /app
USER nodejs
CMD ["node", "/app/packages/server/dist/start.js"]

View File

@@ -27,9 +27,6 @@
},
"name": "@morten-olsen/stash-server",
"version": "1.0.0",
"imports": {
"#root/*": "./src/*"
},
"dependencies": {
"@fastify/cors": "11.1.0",
"@fastify/swagger": "9.6.1",

View File

@@ -5,10 +5,12 @@ import {
hasZodFastifySchemaValidationErrors,
isResponseSerializationError,
jsonSchemaTransform,
jsonSchemaTransformObject,
serializerCompiler,
validatorCompiler,
type ZodTypeProvider,
} from 'fastify-type-provider-zod';
import scalar from '@scalar/fastify-api-reference';
import { StashRuntime } from '@morten-olsen/stash-runtime';
import { systemEndpoints } from './endpoints/system/system.js';
@@ -26,6 +28,12 @@ class BaseError extends Error {
}
const createApi = async (runtime: StashRuntime = new StashRuntime()) => {
runtime.documents.subscribe({
filter: "metadata.foo = 'bar'",
fn: (document) => {
// console.log(document);
},
});
const app = fastify().withTypeProvider<ZodTypeProvider>();
app.setValidatorCompiler(validatorCompiler);
app.setSerializerCompiler(serializerCompiler);
@@ -41,10 +49,20 @@ const createApi = async (runtime: StashRuntime = new StashRuntime()) => {
},
},
transform: jsonSchemaTransform,
transformObject: jsonSchemaTransformObject,
});
await app.register(import('@scalar/fastify-api-reference'), {
await app.register(scalar, {
routePrefix: '/docs',
configuration: {
pageTitle: 'Foo',
title: 'Hello World!',
telemetry: false,
hideClientButton: true,
theme: 'laserwave',
persistAuth: true,
orderRequiredPropertiesFirst: false,
},
});
app.setErrorHandler((err, req, reply) => {

View File

@@ -11,21 +11,21 @@ const documents: DocumentUpsert[] = [
foo: 'bar',
},
type: 'demo',
content: 'the cat is yellow',
text: 'the cat is yellow',
},
{
metadata: {
foo: 'bar',
},
type: 'demo',
content: 'the dog is blue',
text: 'the dog is blue',
},
{
metadata: {
foo: 'baz',
},
source: 'test',
content: 'the pig says hi',
text: 'the pig says hi',
type: 'demo',
},
];

View File

@@ -0,0 +1,11 @@
import { mkdir } from 'fs/promises';
import { resolve } from 'path';
import { preloadModel, setModelLocation } from '@morten-olsen/stash-runtime';
if (process.env.MODEL_DIR) {
const modelDir = resolve(process.env.MODEL_DIR);
await mkdir(modelDir, { recursive: true });
setModelLocation(modelDir);
await preloadModel();
}

View File

@@ -1,7 +1,19 @@
import { resolve } from 'path';
import { mkdir } from 'fs/promises';
import { setModelLocation } from '@morten-olsen/stash-runtime';
import { createApi } from './api.js';
const server = await createApi();
if (process.env.MODEL_DIR) {
const modelDir = resolve(process.env.MODEL_DIR);
await mkdir(modelDir, { recursive: true });
setModelLocation(modelDir);
}
const server = await createApi();
await server.listen({
port: 3400,
host: process.env.SERVER_HOST,
});
console.log('Server started');

203
pnpm-lock.yaml generated
View File

@@ -48,6 +48,34 @@ importers:
specifier: 4.0.15
version: 4.0.15(@types/node@24.10.2)(tsx@4.21.0)(yaml@2.8.2)
packages/client:
dependencies:
openapi-fetch:
specifier: ^0.15.0
version: 0.15.0
devDependencies:
'@morten-olsen/stash-configs':
specifier: workspace:*
version: link:../configs
'@morten-olsen/stash-tests':
specifier: workspace:*
version: link:../tests
'@types/node':
specifier: 24.10.2
version: 24.10.2
'@vitest/coverage-v8':
specifier: 4.0.15
version: 4.0.15(vitest@4.0.15(@types/node@24.10.2)(tsx@4.21.0)(yaml@2.8.2))
openapi-typescript:
specifier: ^7.10.1
version: 7.10.1(typescript@5.9.3)
typescript:
specifier: 5.9.3
version: 5.9.3
vitest:
specifier: 4.0.15
version: 4.0.15(@types/node@24.10.2)(tsx@4.21.0)(yaml@2.8.2)
packages/configs: {}
packages/query-dsl:
@@ -1022,6 +1050,16 @@ packages:
'@protobufjs/utf8@1.1.0':
resolution: {integrity: sha512-Vvn3zZrhQZkkBE8LSuW3em98c0FwgO4nxzv6OdSxPKJIEKY2bGbHn+mhGIPerzI4twdxaP8/0+06HBpwf345Lw==}
'@redocly/ajv@8.17.1':
resolution: {integrity: sha512-EDtsGZS964mf9zAUXAl9Ew16eYbeyAFWhsPr0fX6oaJxgd8rApYlPBf0joyhnUHz88WxrigyFtTaqqzXNzPgqw==}
'@redocly/config@0.22.2':
resolution: {integrity: sha512-roRDai8/zr2S9YfmzUfNhKjOF0NdcOIqF7bhf4MVC5UxpjIysDjyudvlAiVbpPHp3eDRWbdzUgtkK1a7YiDNyQ==}
'@redocly/openapi-core@1.34.6':
resolution: {integrity: sha512-2+O+riuIUgVSuLl3Lyh5AplWZyVMNuG2F98/o6NrutKJfW4/GTZdPpZlIphS0HGgcOHgmWcCSHj+dWFlZaGSHw==}
engines: {node: '>=18.17.0', npm: '>=9.5.0'}
'@rollup/rollup-android-arm-eabi@4.53.3':
resolution: {integrity: sha512-mRSi+4cBjrRLoaal2PnqH82Wqyb+d3HsPUN/W+WslCXsZsyHa9ZeQQX/pQsZaVIWDkPcpV6jJ+3KLbTbgnwv8w==}
cpu: [arm]
@@ -1312,6 +1350,10 @@ packages:
engines: {node: '>=0.4.0'}
hasBin: true
agent-base@7.1.4:
resolution: {integrity: sha512-MnA+YT8fwfJPgBx3m60MNqakm30XOkyIoH1y6huTQvC0PwZG7ki8NacLBcrPbNoo8vEZy7Jpuk7+jMO+CUovTQ==}
engines: {node: '>= 14'}
ajv-draft-04@1.0.0:
resolution: {integrity: sha512-mv00Te6nmYbRp5DCwclxtt7yV/joXJPGS7nM+97GdxvuttCOfgI3K4U25zboyeX0O+myI8ERluxQe5wljMmVIw==}
peerDependencies:
@@ -1337,6 +1379,10 @@ packages:
ansi-align@3.0.1:
resolution: {integrity: sha512-IOfwwBF5iczOjp/WeY4YxyjqAFMQoZufdQWDd19SEExbVLNXqvpzSJ/M7Za4/sCPmQ0+GRquoA7bGcINcxew6w==}
ansi-colors@4.1.3:
resolution: {integrity: sha512-/6w/C21Pm1A7aZitlI5Ni/2J6FFQN8i1Cvz3kHABAAbw93v/NlvKdVOqz7CCWz/3iv/JplRSEEZ83XION15ovw==}
engines: {node: '>=6'}
ansi-diff@1.2.0:
resolution: {integrity: sha512-BIXwHKpjzghBjcwEV10Y4b17tjHfK4nhEqK3LqyQ3JgcMcjmi3DIevozNgrOpfvBMmrq9dfvrPJSu5/5vNUBQg==}
@@ -1498,6 +1544,9 @@ packages:
resolution: {integrity: sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==}
engines: {node: '>=10'}
change-case@5.4.4:
resolution: {integrity: sha512-HRQyTk2/YPEkt9TnUPbOpr64Uw3KOicFWPVBb+xiHvd6eBx/qPr9xqfBFDT8P2vWsvvz4jbEkfDe71W3VyNu2w==}
char-regex@1.0.2:
resolution: {integrity: sha512-kWWXztvZ5SBQV+eRgKFeh8q5sLuZY2+8WUIzlxWVTg+oGwY14qylx1KbKzHd8P6ZYkAg0xyIDU9JMHhyJMZ1jw==}
engines: {node: '>=10'}
@@ -1531,6 +1580,9 @@ packages:
color-name@1.1.4:
resolution: {integrity: sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==}
colorette@1.4.0:
resolution: {integrity: sha512-Y2oEozpomLn7Q3HFP7dpww7AtMJplbM9lGZP6RDfHqmbeRjiwRg4n6VM6j4KLmRke85uWEI7JqF17f3pqdRA0g==}
colorette@2.0.19:
resolution: {integrity: sha512-3tlv/dIP7FWvj3BsbHrGLJ6l/oKh1O3TcgBqMn+yyCagOxc23fyzDS6HypQbgxWbkpDnf52p1LuR4eWDQ/K9WQ==}
@@ -2047,6 +2099,10 @@ packages:
html-escaper@2.0.2:
resolution: {integrity: sha512-H2iMtd0I4Mt5eYiapRdIDjp+XzelXQ0tFE4JS7YFwFevXXMmOp9myNrUvCg0D6ws8iqkRPBfKHgbwig1SmlLfg==}
https-proxy-agent@7.0.6:
resolution: {integrity: sha512-vK9P5/iUfdl95AI+JVyUuIcVtd4ofvtrOr3HNtM2yxC9bnMbEdp3x01OhQNnjb8IJYi38VlTE3mBXwcfvywuSw==}
engines: {node: '>= 14'}
human-signals@2.1.0:
resolution: {integrity: sha512-B4FFZ6q/T2jhhksgkbEW3HBvWIfDW85snkQgawt07S7J5QXTk6BkNV+0yAeZrM5QpMAdYlocGoljn0sJ/WQkFw==}
engines: {node: '>=10.17.0'}
@@ -2070,6 +2126,10 @@ packages:
resolution: {integrity: sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA==}
engines: {node: '>=0.8.19'}
index-to-position@1.2.0:
resolution: {integrity: sha512-Yg7+ztRkqslMAS2iFaU+Oa4KTSidr63OsFGlOrJoW981kIYO3CGCS3wA95P1mUi/IVSJkn0D479KTJpVpvFNuw==}
engines: {node: '>=18'}
individual@3.0.0:
resolution: {integrity: sha512-rUY5vtT748NMRbEMrTNiFfy29BgGZwGXUi2NFUVMWQrogSLzlJvQV9eeMWi+g1aVaQ53tpyLAQtd5x/JH0Nh1g==}
@@ -2244,6 +2304,10 @@ packages:
resolution: {integrity: sha512-HGYWWS/ehqTV3xN10i23tkPkpH46MLCIMFNCaaKNavAXTF1RkqxawEPtnjnGZ6XKSInBKkiOA5BKS+aZiY3AvA==}
engines: {node: '>=8'}
js-levenshtein@1.1.6:
resolution: {integrity: sha512-X2BB11YZtrRqY4EnQcLX5Rh373zbK4alC1FW7D7MBhL2gtcC17cTnr6DmfHZeS0s2rTHjUTMMHfG7gO8SSdw+g==}
engines: {node: '>=0.10.0'}
js-tiktoken@1.0.21:
resolution: {integrity: sha512-biOj/6M5qdgx5TKjDnFT1ymSpM5tbd3ylwDtrQvFQSu0Z7bBYko2dF+W/aUkXUPuk6IVpRxk/3Q2sHOzGlS36g==}
@@ -2439,6 +2503,10 @@ packages:
minimatch@3.1.2:
resolution: {integrity: sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==}
minimatch@5.1.6:
resolution: {integrity: sha512-lKwV/1brpG6mBUFHtb7NUmtABCb2WZZmm2wNiOA5hAb8VdCS4B3dtMWyvcoViccwAW/COERjXLt0zP1zXUN26g==}
engines: {node: '>=10'}
minimatch@9.0.5:
resolution: {integrity: sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==}
engines: {node: '>=16 || 14 >=14.17'}
@@ -2558,9 +2626,21 @@ packages:
onnxruntime-web@1.22.0-dev.20250409-89f8206ba4:
resolution: {integrity: sha512-0uS76OPgH0hWCPrFKlL8kYVV7ckM7t/36HfbgoFw6Nd0CZVVbQC4PkrR8mBX8LtNUFZO25IQBqV2Hx2ho3FlbQ==}
openapi-fetch@0.15.0:
resolution: {integrity: sha512-OjQUdi61WO4HYhr9+byCPMj0+bgste/LtSBEcV6FzDdONTs7x0fWn8/ndoYwzqCsKWIxEZwo4FN/TG1c1rI8IQ==}
openapi-types@12.1.3:
resolution: {integrity: sha512-N4YtSYJqghVu4iek2ZUvcN/0aqH1kRDuNqzcycDxhOUpg7GdvLa2F3DgS6yBNhInhv2r/6I0Flkn7CqL8+nIcw==}
openapi-typescript-helpers@0.0.15:
resolution: {integrity: sha512-opyTPaunsklCBpTK8JGef6mfPhLSnyy5a0IN9vKtx3+4aExf+KxEqYwIy3hqkedXIB97u357uLMJsOnm3GVjsw==}
openapi-typescript@7.10.1:
resolution: {integrity: sha512-rBcU8bjKGGZQT4K2ekSTY2Q5veOQbVG/lTKZ49DeCyT9z62hM2Vj/LLHjDHC9W7LJG8YMHcdXpRZDqC1ojB/lw==}
hasBin: true
peerDependencies:
typescript: ^5.x
optionator@0.9.4:
resolution: {integrity: sha512-6IpQ7mKUxRcZNLIObR0hz7lxsapSSIYNZJwXPGeF0mTVqGKFIXj1DQcMoT22S3ROcLyY/rz0PWaWZ9ayWmad9g==}
engines: {node: '>= 0.8.0'}
@@ -2609,6 +2689,10 @@ packages:
resolution: {integrity: sha512-ayCKvm/phCGxOkYRSCM82iDwct8/EonSEgCSxWxD7ve6jHggsFl4fZVQBPRNgQoKiuV/odhFrGzQXZwbifC8Rg==}
engines: {node: '>=8'}
parse-json@8.3.0:
resolution: {integrity: sha512-ybiGyvspI+fAoRQbIPRddCcSTV9/LsJbf0e/S85VLowVGzRmokfneg2kwVW/KU5rOXrPSbF1qAKPMgNTqqROQQ==}
engines: {node: '>=18'}
parse-ms@2.1.0:
resolution: {integrity: sha512-kHt7kzLoS9VBZfUsiKjv43mr91ea+U05EyKkEtqp7vNbHxmaVuEqN7XxeEVnGrMtYOAxGrDElSi96K7EgO1zCA==}
engines: {node: '>=6'}
@@ -2703,6 +2787,10 @@ packages:
platform@1.3.6:
resolution: {integrity: sha512-fnWVljUchTro6RiCFvCXBbNhJc2NijN7oIQxbwsyL0buWJPG85v81ehlHI9fXrJsMNgTofEoWIQeClKpgxFLrg==}
pluralize@8.0.0:
resolution: {integrity: sha512-Nc3IT5yHzflTfbjgqWcCPpo7DaKy4FnpB0l/zCAW0Tc7jxAiuqSxHasntB3D7887LSrA93kDJ9IXovxJYxyLCA==}
engines: {node: '>=4'}
possible-typed-array-names@1.1.0:
resolution: {integrity: sha512-/+5VFTchJDoVj3bhoqi6UeymcD00DAwb1nJwamzPvHEszJ4FpF6SNNbUbOS8yI56qHzdV8eK0qEfOSiodkTdxg==}
engines: {node: '>= 0.4'}
@@ -3071,6 +3159,10 @@ packages:
resolution: {integrity: sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==}
engines: {node: '>=8'}
supports-color@10.2.2:
resolution: {integrity: sha512-SS+jx45GF1QjgEXQx4NJZV9ImqmO2NPz5FNsIHrsDjh2YsHnawpan7SNQ1o8NuhrbHZy9AZhIoCUiCeaW/C80g==}
engines: {node: '>=18'}
supports-color@7.2.0:
resolution: {integrity: sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==}
engines: {node: '>=8'}
@@ -3205,6 +3297,10 @@ packages:
resolution: {integrity: sha512-q+MB8nYR1KDLrgr4G5yemftpMC7/QLqVndBmEEdqzmNj5dcFOO4Oo8qlwZE3ULT3+Zim1F8Kq4cBnikNhlCMlg==}
engines: {node: '>=8'}
type-fest@4.41.0:
resolution: {integrity: sha512-TeTSQ6H5YHvpqVwBRcnLDCBnDOHWYu7IvGbHT6N8AOymcr9PJGjc1GTtiWZTYg0NCgYwvnYWEkVChQAr9bjfwA==}
engines: {node: '>=16'}
type-fest@5.0.0:
resolution: {integrity: sha512-GeJop7+u7BYlQ6yQCAY1nBQiRSHR+6OdCEtd8Bwp9a3NK3+fWAVjOaPKJDteB9f6cIJ0wt4IfnScjLG450EpXA==}
engines: {node: '>=20'}
@@ -3409,6 +3505,9 @@ packages:
resolution: {integrity: sha512-YgvUTfwqyc7UXVMrB+SImsVYSmTS8X/tSrtdNZMImM+n7+QTriRXyXim0mBrTXNeqzVF0KWGgHPeiyViFFrNDw==}
engines: {node: '>=18'}
yaml-ast-parser@0.0.43:
resolution: {integrity: sha512-2PTINUwsRqSd+s8XxKaJWQlUuEMHJQyEuh2edBbW8KNJz0SJPwUSD2zRWqezFEdN7IzAgeuYHFUCF7o8zRdZ0A==}
yaml@2.8.0:
resolution: {integrity: sha512-4lLa/EcQCB0cJkyts+FpIRx5G/llPxfP6VQU5KByHEhLxY3IJCH0f0Hy1MHI8sClTvsIb8qwRJ6R/ZdlDJ/leQ==}
engines: {node: '>= 14.6'}
@@ -3419,6 +3518,10 @@ packages:
engines: {node: '>= 14.6'}
hasBin: true
yargs-parser@21.1.1:
resolution: {integrity: sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw==}
engines: {node: '>=12'}
yocto-queue@0.1.0:
resolution: {integrity: sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==}
engines: {node: '>=10'}
@@ -3649,7 +3752,7 @@ snapshots:
'@eslint/config-array@0.21.1':
dependencies:
'@eslint/object-schema': 2.1.7
debug: 4.4.3
debug: 4.4.3(supports-color@10.2.2)
minimatch: 3.1.2
transitivePeerDependencies:
- supports-color
@@ -3665,7 +3768,7 @@ snapshots:
'@eslint/eslintrc@3.3.3':
dependencies:
ajv: 6.12.6
debug: 4.4.3
debug: 4.4.3(supports-color@10.2.2)
espree: 10.4.0
globals: 14.0.0
ignore: 5.3.2
@@ -4159,6 +4262,29 @@ snapshots:
'@protobufjs/utf8@1.1.0': {}
'@redocly/ajv@8.17.1':
dependencies:
fast-deep-equal: 3.1.3
fast-uri: 3.1.0
json-schema-traverse: 1.0.0
require-from-string: 2.0.2
'@redocly/config@0.22.2': {}
'@redocly/openapi-core@1.34.6(supports-color@10.2.2)':
dependencies:
'@redocly/ajv': 8.17.1
'@redocly/config': 0.22.2
colorette: 1.4.0
https-proxy-agent: 7.0.6(supports-color@10.2.2)
js-levenshtein: 1.1.6
js-yaml: 4.1.1
minimatch: 5.1.6
pluralize: 8.0.0
yaml-ast-parser: 0.0.43
transitivePeerDependencies:
- supports-color
'@rollup/rollup-android-arm-eabi@4.53.3':
optional: true
@@ -4321,7 +4447,7 @@ snapshots:
'@typescript-eslint/types': 8.49.0
'@typescript-eslint/typescript-estree': 8.49.0(typescript@5.9.3)
'@typescript-eslint/visitor-keys': 8.49.0
debug: 4.4.3
debug: 4.4.3(supports-color@10.2.2)
eslint: 9.39.1
typescript: 5.9.3
transitivePeerDependencies:
@@ -4331,7 +4457,7 @@ snapshots:
dependencies:
'@typescript-eslint/tsconfig-utils': 8.49.0(typescript@5.9.3)
'@typescript-eslint/types': 8.49.0
debug: 4.4.3
debug: 4.4.3(supports-color@10.2.2)
typescript: 5.9.3
transitivePeerDependencies:
- supports-color
@@ -4350,7 +4476,7 @@ snapshots:
'@typescript-eslint/types': 8.49.0
'@typescript-eslint/typescript-estree': 8.49.0(typescript@5.9.3)
'@typescript-eslint/utils': 8.49.0(eslint@9.39.1)(typescript@5.9.3)
debug: 4.4.3
debug: 4.4.3(supports-color@10.2.2)
eslint: 9.39.1
ts-api-utils: 2.1.0(typescript@5.9.3)
typescript: 5.9.3
@@ -4365,7 +4491,7 @@ snapshots:
'@typescript-eslint/tsconfig-utils': 8.49.0(typescript@5.9.3)
'@typescript-eslint/types': 8.49.0
'@typescript-eslint/visitor-keys': 8.49.0
debug: 4.4.3
debug: 4.4.3(supports-color@10.2.2)
minimatch: 9.0.5
semver: 7.7.3
tinyglobby: 0.2.15
@@ -4458,6 +4584,8 @@ snapshots:
acorn@8.15.0: {}
agent-base@7.1.4: {}
ajv-draft-04@1.0.0(ajv@8.17.1):
optionalDependencies:
ajv: 8.17.1
@@ -4484,6 +4612,8 @@ snapshots:
dependencies:
string-width: 4.2.3
ansi-colors@4.1.3: {}
ansi-diff@1.2.0:
dependencies:
ansi-split: 1.0.1
@@ -4681,6 +4811,8 @@ snapshots:
ansi-styles: 4.3.0
supports-color: 7.2.0
change-case@5.4.4: {}
char-regex@1.0.2: {}
chevrotain@11.0.3:
@@ -4711,6 +4843,8 @@ snapshots:
color-name@1.1.4: {}
colorette@1.4.0: {}
colorette@2.0.19: {}
commander@10.0.1: {}
@@ -4764,9 +4898,11 @@ snapshots:
dependencies:
ms: 2.1.2
debug@4.4.3:
debug@4.4.3(supports-color@10.2.2):
dependencies:
ms: 2.1.3
optionalDependencies:
supports-color: 10.2.2
decamelize@1.2.0: {}
@@ -5094,7 +5230,7 @@ snapshots:
ajv: 6.12.6
chalk: 4.1.2
cross-spawn: 7.0.6
debug: 4.4.3
debug: 4.4.3(supports-color@10.2.2)
escape-string-regexp: 4.0.0
eslint-scope: 8.4.0
eslint-visitor-keys: 4.2.1
@@ -5384,6 +5520,13 @@ snapshots:
html-escaper@2.0.2: {}
https-proxy-agent@7.0.6(supports-color@10.2.2):
dependencies:
agent-base: 7.1.4
debug: 4.4.3(supports-color@10.2.2)
transitivePeerDependencies:
- supports-color
human-signals@2.1.0: {}
ieee754@1.2.1: {}
@@ -5399,6 +5542,8 @@ snapshots:
imurmurhash@0.1.4: {}
index-to-position@1.2.0: {}
individual@3.0.0: {}
inherits@2.0.4: {}
@@ -5559,7 +5704,7 @@ snapshots:
istanbul-lib-source-maps@5.0.6:
dependencies:
'@jridgewell/trace-mapping': 0.3.31
debug: 4.4.3
debug: 4.4.3(supports-color@10.2.2)
istanbul-lib-coverage: 3.2.2
transitivePeerDependencies:
- supports-color
@@ -5569,6 +5714,8 @@ snapshots:
html-escaper: 2.0.2
istanbul-lib-report: 3.0.1
js-levenshtein@1.1.6: {}
js-tiktoken@1.0.21:
dependencies:
base64-js: 1.5.1
@@ -5591,7 +5738,7 @@ snapshots:
json-schema-resolver@3.0.0:
dependencies:
debug: 4.4.3
debug: 4.4.3(supports-color@10.2.2)
fast-uri: 3.1.0
rfdc: 1.4.1
transitivePeerDependencies:
@@ -5737,6 +5884,10 @@ snapshots:
dependencies:
brace-expansion: 1.1.12
minimatch@5.1.6:
dependencies:
brace-expansion: 2.0.2
minimatch@9.0.5:
dependencies:
brace-expansion: 2.0.2
@@ -5854,8 +6005,24 @@ snapshots:
platform: 1.3.6
protobufjs: 7.5.4
openapi-fetch@0.15.0:
dependencies:
openapi-typescript-helpers: 0.0.15
openapi-types@12.1.3: {}
openapi-typescript-helpers@0.0.15: {}
openapi-typescript@7.10.1(typescript@5.9.3):
dependencies:
'@redocly/openapi-core': 1.34.6(supports-color@10.2.2)
ansi-colors: 4.1.3
change-case: 5.4.4
parse-json: 8.3.0
supports-color: 10.2.2
typescript: 5.9.3
yargs-parser: 21.1.1
optionator@0.9.4:
dependencies:
deep-is: 0.1.4
@@ -5909,6 +6076,12 @@ snapshots:
json-parse-even-better-errors: 2.3.1
lines-and-columns: 1.2.4
parse-json@8.3.0:
dependencies:
'@babel/code-frame': 7.27.1
index-to-position: 1.2.0
type-fest: 4.41.0
parse-ms@2.1.0: {}
path-absolute@1.0.1: {}
@@ -5994,6 +6167,8 @@ snapshots:
platform@1.3.6: {}
pluralize@8.0.0: {}
possible-typed-array-names@1.1.0: {}
postcss@8.5.6:
@@ -6435,6 +6610,8 @@ snapshots:
strip-json-comments@3.1.1: {}
supports-color@10.2.2: {}
supports-color@7.2.0:
dependencies:
has-flag: 4.0.0
@@ -6560,6 +6737,8 @@ snapshots:
type-fest@0.6.0: {}
type-fest@4.41.0: {}
type-fest@5.0.0:
dependencies:
tagged-tag: 1.0.0
@@ -6770,10 +6949,14 @@ snapshots:
yallist@5.0.0: {}
yaml-ast-parser@0.0.43: {}
yaml@2.8.0: {}
yaml@2.8.2: {}
yargs-parser@21.1.1: {}
yocto-queue@0.1.0: {}
zod-to-json-schema@3.25.0(zod@4.1.13):

View File

@@ -9,6 +9,9 @@
},
{
"path": "./packages/server/tsconfig.json"
},
{
"path": "./packages/client/tsconfig.json"
}
]
}