init
30
.github/workflows/publish.yml
vendored
Normal file
@@ -0,0 +1,30 @@
|
||||
name: Build and Deploy
|
||||
on:
|
||||
workflow_dispatch:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
jobs:
|
||||
build-and-deploy:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout 🛎️
|
||||
uses: actions/checkout@v2.3.1
|
||||
with:
|
||||
persist-credentials: false
|
||||
- name: Install
|
||||
uses: docker://ghcr.io/morten-olsen/node-latex:main
|
||||
with:
|
||||
args: pnpm install
|
||||
- name: Build 🔧
|
||||
uses: docker://ghcr.io/morten-olsen/node-latex:main
|
||||
env:
|
||||
NODE_ENV: production
|
||||
with:
|
||||
args: pnpm build
|
||||
- name: Deploy 🚀
|
||||
uses: JamesIves/github-pages-deploy-action@4.0.0
|
||||
with:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
BRANCH: gh-pages
|
||||
FOLDER: out
|
||||
3
.gitignore
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
/node_modules/
|
||||
/out
|
||||
/dist
|
||||
116
bin/build/index.ts
Normal file
@@ -0,0 +1,116 @@
|
||||
import { resolve } from "path";
|
||||
import { createReact } from "../resources/react";
|
||||
import { Observable, getCollectionItems } from "../observable";
|
||||
import { createPage } from "../resources/page";
|
||||
import { createArticles } from "../data/articles";
|
||||
import { Bundler } from "../bundler";
|
||||
import { forEach } from "../utils/observable";
|
||||
import { createEjs } from "../resources/ejs";
|
||||
import { createLatex } from "../resources/latex";
|
||||
import { markdownToLatex } from "../utils/markdown";
|
||||
import { createPositions } from "../data/positions";
|
||||
import { createProfile } from "../data/profile";
|
||||
import { Position } from "../../types";
|
||||
|
||||
const build = async () => {
|
||||
const bundler = new Bundler();
|
||||
const articles = createArticles({
|
||||
bundler,
|
||||
});
|
||||
const positions = createPositions({
|
||||
bundler,
|
||||
});
|
||||
const profile = createProfile({
|
||||
bundler,
|
||||
});
|
||||
|
||||
const latex = {
|
||||
article: createEjs(resolve("content/templates/latex/article.tex")),
|
||||
resume: createEjs(resolve("content/templates/latex/resume.tex")),
|
||||
};
|
||||
|
||||
const react = {
|
||||
article: createReact(resolve("content/templates/react/article.tsx")),
|
||||
frontpage: createReact(resolve("content/templates/react/frontpage.tsx")),
|
||||
};
|
||||
|
||||
const resumeProps = Observable.combine({
|
||||
articles: articles.pipe(getCollectionItems),
|
||||
positions: positions.pipe(async (positions) => {
|
||||
const result: Position[] = [];
|
||||
for (const a of positions) {
|
||||
const item = await a.data;
|
||||
const content = markdownToLatex({
|
||||
root: resolve("content"),
|
||||
content: item.raw,
|
||||
});
|
||||
result.push({
|
||||
...item,
|
||||
content,
|
||||
});
|
||||
}
|
||||
return result;
|
||||
}),
|
||||
profile,
|
||||
});
|
||||
|
||||
const resumeUrl = createLatex({
|
||||
bundler,
|
||||
path: "/resume",
|
||||
data: resumeProps,
|
||||
template: latex.resume,
|
||||
});
|
||||
|
||||
{
|
||||
const props = Observable.combine({
|
||||
articles: articles.pipe(getCollectionItems),
|
||||
positions: positions.pipe(getCollectionItems),
|
||||
profile,
|
||||
resumeUrl: new Observable(async () => resumeUrl),
|
||||
});
|
||||
createPage({
|
||||
path: "/",
|
||||
props,
|
||||
template: react.frontpage,
|
||||
bundler,
|
||||
});
|
||||
}
|
||||
|
||||
await forEach(articles, async (article) => {
|
||||
const { slug } = await article.data;
|
||||
const pdfUrl = createLatex({
|
||||
bundler,
|
||||
path: resolve("/articles", slug),
|
||||
template: latex.article,
|
||||
data: Observable.combine({
|
||||
article: article.pipe(async ({ title, cover, root, raw }) => {
|
||||
const body = markdownToLatex({
|
||||
root,
|
||||
content: raw,
|
||||
});
|
||||
return {
|
||||
title,
|
||||
body,
|
||||
cover: resolve(root, cover),
|
||||
};
|
||||
}),
|
||||
}),
|
||||
});
|
||||
const props = Observable.combine({
|
||||
article,
|
||||
profile,
|
||||
pdfUrl: new Observable(async () => pdfUrl),
|
||||
resumeUrl: new Observable(async () => resumeUrl),
|
||||
});
|
||||
createPage({
|
||||
path: `/articles/${slug}`,
|
||||
props,
|
||||
template: react.article,
|
||||
bundler,
|
||||
});
|
||||
});
|
||||
|
||||
return bundler;
|
||||
};
|
||||
|
||||
export { build };
|
||||
34
bin/bundler/index.ts
Normal file
@@ -0,0 +1,34 @@
|
||||
import { resolve } from "path";
|
||||
import { Observable } from "../observable";
|
||||
|
||||
type Asset = {
|
||||
content: string | Buffer;
|
||||
};
|
||||
|
||||
class Bundler {
|
||||
#assets: Map<string, Observable<Asset>>;
|
||||
|
||||
constructor() {
|
||||
this.#assets = new Map();
|
||||
}
|
||||
|
||||
public get paths() {
|
||||
return [...this.#assets.keys()];
|
||||
}
|
||||
|
||||
public register = (path: string, asset: Observable<Asset>) => {
|
||||
const realPath = resolve("/", path);
|
||||
if (!this.#assets.has(realPath)) {
|
||||
this.#assets.set(realPath, asset);
|
||||
}
|
||||
return realPath;
|
||||
};
|
||||
|
||||
public get = (path: string) => {
|
||||
const realPath = resolve("/", path);
|
||||
return this.#assets.get(realPath);
|
||||
};
|
||||
}
|
||||
|
||||
export type { Asset };
|
||||
export { Bundler };
|
||||
59
bin/data/articles/index.ts
Normal file
@@ -0,0 +1,59 @@
|
||||
import { createGlob } from "../../resources/glob";
|
||||
import { createFile } from "../../resources/file";
|
||||
import grayMatter from "gray-matter";
|
||||
import { Article } from "../../../types/article";
|
||||
import { Bundler } from "../../bundler";
|
||||
import { markdownBundleImages } from "../../utils/markdown";
|
||||
import { dirname, resolve } from "path";
|
||||
import { createImage } from "../../resources/image";
|
||||
|
||||
type ArticleOptions = {
|
||||
bundler: Bundler;
|
||||
};
|
||||
|
||||
const createArticles = ({ bundler }: ArticleOptions) => {
|
||||
const files = createGlob({
|
||||
pattern: "content/articles/**/*.md",
|
||||
create: (path) => {
|
||||
const file = createFile({ path });
|
||||
const article = file.pipe(async (raw) => {
|
||||
const { data, content } = grayMatter(raw);
|
||||
const { title, slug, cover, color } = data;
|
||||
const cwd = dirname(path);
|
||||
const markdown = await markdownBundleImages({
|
||||
cwd,
|
||||
content,
|
||||
bundler,
|
||||
});
|
||||
const coverUrl = createImage({
|
||||
image: resolve(cwd, cover),
|
||||
format: "avif",
|
||||
bundler,
|
||||
});
|
||||
const thumbUrl = createImage({
|
||||
image: resolve(cwd, cover),
|
||||
format: "avif",
|
||||
width: 400,
|
||||
bundler,
|
||||
});
|
||||
const result: Article = {
|
||||
title,
|
||||
raw: content,
|
||||
cover,
|
||||
root: cwd,
|
||||
content: markdown,
|
||||
coverUrl,
|
||||
thumbUrl,
|
||||
color,
|
||||
slug,
|
||||
} as any;
|
||||
return result;
|
||||
});
|
||||
return article;
|
||||
},
|
||||
});
|
||||
|
||||
return files;
|
||||
};
|
||||
|
||||
export { createArticles };
|
||||
45
bin/data/positions/index.ts
Normal file
@@ -0,0 +1,45 @@
|
||||
import { createGlob } from "../../resources/glob";
|
||||
import { createFile } from "../../resources/file";
|
||||
import grayMatter from "gray-matter";
|
||||
import { Bundler } from "../../bundler";
|
||||
import { markdownBundleImages } from "../../utils/markdown";
|
||||
import { dirname } from "path";
|
||||
import { Position } from "../../../types";
|
||||
import { Observable } from "../../observable";
|
||||
|
||||
type PositionOptions = {
|
||||
bundler: Bundler;
|
||||
};
|
||||
|
||||
const createPositions = ({ bundler }: PositionOptions) => {
|
||||
const files = createGlob<Observable<Position>>({
|
||||
pattern: "content/resume/positions/**/*.md",
|
||||
create: (path) => {
|
||||
const file = createFile({ path });
|
||||
const position = file.pipe(async (raw) => {
|
||||
const { data, content } = grayMatter(raw);
|
||||
const { title } = data;
|
||||
const cwd = dirname(path);
|
||||
const markdown = await markdownBundleImages({
|
||||
cwd,
|
||||
content,
|
||||
bundler,
|
||||
});
|
||||
const result = {
|
||||
company: data.company,
|
||||
title,
|
||||
from: data.from,
|
||||
to: data.to,
|
||||
raw: content,
|
||||
content: markdown,
|
||||
} as any;
|
||||
return result;
|
||||
});
|
||||
return position;
|
||||
},
|
||||
});
|
||||
|
||||
return files;
|
||||
};
|
||||
|
||||
export { createPositions };
|
||||
35
bin/data/profile/index.ts
Normal file
@@ -0,0 +1,35 @@
|
||||
import { resolve } from "path";
|
||||
import { createFile } from "../../resources/file";
|
||||
import YAML from "yaml";
|
||||
import { Bundler } from "../../bundler";
|
||||
import { Profile } from "../../../types";
|
||||
import { createImage } from "../../resources/image";
|
||||
|
||||
type ProfileOptions = {
|
||||
bundler: Bundler;
|
||||
};
|
||||
|
||||
const createProfile = ({ bundler }: ProfileOptions) => {
|
||||
const file = createFile({
|
||||
path: resolve("content/profile.yml"),
|
||||
});
|
||||
|
||||
const profile = file.pipe(async (yaml) => {
|
||||
const data = YAML.parse(yaml);
|
||||
const imagePath = resolve("content", data.image);
|
||||
const result: Profile = {
|
||||
...data,
|
||||
imageUrl: createImage({
|
||||
image: imagePath,
|
||||
format: "avif",
|
||||
bundler,
|
||||
}),
|
||||
imagePath,
|
||||
};
|
||||
return result;
|
||||
});
|
||||
|
||||
return profile;
|
||||
};
|
||||
|
||||
export { createProfile };
|
||||
46
bin/dev/server.ts
Normal file
@@ -0,0 +1,46 @@
|
||||
import express, { Express } from "express";
|
||||
import { Bundler } from "../bundler";
|
||||
import { extname } from "path";
|
||||
|
||||
const createServer = (bundler: Bundler): Express => {
|
||||
const app = express();
|
||||
app.use((req, res) => {
|
||||
let path = req.path;
|
||||
let asset = bundler.get(path);
|
||||
if (!asset) {
|
||||
path = path.endsWith("/") ? path + "index.html" : path + "/index.html";
|
||||
asset = bundler.get(path);
|
||||
}
|
||||
if (asset) {
|
||||
const ext = extname(path);
|
||||
asset.data.then((data) => {
|
||||
if (ext === ".html") {
|
||||
const unsubscribe = asset!.subscribe(async () => {
|
||||
await asset?.data;
|
||||
unsubscribe();
|
||||
res.end(`<script>window.location.reload()</script>`);
|
||||
});
|
||||
res.on("close", unsubscribe);
|
||||
res.on("finish", unsubscribe);
|
||||
res.on("error", unsubscribe);
|
||||
res.writeHead(200, {
|
||||
"content-type": "text/html;charset=utf-8",
|
||||
"Cache-Control": "no-cache, no-store, must-revalidate",
|
||||
Pragma: "no-cache",
|
||||
Expires: "0",
|
||||
"keep-alive": "timeout=5, max=100",
|
||||
});
|
||||
res.write(data.content.toString().replace("</html>", ""));
|
||||
} else {
|
||||
res.send(data.content);
|
||||
}
|
||||
});
|
||||
} else {
|
||||
res.status(404).send("Not found");
|
||||
}
|
||||
});
|
||||
|
||||
return app;
|
||||
};
|
||||
|
||||
export { createServer };
|
||||
40
bin/index.ts
Normal file
@@ -0,0 +1,40 @@
|
||||
import { program } from "commander";
|
||||
import { build } from "./build";
|
||||
import { createServer } from "./dev/server";
|
||||
import { dirname, join, resolve } from "path";
|
||||
import { mkdir, rm, writeFile } from "fs/promises";
|
||||
import { existsSync } from "fs";
|
||||
|
||||
const dev = program.command("dev");
|
||||
dev.action(async () => {
|
||||
const bundler = await build();
|
||||
const server = createServer(bundler);
|
||||
server.listen(3000);
|
||||
});
|
||||
|
||||
const bundle = program.command("build");
|
||||
bundle.action(async () => {
|
||||
const bundler = await build();
|
||||
const outputDir = resolve("out");
|
||||
if (existsSync(outputDir)) {
|
||||
rm(outputDir, { recursive: true });
|
||||
}
|
||||
for (let path of bundler.paths) {
|
||||
await bundler.get(path)?.data;
|
||||
}
|
||||
for (let path of bundler.paths) {
|
||||
const asset = bundler.get(path);
|
||||
if (!asset) {
|
||||
throw new Error(`Asset not found for path: ${path}`);
|
||||
}
|
||||
const content = await asset.data;
|
||||
const target = join(outputDir, path);
|
||||
const targetDir = dirname(target);
|
||||
await mkdir(targetDir, { recursive: true });
|
||||
await writeFile(target, content.content);
|
||||
console.log(`Wrote ${target}`);
|
||||
}
|
||||
process.exit(0);
|
||||
});
|
||||
|
||||
program.parse(process.argv);
|
||||
70
bin/observable/index.test.ts
Normal file
@@ -0,0 +1,70 @@
|
||||
import { Observable } from "./observable";
|
||||
import { getCollectionItems } from "./utils";
|
||||
|
||||
describe("observable", () => {
|
||||
it("should be able to create an observable", async () => {
|
||||
const observable = new Observable(() => Promise.resolve(1));
|
||||
expect(observable).toBeDefined();
|
||||
const data = await observable.data;
|
||||
expect(data).toBe(1);
|
||||
});
|
||||
|
||||
it("should be able to combine observables", async () => {
|
||||
const observable1 = new Observable(() => Promise.resolve(1));
|
||||
const observable2 = new Observable(() => Promise.resolve(2));
|
||||
const combined = Observable.combine({ observable1, observable2 });
|
||||
const data = await combined.data;
|
||||
expect(data.observable1).toBe(1);
|
||||
expect(data.observable2).toBe(2);
|
||||
});
|
||||
|
||||
it("should be able to update observable", async () => {
|
||||
const observable = new Observable(() => Promise.resolve(1));
|
||||
const data = await observable.data;
|
||||
expect(data).toBe(1);
|
||||
observable.set(() => Promise.resolve(2));
|
||||
const data2 = await observable.data;
|
||||
expect(data2).toBe(2);
|
||||
});
|
||||
|
||||
it("should be able to extract collection items", async () => {
|
||||
const observable = new Observable(() =>
|
||||
Promise.resolve([
|
||||
new Observable(() => Promise.resolve(1)),
|
||||
new Observable(() => Promise.resolve(2)),
|
||||
new Observable(() => Promise.resolve(3)),
|
||||
])
|
||||
);
|
||||
const flatten = observable.pipe(getCollectionItems);
|
||||
const data = await flatten.data;
|
||||
expect(data).toEqual([1, 2, 3]);
|
||||
});
|
||||
|
||||
it("should update observable when subscribed", async () => {
|
||||
const observable = new Observable(() => Promise.resolve(1));
|
||||
const spy = jest.fn();
|
||||
observable.subscribe(spy);
|
||||
expect(spy).not.toHaveBeenCalled();
|
||||
observable.set(() => Promise.resolve(2));
|
||||
expect(spy).toHaveBeenCalled();
|
||||
expect(spy).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
it("should update combined observable when subscribed", async () => {
|
||||
const observable1 = new Observable(() => Promise.resolve(1));
|
||||
const observable2 = new Observable(() => Promise.resolve(2));
|
||||
const combined = Observable.combine({ observable1, observable2 });
|
||||
const spy = jest.fn();
|
||||
const data1 = await combined.data;
|
||||
expect(data1.observable1).toBe(1);
|
||||
expect(data1.observable2).toBe(2);
|
||||
combined.subscribe(spy);
|
||||
expect(spy).not.toHaveBeenCalled();
|
||||
observable2.set(() => Promise.resolve(3));
|
||||
expect(spy).toHaveBeenCalled();
|
||||
expect(spy).toHaveBeenCalledTimes(1);
|
||||
const data2 = await combined.data;
|
||||
expect(data2.observable1).toBe(1);
|
||||
expect(data2.observable2).toBe(3);
|
||||
});
|
||||
});
|
||||
2
bin/observable/index.ts
Normal file
@@ -0,0 +1,2 @@
|
||||
export { Observable } from "./observable";
|
||||
export { getCollectionItems } from "./utils";
|
||||
81
bin/observable/observable.ts
Normal file
@@ -0,0 +1,81 @@
|
||||
type Observer = () => void;
|
||||
|
||||
type ObservableRecord<T extends Record<string, Observable<any>>> = {
|
||||
[K in keyof T]: T[K] extends Observable<infer U> ? U : never;
|
||||
};
|
||||
|
||||
class Observable<T> {
|
||||
#observers: Observer[] = [];
|
||||
#data?: Promise<T>;
|
||||
#loader: (current?: T) => Promise<T>;
|
||||
|
||||
constructor(loader: () => Promise<T>) {
|
||||
this.#loader = loader;
|
||||
}
|
||||
|
||||
public get ready() {
|
||||
return this.#data;
|
||||
}
|
||||
|
||||
public get data() {
|
||||
if (!this.#data) {
|
||||
this.#data = this.#loader(this.#data);
|
||||
}
|
||||
return this.#data;
|
||||
}
|
||||
|
||||
public recreate = () => {
|
||||
this.#data = undefined;
|
||||
this.notify();
|
||||
};
|
||||
|
||||
public set(loader: (current?: T) => Promise<T>) {
|
||||
this.#data = undefined;
|
||||
this.#loader = loader;
|
||||
this.notify();
|
||||
}
|
||||
|
||||
public notify = () => {
|
||||
this.#observers.forEach((observer) => observer());
|
||||
};
|
||||
|
||||
subscribe = (observer: Observer) => {
|
||||
this.#observers.push(observer);
|
||||
return () => this.unsubscribe(observer);
|
||||
};
|
||||
|
||||
unsubscribe = (observer: Observer) => {
|
||||
this.#observers = this.#observers.filter((o) => o !== observer);
|
||||
};
|
||||
|
||||
pipe = <U>(fn: (data: T) => Promise<U>) => {
|
||||
const loader = async () => fn(await this.data);
|
||||
const observable = new Observable<U>(loader);
|
||||
this.subscribe(() => {
|
||||
observable.set(loader);
|
||||
});
|
||||
return observable;
|
||||
};
|
||||
|
||||
static combine = <U extends Record<string, Observable<any>>>(
|
||||
record: U
|
||||
): Observable<ObservableRecord<U>> => {
|
||||
const loader = () =>
|
||||
Object.entries(record).reduce(
|
||||
async (accP, [key, value]) => ({
|
||||
...(await accP),
|
||||
[key]: await value.data,
|
||||
}),
|
||||
{} as any
|
||||
);
|
||||
const observable = new Observable<ObservableRecord<U>>(loader);
|
||||
Object.values(record).forEach((item) => {
|
||||
item.subscribe(async () => {
|
||||
observable.set(loader);
|
||||
});
|
||||
});
|
||||
return observable;
|
||||
};
|
||||
}
|
||||
|
||||
export { Observable };
|
||||
7
bin/observable/utils.ts
Normal file
@@ -0,0 +1,7 @@
|
||||
import { Observable } from "./observable";
|
||||
|
||||
const getCollectionItems = async <T>(items: Observable<T>[]) => {
|
||||
return Promise.all(items.map((item) => item.data));
|
||||
};
|
||||
|
||||
export { getCollectionItems };
|
||||
13
bin/resources/ejs/index.ts
Normal file
@@ -0,0 +1,13 @@
|
||||
import { createFile } from "../file";
|
||||
import ejs from "ejs";
|
||||
|
||||
const createEjs = (path: string) => {
|
||||
const file = createFile({ path });
|
||||
const template = file.pipe(async (tmpl) => {
|
||||
const compiled = ejs.compile(tmpl.toString());
|
||||
return compiled;
|
||||
});
|
||||
return template;
|
||||
};
|
||||
|
||||
export { createEjs };
|
||||
19
bin/resources/file/index.ts
Normal file
@@ -0,0 +1,19 @@
|
||||
import { readFile } from "fs/promises";
|
||||
import { Observable } from "../../observable";
|
||||
import { watch } from "fs";
|
||||
|
||||
type FileOptions = {
|
||||
path: string;
|
||||
};
|
||||
|
||||
const createFile = ({ path }: FileOptions) => {
|
||||
const file = new Observable(async () => readFile(path, "utf-8"));
|
||||
|
||||
watch(path, () => {
|
||||
file.recreate();
|
||||
});
|
||||
|
||||
return file;
|
||||
};
|
||||
|
||||
export { createFile };
|
||||
33
bin/resources/glob/index.ts
Normal file
@@ -0,0 +1,33 @@
|
||||
import fastGlob from "fast-glob";
|
||||
import watchGlob from "glob-watcher";
|
||||
import { Observable } from "../../observable";
|
||||
|
||||
type GlobOptions<T> = {
|
||||
cwd?: string;
|
||||
pattern: string;
|
||||
create?: (path: string) => T;
|
||||
};
|
||||
|
||||
const defaultCreate = (a: any) => a;
|
||||
|
||||
const createGlob = <T = string>({
|
||||
cwd,
|
||||
pattern,
|
||||
create = defaultCreate,
|
||||
}: GlobOptions<T>) => {
|
||||
const glob = new Observable(async () => {
|
||||
const files = await fastGlob(pattern, { cwd });
|
||||
return files.map(create);
|
||||
});
|
||||
|
||||
const watcher = watchGlob(pattern, { cwd });
|
||||
watcher.on("add", (path) => {
|
||||
glob.set((current) => Promise.resolve([...(current || []), create(path)]));
|
||||
|
||||
return glob;
|
||||
});
|
||||
|
||||
return glob;
|
||||
};
|
||||
|
||||
export { createGlob };
|
||||
40
bin/resources/image/index.ts
Normal file
@@ -0,0 +1,40 @@
|
||||
import { createHash } from "crypto";
|
||||
import { Asset, Bundler } from "../../bundler";
|
||||
import { Observable } from "../../observable";
|
||||
import sharp, { FormatEnum } from "sharp";
|
||||
|
||||
type ImageOptions = {
|
||||
format: keyof FormatEnum;
|
||||
name?: string;
|
||||
image: string;
|
||||
width?: number;
|
||||
height?: number;
|
||||
bundler: Bundler;
|
||||
};
|
||||
|
||||
const createImage = (options: ImageOptions) => {
|
||||
let path =
|
||||
options.name || createHash("sha256").update(options.image).digest("hex");
|
||||
if (options.width) {
|
||||
path += `-w${options.width}`;
|
||||
}
|
||||
if (options.height) {
|
||||
path += `-h${options.height}`;
|
||||
}
|
||||
path += `.${options.format}`;
|
||||
const loader = async () => {
|
||||
const item = sharp(options.image);
|
||||
if (options.width || options.height) {
|
||||
item.resize(options.width, options.height);
|
||||
}
|
||||
item.toFormat(options.format);
|
||||
const content = await item.toBuffer();
|
||||
return {
|
||||
content,
|
||||
};
|
||||
};
|
||||
const observable = new Observable<Asset>(loader);
|
||||
return options.bundler.register(path, observable);
|
||||
};
|
||||
|
||||
export { createImage };
|
||||
29
bin/resources/latex/index.ts
Normal file
@@ -0,0 +1,29 @@
|
||||
import { Asset, Bundler } from "../../bundler";
|
||||
import { Observable } from "../../observable";
|
||||
import { createEjs } from "../ejs";
|
||||
import { latexToPdf } from "./utils";
|
||||
|
||||
type LatexOptions = {
|
||||
path: string;
|
||||
bundler: Bundler;
|
||||
template: ReturnType<typeof createEjs>;
|
||||
data: Observable<any>;
|
||||
};
|
||||
const createLatex = ({ template, data, path, bundler }: LatexOptions) => {
|
||||
const pdf = Observable.combine({
|
||||
template,
|
||||
data,
|
||||
})
|
||||
.pipe(async ({ template, data }) => template(data))
|
||||
.pipe(async (latex) => {
|
||||
const pdf = await latexToPdf(latex);
|
||||
|
||||
const asset: Asset = {
|
||||
content: pdf,
|
||||
};
|
||||
return asset;
|
||||
});
|
||||
return bundler.register(`${path}.pdf`, pdf);
|
||||
};
|
||||
|
||||
export { createLatex };
|
||||
23
bin/resources/latex/utils.ts
Normal file
@@ -0,0 +1,23 @@
|
||||
import latex from "node-latex";
|
||||
import { Readable } from "stream";
|
||||
|
||||
const latexToPdf = (doc: string) =>
|
||||
new Promise<Buffer>((resolve, reject) => {
|
||||
const chunks: Buffer[] = [];
|
||||
const input = new Readable();
|
||||
input.push(doc);
|
||||
input.push(null);
|
||||
const latexStream = latex(input);
|
||||
latexStream.on("data", (chunk) => {
|
||||
chunks.push(Buffer.from(chunk));
|
||||
});
|
||||
latexStream.on("finish", () => {
|
||||
const result = Buffer.concat(chunks);
|
||||
resolve(result);
|
||||
});
|
||||
latexStream.on("error", (err) => {
|
||||
reject(err);
|
||||
});
|
||||
});
|
||||
|
||||
export { latexToPdf };
|
||||
0
bin/resources/markdown/index.ts
Normal file
62
bin/resources/page/index.ts
Normal file
@@ -0,0 +1,62 @@
|
||||
import React, { ComponentType } from "react";
|
||||
import { renderToStaticMarkup } from "react-dom/server";
|
||||
import { HelmetProvider, FilledContext } from "react-helmet-async";
|
||||
import { Asset, Bundler } from "../../bundler";
|
||||
import { Observable } from "../../observable";
|
||||
import { ServerStyleSheet } from "styled-components";
|
||||
import { resolve } from "path";
|
||||
|
||||
type PageOptions = {
|
||||
path: string;
|
||||
template: Observable<ComponentType<any>>;
|
||||
props: Observable<any>;
|
||||
bundler: Bundler;
|
||||
};
|
||||
const createPage = (options: PageOptions) => {
|
||||
const data = Observable.combine({
|
||||
template: options.template,
|
||||
props: options.props,
|
||||
});
|
||||
const page = data.pipe(async ({ template, props }) => {
|
||||
const sheet = new ServerStyleSheet();
|
||||
const helmetContext: FilledContext = {} as any;
|
||||
const body = sheet.collectStyles(
|
||||
React.createElement(
|
||||
HelmetProvider,
|
||||
{ context: helmetContext },
|
||||
React.createElement(template, props)
|
||||
)
|
||||
);
|
||||
const bodyHtml = renderToStaticMarkup(body);
|
||||
const { helmet } = helmetContext;
|
||||
|
||||
const css = sheet.getStyleTags();
|
||||
const headHtml = [
|
||||
css,
|
||||
helmet.title?.toString(),
|
||||
helmet.priority?.toString(),
|
||||
helmet.meta?.toString(),
|
||||
helmet.link?.toString(),
|
||||
helmet.script?.toString(),
|
||||
]
|
||||
.filter(Boolean)
|
||||
.join("");
|
||||
const html = `<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
${headHtml}
|
||||
</head>
|
||||
<body>
|
||||
${bodyHtml}
|
||||
</body>
|
||||
</html>
|
||||
`;
|
||||
const asset: Asset = { content: html };
|
||||
return asset;
|
||||
});
|
||||
|
||||
const path = resolve("/", options.path, "index.html");
|
||||
return options.bundler.register(path, page);
|
||||
};
|
||||
|
||||
export { createPage };
|
||||
82
bin/resources/react/index.ts
Normal file
@@ -0,0 +1,82 @@
|
||||
import vm from "vm";
|
||||
import React, { ComponentType } from "react";
|
||||
import { nodeResolve } from "@rollup/plugin-node-resolve";
|
||||
import commonjs from "@rollup/plugin-commonjs";
|
||||
import json from "@rollup/plugin-json";
|
||||
import replace from "@rollup/plugin-replace";
|
||||
import sucrase from "@rollup/plugin-sucrase";
|
||||
import alias from "@rollup/plugin-alias";
|
||||
import externalGlobals from "rollup-plugin-external-globals";
|
||||
import { createScript } from "../script";
|
||||
import orgStyled from "styled-components";
|
||||
import * as styledExports from "styled-components";
|
||||
import ReactHelmetAsync from "react-helmet-async";
|
||||
import { resolve } from "path";
|
||||
|
||||
const styled = orgStyled.bind(null);
|
||||
for (let key of Object.keys(orgStyled)) {
|
||||
if (key === "default") {
|
||||
continue;
|
||||
}
|
||||
(styled as any)[key] = (orgStyled as any)[key];
|
||||
}
|
||||
for (let key of Object.keys(styledExports)) {
|
||||
if (key === "default") {
|
||||
continue;
|
||||
}
|
||||
(styled as any)[key] = (styledExports as any)[key];
|
||||
}
|
||||
|
||||
const createReact = <TProps = any>(path: string) => {
|
||||
const script = createScript({
|
||||
path,
|
||||
format: "cjs",
|
||||
plugins: [
|
||||
replace({
|
||||
preventAssignment: true,
|
||||
"process.env.NODE_ENV": JSON.stringify("production"),
|
||||
}),
|
||||
alias({
|
||||
entries: [
|
||||
{ find: "@", replacement: resolve("content/templates/react") },
|
||||
],
|
||||
}),
|
||||
sucrase({
|
||||
exclude: ["node_modules/**"],
|
||||
transforms: ["jsx", "typescript"],
|
||||
}),
|
||||
nodeResolve({
|
||||
browser: true,
|
||||
preferBuiltins: false,
|
||||
extensions: [".js", ".ts", ".tsx"],
|
||||
}),
|
||||
json(),
|
||||
commonjs({
|
||||
include: /node_modules/,
|
||||
}),
|
||||
externalGlobals({
|
||||
react: "React",
|
||||
"styled-components": "StyledComponents",
|
||||
"react-helmet-async": "ReactHelmetAsync",
|
||||
}),
|
||||
],
|
||||
});
|
||||
const template = script.pipe<ComponentType<TProps>>(async () => {
|
||||
const scriptContent = await script.data;
|
||||
const exports: any = {};
|
||||
const module = { exports };
|
||||
const globals = {
|
||||
module,
|
||||
exports,
|
||||
React,
|
||||
StyledComponents: styled,
|
||||
ReactHelmetAsync,
|
||||
};
|
||||
vm.runInNewContext(scriptContent, globals);
|
||||
return module.exports;
|
||||
});
|
||||
|
||||
return template;
|
||||
};
|
||||
|
||||
export { createReact };
|
||||
52
bin/resources/script/index.ts
Normal file
@@ -0,0 +1,52 @@
|
||||
import { Observable } from "../../observable";
|
||||
import { InputPluginOption, ModuleFormat, watch } from "rollup";
|
||||
|
||||
type ScriptOptions = {
|
||||
path: string;
|
||||
format: ModuleFormat;
|
||||
plugins?: InputPluginOption;
|
||||
};
|
||||
|
||||
const build = (options: ScriptOptions, update: (code: string) => void) =>
|
||||
new Promise<string>((resolve, reject) => {
|
||||
let compiled = false;
|
||||
const watcher = watch({
|
||||
input: options.path,
|
||||
plugins: options.plugins,
|
||||
onwarn: () => { },
|
||||
output: {
|
||||
format: options.format,
|
||||
},
|
||||
watch: {
|
||||
skipWrite: true,
|
||||
},
|
||||
});
|
||||
|
||||
watcher.on("event", async (event) => {
|
||||
if (event.code === "BUNDLE_END") {
|
||||
const { output } = await event.result.generate({
|
||||
format: options.format,
|
||||
});
|
||||
const { code } = output[0];
|
||||
if (!compiled) {
|
||||
resolve(code);
|
||||
compiled = true;
|
||||
} else {
|
||||
update(code);
|
||||
}
|
||||
}
|
||||
if (event.code === "ERROR") {
|
||||
reject(event.error);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
const createScript = (options: ScriptOptions) => {
|
||||
const script: Observable<string> = new Observable(() =>
|
||||
build(options, (code) => script.set(() => Promise.resolve(code)))
|
||||
);
|
||||
|
||||
return script;
|
||||
};
|
||||
|
||||
export { createScript };
|
||||
56
bin/utils/markdown/index.ts
Normal file
@@ -0,0 +1,56 @@
|
||||
import { resolve } from "path";
|
||||
import { decode } from "html-entities";
|
||||
import { marked } from "marked";
|
||||
import remark from "remark";
|
||||
import visit from "unist-util-visit";
|
||||
import { Bundler } from "../../bundler";
|
||||
import { createImage } from "../../resources/image";
|
||||
import { renderer } from "./latex";
|
||||
|
||||
type MarkdownBundleImagesOptions = {
|
||||
cwd: string;
|
||||
content: string;
|
||||
bundler: Bundler;
|
||||
};
|
||||
|
||||
const markdownBundleImages = async ({
|
||||
bundler,
|
||||
cwd,
|
||||
content,
|
||||
}: MarkdownBundleImagesOptions) => {
|
||||
const result = await remark()
|
||||
.use(() => (tree) => {
|
||||
visit(tree, "image", (node) => {
|
||||
if (!("url" in node)) {
|
||||
return;
|
||||
}
|
||||
const url = node.url as string;
|
||||
const path = resolve(cwd, url);
|
||||
const image = createImage({
|
||||
image: path,
|
||||
bundler,
|
||||
format: "webp",
|
||||
});
|
||||
const newUrl = image;
|
||||
node.url = newUrl;
|
||||
});
|
||||
})
|
||||
.process(content);
|
||||
return String(result);
|
||||
};
|
||||
|
||||
type MarkdownToLatexOptions = {
|
||||
root: string;
|
||||
content: string;
|
||||
};
|
||||
|
||||
const markdownToLatex = ({ root, content }: MarkdownToLatexOptions) => {
|
||||
const render: any = {
|
||||
...renderer(0),
|
||||
};
|
||||
const latex = marked(content, {
|
||||
renderer: render,
|
||||
});
|
||||
return decode(latex);
|
||||
};
|
||||
export { markdownBundleImages, markdownToLatex };
|
||||
91
bin/utils/markdown/latex.ts
Normal file
@@ -0,0 +1,91 @@
|
||||
import { decode } from "html-entities";
|
||||
import { existsSync } from "fs";
|
||||
|
||||
const latexTypes = ["", "section", "subsection", "paragraph", "subparagraph"];
|
||||
|
||||
const sanitize = (text?: string) => {
|
||||
if (!text) {
|
||||
return "";
|
||||
}
|
||||
return decode(text)
|
||||
.replace("&", "\\&")
|
||||
.replace("_", "\\_")
|
||||
.replace(/([^\\])\}/g, "$1\\}")
|
||||
.replace(/([^\\])\{/g, "$1\\{")
|
||||
.replace(/[^\\]\[/g, "\\[")
|
||||
.replace(/#/g, "\\#");
|
||||
};
|
||||
|
||||
type Renderer = (depth: number) => {
|
||||
heading?: (text: string, depth: number) => string;
|
||||
code?: (input: string) => string;
|
||||
text?: (input: string) => string;
|
||||
paragraph?: (input: string) => string;
|
||||
list?: (input: string) => string;
|
||||
listitem?: (input: string) => string;
|
||||
link?: (href: string, text: string) => string;
|
||||
strong?: (text: string) => string;
|
||||
em?: (text: string) => string;
|
||||
codespan?: (code: string) => string;
|
||||
image?: (link: string) => string;
|
||||
};
|
||||
|
||||
const renderer = (outerDepth: number) => ({
|
||||
heading: (text: string, depth: number) => {
|
||||
return `\\${latexTypes[outerDepth + depth]}{${sanitize(text)}}\n\n`;
|
||||
},
|
||||
code: (input: string) => {
|
||||
return `
|
||||
\\begin{lstlisting}
|
||||
${input}
|
||||
\\end{lstlisting}
|
||||
`;
|
||||
},
|
||||
text: (input: string) => {
|
||||
return sanitize(input);
|
||||
},
|
||||
blockquote: (input: string) => {
|
||||
return sanitize(input);
|
||||
},
|
||||
paragraph: (input: string) => {
|
||||
return `${input}\n\n`;
|
||||
},
|
||||
list: (input: string) => {
|
||||
return `
|
||||
\\begin{itemize}
|
||||
${input}
|
||||
\\end{itemize}
|
||||
`;
|
||||
},
|
||||
listitem: (input: string) => {
|
||||
return `\\item{${input}}`;
|
||||
},
|
||||
link: (href: string, text: string) => {
|
||||
if (!text || text === href) {
|
||||
return `\\url{${sanitize(href)}}`;
|
||||
}
|
||||
return `${sanitize(text)} (\\url{${sanitize(href)}})`;
|
||||
},
|
||||
strong: (text: string) => {
|
||||
return `\\textbf{${sanitize(text)}}`;
|
||||
},
|
||||
em: (text: string) => {
|
||||
return `\\textbf{${sanitize(text)}}`;
|
||||
},
|
||||
codespan: (code: string) => {
|
||||
return `\\texttt{${sanitize(code)}}`;
|
||||
},
|
||||
image: (link: string) => {
|
||||
if (!existsSync(link)) {
|
||||
return "Online image not supported";
|
||||
}
|
||||
return `\\begin{figure}[h!]
|
||||
\\includegraphics[width=0.5\\textwidth]{${link}}
|
||||
\\centering
|
||||
\\end{figure}
|
||||
`;
|
||||
},
|
||||
});
|
||||
|
||||
export type { Renderer };
|
||||
export { sanitize, renderer };
|
||||
27
bin/utils/observable/index.ts
Normal file
@@ -0,0 +1,27 @@
|
||||
import { Observable } from "../../observable";
|
||||
|
||||
const forEach = async <T extends Observable<any[]>>(
|
||||
observable: T,
|
||||
fn: (
|
||||
value: T extends Observable<infer U>
|
||||
? U extends Array<infer A>
|
||||
? A
|
||||
: never
|
||||
: never
|
||||
) => Promise<void>
|
||||
) => {
|
||||
const knownValues = new Set();
|
||||
const update = async () => {
|
||||
for (let value of await observable.data) {
|
||||
if (knownValues.has(value)) {
|
||||
continue;
|
||||
}
|
||||
await fn(value);
|
||||
knownValues.add(value);
|
||||
}
|
||||
};
|
||||
await update();
|
||||
observable.subscribe(update);
|
||||
};
|
||||
|
||||
export { forEach };
|
||||
BIN
content/articles/2022-03-15-my-home-runs-redux/cover.png
Normal file
|
After Width: | Height: | Size: 1.8 MiB |
BIN
content/articles/2022-03-15-my-home-runs-redux/graph.png
Normal file
|
After Width: | Height: | Size: 29 KiB |
92
content/articles/2022-03-15-my-home-runs-redux/main.md
Normal file
@@ -0,0 +1,92 @@
|
||||
---
|
||||
title: My home runs Redux
|
||||
cover: cover.png
|
||||
slug: my-home-runs-redux
|
||||
published: 2022-03-15
|
||||
color: '#e80ccf'
|
||||
---
|
||||
|
||||
I have been playing around with smart homes for a long time; I have used most of the platforms out there, I have developed quite a few myself, and one thing I keep coming back to is Redux.
|
||||
|
||||
Those who know what Redux is may find this a weird choice, but for those who don't know Redux, I'll give a brief introduction to get up to speed.
|
||||
|
||||
Redux is a state management framework, initially built for a React talk by Dan Abramov and is still primarily associated with managing React applications. Redux has a declarative state derived through a "reducer"-function. This reducer function takes in the current state and an event, and, based on that event, it gives back an updated state. So you have an initial state inside Redux, and then you dispatch events into it, each getting the current state and updating it. That means that the resulting state will always be the same given the same set of events.
|
||||
|
||||
So why is a framework primarily used to keep track of application state for React-based frontends a good fit for a smart home? Well, your smart home platform most likely closely mimics this architecture already!
|
||||
|
||||
First, an event goes in, such as a motion sensor triggering, or you set the bathroom light to 75% brightness in the interface. This event then goes into the platform and hits some automation or routine, resulting in an update request getting sent to the correct devices, which then change the state to correspond to the new state.
|
||||
|
||||
...But that is not quite what happens on most platforms. Deterministic events may go into the system, but this usually doesn't cause a change to a deterministic state. Instead, it gets dispatched to the device, the devices updates, the platform sees this change, and then it updates its state to represent that new state.
|
||||
|
||||
This distinction is essential because it comes with a few drawbacks:
|
||||
|
||||
* Because the event does not change the state but sends a request to the device that does it, everything becomes asynchronous and can happen out of order. This behaviour can be seen either as an issue or a feature, but it does make integrating with it a lot harder from a technical point of view.
|
||||
* The request is sent to the device as a "fire-and-forget" event. It then relies on the success of that request and the subsequent state change to be reported back from the device before the state gets updated. This behaviour means that if this request fails (something you often see with ZigBee-based devices), the device and the state don't get updated.
|
||||
* Since the device is responsible for reporting the state change, you are dependent on having that actual device there to make the change. Without sending the changes to the actual device, you cannot test the setup.
|
||||
|
||||
So can we create a setup that gets away from these issues?
|
||||
|
||||
Another thing to add here is more terminology/philosophy, but most smart home setups are, in my opinion, not really smart, just connected and, to some extent, automated. I want a design that has some actual smartness to it. In this article, I will outline a setup closer to that of the connected, automated home, and at the end, I will give some thoughts on how to take this to the next level and make it smart.
|
||||
|
||||
We know what we want to achieve, and Redux can help us solve this. Remember that Redux takes actions and applies them in a deterministic way to produce a deterministic state.
|
||||
|
||||
Time to go a bit further down the React rabbit hole because another thing from React-land comes in handy here: the concept of reconciliation.
|
||||
|
||||
Instead of dispatching events to the devices waiting for them to update and report their state back, we can rely on reconciliation to update our device. For example, let's say we have a device state for our living room light that says it is at 80% brightness in our Redux store. So now we dispatch an event that sets it to 20% brightness.
|
||||
|
||||
Instead of sending this event to the device, we update the Redux state.
|
||||
|
||||
We have a state listener that detects when the state changes and compares it to the state of the actual device. In our case, it seems that the state indicates that the living room light should be at 20% but are, in fact, at 80%, so it sends a request to the actual device to update it to the correct value.
|
||||
|
||||
We can also do schedule reconciliation to compare our Redux state to that of the actual devices. If a device fails to update its state after a change, it will automatically get updated on our next scheduled run, ensuring that our smart home devices always reflect our state.
|
||||
|
||||
_Sidenote: Yes, of course, I have done a proof of concept using React with a home build reconciliation that reflected the virtual dom unto physical devices, just to have had a house that ran React-Redux_
|
||||
|
||||
Let's go through our list of issues with how most platforms handle this. We can see that we have eliminated all of them by switching to this Redux-reconciliation approach: we update the state directly to run it synchronously. We can re-run the reconciliation so failed or dropped device updates get re-run. We don't require any physical devices as our state is directly updated.
|
||||
|
||||
We now have a robust, reliable, state management mechanism for our smart home, time to add some smarts to it. It is a little outside the article's main focus as this is just my way of doing it; there may be way better ways, so use it at your discretion.
|
||||
|
||||
Redux has the concept of middlewares which are stateful functions that live between the event going into Redux and the reducer updating the state. These middlewares allow Redux to deal with side effects and do event transformations.
|
||||
|
||||
Time for another piece of my smart home philosophy: Most smart homes act on events, and I have used the word throughout this article, but to me, events are not the most valuable thing when creating a smart home, instead I would argue that the goal is to deal with intents rather than events. For instance, an event could be that I started to play a video on the TV. But, that state a fact, what we want to do is instead capture what I am trying to achieve, the "intent", so lets split this event into two intents; if the video is less than one hour, I want to watch a TV show, if it is more I want to watch a movie.
|
||||
|
||||
These intents allow us to not deal with weak-meaning events to do complex operations but instead split our concern into two separate concepts: intent classification and intent execution.
|
||||
|
||||
So last thing we need is a direct way of updating devices, as we can not capture everything through our intent classifier. For instance, if I sit down to read a book that does not generate any sensor data for our system to react to, I will still need a way to adjust device states manually. (I could add a button that would dispatch a reading intent)
|
||||
|
||||
I have separated the events going into Redux into two types:
|
||||
|
||||
* control events, which directly controls a device
|
||||
* environment events represent sensor data coming in (push on a button, motion sensor triggering, TV playing, etc.)
|
||||
|
||||
Now comes the part I have feared, where I need to draw a diagram.
|
||||
|
||||
...sorry
|
||||
|
||||
|
||||

|
||||
|
||||
So this shows our final setup.
|
||||
|
||||
Events go into our Redux setup, either environment or control.
|
||||
|
||||
Control events go straight to the reducer, and the state is updated.
|
||||
|
||||
Environment events first go to the intent classifier, which uses previous events, the current state, and the incoming event to derive the correct intent. The intent then goes into our intent executor, which converts the intent into a set of actual device changes, which gets sent to our reducer, and the state is then updated.
|
||||
|
||||
Lastly, we invoke the reconciliation to update our real devices to reflect our new state.
|
||||
|
||||
There we go! Now we have ended up with a self-contained setup. We can run it without the reconciliation or mock it to create tests for our setup and work without changing any real devices, and we can re-run the reconciliation on our state to ensure our state gets updated correctly, even if a device should miss an update.
|
||||
|
||||
**Success!!!**
|
||||
|
||||
But I promised to give an idea of how to take this smart home and make it actually "smart."
|
||||
|
||||
Let's imagine that we did not want to "program" our smart home. Instead, we wanted to use it; turning the lights on and off using the switches when we entered and exited a room, dimming the lights for movie time, and so on, and over time we want our smart home to pick up on those routines and start to do them for us.
|
||||
|
||||
We have a setup where we both have control events and environments coming in. Control events represent how we want the state of our home to be in a given situation. Environment events represent what happened in our home. So we could store those historically with some machine learning and look for patterns.
|
||||
|
||||
Let's say you always dim the light when playing a movie that is more than one hour long; your smart home would be able to recognize this pattern and automatically start to do this routine for you.
|
||||
|
||||
Would this work? I don't know. I am trying to get more skilled at machine learning to find out.
|
||||
|
||||
BIN
content/articles/2022-03-16-hiring/cover.png
Normal file
|
After Width: | Height: | Size: 1.6 MiB |
37
content/articles/2022-03-16-hiring/main.md
Normal file
@@ -0,0 +1,37 @@
|
||||
---
|
||||
title: How to hire engineers, by an engineer
|
||||
cover: cover.png
|
||||
slug: hiring
|
||||
published: 2022-03-16
|
||||
color: "#8bae8c"
|
||||
---
|
||||
|
||||
It has been a few years since I have been part of the recruitment process. Still, I did reasonably go through the hiring process when looking for a new job so that I will mix a bit from both sides for this article, so you get both some experience from hires and what worked and experience from the other side of the table and what caused my not to consider a company, because spoiler alert: Engineers are contacted a lot!
|
||||
|
||||
So first I need to introduce a hard truth as this will be underpining a lot of my points and is most likely the most important take away from this: Your company is not unique
|
||||
|
||||
Unless your tech brand is among the X highest regarded in the world, your company alone isn't a selling point. I have been contacted by so many companies which thought because they were leader in their field or had a "great product" that makes candidates come banging at their door. If I could disclose all those messages it would be really easy to see that except for the order of information all says almost the same thing, and chances are you job listing is the same. Sorry.
|
||||
The take away from this is that if everything is equal any misstep in your hiring process can cost you that candidate, so if you are not amongst the strongest of tech brands you need to be extremely aware or you will NOT fill the position
|
||||
|
||||
Okay after that slap in the face we can take a second to look at something...
|
||||
|
||||
A lot of people focuses on skills when hirering, and of cause the candidate should have the skills for the position, but I will make a case to put less focus on the hard skills and more focus on passion.
|
||||
|
||||
Usually screening skills through an interview is hard and techniques like code challenges has their own issues, but more on that later.
|
||||
Screening for passion is easier, usually you can get a good feeling if a candidate is passionate about a specific topic, and passionate people want to learn! So even if the candidate has limited skills, if they have passion they will learn and they will outgrow a candidate with experience but no passion.
|
||||
Filling a team with technically skills can solve an immediate requirement, but companies, teams and products change, your requirements will change along with it. Building a passionate team will adjust and evolve along where a product where a team consisting of skilled people but without passion will stay where they where when you hired them.
|
||||
|
||||
Another issue I see in many job postings is requiring a long list of skills. It would be awesome to find someone skilled in everything and who could solve all tasks. In the real world, when ever you add another skill to that list you are limiting the list of candidates that would fit so chances are you are not going to find anyone or the actual skills of any candidate in that very narrow list will be way lower than in a wider pool.
|
||||
A better way is to just add the most important skills, and learn the candidate any less important skills at the job. If you hired passionate people this should be possible (remember to screen for passion about learning new things)
|
||||
|
||||
While we are on the expected skill list: A lot of companies has this list of "it would be really nice if you had these skills". Well those could definitely be framed as learning experiences instead. If you have recruited passionate people, seeing that you will learn new cool skills count as a plus and any candidate who already have the skill will see it and think "awesome, I am already uniquely suited for the job!"
|
||||
|
||||
I promised to talk a bit about code challenges: They can be useful to screen a candidates ability to just go in and start to work from day one, and if done correctly can help a manager organise their process to best suit the teams unique skills but...
|
||||
Hiring at the moment is hard! And as stated pretty much any job listing I have seen are identical, so as in a competitive job market where a small outlier on your resumé lands you in the pile never read through, as likely is it in a competitive hiring market that your listing never gets acted upon.
|
||||
Engineers are contacted a lot by recruiters and speaking to all would require a lot of work so if a company has a prolonged process it quickly gets sorted out, especially by the best candidates whom most likely get contacted the most and most likely have a full time job so time is a scarce resource.
|
||||
So be aware that if you use time consuming processes such as the code challenge you might miss out on the best candidates.
|
||||
|
||||
Please just disclose the salary range. From being connected to a few hundred recruiters here on LinkedIn I can see that this isn't just me but a general issue. As mentioned before, it takes very little to have your listings ignored and most likely most of your strongest potential candidates already has full time jobs, and would not want to move to a position paying less unless the position where absolutely unique (which again, yours most likely isn't). Therefore if you choose not to disclose the salary range be aware that you miss out on most of the best candidates. A company will get an immediate no from me if not disclosing the salary range.
|
||||
|
||||
Lastly, I have spend a lot of words telling your that your company or position isn't unique, and well we both know that is not accurate, your company most likely has something unique to offer! Be that soft values or hard benefits. Be sure to put them in your job listing, to bring out this uniqueness, it is what is going to set you apart from the other listing. There are lot of other companies with the same tech stack, using an agile approach, with a high degree of autonomy, with a great team... But what can you offer that no one else can? Get it front and center... Recruiting is marketing and good copy writing
|
||||
|
||||
BIN
content/articles/2022-04-15-coding-challange/cover.png
Normal file
|
After Width: | Height: | Size: 1.4 MiB |
44
content/articles/2022-04-15-coding-challange/main.md
Normal file
@@ -0,0 +1,44 @@
|
||||
---
|
||||
title: A defence for the coding challange
|
||||
cover: cover.png
|
||||
published: 2022-04-15
|
||||
slug: a-defence-for-the-code-challange
|
||||
color: "#3d91ef"
|
||||
---
|
||||
|
||||
Let's talk about code challenges. Code challenges are a topic with many opinions and something I have been unsure if I liked or hated. Still, I would like to make a case for why I think there are situations where this practise is beneficial, not only for the interviewer but the candidate as well.
|
||||
|
||||
But before getting that far, I would like to point out some of the downsides to code challenges because it isn't one-size-fits-all, and you may want to steer completely clear of them or only use them in specific circumstances.
|
||||
|
||||
## Downside 1
|
||||
The primary issue with coding challenges is that they may be built in a way that prevents the candidate from showing their strength. I have, for instance, often seen those logic-style code challenges being applied to all development positions, so a front-end developer would be quizzed on his ability to solve sorting algorithms. What he would be supposed to do after being hired was to align stuff correctly with CSS. This skill test, which ultimately assesses an entirely different set of skills than what is needed, will alienate the candidate and allow a candidate with skills in the quizzed topic to overshine one with the basic skills required.
|
||||
|
||||
Later I will talk a bit about some requirements that I think need to be considered in a good code test, so if used, at least would give a better indication of a candidate's skill concerning the specific role, not just as a "guy who does computer stuff".
|
||||
|
||||
## Downside 2
|
||||
|
||||
The second one is one I have mentioned before, but in a competitive hiring market, being the company with the most prolonged hiring process means that you might very well miss out on some of the best candidates due to them not having the available time to complete these tasks in their spare time, or because another company was able to close the hire quicker.
|
||||
|
||||
# Why you may want to use code challenges
|
||||
|
||||
Unfortunately, many people don't perform well in interviews. Without a technical assessment, the only place for a candidate to showcase their skills is in the interview itself.
|
||||
|
||||
The IT space has historically been associated with an introvert stereotype. While not always the case, they are definitely out there, and there is nothing wrong with that, but they are usually not the strongest at selling themself, and that is basically what most job interviews are. So if we give a candidate only the ability to showcase their skills through an interview, it stands to reason that the guy we end out hiring isn't necessarily the strongest candidate for the job but the best at showcasing hers/his skill.
|
||||
|
||||
Using a code challenge alongside the interview allows you to use the interview part to assess the person, get an idea about how they would interact on the team, have time to explain to them what the job would be like, without having the "hidden" agenda, of trying to trip them up with random technical questions, to try to see if they can answer correctly on the spot.
|
||||
|
||||
So instead of the on the spot question style, the candidate would get the time to seek information and solve the tasks more reminiscent of how they would work in the real world.
|
||||
|
||||
Additionally, if done right, the code challenge can also help the company/team prepare for the new candidate after the hire. For example, suppose your code challenge can indicate the candidate's strengths, weaknesses and knowledge level with various technologies. This can help put the "training"-program together to support the new hire to be up and running and comfortable in the position as quickly as possible.
|
||||
|
||||
## What makes a good code challenge
|
||||
|
||||
It isn't easy to answer, as it would vary from position to position, team to team, and company to company. Some jobs may require a specific knowledge set, where the "implement a sorting algorithm" may be the proper test and be something you would expect any candidate to be able to.
|
||||
|
||||
But here are a few questions I would use to evaluate the value of a code challenge:
|
||||
|
||||
1. Does it cover all the areas you are interested in in a candidate? This is not to evaluate if the candidate has ALL skills but rather to see if he has some skills which would add value to a team. For instance, if the role is for a front-end team that does both the front-end development, back-end for front-end, QA, DevOps, etc., the test should allow a candidate to showcase skills. If, for instance, your test is too heavily focused on one aspect, let's say front-end development, you may miss a candidate that could have elevated the entire team's ability at QA.
|
||||
1. Does it allow for flexible timeframes? Some candidates may not have time to spend 20 hours completing your code challenge, and the test should respect that. So if you have a lot of different tasks, as in the example above, you shouldn't expect the candidate to complete all, even if he has the time. Instead, make a suggested time frame, and give the candidate the possibility of picking particular focus areas to complete. That way, you respect their time, and you also allow them to showcase the skills they feel they are strongest at.
|
||||
|
||||
Another bonus thing to add is to give the candidate the ability to submit additional considerations and caveats to their solution. For example, a candidate may have chosen a particular path because the "right" approach wasn't clear from the context, have made suboptimal solutions to keep within the timeframe, or even skipped parts because of scope but still want to elaborate. This way, you get closer to the complete picture, not just the code-to-repo.
|
||||
|
||||
BIN
content/articles/2022-05-06-bob-the-algorithm/Frame1.png
Normal file
|
After Width: | Height: | Size: 39 KiB |
BIN
content/articles/2022-05-06-bob-the-algorithm/Graph1.png
Normal file
|
After Width: | Height: | Size: 17 KiB |
BIN
content/articles/2022-05-06-bob-the-algorithm/Graph2.png
Normal file
|
After Width: | Height: | Size: 31 KiB |
BIN
content/articles/2022-05-06-bob-the-algorithm/GraphStep1.png
Normal file
|
After Width: | Height: | Size: 3.1 KiB |
BIN
content/articles/2022-05-06-bob-the-algorithm/GraphStep2.png
Normal file
|
After Width: | Height: | Size: 3.9 KiB |
BIN
content/articles/2022-05-06-bob-the-algorithm/Planned.png
Normal file
|
After Width: | Height: | Size: 165 KiB |
BIN
content/articles/2022-05-06-bob-the-algorithm/TaskBounds.png
Normal file
|
After Width: | Height: | Size: 4.3 KiB |
BIN
content/articles/2022-05-06-bob-the-algorithm/cover.png
Normal file
|
After Width: | Height: | Size: 1.6 MiB |
80
content/articles/2022-05-06-bob-the-algorithm/main.md
Normal file
@@ -0,0 +1,80 @@
|
||||
---
|
||||
title: My day is being planned by an algorithm
|
||||
cover: ./cover.png
|
||||
slug: bob-the-algorithm
|
||||
published: 2022-05-06
|
||||
color: '#e7d9ac'
|
||||
---
|
||||
|
||||
Allow me to introduce Bob. Bob is an algorithm, and he has just accepted a role as my assistant.
|
||||
|
||||
I am not very good when it comes to planning my day, and the many apps out there that promise to help haven't solved the problem for me, usually due to three significant shortcomings:
|
||||
|
||||
> Some cool quote!
|
||||
|
||||
1. Most day planner apps do what their paper counterparts would do: record the plan you create. I don't want to make the plan; someone should do that for me.
|
||||
2. They help you create a plan at the start of the day that you have to follow throughout the day. My days aren't that static, so my schedule needs to change throughout the day.
|
||||
3. They can't handle transits between locations very well.
|
||||
|
||||
So to solve those issues, I decided that the piece of silicon in my pocket, capable of doing a million calculations a second, should be able to help me do something other than waste time doom scrolling. It should let me get more done throughout the day and help me get more time for stuff I want to do. That is why I created Bob.
|
||||
|
||||
Also, I wanted a planning algorithm that was not only for productivity. I did not want to get into the same situation as poor Kiki in the book "The circle", who gets driven insane by a planning algorithm that tries to hyper-optimize her day. Bob also needs to plan downtime.
|
||||
|
||||
Bob is still pretty young and still learning new things, but he has gotten to the point where I believe he is good enough to start to use on a day to day basis.
|
||||
|
||||

|
||||
|
||||
How does Bob work? Bob gets a list of tasks, some from my calendar (both my work and my personal calendar), some from "routines" (which are daily tasks that I want to do most days, such as eating breakfast or picking up the kid), and some tasks come from "goals" which are a list of completable items. These tasks go into Bob, and he tries to create a plan for the next couple of days where I get everything done that I set out to do.
|
||||
|
||||
Tasks have a bit more data than your standard calendar events to allow for good scheduling
|
||||
An "earliest start time" and a "latest start time". These define when the task can add it to the schedule.
|
||||
* A list of locations where the task can be completed.
|
||||
* A duration.
|
||||
* If the task is required.
|
||||
* A priority
|
||||
|
||||

|
||||
|
||||
Bob uses a graph walk to create the optimal plan, where each node contains a few different things
|
||||
* A list of remaining tasks
|
||||
* A list of tasks that are impossible to complete in the current plan
|
||||
* A score
|
||||
* The current location
|
||||
* The present time
|
||||
|
||||
Bob starts by figuring out which locations I can go to complete the remaining tasks and then create new leaf notes for all of those transits. Next, he figures out if some of the remaining tasks become impossible to complete and when I will arrive at the location and calculate a score for that node.
|
||||
|
||||
He then gets a list of all the remaining tasks for the current node which can be completed at the current location, again figuring out when I would be done with the task, updating the list of impossible tasks and scoring the node.
|
||||
If any node adds a required task to the impossible list, that node is considered dead, and Bob will not analyze it further.
|
||||
|
||||

|
||||
|
||||
Now we have a list of active leaves, and from that list, we find the node with the highest score and redo the process from above.
|
||||
|
||||

|
||||
|
||||
Bob has four different strategies for finding a plan.
|
||||
|
||||
* First valid: this finds the first plan that satisfies all restrains but may lead to non-required tasks getting removed, even though it would be possible to find a plan that included all tasks. This strategy is the fastest and least precise strategy.
|
||||
* First complete: this does the same as "First valid" but only exits early if it finds a plan that includes all tasks. This strategy will generally create pretty good plans but can contain excess transits. If it does not find any plans that contain all tasks, it will switch to the "All valid" strategy.
|
||||
* All valid: this explores all paths until the path is either dead or completed. Then it finds the plan with the highest score. If there are no valid plans, it will switch to the "All" strategy.
|
||||
* All: This explores all paths, even dead ones, and at the end returns the one with the highest score. This strategy allows a plan to be created even if it needs to remove some required tasks.
|
||||
|
||||
Scoring is quite simple at the moment, but something I plan to expand on a lot. Currently, the score gets increased when a task gets completed, and it gets decreased when a task becomes impossible. How much it is increased or decreased is influenced by the task's priority and if the task is required. It also decreases based on minutes spent transiting.
|
||||
|
||||
The leaf picked for analysis is the one with the highest score. This approach allows the two first strategies to create decent results, though they aren't guaranteed to be the best. It all comes down to how well tuned the scoring variables are tweaked. Currently, they aren't, but at some point, I plan to create a training algorithm for Bob, which will create plans, score them through "All", and then try to tweak the variables to arrive at the correct one with as few nodes analyzed as possible when running the same plan through "First valid"/"First complete".
|
||||
|
||||
This approach also allows me to calculate a plan with any start time, so I can re-plan it later in the day if I can't follow the original plan or if stuff gets added or removed. So this becomes a tool that helps me get the most out of my day without dictating it.
|
||||
|
||||
Bob can also do multi-day planning. Here, he gets a list of tasks for the different days as he usually would and a "shared" list of goals. So he runs the same calculation, adding in the tasks for that day, along with the shared goal list, and everything remaining from the shared list then gets carried over to the next day. This process repeats for all the remaining days.
|
||||
|
||||
I have created a proof of concept app that houses Bob. I can manage tasks, generate plans, and update my calendar with those plans in this app.
|
||||
|
||||
There are also a few features that I want to add later. The most important one is an "asset" system. For instance, when calculating transits, it needs to know if I have brought the bike along because if I took public transit to work, it doesn't make sense to calculate a bike transit later in the day. This system would work by "assets" being tied to a task and location, and then when Bob creates plans, he knows to consider if the asset is there or not. Assets could also be tied to tasks, so one task may be to pick up something, another to drop it off. In those cases, assets would act as dependencies, so I have to have picked up the asset before being able to drop it off. The system is pretty simple to implement but causes the graph to grow a lot, so I need to do some optimizations before it makes sense to put it in.
|
||||
|
||||
Wrapping up; I have only been using Bob for a few days, but so far, he seems to create good plans and has helped me achieve more both productive tasks, also scheduling downtime such as reading, meditation, playing console etc. and ensuring that I had time for that in the plan.
|
||||
|
||||
There is still a lot of stuff that needs to be done, and I will add in features and fix the code base slowly over time.
|
||||
|
||||
You can find the source for this algorithm and the app it lives in at [Github](https://github.com/morten-olsen/bob-the-algorithm), but beware, it is a proof of concept, so readability or maintainability hasn't been a goal.
|
||||
|
||||
BIN
content/articles/2022-12-05-git-strategy/cover.png
Normal file
|
After Width: | Height: | Size: 1.6 MiB |
71
content/articles/2022-12-05-git-strategy/main.md
Normal file
@@ -0,0 +1,71 @@
|
||||
---
|
||||
title: A meta talk about Git strategies
|
||||
cover: cover.png
|
||||
slug: a-meta-talk-about-git-strategies
|
||||
published: 2022-12-05
|
||||
color: '#7ed6df'
|
||||
---
|
||||
|
||||
Let me start with a (semi) fictional story; It is Friday, and you and your team have spent the last five weeks working on this excellent new feature. You have written a bunch of unit tests to ensure that you maintain your project's impressive 100% test coverage, and both you, your product owner and the QA testers have all verified that everything is tip-top and ready to go for the launch! You hit the big "Deploy" button. 3-2-1 Success! it is released to production, and everyone gets their glass of Champagne!
|
||||
|
||||
You go home for the weekend satisfied with the great job you did.
|
||||
|
||||
On Monday, you open your email to find it flooded with customers screaming that nothing is working! Oh no, you must have made a mistake!!! So you set about debugging and quickly locate the error message in your monitoring, so you checkout the code from Git and start investigating. But the error that happens isn't even possible. So you spend the entire day debugging, again and again, coming to the same conclusion; This is not possible.
|
||||
|
||||
> Git Rocks!
|
||||
|
||||
So finally, you decide to go and read the deployment log line-by-painstakingly-line, and there, on line 13.318, you see it! One of your 12 microservices failed deployment! The deployment used a script with a pipe in it. Unfortunately, the script did not have pipefail configured. The script, therefore, did not generate a non-zero exit code, so the deployment just kept humming along, deploying the remaining 11 with success. This chain of events resulted in a broken infrastructure state and unhappy customers, and you spend the entire Monday debugging and potentially the ENTIRE EXISTANCE coming to an end!
|
||||
|
||||
|
||||
I think most developers would have a story similar to the one above, so why is getting release management right so damn hard? Modern software architecture and the tools that help us are complex machineries, which goes for our deployment tools. Therefore ensuring that every little thing is as planned means that we would have to check hundreds, if not thousands of items, each more to decipher than the last (anyone who has ever tried to solve a broken Xcode builds from an output log will know this).
|
||||
|
||||
So is there a better way? Unfortunately, when things break, any of those thousands of items could be the reason, so when stuff does break, the answer is most likely no, but what about just answering the simple question: "Is something broken?". Well, I am glad you asked because I do believe that there is a better way, and it is a way that revolves around Git.
|
||||
|
||||
# Declaring your expected state
|
||||
|
||||
So I am going to talk about Kubernetes, yet again - A technology I use less and less but, for some reason, ends up being part of my examples more and more often.
|
||||
|
||||
At its core Kubernetes has two conceptually simple tasks; it stores an expected state of the resources that it is supposed to keep track of two; if any of those resources are, in fact, not in the expected state, it tries to right the wrong.
|
||||
|
||||
This approach means that when we interact with Kubernetes, we don't ask it to perform a specific task - We never tell it, "create three additional instances of service X," but rather ", There should be five instances of service X".
|
||||
|
||||
This approach also means that instead of actions and events, we can use reconciliation - no tracking of what was and what is, just what we expect; the rest is the tool's responsibility.
|
||||
|
||||
It also makes it very easy for Kubernetes to track the health of the infrastructure - it knows the expected state. If the actual state differs, it is in some unhealthy state, and if it is unhealthy, it should either fix it or, failing that, raise the alarm for manual intervention.
|
||||
|
||||
# Git as the expected state
|
||||
|
||||
So how does this relate to Git? Well, Git is a version control system. As such, it should keep track of the state of the code. That, to me, doesn't just include when and why but also where - to elaborate: Git is already great at telling when something happened and also why (provided that you write good commit messages), but it should also be able to answer what is the code state in a given context.
|
||||
|
||||
So let's say you have a production environment; a good Git strategy, in my opinion, should be able to answer the question, "What is the expected code state on production right now?" And note the word "expected" here; it is crucial because Git is, of course, not able to do deployments or sync environments (in most cases) but what it can do is serve as our expected state that I talked about with Kubernetes.
|
||||
|
||||
The target is to be able to compare what we expect, with what is actually there completly independant of all the tooling that sits in between, as we want to remove those as a source of error or complexity.
|
||||
|
||||
We want to have something with the simplicity of the Kubernetes approach - we declare an expected state, and the tooling enforces this or alerts us if it can not.
|
||||
|
||||
We also need to ensure that we can compare our expected state to the actual state.
|
||||
|
||||
To achieve this we are going to focus on Git SHAs, so we will be tracking if a deployed resource is a deployment of our expected SHA.
|
||||
|
||||
For a web resource, an excellent way to do this could be through a `/.well-known/deployment-meta.json` while if you are running something like Terraform and AWS, you could tag your resources with this SHA - Try to have as few different methods of exposing this information as possible to keep monitoring simple.
|
||||
|
||||
With this piece of information, we are ready to create our monitor. Let's say we have a Git ref called `environments/production`, and its HEAD points to what we expect to be in production, now comparing is simply getting the SHA of the HEAD commit of that ref and comparing it to our `./well-known/deployment-meta.json`. If they match, the environment is in the expected state. If not, it is unhealthy.
|
||||
|
||||
Let's extend on this a bit; we can add a scheduled task that checks the monitor. If it is unhealthy, it retriggers a deployment and, if that fails, raises the alarm - So even if a deployment failed and no one noticed it yet, it will get auto-corrected the next time our simple reconciler runs. This can be done simply using something like a GitHub workflow.
|
||||
|
||||
You could also go all in and write a crossplane controller and use the actual Kubernetes reconciler to ensure your environments are in a healty state - Go as creazy as you like, just remember to make the tool work for you, not the other way around.
|
||||
|
||||
So, now we have a setup where Git tracks the expected state, and we can easily compare the expected state and the actual state. Lastly, we have a reconciliation loop that tries to rectify any discrepancy.
|
||||
|
||||
# Conclusion
|
||||
|
||||
So as a developer, the only thing I need to keep track of is that my Git refs are pointing to the right stuff. Everything else is reconciliation that I don't have to worry about - unless it is unreconcilable - and in which case, I will get alerted.
|
||||
|
||||
As someone responsible for the infrastructure, the only thing I need to keep track of is that the expected state matches the actual state.
|
||||
|
||||
No more multi-tool lookup, complex log dives or timeline reconstruction (until something fails, of course)
|
||||
|
||||
I believe that the switch from Git being just the code to being the code state makes a lot of daily tasks more straightforward and more transparent, builds a more resilient infrastructure and is worth considering when deciding how you want to do Git.
|
||||
|
||||
|
||||
|
||||
BIN
content/assets/me.jpg
Normal file
|
After Width: | Height: | Size: 1.2 MiB |
36
content/profile.yml
Normal file
@@ -0,0 +1,36 @@
|
||||
name: Morten Olsen
|
||||
image: assets/me.jpg
|
||||
tagline: Hi, I'm Morten And I make software
|
||||
about: |
|
||||
Hell bend on a conquest to take over
|
||||
the digital world (and the physical,
|
||||
should the chance arise). Am I over
|
||||
ambitious? Perhaps, but with a
|
||||
proven track record in most aspects
|
||||
of things which can process 1s and
|
||||
0s and a mind which runs at a speed
|
||||
which can battle a well-trained race-
|
||||
horse, I believe I am equipt to un-
|
||||
dertake this voyage!
|
||||
info:
|
||||
- name: E-mail
|
||||
value: morten@olsen.pro
|
||||
- name: Location
|
||||
value: Copenhagen, DK
|
||||
- name: Github
|
||||
value: https://github.com/morten-olsen
|
||||
skills:
|
||||
- name: Typescript
|
||||
level: 5
|
||||
- name: React
|
||||
level: 5
|
||||
- name: React Native
|
||||
level: 4
|
||||
- name: NodeJS
|
||||
level: 5
|
||||
- name: Docker
|
||||
level: 4
|
||||
- name: NextJS
|
||||
level: 3
|
||||
- name: Fastify
|
||||
level: 3
|
||||
8
content/resume/positions/bilzonen/main.md
Normal file
@@ -0,0 +1,8 @@
|
||||
---
|
||||
company: BilZonen
|
||||
title: Web Developer
|
||||
from: 2010
|
||||
to: 2012
|
||||
---
|
||||
|
||||
I work as a part-time web developer on bilzonen.dk. I have worked with both day-to-day maintenance and large scale projects (new search module, integration of new data catalog, mobile site, new-car-catalog and the entire dealer solution). The page is an Umbraco solution, with all .NET (C#) code. I have introduced a new custom build provider-model system, which allows data-providers to move data between data stores, external services, and the site. (search, caching and external car date is running through the provider system). Also, i have set up the development environment, from setting up virtual server hosts to building custom tool for building and unit testing.
|
||||
8
content/resume/positions/haastrupit/main.md
Normal file
@@ -0,0 +1,8 @@
|
||||
---
|
||||
company: Haastrup IT
|
||||
title: Web developer
|
||||
from: 2009
|
||||
to: 2010
|
||||
---
|
||||
|
||||
I have worked as a part time project koordinator and systems developer, sitting with responsibility for a wide variety of projects including projects for "Københavns Kommune" (Navision reporting software) and "Syddanmarks kommune" (Electronic application processing system). Most projects were made in C#, but also PHP, VB, ActionScript. In addtion to that i maintained the in-house hosting setup.
|
||||
12
content/resume/positions/sampension/main.md
Normal file
@@ -0,0 +1,12 @@
|
||||
---
|
||||
company: Sampension
|
||||
title: Senior Frontend Developer
|
||||
from: 2018
|
||||
to: 2021
|
||||
---
|
||||
|
||||
Sampension is a danish pension fund and my work has been to design and help to build a frontend architecture that would run natively on iOS and Android as well as on the web on both desktop and mobile devices.
|
||||
|
||||
It was important to ensure that the project felt at home on all platforms and that it was maintainable by a small team of developers.
|
||||
|
||||
To achieve this we used React Native and React Native for Web to create a unified codebase for all platforms, as well as create a component library which would deal with ensuring the best UX on all platforms.
|
||||
8
content/resume/positions/sydbank/main.md
Normal file
@@ -0,0 +1,8 @@
|
||||
---
|
||||
company: Sydbank
|
||||
title: IT Hotline
|
||||
from: 2007
|
||||
to: 2009
|
||||
---
|
||||
|
||||
I work as a part-time supporter of customers (private and business) and staff, on Sydbanks different electronic banking systems. Mostly telephonic bug finding and PC setup.
|
||||
8
content/resume/positions/trendsales-1/main.md
Normal file
@@ -0,0 +1,8 @@
|
||||
---
|
||||
company: Trendsales
|
||||
title: Web Developer
|
||||
from: 2012
|
||||
to: 2012
|
||||
---
|
||||
|
||||
I got a part-time job at Trendsales, where my primary responsibility was maintaining the API which powered the iOS app. Quickly my tasks became more diverse, and I ended using about 25-50 percent of my time on the API, while the remaining was spend doing work on the platform in general.
|
||||
16
content/resume/positions/trendsales-2/main.md
Normal file
@@ -0,0 +1,16 @@
|
||||
---
|
||||
company: Trendsales
|
||||
title: iOS and Android Developer
|
||||
from: 2012
|
||||
to: 2015
|
||||
---
|
||||
|
||||
I became responsible for the iOS platform, which was a task that required a new app to be built from the ground up using _Xamarin_. In addition to that, a new API to support the app along with support for our larger vendors was needed which had to be build using something closely similar to _Microsoft MVC_ so that other people could join the project at a later stage.
|
||||
|
||||
he project started in October with the initial version available to our users in late December.
|
||||
|
||||
This project represented my first adventure into mobile development and became an app with more than 15 million screen views and 1.5 million sessions per month.
|
||||
|
||||
After that, I joined two other colleagues, who were working on an Android version of the app, to form a join mobile development team.
|
||||
|
||||
Throughout the period I also worked on the backend for the web page from time to time.
|
||||
13
content/resume/positions/trendsales-3/main.md
Normal file
@@ -0,0 +1,13 @@
|
||||
---
|
||||
company: Trendsales
|
||||
title: Frontend Technical Lead
|
||||
from: 2016
|
||||
to: 2018
|
||||
---
|
||||
|
||||
In 2015 Trendsales decided to build an entirely new platform. It became my responsibility to create a modernized frontend architecture. The work began in 2016 with just me on the project and consisted of a proof of concept version containing everything from framework selection, structure, style guides build chain, continuous deployment, and an actual initial working version. The result where the platform which I was given technical ownership over and which I, along with two others, worked on expanding over the next year. The platform is currently powering _m.trendsales.dk_. The project is build using React and state management are done using Redux. In addition to the of the shelve frameworks, we also needed to develop quite a few bespoke frameworks, in order to meet demands. Among others, these were created to solve the following issues:
|
||||
|
||||
- Introducing a new navigational paradigm
|
||||
- Create a more flexible routing mechanism
|
||||
- Be able to serve skeleton page, for page transitions while still being able to create complete server-side pages
|
||||
- Ensure project flows between multiple systems such as Github, Jira, Octopus Deploy, AppVeyor and Docker
|
||||
8
content/resume/positions/zeronorth/main.md
Normal file
@@ -0,0 +1,8 @@
|
||||
---
|
||||
company: ZeroNorth
|
||||
title: Senior Software Engineer
|
||||
from: 2022
|
||||
to: Present
|
||||
---
|
||||
|
||||
Hello world
|
||||
9
content/templates/latex/article.tex
Normal file
@@ -0,0 +1,9 @@
|
||||
\documentclass{article}
|
||||
\usepackage{graphicx}
|
||||
\usepackage{hyperref}
|
||||
\title{<%-article.title%>}
|
||||
\begin{document}
|
||||
\maketitle
|
||||
\includegraphics[width=0.5\textwidth]{<%-article.cover%>}
|
||||
<%-article.body%>
|
||||
\end{document}
|
||||
149
content/templates/latex/resume.tex
Normal file
@@ -0,0 +1,149 @@
|
||||
\documentclass[10pt, a4paper]{article}
|
||||
\usepackage[top=2cm, bottom=2cm, left=2cm, right=2cm]{geometry}
|
||||
\usepackage{graphicx}
|
||||
\usepackage{hyperref}
|
||||
\usepackage{calc}
|
||||
\usepackage{multicol}
|
||||
\usepackage{fancyhdr}
|
||||
|
||||
\setlength{\columnseprule}{0.1pt}
|
||||
%\setlength{\columnsep}{1.5cm}
|
||||
\def \columncount {2}
|
||||
\def \skillcolumncount {2}
|
||||
|
||||
\pagestyle{fancy}
|
||||
\fancyhf{}
|
||||
\rhead{<%-profile.name%> \today}
|
||||
\lhead{Curriculum Vitae}
|
||||
\rfoot{Page \thepage}
|
||||
|
||||
\newenvironment{columns}{
|
||||
\ifnum\columncount>1
|
||||
\begin{multicols}{\columncount}
|
||||
\fi
|
||||
}{
|
||||
\ifnum\columncount>1
|
||||
\end{multicols}
|
||||
\fi
|
||||
\vspace{0.5cm}
|
||||
\hrule
|
||||
}
|
||||
|
||||
\newcommand{\cvinfo}[2]{
|
||||
\noindent \textbf{#1}\dotfill#2
|
||||
}
|
||||
|
||||
\newenvironment{cvtitle}[3]{
|
||||
\noindent\begin{minipage}{\textwidth}
|
||||
\noindent\begin{minipage}{\textwidth - 3.2cm}
|
||||
\Huge #1\newline\large #3
|
||||
\end{minipage}
|
||||
\noindent\begin{minipage}{3cm}
|
||||
\begin{flushright}
|
||||
\includegraphics[height=3cm]{#2}
|
||||
\end{flushright}
|
||||
\end{minipage}
|
||||
\vspace{0.5cm}
|
||||
\hrule
|
||||
\vspace{0.5cm}
|
||||
\ifnum\skillcolumncount>1
|
||||
\begin{multicols}{\skillcolumncount}
|
||||
\fi
|
||||
}{
|
||||
\ifnum\skillcolumncount>1
|
||||
\end{multicols}
|
||||
\fi
|
||||
\end{minipage}
|
||||
\hfill
|
||||
\begin{minipage}{\textwidth/3-2cm}
|
||||
\end{minipage}
|
||||
\vspace{1cm}
|
||||
\hrule
|
||||
}
|
||||
|
||||
\newenvironment{cvskills}{
|
||||
\noindent\begin{minipage}{\textwidth}
|
||||
\ifnum\skillcolumncount>1
|
||||
\begin{multicols}{\skillcolumncount}
|
||||
\fi
|
||||
}{
|
||||
\ifnum\skillcolumncount>1
|
||||
\end{multicols}
|
||||
\fi
|
||||
\vspace{0.5cm}
|
||||
\hrule
|
||||
\end{minipage}
|
||||
}
|
||||
|
||||
\newenvironment{cvbox}[3]
|
||||
{
|
||||
\noindent\begin{columns}
|
||||
\noindent{\Large \textbf{#1}} \hfill {\small #2} \\
|
||||
\textit{#3}
|
||||
\ifnum\columncount>2
|
||||
\vfill\null\columnbreak
|
||||
\else
|
||||
\\\\
|
||||
\fi
|
||||
}
|
||||
{
|
||||
\end{columns}
|
||||
%\end{minipage}
|
||||
\vspace{0.5cm}
|
||||
}
|
||||
|
||||
\newcommand{\cvskill}[2]{
|
||||
\textbf{#1}\dotfill
|
||||
\textit{#2}
|
||||
}
|
||||
|
||||
\newenvironment{cvexp}[4]
|
||||
{ \begin{cvbox}{#1}{#2 - #3}{#4} }
|
||||
{\end{cvbox}}
|
||||
|
||||
\newenvironment{cvproj}[3]
|
||||
{
|
||||
\noindent
|
||||
\begin{columns}
|
||||
\noindent{\Large \textbf{#1}} \\ {\small #3} \\
|
||||
{\tiny\textit{#2}}
|
||||
\ifnum\columncount>2
|
||||
\vfill\null\columnbreak
|
||||
\else
|
||||
\\\\
|
||||
\fi
|
||||
}
|
||||
{
|
||||
\end{columns}
|
||||
\vspace{0.5cm}
|
||||
}
|
||||
|
||||
\begin{document}
|
||||
|
||||
\begin{cvtitle}{<%-profile.name%>}{<%-profile.imagePath%>}{<%-profile.tagline%>}
|
||||
<% for (let info of profile.info) { %>
|
||||
\cvinfo{<%-info.name%>}{<%-info.value%>}
|
||||
<% } %>
|
||||
\end{cvtitle}
|
||||
|
||||
\begin{columns}
|
||||
\section*{Who am I?}
|
||||
<%-profile.about%>
|
||||
\end{columns}
|
||||
|
||||
\section*{Platform and languages}
|
||||
Platforms and languages which I have worked with. The list is a shortened down version\\\\
|
||||
\begin{cvskills}
|
||||
<% for (let skill of profile.skills.sort((a, b) => b.level - a.level)) { %>
|
||||
\cvskill{<%-skill.name%>}{<%-skill.level%>}
|
||||
<% } %>
|
||||
\end{cvskills}
|
||||
|
||||
\section*{Experience}
|
||||
<% for (let exp of positions.sort((a, b) => new Date(b.from) - new Date(a.from))) { %>
|
||||
\begin{cvexp}{<%-exp.company%>}{<%-exp.from%>}{<%-exp.to%>}{<%-exp.title%>}
|
||||
<%-exp.content%>
|
||||
\end{cvexp}
|
||||
<% } %>
|
||||
|
||||
\end{document}
|
||||
292
content/templates/react/article.tsx
Normal file
@@ -0,0 +1,292 @@
|
||||
import styled, { createGlobalStyle } from "styled-components";
|
||||
import ReactMarkdown from "react-markdown";
|
||||
import { Jumbo } from "./typography";
|
||||
import { createTheme, ThemeProvider } from "./theme";
|
||||
import { Helmet } from "react-helmet-async";
|
||||
import { Page } from "types";
|
||||
|
||||
const GlobalStyle = createGlobalStyle`
|
||||
* { box-sizing: border-box; }
|
||||
body, html { height: 100%; margin: 0; }
|
||||
body {
|
||||
font-size: 17px;
|
||||
background-color: ${({ theme }) => theme.colors.background};
|
||||
color: ${({ theme }) => theme.colors.foreground};
|
||||
}
|
||||
`;
|
||||
|
||||
const Title = styled(Jumbo)`
|
||||
display: block;
|
||||
padding: 5rem 2rem;
|
||||
display: flex;
|
||||
align-items: flex-end;
|
||||
flex-wrap: wrap;
|
||||
min-height: 100%;
|
||||
margin-bottom: 10rem;
|
||||
margin-top: 10rem;
|
||||
@media only screen and (max-width: 900px) {
|
||||
padding-bottom: 2rem;
|
||||
}
|
||||
`;
|
||||
|
||||
const ArticleTitleWord = styled(Jumbo)`
|
||||
font-size: 4rem;
|
||||
line-height: 4.1rem;
|
||||
display: inline-block;
|
||||
padding: 0 15px;
|
||||
text-transform: uppercase;
|
||||
margin: 10px;
|
||||
font-family: "Black Ops One", sans-serif;
|
||||
background: ${({ theme }) => theme.colors.primary};
|
||||
color: ${({ theme }) => theme.colors.foreground};
|
||||
@media only screen and (max-width: 900px) {
|
||||
font-size: 2.5rem;
|
||||
line-height: 3.1rem;
|
||||
}
|
||||
@media only screen and (max-width: 700px) {
|
||||
padding: 5px;
|
||||
font-size: 2rem;
|
||||
line-height: 2.1rem;
|
||||
}
|
||||
`;
|
||||
|
||||
const Wrapper = styled.div`
|
||||
display: flex;
|
||||
height: 100%;
|
||||
`;
|
||||
|
||||
const ArticleWrapper = styled.article`
|
||||
font-size: 1.1rem;
|
||||
font-family: "Merriweather", serif;
|
||||
|
||||
> p,
|
||||
ul,
|
||||
ol {
|
||||
letter-spacing: 0.08rem;
|
||||
line-height: 2.1rem;
|
||||
text-align: justify;
|
||||
max-width: 700px;
|
||||
margin: 2rem 4rem;
|
||||
list-style-position: inside;
|
||||
background: ${({ theme }) => theme.colors.background};
|
||||
|
||||
@media only screen and (max-width: 700px) {
|
||||
margin: 2rem 2rem;
|
||||
}
|
||||
}
|
||||
|
||||
> p:first-of-type {
|
||||
display: block;
|
||||
margin-bottom: 2rem;
|
||||
padding-bottom: 2rem;
|
||||
}
|
||||
|
||||
> p:first-of-type::first-letter {
|
||||
font-family: "Black Ops One", sans-serif;
|
||||
border: solid 5px ${({ theme }) => theme.colors.foreground};
|
||||
margin: 0 1rem 0 0;
|
||||
font-size: 6rem;
|
||||
float: left;
|
||||
clear: left;
|
||||
padding: 1rem;
|
||||
}
|
||||
|
||||
li {
|
||||
padding: 0.5rem 0;
|
||||
margin: 0;
|
||||
}
|
||||
|
||||
img {
|
||||
max-width: 100%;
|
||||
margin: auto;
|
||||
}
|
||||
|
||||
> p + p::first-letter {
|
||||
margin-left: 1.8rem;
|
||||
}
|
||||
|
||||
h1,
|
||||
h2,
|
||||
h3,
|
||||
h4,
|
||||
h5,
|
||||
h6 {
|
||||
max-width: 350px;
|
||||
margin-left: -100px;
|
||||
float: left;
|
||||
padding: 20px;
|
||||
padding-right: 40px;
|
||||
shape-outside: padding-box;
|
||||
position: relative;
|
||||
font-family: "Black Ops One", sans-serif;
|
||||
text-transform: uppercase;
|
||||
display: flex;
|
||||
align-items: flex-start;
|
||||
|
||||
@media only screen and (max-width: 900px) {
|
||||
width: 100%;
|
||||
margin-left: 0;
|
||||
shape-outside: inherit;
|
||||
float: none;
|
||||
}
|
||||
|
||||
&:after {
|
||||
position: absolute;
|
||||
content: "";
|
||||
right: 20px;
|
||||
top: 0;
|
||||
bottom: 0;
|
||||
border: solid 5px ${({ theme }) => theme.colors.foreground};
|
||||
left: 0;
|
||||
|
||||
@media only screen and (max-width: 900px) {
|
||||
border: none;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
blockquote {
|
||||
width: 350px;
|
||||
font-size: 3rem;
|
||||
margin-left: -100px;
|
||||
float: left;
|
||||
padding: 50px;
|
||||
shape-outside: padding-box;
|
||||
position: relative;
|
||||
text-transform: uppercase;
|
||||
|
||||
@media only screen and (max-width: 900px) {
|
||||
width: 100%;
|
||||
margin-left: 0;
|
||||
}
|
||||
|
||||
&:before {
|
||||
color: ${({ theme }) => theme.colors.primary};
|
||||
content: "\\00BB";
|
||||
float: left;
|
||||
font-size: 6rem;
|
||||
}
|
||||
|
||||
&:after {
|
||||
position: absolute;
|
||||
content: "";
|
||||
right: 20px;
|
||||
top: 0;
|
||||
bottom: 0;
|
||||
border-right: 5px solid;
|
||||
border-color: ${({ theme }) => theme.colors.primary};
|
||||
}
|
||||
}
|
||||
`;
|
||||
|
||||
const Content = styled.div`
|
||||
margin-right: 40%;
|
||||
max-width: 700px;
|
||||
width: 100%;
|
||||
position: absolute;
|
||||
right: 0;
|
||||
|
||||
@media only screen and (max-width: 900px) {
|
||||
margin-right: 0;
|
||||
}
|
||||
`;
|
||||
|
||||
const Side = styled.aside`
|
||||
position: fixed;
|
||||
right: 0;
|
||||
top: 0;
|
||||
width: 40%;
|
||||
height: 100%;
|
||||
clip-path: polygon(40% 0%, 100% 0%, 100% 100%, 0% 100%, 0% 50%);
|
||||
|
||||
@media only screen and (max-width: 900px) {
|
||||
position: static;
|
||||
width: 100%;
|
||||
height: 350px;
|
||||
clip-path: none;
|
||||
}
|
||||
`;
|
||||
|
||||
const Cover = styled.div<{ src: string }>`
|
||||
background: url(${({ src }) => src});
|
||||
background-size: cover;
|
||||
background-position: center;
|
||||
width: 100%;
|
||||
height: 100%;
|
||||
`;
|
||||
|
||||
const Download = styled.a`
|
||||
display: inline-block;
|
||||
background: ${({ theme }) => theme.colors.primary};
|
||||
color: ${({ theme }) => theme.colors.foreground};
|
||||
text-align: center;
|
||||
padding: 1rem;
|
||||
font-size: 1rem;
|
||||
font-family: "Black Ops One", sans-serif;
|
||||
text-transform: uppercase;
|
||||
text-decoration: none;
|
||||
`;
|
||||
|
||||
const Author = styled.a`
|
||||
text-transform: uppercase;
|
||||
font-family: "Black Ops One", sans-serif;
|
||||
font-size: 2rem;
|
||||
margin: 1rem;
|
||||
display: inline-block;
|
||||
position: relative;
|
||||
text-decoration: none;
|
||||
color: ${({ theme }) => theme.colors.foreground};
|
||||
|
||||
&:after {
|
||||
content: "";
|
||||
border-bottom: solid 15px ${({ theme }) => theme.colors.primary};
|
||||
display: block;
|
||||
width: 100%;
|
||||
height: 5px;
|
||||
bottom: 0px;
|
||||
z-index: -1;
|
||||
position: absolute;
|
||||
}
|
||||
`;
|
||||
|
||||
const ArticlePage: Page<"article"> = ({ article, profile, pdfUrl }) => {
|
||||
return (
|
||||
<ThemeProvider theme={createTheme({ baseColor: article.color })}>
|
||||
<Helmet>
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1" />
|
||||
<link rel="preconnect" href="https://fonts.googleapis.com" />
|
||||
<link
|
||||
rel="preconnect"
|
||||
href="https://fonts.gstatic.com"
|
||||
crossOrigin="anonymous"
|
||||
/>
|
||||
<link
|
||||
href="https://fonts.googleapis.com/css2?family=Archivo+Black&family=Black+Ops+One&family=Merriweather:wght@400;700&display=swap"
|
||||
rel="stylesheet"
|
||||
/>
|
||||
</Helmet>
|
||||
<GlobalStyle />
|
||||
<Wrapper>
|
||||
<Content>
|
||||
<Title>
|
||||
{article.title.split(" ").map((word, index) => (
|
||||
<ArticleTitleWord key={index}>{word}</ArticleTitleWord>
|
||||
))}
|
||||
<Author href="/">by {profile.name}</Author>
|
||||
</Title>
|
||||
<Download href={pdfUrl} target="_blank" rel="noreferrer">
|
||||
Download PDF
|
||||
</Download>
|
||||
<ArticleWrapper>
|
||||
<ReactMarkdown>{article.content}</ReactMarkdown>
|
||||
</ArticleWrapper>
|
||||
</Content>
|
||||
<Side>
|
||||
<Cover src={article.coverUrl} />
|
||||
</Side>
|
||||
</Wrapper>
|
||||
</ThemeProvider>
|
||||
);
|
||||
};
|
||||
|
||||
export default ArticlePage;
|
||||
77
content/templates/react/components/article/grid/index.tsx
Normal file
@@ -0,0 +1,77 @@
|
||||
import React, { useMemo } from "react";
|
||||
import styled from "styled-components";
|
||||
import ArticlePreview from "../preview";
|
||||
import { JumboArticlePreview } from "../preview/jumbo";
|
||||
import { MiniArticlePreview } from "../preview/mini";
|
||||
import { Article } from "types";
|
||||
|
||||
type Props = {
|
||||
articles: Article[];
|
||||
};
|
||||
|
||||
const Wrapper = styled.div`
|
||||
width: 100%;
|
||||
`;
|
||||
|
||||
const FeaturedArticle = styled.div`
|
||||
display: flex;
|
||||
flex-wrap: wrap;
|
||||
margin: 0 auto;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
width: 100%;
|
||||
`;
|
||||
|
||||
const FeaturedArticles = styled.div`
|
||||
display: flex;
|
||||
flex-direction: row;
|
||||
margin: 0 auto;
|
||||
width: 100%;
|
||||
flex-wrap: wrap;
|
||||
`;
|
||||
|
||||
const RemainingArticles = styled.div`
|
||||
display: flex;
|
||||
flex-direction: row;
|
||||
flex-wrap: wrap;
|
||||
margin: 0 auto;
|
||||
width: 100%;
|
||||
`;
|
||||
|
||||
const ArticleGrid: React.FC<Props> = ({ articles }) => {
|
||||
const sorted = useMemo(
|
||||
() => articles,
|
||||
// TODO:
|
||||
// articles.sort(
|
||||
// (a, b) =>
|
||||
// new Date(b.published).getTime() -
|
||||
// new Date(a.published).getTime()
|
||||
// ),
|
||||
[articles]
|
||||
);
|
||||
const featured1 = useMemo(() => sorted.slice(0, 1)[0], [sorted]);
|
||||
|
||||
const featured2 = useMemo(() => sorted.slice(1, 4), [sorted]);
|
||||
|
||||
const remaining = useMemo(() => sorted.slice(4, 12), [sorted]);
|
||||
|
||||
return (
|
||||
<Wrapper>
|
||||
<FeaturedArticle>
|
||||
<JumboArticlePreview article={featured1} />
|
||||
</FeaturedArticle>
|
||||
<FeaturedArticles>
|
||||
{featured2.map((article) => (
|
||||
<ArticlePreview key={article.title} article={article} />
|
||||
))}
|
||||
</FeaturedArticles>
|
||||
<RemainingArticles>
|
||||
{remaining.map((article) => (
|
||||
<MiniArticlePreview key={article.title} article={article} />
|
||||
))}
|
||||
</RemainingArticles>
|
||||
</Wrapper>
|
||||
);
|
||||
};
|
||||
|
||||
export { ArticleGrid };
|
||||
82
content/templates/react/components/article/preview/index.tsx
Normal file
@@ -0,0 +1,82 @@
|
||||
import React, { useMemo } from "react";
|
||||
import styled from "styled-components";
|
||||
import { Title1 } from "@/typography";
|
||||
import { createTheme } from "@/theme/create";
|
||||
import { ThemeProvider } from "@/theme/provider";
|
||||
import { Article } from "types";
|
||||
|
||||
type Props = {
|
||||
article: Article;
|
||||
};
|
||||
|
||||
const Wrapper = styled.a`
|
||||
height: 500px;
|
||||
border-right: 2px solid rgba(0, 0, 0, 0.1);
|
||||
flex: 1;
|
||||
min-width: 200px;
|
||||
position: relative;
|
||||
margin: 15px;
|
||||
cursor: pointer;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
|
||||
@media only screen and (max-width: 700px) {
|
||||
max-height: 300px;
|
||||
}
|
||||
`;
|
||||
|
||||
const Title = styled(Title1)`
|
||||
background: ${({ theme }) => theme.colors.primary};
|
||||
line-height: 40px;
|
||||
font-family: "Black Ops One", sans-serif;
|
||||
font-size: 25px;
|
||||
padding: 0 5px;
|
||||
margin: 5px 0;
|
||||
`;
|
||||
|
||||
const MetaWrapper = styled.div`
|
||||
top: 10px;
|
||||
left: 10px;
|
||||
right: 10px;
|
||||
display: flex;
|
||||
flex-wrap: wrap;
|
||||
`;
|
||||
|
||||
const AsideWrapper = styled.aside<{
|
||||
image?: string;
|
||||
}>`
|
||||
background: ${({ theme }) => theme.colors.primary};
|
||||
background-size: cover;
|
||||
background-position: center;
|
||||
${({ image }) => (image ? `background-image: url(${image});` : "")}
|
||||
flex: 1;
|
||||
top: 0;
|
||||
bottom: 0;
|
||||
right: 0;
|
||||
left: 0;
|
||||
}
|
||||
`;
|
||||
|
||||
const ArticlePreview: React.FC<Props> = ({ article }) => {
|
||||
const theme = useMemo(
|
||||
() =>
|
||||
createTheme({
|
||||
baseColor: article.color,
|
||||
}),
|
||||
[article.color]
|
||||
);
|
||||
return (
|
||||
<ThemeProvider theme={theme}>
|
||||
<Wrapper href={`/articles/${article.slug}`}>
|
||||
<AsideWrapper image={article.thumbUrl} />
|
||||
<MetaWrapper>
|
||||
{article.title.split(" ").map((word, index) => (
|
||||
<Title key={index}>{word}</Title>
|
||||
))}
|
||||
</MetaWrapper>
|
||||
</Wrapper>
|
||||
</ThemeProvider>
|
||||
);
|
||||
};
|
||||
|
||||
export default ArticlePreview;
|
||||
79
content/templates/react/components/article/preview/jumbo.tsx
Normal file
@@ -0,0 +1,79 @@
|
||||
import React from "react";
|
||||
import styled from "styled-components";
|
||||
import { Title1, Body1 } from "@/typography";
|
||||
import { Article } from "types";
|
||||
|
||||
type Props = {
|
||||
article: Article;
|
||||
};
|
||||
|
||||
const Wrapper = styled.a`
|
||||
height: 300px;
|
||||
flex: 1;
|
||||
position: relative;
|
||||
margin: 15px;
|
||||
cursor: pointer;
|
||||
display: flex;
|
||||
background: ${({ theme }) => theme.colors.background};
|
||||
|
||||
@media only screen and (max-width: 700px) {
|
||||
flex-direction: column;
|
||||
height: 500px;
|
||||
}
|
||||
`;
|
||||
|
||||
const Title = styled(Title1)`
|
||||
line-height: 40px;
|
||||
font-family: "Black Ops One", sans-serif;
|
||||
font-size: 25px;
|
||||
padding: 0 5px;
|
||||
margin: 5px 0;
|
||||
`;
|
||||
|
||||
const Summery = styled(Body1)`
|
||||
max-width: 300px;
|
||||
padding: 0 5px;
|
||||
margin: 5px 0;
|
||||
overflow: hidden;
|
||||
letter-spacing: 0.5px;
|
||||
line-height: 2.1rem;
|
||||
|
||||
@media only screen and (max-width: 700px) {
|
||||
max-height: 100px;
|
||||
}
|
||||
`;
|
||||
|
||||
const MetaWrapper = styled.div`
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
padding: 40px;
|
||||
`;
|
||||
|
||||
const AsideWrapper = styled.aside<{
|
||||
image?: string;
|
||||
}>`
|
||||
background: ${({ theme }) => theme.colors.primary};
|
||||
background-size: cover;
|
||||
background-position: center;
|
||||
${({ image }) => (image ? `background-image: url(${image});` : "")}
|
||||
flex: 1;
|
||||
top: 0;
|
||||
bottom: 0;
|
||||
right: 0;
|
||||
left: 0;
|
||||
}
|
||||
`;
|
||||
|
||||
const JumboArticlePreview: React.FC<Props> = ({ article }) => {
|
||||
return (
|
||||
<Wrapper href={`/articles/${article.slug}`}>
|
||||
<AsideWrapper image={article.coverUrl} />
|
||||
<MetaWrapper>
|
||||
<Title>{article.title}</Title>
|
||||
<Summery>{article.content}</Summery>
|
||||
</MetaWrapper>
|
||||
</Wrapper>
|
||||
);
|
||||
};
|
||||
|
||||
export { JumboArticlePreview };
|
||||
80
content/templates/react/components/article/preview/mini.tsx
Normal file
@@ -0,0 +1,80 @@
|
||||
import React, { useMemo } from "react";
|
||||
import styled from "styled-components";
|
||||
import { Title1 } from "@/typography";
|
||||
import { createTheme } from "@/theme/create";
|
||||
import { ThemeProvider } from "@/theme/provider";
|
||||
import { Article } from "types";
|
||||
|
||||
type Props = {
|
||||
article: Article;
|
||||
};
|
||||
|
||||
const Wrapper = styled.a`
|
||||
position: relative;
|
||||
margin: 15px;
|
||||
cursor: pointer;
|
||||
display: flex;
|
||||
width: 220px;
|
||||
height: 200px;
|
||||
|
||||
@media only screen and (max-width: 700px) {
|
||||
width: 100%;
|
||||
}
|
||||
`;
|
||||
|
||||
const Title = styled(Title1)`
|
||||
line-height: 20px;
|
||||
font-size: 20px;
|
||||
padding: 5px 5px;
|
||||
font-family: "Black Ops One", sans-serif;
|
||||
margin: 5px 0;
|
||||
background: ${({ theme }) => theme.colors.background};
|
||||
`;
|
||||
|
||||
const MetaWrapper = styled.div`
|
||||
display: flex;
|
||||
flex-direction: row;
|
||||
flex-wrap: wrap;
|
||||
padding: 10px;
|
||||
max-width: 220px;
|
||||
position: absolute;
|
||||
`;
|
||||
|
||||
const AsideWrapper = styled.aside<{
|
||||
image?: string;
|
||||
}>`
|
||||
background: ${({ theme }) => theme.colors.primary};
|
||||
background-size: cover;
|
||||
background-position: center;
|
||||
${({ image }) => (image ? `background-image: url(${image});` : "")}
|
||||
position: absolute;
|
||||
top: 0;
|
||||
bottom: 0;
|
||||
right: 0;
|
||||
left: 0;
|
||||
}
|
||||
`;
|
||||
|
||||
const MiniArticlePreview: React.FC<Props> = ({ article }) => {
|
||||
const theme = useMemo(
|
||||
() =>
|
||||
createTheme({
|
||||
baseColor: article.color,
|
||||
}),
|
||||
[article.color]
|
||||
);
|
||||
return (
|
||||
<ThemeProvider theme={theme}>
|
||||
<Wrapper href={`/articles/${article.slug}`}>
|
||||
<AsideWrapper image={article.thumbUrl} />
|
||||
<MetaWrapper>
|
||||
{article.title.split(" ").map((word, index) => (
|
||||
<Title key={index}>{word}</Title>
|
||||
))}
|
||||
</MetaWrapper>
|
||||
</Wrapper>
|
||||
</ThemeProvider>
|
||||
);
|
||||
};
|
||||
|
||||
export { MiniArticlePreview };
|
||||
30
content/templates/react/components/html/index.tsx
Normal file
@@ -0,0 +1,30 @@
|
||||
import { FC, ReactNode } from "react"
|
||||
|
||||
type HtmlProps = {
|
||||
body: ReactNode;
|
||||
head: ReactNode;
|
||||
scripts: string[];
|
||||
};
|
||||
|
||||
const Html: FC<HtmlProps> = ({ body, head, scripts }) => {
|
||||
return (
|
||||
<html>
|
||||
<head>
|
||||
<title>My App</title>
|
||||
{head}
|
||||
{scripts.map((script, index) => (
|
||||
<script key={index} src={script} />
|
||||
))}
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1" />
|
||||
<link rel="preconnect" href="https://fonts.googleapis.com" />
|
||||
<link rel="preconnect" href="https://fonts.gstatic.com" crossOrigin="anonymous"/>
|
||||
<link href="https://fonts.googleapis.com/css2?family=Archivo+Black&family=Black+Ops+One&family=Merriweather:wght@400;700&display=swap" rel="stylesheet" />
|
||||
</head>
|
||||
<body>
|
||||
<div id="root">{body}</div>
|
||||
</body>
|
||||
</html>
|
||||
);
|
||||
};
|
||||
|
||||
export { Html };
|
||||
65
content/templates/react/components/sheet/index.tsx
Normal file
@@ -0,0 +1,65 @@
|
||||
import React, { ReactNode, useMemo } from "react";
|
||||
import styled from "styled-components";
|
||||
import { createTheme } from "@/theme/create";
|
||||
import { ThemeProvider } from "@/theme/provider";
|
||||
|
||||
const Wrapper = styled.div`
|
||||
background: ${({ theme }) => theme.colors.background};
|
||||
color: ${({ theme }) => theme.colors.foreground};
|
||||
min-height: 100%;
|
||||
position: relative;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
flex-direction: column;
|
||||
`;
|
||||
|
||||
const BackgroundWrapper = styled.div<{
|
||||
image?: string;
|
||||
}>`
|
||||
position: absolute;
|
||||
left: 0;
|
||||
top: 0;
|
||||
bottom: 0;
|
||||
right: 0;
|
||||
background-size: cover;
|
||||
background-position: center;
|
||||
opacity: 0.2;
|
||||
${({ image }) => (image ? `background-image: url(${image});` : "")}
|
||||
`;
|
||||
|
||||
const Content = styled.div`
|
||||
z-index: 1;
|
||||
display: flex;
|
||||
max-width: 1000px;
|
||||
width: 100%;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
flex-direction: column;
|
||||
`;
|
||||
|
||||
type Props = {
|
||||
children: ReactNode;
|
||||
background?: string;
|
||||
color?: string;
|
||||
};
|
||||
|
||||
const Sheet: React.FC<Props> = ({ color, background, children }) => {
|
||||
const theme = useMemo(
|
||||
() =>
|
||||
createTheme({
|
||||
baseColor: color,
|
||||
}),
|
||||
[color]
|
||||
);
|
||||
return (
|
||||
<ThemeProvider theme={theme}>
|
||||
<Wrapper>
|
||||
<BackgroundWrapper image={background} />
|
||||
<Content>{children}</Content>
|
||||
</Wrapper>
|
||||
</ThemeProvider>
|
||||
);
|
||||
};
|
||||
|
||||
export { Sheet };
|
||||
160
content/templates/react/frontpage.tsx
Normal file
@@ -0,0 +1,160 @@
|
||||
import styled, { createGlobalStyle } from "styled-components";
|
||||
import { ArticleGrid } from "@/components/article/grid";
|
||||
import { Jumbo } from "@/typography";
|
||||
import { useMemo } from "react";
|
||||
import { Sheet } from "./components/sheet";
|
||||
import { ThemeProvider, createTheme } from "./theme";
|
||||
import chroma from "chroma-js";
|
||||
import { Helmet } from "react-helmet-async";
|
||||
import { Page } from "../../../types";
|
||||
|
||||
const GlobalStyle = createGlobalStyle`
|
||||
* { box-sizing: border-box; }
|
||||
body, html { height: 100%; margin: 0; }
|
||||
body {
|
||||
font-size: 17px;
|
||||
background-color: ${({ theme }) => theme.colors.background};
|
||||
color: ${({ theme }) => theme.colors.foreground};
|
||||
}
|
||||
`;
|
||||
|
||||
const Hero = styled.div`
|
||||
display: flex;
|
||||
flex-wrap: wrap;
|
||||
`;
|
||||
|
||||
const Download = styled.a`
|
||||
font-size: 30px;
|
||||
line-height: 40px;
|
||||
display: inline-block;
|
||||
background: ${({ theme }) => theme.colors.foreground};
|
||||
color: ${({ theme }) => theme.colors.primary};
|
||||
padding: 0 15px;
|
||||
text-transform: uppercase;
|
||||
margin: 10px;
|
||||
font-family: "Black Ops One", sans-serif;
|
||||
@media only screen and (max-width: 700px) {
|
||||
margin: 5px;
|
||||
font-size: 3rem;
|
||||
line-height: 3.1rem;
|
||||
}
|
||||
`;
|
||||
|
||||
const Title = styled(Jumbo)`
|
||||
font-size: 60px;
|
||||
line-height: 80px;
|
||||
display: inline-block;
|
||||
background: ${({ theme }) => theme.colors.primary};
|
||||
color: ${({ theme }) => theme.colors.foreground};
|
||||
padding: 0 15px;
|
||||
text-transform: uppercase;
|
||||
margin: 10px;
|
||||
font-family: "Black Ops One", sans-serif;
|
||||
@media only screen and (max-width: 700px) {
|
||||
margin: 5px;
|
||||
font-size: 3rem;
|
||||
line-height: 3.1rem;
|
||||
}
|
||||
`;
|
||||
|
||||
const Arrow = styled.div`
|
||||
position: absolute;
|
||||
bottom: 20px;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
:after {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
background: ${({ theme }) => theme.colors.primary};
|
||||
border-radius: 50%;
|
||||
width: 80px;
|
||||
height: 80px;
|
||||
content: "↓";
|
||||
font-size: 50px;
|
||||
@media only screen and (max-width: 700px) {
|
||||
width: 40px;
|
||||
height: 40px;
|
||||
}
|
||||
}
|
||||
`;
|
||||
|
||||
const ImageBg = styled.picture`
|
||||
position: absolute;
|
||||
top: 0;
|
||||
left: 0;
|
||||
bottom: 0;
|
||||
right: 0;
|
||||
height: 100%;
|
||||
object-fit: cover;
|
||||
object-position: center;
|
||||
z-index: -1;
|
||||
opacity: 0.5;
|
||||
img {
|
||||
width: 100%;
|
||||
height: 100%;
|
||||
object-fit: cover;
|
||||
object-position: center;
|
||||
}
|
||||
`;
|
||||
|
||||
const FrontPage: Page<"frontpage"> = ({ articles, profile }) => {
|
||||
const theme = useMemo(
|
||||
() =>
|
||||
createTheme({
|
||||
baseColor: chroma.random().brighten(1).hex(),
|
||||
}),
|
||||
[]
|
||||
);
|
||||
|
||||
return (
|
||||
<ThemeProvider theme={theme}>
|
||||
<Helmet>
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1" />
|
||||
<link rel="preconnect" href="https://fonts.googleapis.com" />
|
||||
<link
|
||||
rel="preconnect"
|
||||
href="https://fonts.gstatic.com"
|
||||
crossOrigin="anonymous"
|
||||
/>
|
||||
<link
|
||||
href="https://fonts.googleapis.com/css2?family=Archivo+Black&family=Black+Ops+One&family=Merriweather:wght@400;700&display=swap"
|
||||
rel="stylesheet"
|
||||
/>
|
||||
</Helmet>
|
||||
<GlobalStyle />
|
||||
<Sheet color="#c85279">
|
||||
<ImageBg>
|
||||
<img src={profile.imageUrl} loading="lazy" />
|
||||
</ImageBg>
|
||||
<Arrow />
|
||||
<Hero>
|
||||
{"Hi, I'm Morten".split(" ").map((char, index) => (
|
||||
<Title key={index}>{char}</Title>
|
||||
))}
|
||||
</Hero>
|
||||
<Hero>
|
||||
{"And I make software".split(" ").map((char, index) => (
|
||||
<Title key={index}>{char}</Title>
|
||||
))}
|
||||
</Hero>
|
||||
<Hero>
|
||||
<Download href="/resume.pdf" download>
|
||||
Download resumé
|
||||
</Download>
|
||||
</Hero>
|
||||
</Sheet>
|
||||
<Sheet color="#ef23e2">
|
||||
<Hero>
|
||||
{"Table of Content".split(" ").map((char, index) => (
|
||||
<Title key={index}>{char}</Title>
|
||||
))}
|
||||
</Hero>
|
||||
<ArticleGrid articles={articles} />
|
||||
</Sheet>
|
||||
</ThemeProvider>
|
||||
);
|
||||
};
|
||||
|
||||
export default FrontPage;
|
||||
60
content/templates/react/theme/create.ts
Normal file
@@ -0,0 +1,60 @@
|
||||
import chroma from 'chroma-js';
|
||||
import { Theme } from './theme';
|
||||
|
||||
const WHITE = chroma('white');
|
||||
const BLACK = chroma('black');
|
||||
|
||||
type CreateOptions = {
|
||||
baseColor?: string;
|
||||
};
|
||||
|
||||
const isBright = (color: chroma.Color) => color.luminance() > 0.4;
|
||||
|
||||
const createTheme = (options: CreateOptions = {}) => {
|
||||
const baseColor = options.baseColor
|
||||
? chroma(options.baseColor)
|
||||
: chroma.random();
|
||||
const text = isBright(baseColor) ? BLACK : WHITE;
|
||||
const bg = isBright(baseColor)
|
||||
? baseColor.luminance(0.9)
|
||||
: baseColor.luminance(0.01);
|
||||
const theme: Theme = {
|
||||
typography: {
|
||||
Jumbo: {
|
||||
weight: 'bold',
|
||||
size: 2.8,
|
||||
},
|
||||
Title1: {
|
||||
weight: 'bold',
|
||||
},
|
||||
Title2: {
|
||||
weight: 'bold',
|
||||
size: 1.3,
|
||||
},
|
||||
Body1: {},
|
||||
Overline: {
|
||||
size: 0.8,
|
||||
upperCase: true,
|
||||
},
|
||||
Caption: {
|
||||
size: 0.8,
|
||||
},
|
||||
Link: {
|
||||
upperCase: true,
|
||||
weight: 'bold',
|
||||
},
|
||||
},
|
||||
colors: {
|
||||
primary: baseColor.hex(),
|
||||
foreground: text.hex(),
|
||||
background: bg.hex(),
|
||||
},
|
||||
font: {
|
||||
baseSize: 16,
|
||||
},
|
||||
};
|
||||
return theme;
|
||||
};
|
||||
|
||||
export { createTheme };
|
||||
|
||||
6
content/templates/react/theme/global.d.ts
vendored
Normal file
@@ -0,0 +1,6 @@
|
||||
import {} from 'styled-components';
|
||||
import { Theme } from './theme';
|
||||
declare module 'styled-components' {
|
||||
export interface DefaultTheme extends Theme {}
|
||||
}
|
||||
|
||||
5
content/templates/react/theme/index.ts
Normal file
@@ -0,0 +1,5 @@
|
||||
import { createTheme } from './create';
|
||||
import { ThemeProvider } from './provider';
|
||||
|
||||
export * from './theme';
|
||||
export { createTheme, ThemeProvider };
|
||||
14
content/templates/react/theme/provider.tsx
Normal file
@@ -0,0 +1,14 @@
|
||||
import React, { ReactNode } from 'react';
|
||||
import { ThemeProvider as StyledThemeProvider } from 'styled-components';
|
||||
import { Theme } from './theme';
|
||||
|
||||
type Props = {
|
||||
theme: Theme;
|
||||
children: ReactNode;
|
||||
};
|
||||
|
||||
const ThemeProvider: React.FC<Props> = ({ theme, children }) => (
|
||||
<StyledThemeProvider theme={theme}>{children}</StyledThemeProvider>
|
||||
);
|
||||
|
||||
export { ThemeProvider };
|
||||
30
content/templates/react/theme/theme.ts
Normal file
@@ -0,0 +1,30 @@
|
||||
type Typography = {
|
||||
family?: string;
|
||||
size?: number;
|
||||
spacing?: number;
|
||||
weight?: string;
|
||||
upperCase?: boolean;
|
||||
};
|
||||
|
||||
type Theme = {
|
||||
typography: {
|
||||
Jumbo: Typography;
|
||||
Title2: Typography;
|
||||
Title1: Typography;
|
||||
Body1: Typography;
|
||||
Caption: Typography;
|
||||
Overline: Typography;
|
||||
Link: Typography;
|
||||
};
|
||||
colors: {
|
||||
primary: string;
|
||||
foreground: string;
|
||||
background: string;
|
||||
};
|
||||
font: {
|
||||
baseSize: number;
|
||||
family?: string;
|
||||
};
|
||||
};
|
||||
|
||||
export type { Theme, Typography };
|
||||
28
content/templates/react/tsconfig.json
Normal file
@@ -0,0 +1,28 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"target": "es2018",
|
||||
"module": "commonjs",
|
||||
"strict": true,
|
||||
"jsx": "react-jsx",
|
||||
"esModuleInterop": true,
|
||||
"moduleResolution": "node",
|
||||
"resolveJsonModule": true,
|
||||
"sourceMap": true,
|
||||
"skipLibCheck": true,
|
||||
"declaration": true,
|
||||
"declarationMap": true,
|
||||
"experimentalDecorators": true,
|
||||
"emitDecoratorMetadata": true,
|
||||
"forceConsistentCasingInFileNames": true,
|
||||
"baseUrl": ".",
|
||||
"paths": {
|
||||
"@/*": ["./*"]
|
||||
"types/*": ["../../../types/*"],
|
||||
"types": ["../../../types"]
|
||||
}
|
||||
},
|
||||
"ts-node": {
|
||||
"files": true
|
||||
},
|
||||
"include": ["./**/*", "../../../types/**/*"],
|
||||
}
|
||||
57
content/templates/react/typography/index.tsx
Normal file
@@ -0,0 +1,57 @@
|
||||
import styled from "styled-components";
|
||||
import { Theme, Typography } from "../theme";
|
||||
|
||||
interface TextProps {
|
||||
color?: keyof Theme["colors"];
|
||||
bold?: boolean;
|
||||
theme: Theme;
|
||||
}
|
||||
|
||||
const BaseText = styled.span<TextProps>`
|
||||
${({ theme }) =>
|
||||
theme.font.family ? `font-family: ${theme.font.family};` : ""}
|
||||
color: ${({ color, theme }) =>
|
||||
color ? theme.colors[color] : theme.colors.foreground};
|
||||
font-weight: ${({ bold }) => (bold ? "bold" : "normal")};
|
||||
font-size: ${({ theme }) => theme.font.baseSize}px;
|
||||
`;
|
||||
|
||||
const get = (name: keyof Theme["typography"], theme: Theme): Typography => {
|
||||
const typography = theme.typography[name];
|
||||
return typography;
|
||||
};
|
||||
|
||||
const createTypography = (name: keyof Theme["typography"]) => {
|
||||
const Component = styled(BaseText) <TextProps>`
|
||||
font-size: ${({ theme }) =>
|
||||
theme.font.baseSize * (get(name, theme).size || 1)}px;
|
||||
font-weight: ${({ bold, theme }) =>
|
||||
typeof bold !== "undefined"
|
||||
? "bold"
|
||||
: get(name, theme).weight || "normal"};
|
||||
${({ theme }) =>
|
||||
get(name, theme).upperCase ? "text-transform: uppercase;" : ""}
|
||||
`;
|
||||
return Component;
|
||||
};
|
||||
|
||||
const Jumbo = createTypography("Jumbo");
|
||||
const Title2 = createTypography("Title2");
|
||||
const Title1 = createTypography("Title1");
|
||||
const Body1 = createTypography("Body1");
|
||||
const Overline = createTypography("Overline");
|
||||
const Caption = createTypography("Caption");
|
||||
const Link = createTypography("Link");
|
||||
|
||||
const types: { [key in keyof Theme["typography"]]: typeof BaseText } = {
|
||||
Jumbo,
|
||||
Title2,
|
||||
Title1,
|
||||
Body1,
|
||||
Overline,
|
||||
Caption,
|
||||
Link,
|
||||
};
|
||||
|
||||
export type { TextProps };
|
||||
export { types, Jumbo, Title2, Title1, Body1, Overline, Caption, Link };
|
||||
5
jest.config.js
Normal file
@@ -0,0 +1,5 @@
|
||||
/** @type {import('ts-jest').JestConfigWithTsJest} */
|
||||
module.exports = {
|
||||
preset: 'ts-jest',
|
||||
testEnvironment: 'node',
|
||||
};
|
||||
62
package.json
Normal file
@@ -0,0 +1,62 @@
|
||||
{
|
||||
"name": "web2",
|
||||
"version": "1.0.0",
|
||||
"description": "",
|
||||
"main": "index.js",
|
||||
"scripts": {
|
||||
"build": "ts-node ./bin/index.ts build",
|
||||
"dev": "ts-node ./bin/index.ts dev"
|
||||
},
|
||||
"keywords": [],
|
||||
"author": "",
|
||||
"license": "ISC",
|
||||
"dependencies": {
|
||||
"@rollup/plugin-alias": "^4.0.3",
|
||||
"@rollup/plugin-commonjs": "^24.0.1",
|
||||
"@rollup/plugin-json": "^6.0.0",
|
||||
"@rollup/plugin-node-resolve": "^15.0.1",
|
||||
"@rollup/plugin-replace": "^5.0.2",
|
||||
"@rollup/plugin-sucrase": "^5.0.1",
|
||||
"@rollup/plugin-typescript": "^11.0.0",
|
||||
"chroma-js": "^2.4.2",
|
||||
"commander": "^10.0.0",
|
||||
"ejs": "^3.1.9",
|
||||
"eventemitter3": "^5.0.0",
|
||||
"express": "^4.18.2",
|
||||
"fast-glob": "^3.2.12",
|
||||
"glob-watcher": "^5.0.5",
|
||||
"gray-matter": "^4.0.3",
|
||||
"html-entities": "^2.3.3",
|
||||
"marked": "^4.2.12",
|
||||
"node-latex": "^3.1.0",
|
||||
"react": "^18.2.0",
|
||||
"react-dom": "^18.2.0",
|
||||
"react-helmet-async": "^1.3.0",
|
||||
"react-markdown": "^6.0.3",
|
||||
"remark": "^13",
|
||||
"rollup": "^3.20.0",
|
||||
"rollup-plugin-external-globals": "^0.7.3",
|
||||
"sharp": "^0.31.3",
|
||||
"styled-components": "^5.3.9",
|
||||
"tslib": "^2.5.0",
|
||||
"unist-util-visit": "^2.0.3",
|
||||
"yaml": "^2.2.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/chroma-js": "^2.4.0",
|
||||
"@types/ejs": "^3.1.2",
|
||||
"@types/express": "^4.17.17",
|
||||
"@types/glob-watcher": "^5.0.2",
|
||||
"@types/jest": "^29.5.0",
|
||||
"@types/marked": "^4.0.8",
|
||||
"@types/node": "^18.15.3",
|
||||
"@types/react": "^18.0.28",
|
||||
"@types/react-dom": "^18.0.11",
|
||||
"@types/sharp": "^0.31.1",
|
||||
"@types/styled-components": "^5.1.26",
|
||||
"jest": "^29.5.0",
|
||||
"ts-jest": "^29.0.5",
|
||||
"ts-node": "^10.9.1",
|
||||
"typescript": "^5.0.2"
|
||||
}
|
||||
}
|
||||
4978
pnpm-lock.yaml
generated
Normal file
23
tsconfig.json
Normal file
@@ -0,0 +1,23 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"target": "es2018",
|
||||
"module": "commonjs",
|
||||
"strict": true,
|
||||
"jsx": "react-jsx",
|
||||
"esModuleInterop": true,
|
||||
"moduleResolution": "node",
|
||||
"resolveJsonModule": true,
|
||||
"sourceMap": true,
|
||||
"skipLibCheck": true,
|
||||
"declaration": true,
|
||||
"declarationMap": true,
|
||||
"experimentalDecorators": true,
|
||||
"emitDecoratorMetadata": true,
|
||||
"forceConsistentCasingInFileNames": true,
|
||||
"outDir": "dist"
|
||||
},
|
||||
"ts-node": {
|
||||
"files": true
|
||||
},
|
||||
"include": ["bin", "content", "types"]
|
||||
}
|
||||
15
types/article.ts
Normal file
@@ -0,0 +1,15 @@
|
||||
type Article = {
|
||||
title: string;
|
||||
cover: string;
|
||||
raw: string;
|
||||
published?: Date;
|
||||
coverUrl: string;
|
||||
thumbUrl: string;
|
||||
slug: string;
|
||||
root: string;
|
||||
content: string;
|
||||
color?: string;
|
||||
pdfUrl: string;
|
||||
};
|
||||
|
||||
export type { Article };
|
||||
4
types/index.ts
Normal file
@@ -0,0 +1,4 @@
|
||||
export * from "./position";
|
||||
export * from "./article";
|
||||
export * from "./profile";
|
||||
export * from "./pages";
|
||||
19
types/pages.ts
Normal file
@@ -0,0 +1,19 @@
|
||||
import { ComponentType } from "react";
|
||||
import { Article } from "./article";
|
||||
import { Profile } from "./profile";
|
||||
|
||||
interface Pages {
|
||||
frontpage: ComponentType<{
|
||||
articles: Article[];
|
||||
profile: Profile;
|
||||
}>;
|
||||
article: ComponentType<{
|
||||
article: Article;
|
||||
profile: Profile;
|
||||
pdfUrl: string;
|
||||
}>;
|
||||
}
|
||||
|
||||
type Page<TName extends keyof Pages> = Pages[TName];
|
||||
|
||||
export { Pages, Page };
|
||||
7
types/position.ts
Normal file
@@ -0,0 +1,7 @@
|
||||
type Position = {
|
||||
title: string;
|
||||
raw: string;
|
||||
content: string;
|
||||
};
|
||||
|
||||
export { Position };
|
||||
17
types/profile.ts
Normal file
@@ -0,0 +1,17 @@
|
||||
type Profile = {
|
||||
name: string;
|
||||
about: string;
|
||||
tagline: string;
|
||||
imageUrl: string;
|
||||
imagePath: string;
|
||||
info: {
|
||||
name: string;
|
||||
value: string;
|
||||
}[];
|
||||
skills: {
|
||||
name: string;
|
||||
level: number;
|
||||
}[];
|
||||
};
|
||||
|
||||
export { Profile };
|
||||