chore: cleanup

This commit is contained in:
Morten Olsen
2022-04-15 00:26:08 +02:00
parent 40457007ad
commit 32d31b4371
103 changed files with 0 additions and 15658 deletions

View File

@@ -1,45 +0,0 @@
package mortenolsen
import (
"dagger.io/dagger"
"dagger.io/dagger/core"
"universe.dagger.io/docker"
)
dagger.#Plan & {
_nodeModulesMount: "/node_modules": {
dest: "/node_modules"
type: "cache"
contents: core.#CacheDir & {
id: "morten-olsen.github-io-modules-cache"
}
}
client: {
filesystem: {
"./": read: {
contents: dagger.#FS
exclude: [
"node_modules",
"out",
".next"
]
}
"./docker": read: contents: dagger.#FS
"./docker/Dockerfile": read: contents: dagger.#FS
"./_output": write: contents: actions.build.contents.output
}
}
actions: {
deps: docker.#Build & {
steps: [
docker.#Dockerfile & {
source: client.filesystem."./docker".read.contents
dockerfile: {
contents: "foo"
}
}
]
}
}
}

View File

@@ -1 +0,0 @@
module: ""

View File

@@ -1,2 +0,0 @@
# generated by dagger
** linguist-generated=true

View File

@@ -1 +0,0 @@
module: "dagger.io"

View File

@@ -1,4 +0,0 @@
package dagger
// DEPRECATED: Use #Socket instead
#Service: #Socket

View File

@@ -1,83 +0,0 @@
package core
import "dagger.io/dagger"
// Execute a command in a container
#Exec: {
$dagger: task: _name: "Exec"
// Container filesystem
input: dagger.#FS
// Transient filesystem mounts
// Key is an arbitrary name, for example "app source code"
// Value is mount configuration
mounts: [name=string]: #Mount
// Command to execute
// Example: ["echo", "hello, world!"]
args: [...string]
// Environment variables
env: [key=string]: string | dagger.#Secret
// Working directory
workdir: string | *"/"
// User ID or name
user: string | *"root"
// If set, always execute even if the operation could be cached
always: true | *false
// Inject hostname resolution into the container
// key is hostname, value is IP
hosts: [hostname=string]: string
// Modified filesystem
output: dagger.#FS
// Command exit code
// Currently this field can only ever be zero.
// If the command fails, DAG execution is immediately terminated.
// FIXME: expand API to allow custom handling of failed commands
exit: int & 0
}
// A transient filesystem mount.
#Mount: {
dest: string
type: string
{
type: "cache"
contents: #CacheDir
} | {
type: "tmp"
contents: #TempDir
} | {
type: "socket"
contents: dagger.#Socket
} | {
type: "fs"
contents: dagger.#FS
source?: string
ro?: true | *false
} | {
type: "secret"
contents: dagger.#Secret
uid: int | *0
gid: int | *0
mask: int | *0o400
}
}
// A (best effort) persistent cache dir
#CacheDir: {
id: string
concurrency: *"shared" | "private" | "locked"
}
// A temporary directory for command execution
#TempDir: {
size: int64 | *0
}

View File

@@ -1,125 +0,0 @@
package core
import "dagger.io/dagger"
// Access the source directory for the current CUE package
// This may safely be called from any package
#Source: {
$dagger: task: _name: "Source"
// Relative path to source.
path: string
// Optionally exclude certain files
include: [...string]
// Optionally include certain files
exclude: [...string]
output: dagger.#FS
}
// Create one or multiple directory in a container
#Mkdir: {
$dagger: task: _name: "Mkdir"
// Container filesystem
input: dagger.#FS
// Path of the directory to create
// It can be nested (e.g : "/foo" or "/foo/bar")
path: string
// Permissions of the directory
permissions: *0o755 | int
// If set, it creates parents' directory if they do not exist
parents: *true | false
// Modified filesystem
output: dagger.#FS
}
#ReadFile: {
$dagger: task: _name: "ReadFile"
// Filesystem tree holding the file
input: dagger.#FS
// Path of the file to read
path: string
// Contents of the file
contents: string
}
// Write a file to a filesystem tree, creating it if needed
#WriteFile: {
$dagger: task: _name: "WriteFile"
// Input filesystem tree
input: dagger.#FS
// Path of the file to write
path: string
// Contents to write
contents: string
// Permissions of the file
permissions: *0o600 | int
// Output filesystem tree
output: dagger.#FS
}
// Copy files from one FS tree to another
#Copy: {
$dagger: task: _name: "Copy"
// Input of the operation
input: dagger.#FS
// Contents to copy
contents: dagger.#FS
// Source path (optional)
source: string | *"/"
// Destination path (optional)
dest: string | *"/"
// Output of the operation
output: dagger.#FS
}
#CopyInfo: {
source: {
root: dagger.#FS
path: string | *"/"
}
dest: string
}
// Merge multiple FS trees into one
#Merge: {
$dagger: task: _name: "Merge"
inputs: [...dagger.#FS]
output: dagger.#FS
}
// Extract the difference from lower FS to upper FS as its own FS
#Diff: {
$dagger: task: _name: "Diff"
lower: dagger.#FS
upper: dagger.#FS
output: dagger.#FS
}
// Select a subdirectory from a filesystem tree
#Subdir: {
// Input tree
input: dagger.#FS
// Path of the subdirectory
// Example: "/build"
path: string
// Copy action
_copy: #Copy & {
"input": dagger.#Scratch
contents: input
source: path
dest: "/"
}
// Subdirectory tree
output: dagger.#FS & _copy.output
}

View File

@@ -1,32 +0,0 @@
package core
import "dagger.io/dagger"
// Push a directory to a git remote
#GitPush: {
@dagger(notimplemented)
$dagger: task: _name: "GitPush"
input: dagger.#FS
remote: string
ref: string
}
// Pull a directory from a git remote
// Warning: do NOT embed credentials in the remote url as this will expose them in logs.
// By using username and password Dagger will handle this for you in a secure manner.
#GitPull: {
$dagger: task: _name: "GitPull"
remote: string
ref: string
keepGitDir: true | *false
auth?: {
username: string
password: dagger.#Secret // can be password or personal access token
} | {
authToken: dagger.#Secret
} | {
authHeader: dagger.#Secret
}
output: dagger.#FS
}

View File

@@ -1,49 +0,0 @@
package core
// HTTP operations
// Raw buildkit API
//
// package llb // import "github.com/moby/buildkit/client/llb"
//
// func HTTP(url string, opts ...HTTPOption) State
//
// type HTTPOption interface {
// SetHTTPOption(*HTTPInfo)
// }
// func Checksum(dgst digest.Digest) HTTPOption
// func Chmod(perm os.FileMode) HTTPOption
// func Chown(uid, gid int) HTTPOption
// func Filename(name string) HTTPOption
import "dagger.io/dagger"
// Fetch a file over HTTP
#HTTPFetch: {
$dagger: task: _name: "HTTPFetch"
// Source url
// Example: https://www.dagger.io/index.html
source: string
// Destination path of the downloaded file
// Example: "/downloads/index.html"
dest: string
// Optionally verify the file checksum
// FIXME: what is the best format to encode checksum?
checksum?: string
// Optionally set file permissions on the downloaded file
// FIXME: find a more developer-friendly way to input file permissions
permissions?: int
// Optionally set UID of the downloaded file
uid?: int
// Optionally set GID of the downloaded file
gid?: int
// New filesystem state containing the downloaded file
output: dagger.#FS
}

View File

@@ -1,182 +0,0 @@
package core
import (
"list"
"dagger.io/dagger"
)
// Upload a container image to a remote repository
#Push: {
$dagger: task: _name: "Push"
// Target repository address
dest: dagger.#Ref
// Filesystem contents to push
input: dagger.#FS
// Container image config
config: dagger.#ImageConfig
// Authentication
auth?: {
username: string
secret: dagger.#Secret
}
// Complete ref of the pushed image, including digest
result: dagger.#Ref
}
// Download a container image from a remote repository
#Pull: {
$dagger: task: _name: "Pull"
// Repository source ref
source: dagger.#Ref
// Authentication
auth?: {
username: string
secret: dagger.#Secret
}
// Root filesystem of downloaded image
output: dagger.#FS
// Image digest
digest: string
// Downloaded container image config
config: dagger.#ImageConfig
}
// Build a container image using a Dockerfile
#Dockerfile: {
$dagger: task: _name: "Dockerfile"
// Source directory to build
source: dagger.#FS
dockerfile: *{
path: string | *"Dockerfile"
} | {
contents: string
}
// Authentication
auth: [registry=string]: {
username: string
secret: dagger.#Secret
}
platforms?: [...string]
target?: string
buildArg?: [string]: string
label?: [string]: string
hosts?: [string]: string
// Root filesystem produced
output: dagger.#FS
// Container image config produced
config: dagger.#ImageConfig
}
// Export an image as a tar archive
#Export: {
$dagger: task: _name: "Export"
// Filesystem contents to export
input: dagger.#FS
// Container image config
config: dagger.#ImageConfig
// Name and optionally a tag in the 'name:tag' format
tag: string
// Type of export
type: *"docker" | "oci"
// Path to the exported file inside `output`
path: string | *"/image.tar"
// Exported image ID
imageID: string
// Root filesystem with exported file
output: dagger.#FS
}
// Change image config
#Set: {
// The source image config
input: dagger.#ImageConfig
// The config to merge
config: dagger.#ImageConfig
// Resulting config
output: dagger.#ImageConfig & {
let structs = ["env", "label", "volume", "expose"]
let lists = ["onbuild"]
// doesn't exist in config, copy away
for field, value in input if config[field] == _|_ {
"\(field)": value
}
// only exists in config, just copy as is
for field, value in config if input[field] == _|_ {
"\(field)": value
}
// these should exist in both places
for field, value in config if input[field] != _|_ {
"\(field)": {
// handle structs that need merging
if list.Contains(structs, field) {
_#mergeStructs & {
#a: input[field]
#b: config[field]
}
}
// handle lists that need concatenation
if list.Contains(lists, field) {
list.Concat([
input[field],
config[field],
])
}
// replace anything else
if !list.Contains(structs+lists, field) {
value
}
}
}
}
}
// Merge two structs by overwriting or adding values
_#mergeStructs: {
// Struct with defaults
#a: [string]: _
// Struct with overrides
#b: [string]: _
{
// FIXME: we need exists() in if because this matches any kind of error (cue-lang/cue#943)
// add anything not in b
for field, value in #a if #b[field] == _|_ {
"\(field)": value
}
// safely add all of b
for field, value in #b {
"\(field)": value
}
}
}

View File

@@ -1,10 +0,0 @@
package core
// A core action that does nothing
// Useful to work around bugs in the DAG resolver.
// See for example https://github.com/dagger/dagger/issues/1789
#Nop: {
$dagger: task: _name: "Nop"
input: _
output: input
}

View File

@@ -1,42 +0,0 @@
package core
import "dagger.io/dagger"
// Decode the contents of a secrets without leaking it.
// Supported formats: json, yaml
#DecodeSecret: {
$dagger: task: _name: "DecodeSecret"
// A dagger.#Secret whose plain text is a JSON or YAML string
input: dagger.#Secret
format: "json" | "yaml"
// A new secret or (map of secrets) derived from unmarshaling the input secret's plain text
output: dagger.#Secret | {[string]: output}
}
// Create a new a secret from a filesystem tree
#NewSecret: {
$dagger: task: _name: "NewSecret"
// Filesystem tree holding the secret
input: dagger.#FS
// Path of the secret to read
path: string
// Whether to trim leading and trailing space characters from secret value
trimSpace: *true | false
// Contents of the secret
output: dagger.#Secret
}
// Trim leading and trailing space characters from a secret
#TrimSecret: {
$dagger: task: _name: "TrimSecret"
// Original secret
input: dagger.#Secret
// New trimmed secret
output: dagger.#Secret
}

View File

@@ -1,36 +0,0 @@
package dagger
// A ref is an address for a remote container image
//
// Examples:
// - "index.docker.io/dagger"
// - "dagger"
// - "index.docker.io/dagger:latest"
// - "index.docker.io/dagger:latest@sha256:a89cb097693dd354de598d279c304a1c73ee550fbfff6d9ee515568e0c749cfe"
#Ref: string
// Container image config. See [OCI](https://www.opencontainers.org/).
#ImageConfig: {
user?: string
expose?: [string]: {}
env?: [string]: string
entrypoint?: [...string]
cmd?: [...string]
volume?: [string]: {}
workdir?: string
label?: [string]: string
stopsignal?: string
healthcheck?: #HealthCheck
argsescaped?: bool
onbuild?: [...string]
stoptimeout?: int
shell?: [...string]
}
#HealthCheck: {
test?: [...string]
interval?: int
timeout?: int
startperiod?: int
retries?: int
}

View File

@@ -1,153 +0,0 @@
package dagger
// A special kind of program which `dagger` can execute.
#Plan: {
// Access client machine
client: {
// Access client filesystem
// Path may be absolute, or relative to client working directory
filesystem: [path=string]: {
// Read data from that path
read?: _#clientFilesystemRead & {
"path": path
}
// If set, Write to that path
write?: _#clientFilesystemWrite & {
"path": path
// avoid race condition
if read != _|_ {
_after: read
}
}
}
// Access client network endpoints
network: [address=#Address]: _#clientNetwork & {
"address": address
}
// Access client environment variables
env: _#clientEnv
// Execute commands in the client
commands: [id=string]: _#clientCommand
// Platform of the client machine
platform: _#clientPlatform
}
// Configure platform execution
platform?: string
// Execute actions in containers
actions: {
...
}
}
_#clientFilesystemRead: {
$dagger: task: _name: "ClientFilesystemRead"
// Path may be absolute, or relative to client working directory
path: string
{
// CUE type defines expected content:
// string: contents of a regular file
// #Secret: secure reference to the file contents
contents: string | #Secret
} | {
// CUE type defines expected content:
// #FS: contents of a directory
contents: #FS
// Filename patterns to include
// Example: ["*.go", "Dockerfile"]
include?: [...string]
// Filename patterns to exclude
// Example: ["node_modules"]
exclude?: [...string]
}
}
_#clientFilesystemWrite: {
$dagger: task: _name: "ClientFilesystemWrite"
// Path may be absolute, or relative to client working directory
path: string
{
// File contents to export (as a string or secret)
contents: string | #Secret
// File permissions (defaults to 0o644)
permissions?: int
} | {
// Filesystem contents to export
// Reference an #FS field produced by an action
contents: #FS
}
}
_#clientNetwork: {
$dagger: task: _name: "ClientNetwork"
// URL to the socket
// Example: unix:///var/run/docker.sock
address: #Address
{
// unix socket or npipe
connect: #Socket
// } | {
// // FIXME: not yet implemented
// listen: #Socket
}
}
_#clientEnv: {
$dagger: task: _name: "ClientEnv"
// CUE type defines expected content
[!~"\\$dagger"]: *string | #Secret
}
_#clientCommand: {
$dagger: task: _name: "ClientCommand"
// Name of the command to execute
// Examples: "ls", "/bin/bash"
name: string
// Positional arguments to the command
// Examples: ["/tmp"]
args: [...string]
// Command-line flags represented in a civilized form
// Example: {"-l": true, "-c": "echo hello world"}
flags: [string]: bool | string
// Environment variables
// Example: {"DEBUG": "1"}
env: [string]: string | #Secret
// Capture standard output (as a string or secret)
stdout?: *string | #Secret
// Capture standard error (as a string or secret)
stderr?: *string | #Secret
// Inject standard input (from a string or secret)
stdin?: string | #Secret
}
_#clientPlatform: {
$dagger: task: _name: "ClientPlatform"
// Operating system of the client machine
os: string
// Hardware architecture of the client machine
arch: string
}

View File

@@ -1,38 +0,0 @@
package dagger
// A reference to a filesystem tree.
// For example:
// - The root filesystem of a container
// - A source code repository
// - A directory containing binary artifacts
// Rule of thumb: if it fits in a tar archive, it fits in a #FS.
#FS: {
$dagger: fs: _id: string | null
}
// An empty directory
#Scratch: #FS & {
$dagger: fs: _id: null
}
// A reference to an external secret, for example:
// - A password
// - A SSH private key
// - An API token
// Secrets are never merged in the Cue tree. They can only be used
// by a special filesystem mount designed to minimize leak risk.
#Secret: {
$dagger: secret: _id: string
}
// A reference to a network socket, for example:
// - A UNIX socket
// - A TCP or UDP port
// - A Windows named pipe
#Socket: {
$dagger: service: _id: string
}
// A network service address
#Address: string & =~"^(unix://|npipe://).+"
// TODO: #Address: string & =~"^(tcp://|unix://|npipe://|udp://).+"

View File

@@ -1,76 +0,0 @@
# Europa Universe
## About this directory
`europa-universe/` is a staging area for the upcoming `universe.dagger.io` package namespace,
which will be shipped as part of the [Europa release](https://github.com/dagger/dagger/issues/1088).
## What is Universe?
The Dagger Universe is a catalog of reusable Cue packages, curated by Dagger but possibly authored by third parties. Most packages in Universe contain reusable actions; some may also contain entire configuration templates.
The import domain for Universe will be `universe.dagger.io`. It will deprecate the current domain `alpha.dagger.io`.
## Where is the `dagger` package?
Europa will also introduce a new package for the Dagger Core API: `dagger.io/dagger`.
This is a core package, and is *not* part of Universe (note the import domain).
The development version of the Europa core API can be imported as [alpha.dagger.io/europa/dagger](../stdlib/europa/dagger).
## Where is the `dagger/engine` package?
Europa will also introduce a new package for the Low-Level Dagger Engine API : `dagger.io/dagger/engine`.
This is a core package, and is *not* part of Universe (note the import domain).
The development version of the Europa Low-Level Engine API can be imported as either:
* [alpha.dagger.io/europa/dagger/engine/spec/engine](../stdlib/europa/dagger/engine/spec/engine) for the full spec
* [alpha.dagger.io/dagger/europa/engine](../stdlib/europa/dagger/engine) for the implemented subset of the spec
## Universe vs other packages
This table compares Dagger core packages, Dagger Universe packages, and the overall CUE package ecosystem.
| | *Dagger core* | *Dagger Universe* | *CUE ecosystem* |
|---|----------------|-------------------|-----------------|
| Import path | `dagger.io` | `universe.dagger.io` | Everything else |
| Purpose | Access core Dagger features | Safely reuse code from the Dagger community | Reuse any CUE code from anyone |
| Author | Dagger team | Dagger community, curated by Dagger | Anyone |
| Release cycle | Released with Dagger engine | Released continuously | No release cycle |
| Size | Small | Large | Very large |
| Growth rate | Grows slowly, with engine features | Grows fast, with Dagger community | Grows even faster, with CUE ecosystem |
## Notable packages
### Docker API
*Import path: [`universe.dagger.io/docker`](./universe/docker)*
The `docker` package is a native Cue API for Docker. You can use it to build, run, push and pull Docker containers directly from Cue.
The Dagger container API defines the following types:
* `#Image`: a container image
* `#Run`: run a command in a container
* `#Push`: upload an image to a repository
* `#Pull`: download an image from a repository
* `#Build`: build an image
### Examples
*Import path: [`universe.dagger.io/examples`](./examples)*
This package contains examples of complete Dagger configurations, including the result of following tutorials in the documentations.
For example, [the todoapp example](./examples/todoapp) corresponds to the [Getting Started tutorial](https://docs.dagger.io/1003/get-started/)
## TODO LIST
* Support native language dev in `docker.#Run` with good DX (Python, Go, Typescript etc.)
* Coding style. When to use verbs vs. nouns?
* Easy file injection API (`container.#Image.files` ?)
* Use file injection instead of inline for `#Command.script` (to avoid hitting arg character limits)
* Organize universe packages in sub-categories?

View File

@@ -1,42 +0,0 @@
// Base package for Alpine Linux
package alpine
import (
"universe.dagger.io/docker"
)
// Build an Alpine Linux container image
#Build: {
// Alpine version to install.
version: string | *"3.15.0@sha256:21a3deaa0d32a8057914f36584b5288d2e5ecc984380bc0118285c70fa8c9300"
// List of packages to install
packages: [pkgName=string]: {
// NOTE(samalba, gh issue #1532):
// it's not recommended to pin the version as it is already pinned by the major Alpine version
// version pinning is for future use (as soon as we support custom repositories like `community`,
// `testing` or `edge`)
version: string | *""
}
docker.#Build & {
steps: [
docker.#Pull & {
source: "index.docker.io/alpine:\(version)"
},
for pkgName, pkg in packages {
docker.#Run & {
command: {
name: "apk"
args: ["add", "\(pkgName)\(pkg.version)"]
flags: {
"-U": true
"--no-cache": true
}
}
}
},
]
}
}

View File

@@ -1,8 +0,0 @@
setup() {
load '../../bats_helpers'
common_setup
}
@test "alpine" {
dagger "do" -p ./test.cue test
}

View File

@@ -1,53 +0,0 @@
package alpine
import (
"dagger.io/dagger"
"dagger.io/dagger/core"
"universe.dagger.io/alpine"
"universe.dagger.io/docker"
)
dagger.#Plan & {
actions: test: {
// Test: customize alpine version
alpineVersion: {
build: alpine.#Build & {
// install an old version on purpose
version: "3.10.9"
}
verify: core.#ReadFile & {
input: build.output.rootfs
path: "/etc/alpine-release"
contents: "3.10.9\n"
}
}
// Test: install packages
packageInstall: {
build: alpine.#Build & {
packages: {
jq: {}
curl: {}
}
}
check: docker.#Run & {
input: build.output
command: {
name: "sh"
flags: "-c": """
jq --version > /jq-version.txt
curl --version > /curl-version.txt
"""
}
export: files: {
"/jq-version.txt": contents: =~"^jq"
"/curl-version.txt": contents: =~"^curl"
}
}
}
}
}

View File

@@ -1,104 +0,0 @@
// AWS base package
package aws
import (
"dagger.io/dagger"
"dagger.io/dagger/core"
"universe.dagger.io/docker"
)
#DefaultLinuxVersion: "amazonlinux:2.0.20220121.0@sha256:f3a37f84f2644095e2c6f6fdf2bf4dbf68d5436c51afcfbfa747a5de391d5d62"
#DefaultCliVersion: "2.4.12"
// Build provides a docker.#Image with the aws cli pre-installed to Amazon Linux 2.
// Can be customized with packages, and can be used with docker.#Run for executing custom scripts.
// Used by default with aws.#Run
#Build: {
docker.#Build & {
steps: [
docker.#Pull & {
source: #DefaultLinuxVersion
},
// cache yum install separately
docker.#Run & {
command: {
name: "yum"
args: ["install", "unzip", "-y"]
}
},
docker.#Run & {
command: {
name: "/scripts/install.sh"
args: [version]
}
mounts: scripts: {
dest: "/scripts"
contents: _scripts.output
}
},
]
}
_scripts: core.#Source & {
path: "_scripts"
}
// The version of the AWS CLI to install
version: string | *#DefaultCliVersion
}
// Credentials provides long or short-term credentials.
#Credentials: {
// AWS access key
accessKeyId?: dagger.#Secret
// AWS secret key
secretAccessKey?: dagger.#Secret
// AWS session token (provided with temporary credentials)
sessionToken?: dagger.#Secret
}
// Region provides a schema to validate acceptable region value.
#Region: "us-east-2" | "us-east-1" | "us-west-1" | "us-west-2" | "af-south-1" | "ap-east-1" | "ap-southeast-3" | "ap-south-1" | "ap-northeast-3" | "ap-northeast-2" | "ap-southeast-1" | "ap-southeast-2" | "ap-northeast-1" | "ca-central-1" | "cn-north-1" | "cn-northwest-1" | "eu-central-1" | "eu-west-1" | "eu-west-2" | "eu-south-1" | "eu-west-3" | "eu-north-1" | "me-south-1" | "sa-east-1"
// Container a standalone environment pre-configured with credentials and .aws/config
#Container: {
// _build provides the default image
_build: #Build
// configFile provides access to a config file, typically found in ~/.aws/config
configFile?: dagger.#FS
// credentials provides long or short-term credentials
credentials: #Credentials
docker.#Run & {
input: docker.#Image | *_build.output
env: {
// pass credentials as env vars
if credentials.accessKeyId != _|_ {
AWS_ACCESS_KEY_ID: credentials.accessKeyId
}
if credentials.secretAccessKey != _|_ {
AWS_SECRET_ACCESS_KEY: credentials.secretAccessKey
}
if credentials.sessionToken != _|_ {
AWS_SESSION_TOKEN: credentials.sessionToken
}
}
if configFile != _|_ {
mounts: aws: {
contents: configFile
dest: "/aws"
ro: true
}
env: AWS_CONFIG_FILE: "/aws/config"
}
}
}

View File

@@ -1,129 +0,0 @@
package cli
import (
"list"
"strings"
"encoding/json"
"universe.dagger.io/aws"
)
// Command provides a declarative interface to the AWS CLI
#Command: {
// "register" output.txt
export: files: "/output.txt": _
// Global arguments passed to the aws cli.
options: {
// Turn on debug logging.
debug?: bool
// Override command's default URL with the given URL.
"endpoint-url"?: string
// By default, the AWS CLI uses SSL when communicating with AWS services.
// For each SSL connection, the AWS CLI will verify SSL certificates. This
// option overrides the default behavior of verifying SSL certificates.
"no-verify-ssl"?: bool
// Disable automatic pagination.
"no-paginate"?: bool
// The formatting style for command output.
output: *"json" | "text" | "table" | "yaml" | "yaml-stream"
// A JMESPath query to use in filtering the response data.
query?: string
// Use a specific profile from your credential file.
profile?: string
// The region to use. Overrides config/env settings.
region?: string
// Display the version of this tool.
version?: bool
// Turn on/off color output.
color?: "off" | "on" | "auto"
// Do not sign requests. Credentials will not be loaded if this argument
// is provided.
"no-sign-request"?: bool
// The CA certificate bundle to use when verifying SSL certificates. Over-
// rides config/env settings.
"ca-bundle"?: string
// The maximum socket read time in seconds. If the value is set to 0, the
// socket read will be blocking and not timeout. The default value is 60
// seconds.
"cli-read-timeout"?: int
// The maximum socket connect time in seconds. If the value is set to 0,
// the socket connect will be blocking and not timeout. The default value
// is 60 seconds.
"cli-connect-timeout"?: int
// The formatting style to be used for binary blobs. The default format is
// base64. The base64 format expects binary blobs to be provided as a
// base64 encoded string. The raw-in-base64-out format preserves compati-
// bility with AWS CLI V1 behavior and binary values must be passed liter-
// ally. When providing contents from a file that map to a binary blob
// fileb:// will always be treated as binary and use the file contents
// directly regardless of the cli-binary-format setting. When using
// file:// the file contents will need to properly formatted for the con-
// figured cli-binary-format.
"cli-binary-format"?: "base64" | "raw-in-base64-out"
// Disable cli pager for output.
"no-cli-pager": true
// Automatically prompt for CLI input parameters.
"cli-auto-prompt"?: bool
// Disable automatically prompt for CLI input parameters.
"no-cli-auto-prompt"?: bool
}
// Result will contain the cli output. If unmarshal is set to false this will be the raw string as provided by the aws cli command. If unmarshal is set to true this will be a map as returned by json.Unmarshal.
_unmarshalable: string | number | bool | null | [..._unmarshalable] | {[string]: _unmarshalable}
result: _unmarshalable
if unmarshal != false {
options: output: "json"
result: json.Unmarshal(export.files["/output.txt"])
}
if unmarshal == false {
result: export.files["/output.txt"]
}
// The service to run the command against.
service: {
args: [...string]
name: "accessanalyzer" | "account" | "acm" | "acm-pca" | "alexaforbusiness" | "amp" | "amplify" | "amplifybackend" | "amplifyuibuilder" | "apigateway" | "apigatewaymanagementapi" | "apigatewayv2" | "appconfig" | "appconfigdata" | "appflow" | "appintegrations" | "application-autoscaling" | "application-insights" | "applicationcostprofiler" | "appmesh" | "apprunner" | "appstream" | "appsync" | "athena" | "auditmanager" | "autoscaling" | "autoscaling-plans" | "backup" | "backup-gateway" | "batch" | "braket" | "budgets" | "ce" | "chime" | "chime-sdk-identity" | "chime-sdk-meetings" | "chime-sdk-messaging" | "cli-dev" | "cloud9" | "cloudcontrol" | "clouddirectory" | "cloudformation" | "cloudfront" | "cloudhsm" | "cloudtrail" | "cloudwatch" | "codeartifact" | "codebuild" | "codecommit" | "codeguru-reviewer" | "codeguruprofiler" | "codepipeline" | "codestar" | "codestar-connections" | "codestar-notifications" | "cognito-identity" | "cognito-idp" | "cognito-sync" | "comprehend" | "comprehendmedical" | "compute-optimizer" | "configservice" | "configure" | "connect" | "connect-contact-lens" | "connectparticipant" | "cur" | "customer-profiles" | "databrew" | "dataexchange" | "datapipeline" | "datasync" | "dax" | "ddb" | "deploy" | "detective" | "devicefarm" | "devops-guru" | "directconnect" | "discovery" | "dlm" | "dms" | "docdb" | "drs" | "ds" | "dynamodb" | "dynamodbstreams" | "ebs" | "ec2" | "ec2-instance-connect" | "ecr" | "ecr-public" | "ecs" | "efs" | "eks" | "elastic-inference" | "elasticache" | "elasticbeanstalk" | "elastictranscoder" | "elb" | "elbv2" | "emr" | "emr-containers" | "es" | "events" | "evidently" | "finspace" | "finspace-data" | "firehose" | "fis" | "fms" | "forecast" | "forecastquery" | "frauddetector" | "fsx" | "gamelift" | "glacier" | "globalaccelerator" | "glue" | "grafana" | "greengrass" | "greengrassv2" | "groundstation" | "guardduty" | "health" | "healthlake" | "help" | "history" | "honeycode" | "iam" | "identitystore" | "imagebuilder" | "importexport" | "inspector" | "inspector2" | "iot" | "iot-data" | "iot-jobs-data" | "iot1click-devices" | "iot1click-projects" | "iotanalytics" | "iotdeviceadvisor" | "iotevents" | "iotevents-data" | "iotfleethub" | "iotsecuretunneling" | "iotsitewise" | "iotthingsgraph" | "iottwinmaker" | "iotwireless" | "ivs" | "kafka" | "kafkaconnect" | "kendra" | "kinesis" | "kinesis-video-archived-media" | "kinesis-video-media" | "kinesis-video-signaling" | "kinesisanalytics" | "kinesisanalyticsv2" | "kinesisvideo" | "kms" | "lakeformation" | "lambda" | "lex-models" | "lex-runtime" | "lexv2-models" | "lexv2-runtime" | "license-manager" | "lightsail" | "location" | "logs" | "lookoutequipment" | "lookoutmetrics" | "lookoutvision" | "machinelearning" | "macie" | "macie2" | "managedblockchain" | "marketplace-catalog" | "marketplace-entitlement" | "marketplacecommerceanalytics" | "mediaconnect" | "mediaconvert" | "medialive" | "mediapackage" | "mediapackage-vod" | "mediastore" | "mediastore-data" | "mediatailor" | "memorydb" | "meteringmarketplace" | "mgh" | "mgn" | "migration-hub-refactor-spaces" | "migrationhub-config" | "migrationhubstrategy" | "mobile" | "mq" | "mturk" | "mwaa" | "neptune" | "network-firewall" | "networkmanager" | "nimble" | "opensearch" | "opsworks" | "opsworks-cm" | "organizations" | "outposts" | "panorama" | "personalize" | "personalize-events" | "personalize-runtime" | "pi" | "pinpoint" | "pinpoint-email" | "pinpoint-sms-voice" | "polly" | "pricing" | "proton" | "qldb" | "qldb-session" | "quicksight" | "ram" | "rbin" | "rds" | "rds-data" | "redshift" | "redshift-data" | "rekognition" | "resiliencehub" | "resource-groups" | "resourcegroupstaggingapi" | "robomaker" | "route53" | "route53-recovery-cluster" | "route53-recovery-control-config" | "route53-recovery-readiness" | "route53domains" | "route53resolver" | "rum" | "s3" | "s3api" | "s3control" | "s3outposts" | "sagemaker" | "sagemaker-a2i-runtime" | "sagemaker-edge" | "sagemaker-featurestore-runtime" | "sagemaker-runtime" | "savingsplans" | "schemas" | "sdb" | "secretsmanager" | "securityhub" | "serverlessrepo" | "service-quotas" | "servicecatalog" | "servicecatalog-appregistry" | "servicediscovery" | "ses" | "sesv2" | "shield" | "signer" | "sms" | "snow-device-management" | "snowball" | "sns" | "sqs" | "ssm" | "ssm-contacts" | "ssm-incidents" | "sso" | "sso-admin" | "sso-oidc" | "stepfunctions" | "storagegateway" | "sts" | "support" | "swf" | "synthetics" | "textract" | "timestream-query" | "timestream-write" | "transcribe" | "transfer" | "translate" | "voice-id" | "waf" | "waf-regional" | "wafv2" | "wellarchitected" | "wisdom" | "workdocs" | "worklink" | "workmail" | "workmailmessageflow" | "workspaces" | "workspaces-web" | "xray" | ""
command: string
}
// unmarshal determines whether to automatically json.Unmarshal() the command result. If set to true, the output field will be set to "json" and the command output will be Unmarshaled to result:
unmarshal: false | *true
aws.#Container & {
always: true
_optionArgs: list.FlattenN([
for k, v in options {
if (v & bool) != _|_ {
["--\(k)"]
}
if (v & string) != _|_ {
["--\(k)", v]
}
},
], 1)
command: {
name: "/bin/sh"
flags: "-c": strings.Join(["aws"]+_optionArgs+[service.name, service.command]+service.args+[">", "/output.txt"], " ")
}
}
}

View File

@@ -1,41 +0,0 @@
package test
import (
"dagger.io/dagger"
"dagger.io/dagger/core"
"universe.dagger.io/aws"
"universe.dagger.io/aws/cli"
)
dagger.#Plan & {
client: commands: sops: {
name: "sops"
args: ["-d", "--extract", "[\"AWS\"]", "../../../secrets_sops.yaml"]
stdout: dagger.#Secret
}
actions: {
sopsSecrets: core.#DecodeSecret & {
format: "yaml"
input: client.commands.sops.stdout
}
getCallerIdentity: cli.#Command & {
credentials: aws.#Credentials & {
accessKeyId: sopsSecrets.output.AWS_ACCESS_KEY_ID.contents
secretAccessKey: sopsSecrets.output.AWS_SECRET_ACCESS_KEY.contents
}
options: region: "us-east-2"
service: {
name: "sts"
command: "get-caller-identity"
}
}
verify: getCallerIdentity.result & {
UserId: !~"^$"
Account: !~"^$"
Arn: !~"^$"
}
}
}

View File

@@ -1,9 +0,0 @@
setup() {
load '../../../bats_helpers'
common_setup
}
@test "aws/cli" {
dagger "do" -p ./sts_get_caller_identity.cue verify
}

View File

@@ -1,4 +0,0 @@
[profile ci]
credential_source = Environment
region = us-east-2
role_arn = arn:aws:iam::125635003186:role/dagger-ci

View File

@@ -1,52 +0,0 @@
package test
import (
"encoding/json"
"dagger.io/dagger"
"dagger.io/dagger/core"
"universe.dagger.io/aws"
)
dagger.#Plan & {
client: {
filesystem: ".": read: {
contents: dagger.#FS
include: ["config"]
}
commands: sops: {
name: "sops"
args: ["-d", "--extract", "[\"AWS\"]", "../../secrets_sops.yaml"]
stdout: dagger.#Secret
}
}
actions: {
sopsSecrets: core.#DecodeSecret & {
format: "yaml"
input: client.commands.sops.stdout
}
getCallerIdentity: aws.#Container & {
always: true
configFile: client.filesystem.".".read.contents
credentials: aws.#Credentials & {
accessKeyId: sopsSecrets.output.AWS_ACCESS_KEY_ID.contents
secretAccessKey: sopsSecrets.output.AWS_SECRET_ACCESS_KEY.contents
}
command: {
name: "sh"
flags: "-c": "aws --profile ci sts get-caller-identity > /output.txt"
}
export: files: "/output.txt": _
}
verify: json.Unmarshal(getCallerIdentity.export.files."/output.txt") & {
UserId: string
Account: =~"^12[0-9]{8}86$"
Arn: =~"^arn:aws:sts::(12[0-9]{8}86):assumed-role/dagger-ci"
}
}
}

View File

@@ -1,44 +0,0 @@
package test
import (
"encoding/json"
"dagger.io/dagger"
"dagger.io/dagger/core"
"universe.dagger.io/aws"
)
dagger.#Plan & {
client: commands: sops: {
name: "sops"
args: ["-d", "--extract", "[\"AWS\"]", "../../secrets_sops.yaml"]
stdout: dagger.#Secret
}
actions: {
sopsSecrets: core.#DecodeSecret & {
format: "yaml"
input: client.commands.sops.stdout
}
getCallerIdentity: aws.#Container & {
always: true
credentials: aws.#Credentials & {
accessKeyId: sopsSecrets.output.AWS_ACCESS_KEY_ID.contents
secretAccessKey: sopsSecrets.output.AWS_SECRET_ACCESS_KEY.contents
}
command: {
name: "sh"
flags: "-c": "aws --region us-east-2 sts get-caller-identity > /output.txt"
}
export: files: "/output.txt": _
}
verify: json.Unmarshal(getCallerIdentity.export.files."/output.txt") & {
UserId: string & !~"^$"
Account: =~"^12[0-9]{8}86$"
Arn: =~"(12[0-9]{8}86)"
}
}
}

View File

@@ -1,23 +0,0 @@
package test
import (
"dagger.io/dagger"
"universe.dagger.io/aws"
"universe.dagger.io/docker"
)
dagger.#Plan & {
actions: {
build: aws.#Build
getVersion: docker.#Run & {
always: true
input: build.output
command: {
name: "sh"
flags: "-c": "aws --version > /output.txt"
}
export: files: "/output.txt": =~"^aws-cli/\(aws.#DefaultCliVersion)"
}
}
}

View File

@@ -1,11 +0,0 @@
setup() {
load '../../bats_helpers'
common_setup
}
@test "aws" {
dagger "do" -p ./default_version.cue getVersion
dagger "do" -p ./credentials.cue verify
dagger "do" -p ./config_file.cue verify
}

View File

@@ -1,60 +0,0 @@
// Helpers to run bash commands in containers
package bash
import (
"dagger.io/dagger"
"dagger.io/dagger/core"
"universe.dagger.io/docker"
)
// Run a bash script in a Docker container
// Since this is a thin wrapper over docker.#Run, we embed it.
// Whether to embed or wrap is a case-by-case decision, like in Go.
#Run: {
// The script to execute
script: {
// A directory containing one or more bash scripts
directory: dagger.#FS
// Name of the file to execute
filename: string
_directory: directory
_filename: filename
} | {
// Script contents
contents: string
_filename: "run.sh"
_write: core.#WriteFile & {
input: dagger.#Scratch
path: _filename
"contents": contents
}
_directory: _write.output
}
// Arguments to the script
args: [...string]
// Where in the container to mount the scripts directory
_mountpoint: "/bash/scripts"
docker.#Run & {
command: {
name: "bash"
"args": ["\(_mountpoint)/\(script._filename)"] + args
// FIXME: make default flags overrideable
flags: {
"--norc": true
"-e": true
"-o": "pipefail"
}
}
mounts: "Bash scripts": {
contents: script._directory
dest: _mountpoint
}
}
}

View File

@@ -1,3 +0,0 @@
#!/bin/sh
echo Hello, world > /out.txt

View File

@@ -1,10 +0,0 @@
setup() {
load '../../bats_helpers'
common_setup
}
@test "bash" {
dagger "do" -p ./test.cue test
}

View File

@@ -1,50 +0,0 @@
package bash
import (
"dagger.io/dagger"
"dagger.io/dagger/core"
"universe.dagger.io/docker"
"universe.dagger.io/bash"
)
dagger.#Plan & {
actions: test: {
_pull: docker.#Pull & {
source: "index.docker.io/debian"
}
_image: _pull.output
// Run a script from source directory + filename
runFile: {
dir: _load.output
_load: core.#Source & {
path: "./data"
include: ["*.sh"]
}
run: bash.#Run & {
input: _image
export: files: "/out.txt": _
script: {
directory: dir
filename: "hello.sh"
}
}
output: run.export.files."/out.txt" & "Hello, world\n"
}
// Run a script from string
runString: {
run: bash.#Run & {
input: _image
export: files: "/output.txt": _
script: contents: "echo 'Hello, inlined world!' > /output.txt"
}
output: run.export.files."/output.txt" & "Hello, inlined world!\n"
}
}
}

View File

@@ -1,25 +0,0 @@
common_setup() {
load "$(dirname "${BASH_SOURCE[0]}")/node_modules/bats-support/load.bash"
load "$(dirname "${BASH_SOURCE[0]}")/node_modules/bats-assert/load.bash"
# Dagger Binary
# FIXME: `command -v` must be wrapped in a sub-bash,
# otherwise infinite recursion when DAGGER_BINARY is not set.
export DAGGER="${DAGGER_BINARY:-$(bash -c 'command -v dagger')}"
# Disable telemetry
DAGGER_TELEMETRY_DISABLE="1"
export DAGGER_TELEMETRY_DISABLE
# Force plain printing for error reporting
DAGGER_LOG_FORMAT="plain"
export DAGGER_LOG_FORMAT
# cd into the directory containing the bats file
cd "$BATS_TEST_DIRNAME" || exit 1
}
# dagger helper to execute the right binary
dagger() {
"${DAGGER}" "$@"
}

View File

@@ -1 +0,0 @@
module: "universe.dagger.io"

View File

@@ -1,104 +0,0 @@
package docker
import (
"dagger.io/dagger"
"dagger.io/dagger/core"
)
// Modular build API for Docker containers
#Build: {
steps: [#Step, ...#Step]
output: #Image
// Generate build DAG from linear steps
_dag: {
for idx, step in steps {
"\(idx)": step & {
// connect input to previous output
if idx > 0 {
// FIXME: the intermediary `output` is needed because of a possible CUE bug.
// `._dag."0".output: 1 errors in empty disjunction::`
// See: https://github.com/cue-lang/cue/issues/1446
// input: _dag["\(idx-1)"].output
_output: _dag["\(idx-1)"].output
input: _output
}
}
}
}
if len(_dag) > 0 {
output: _dag["\(len(_dag)-1)"].output
}
}
// A build step is anything that produces a docker image
#Step: {
input?: #Image
output: #Image
...
}
// Build step that copies files into the container image
#Copy: {
input: #Image
contents: dagger.#FS
source: string | *"/"
dest: string | *"/"
// Execute copy operation
_copy: core.#Copy & {
"input": input.rootfs
"contents": contents
"source": source
"dest": dest
}
output: #Image & {
config: input.config
rootfs: _copy.output
}
}
// Build step that executes a Dockerfile
#Dockerfile: {
source: dagger.#FS
// Dockerfile definition or path into source
dockerfile: *{
path: string | *"Dockerfile"
} | {
contents: string
}
// Registry authentication
// Key must be registry address
auth: [registry=string]: {
username: string
secret: dagger.#Secret
}
platforms: [...string]
target?: string
buildArg: [string]: string
label: [string]: string
hosts: [string]: string
_build: core.#Dockerfile & {
"source": source
"auth": auth
"dockerfile": dockerfile
"platforms": platforms
if target != _|_ {
"target": target
}
"buildArg": buildArg
"label": label
"hosts": hosts
}
output: #Image & {
rootfs: _build.output
config: _build.config
}
}

View File

@@ -1,92 +0,0 @@
package cli
import (
"dagger.io/dagger"
"universe.dagger.io/docker"
)
// See https://github.com/dagger/dagger/discussions/1874
// Default image
#Image: docker.#Pull & {
source: "docker:20.10.13-alpine3.15"
}
// Run a docker CLI command
#Run: {
#RunSocket | #RunSSH | #RunTCP
_defaultImage: #Image
// As a convenience, input defaults to a ready-to-use docker environment
input: docker.#Image | *_defaultImage.output
}
// Connect via local docker socket
#RunSocket: {
host: dagger.#Socket
docker.#Run & {
mounts: docker: {
dest: "/var/run/docker.sock"
contents: host
}
}
}
// Connect via SSH
#RunSSH: {
host: =~"^ssh://.+"
ssh: {
// Private SSH key
key?: dagger.#Secret
// Known hosts file contents
knownHosts?: dagger.#Secret
// FIXME: implement keyPassphrase
}
docker.#Run & {
env: DOCKER_HOST: host
if ssh.key != _|_ {
mounts: ssh_key: {
dest: "/root/.ssh/id_rsa"
contents: ssh.key
}
}
if ssh.knownHosts != _|_ {
mounts: ssh_hosts: {
dest: "/root/.ssh/known_hosts"
contents: ssh.knownHosts
}
}
}
}
// Connect via HTTP/HTTPS
#RunTCP: {
host: =~"^tcp://.+"
docker.#Run & {
env: DOCKER_HOST: host
// Directory with certificates to verify ({ca,cert,key}.pem files).
// This enables HTTPS.
certs?: dagger.#FS
if certs != _|_ {
env: {
DOCKER_TLS_VERIFY: "1"
DOCKER_CERT_PATH: "/certs/client"
}
mounts: "certs": {
dest: "/certs/client"
contents: certs
}
}
}
}

View File

@@ -1,38 +0,0 @@
package cli
import (
"dagger.io/dagger/core"
"universe.dagger.io/docker"
)
// Load an image into a docker daemon
#Load: {
// Image to load
image: docker.#Image
// Name and optionally a tag in the 'name:tag' format
tag: docker.#Ref
// Exported image ID
imageID: _export.imageID
// Root filesystem with exported file
result: _export.output
_export: core.#Export & {
"tag": tag
input: image.rootfs
config: image.config
}
#Run & {
mounts: src: {
dest: "/src"
contents: _export.output
}
command: {
name: "load"
flags: "-i": "/src/image.tar"
}
}
}

View File

@@ -1,55 +0,0 @@
package test
import (
"dagger.io/dagger"
"universe.dagger.io/alpine"
"universe.dagger.io/bash"
"universe.dagger.io/docker"
"universe.dagger.io/docker/cli"
)
dagger.#Plan & {
client: network: "unix:///var/run/docker.sock": connect: dagger.#Socket
actions: test: {
_cli: alpine.#Build & {
packages: {
bash: {}
"docker-cli": {}
}
}
_image: docker.#Run & {
input: _cli.output
command: {
name: "touch"
args: ["/foo.bar"]
}
}
load: cli.#Load & {
image: _image.output
host: client.network."unix:///var/run/docker.sock".connect
tag: "dagger:load"
}
verify: bash.#Run & {
input: _cli.output
mounts: docker: {
contents: client.network."unix:///var/run/docker.sock".connect
dest: "/var/run/docker.sock"
}
env: {
IMAGE_NAME: load.tag
IMAGE_ID: load.imageID
// FIXME: without this forced dependency, load.command might not run
DEP: "\(load.success)"
}
script: contents: #"""
test "$(docker image inspect $IMAGE_NAME -f '{{.Id}}')" = "$IMAGE_ID"
docker run --rm $IMAGE_NAME stat /foo.bar
"""#
}
}
}

View File

@@ -1,48 +0,0 @@
package test
import (
"dagger.io/dagger"
"universe.dagger.io/alpine"
"universe.dagger.io/docker"
"universe.dagger.io/docker/cli"
)
dagger.#Plan & {
client: network: "unix:///var/run/docker.sock": connect: dagger.#Socket
actions: test: {
run: cli.#Run & {
host: client.network."unix:///var/run/docker.sock".connect
command: name: "info"
}
differentImage: {
_cli: docker.#Build & {
steps: [
alpine.#Build & {
packages: "docker-cli": {}
},
docker.#Run & {
command: {
name: "sh"
flags: "-c": "echo -n foobar > /test.txt"
}
},
]
}
run: cli.#Run & {
input: _cli.output
host: client.network."unix:///var/run/docker.sock".connect
command: {
name: "docker"
args: ["info"]
}
export: files: "/test.txt": "foobar"
}
}
// FIXME: test remote connections with `docker:dind` image
// when we have long running tasks
}
}

View File

@@ -1,10 +0,0 @@
setup() {
load '../../../bats_helpers'
common_setup
}
@test "docker/cli" {
dagger "do" -p ./run.cue test
dagger "do" -p ./load.cue test
}

View File

@@ -1,23 +0,0 @@
package docker
import (
"dagger.io/dagger"
)
// A container image
#Image: {
// Root filesystem of the image.
rootfs: dagger.#FS
// Image config
config: dagger.#ImageConfig
}
// A ref is an address for a remote container image
// Examples:
// - "index.docker.io/dagger"
// - "dagger"
// - "index.docker.io/dagger:latest"
// - "index.docker.io/dagger:latest@sha256:a89cb097693dd354de598d279c304a1c73ee550fbfff6d9ee515568e0c749cfe"
// FIXME: add formatting constraints
#Ref: dagger.#Ref

View File

@@ -1,35 +0,0 @@
// Build, ship and run Docker containers in Dagger
package docker
import (
"dagger.io/dagger"
"dagger.io/dagger/core"
)
// Download an image from a remote registry
#Pull: {
// Source ref.
source: #Ref
// Registry authentication
auth?: {
username: string
secret: dagger.#Secret
}
_op: core.#Pull & {
"source": source
if auth != _|_ {
"auth": auth
}
}
// Downloaded image
image: #Image & {
rootfs: _op.output
config: _op.config
}
// FIXME: compat with Build API
output: image
}

View File

@@ -1,33 +0,0 @@
package docker
import (
"dagger.io/dagger"
"dagger.io/dagger/core"
)
// Upload an image to a remote repository
#Push: {
// Destination ref
dest: #Ref
// Complete ref after pushing (including digest)
result: #Ref & _push.result
// Registry authentication
auth?: {
username: string
secret: dagger.#Secret
}
// Image to push
image: #Image
_push: core.#Push & {
"dest": dest
if auth != _|_ {
"auth": auth
}
input: image.rootfs
config: image.config
}
}

View File

@@ -1,183 +0,0 @@
package docker
import (
"list"
"dagger.io/dagger"
"dagger.io/dagger/core"
)
// Run a command in a container
#Run: {
// Docker image to execute
input: #Image
always: bool | *false
// Filesystem mounts
mounts: [name=string]: core.#Mount
// Expose network ports
// FIXME: investigate feasibility
ports: [name=string]: {
frontend: dagger.#Socket
backend: {
protocol: *"tcp" | "udp"
address: string
}
}
// Entrypoint to prepend to command
entrypoint?: [...string]
// Command to execute
command?: {
// Name of the command to execute
// Examples: "ls", "/bin/bash"
name: string
// Positional arguments to the command
// Examples: ["/tmp"]
args: [...string]
// Command-line flags represented in a civilized form
// Example: {"-l": true, "-c": "echo hello world"}
flags: [string]: (string | true)
_flatFlags: list.FlattenN([
for k, v in flags {
if (v & bool) != _|_ {
[k]
}
if (v & string) != _|_ {
[k, v]
}
},
], 1)
}
// Environment variables
// Example: {"DEBUG": "1"}
env: [string]: string | dagger.#Secret
// Working directory for the command
// Example: "/src"
workdir: string
// Username or UID to ad
// User identity for this command
// Examples: "root", "0", "1002"
user: string
// Add defaults to image config
// This ensures these values are present
_defaults: core.#Set & {
"input": {
entrypoint: []
cmd: []
workdir: "/"
user: "root"
}
config: input.config
}
// Override with user config
_config: core.#Set & {
input: _defaults.output
config: {
if entrypoint != _|_ {
"entrypoint": entrypoint
}
if command != _|_ {
cmd: [command.name] + command._flatFlags + command.args
}
if workdir != _|_ {
"workdir": workdir
}
if user != _|_ {
"user": user
}
}
}
// Output fields
{
// Has the command completed?
completed: bool & (_exec.exit != _|_)
// Was completion successful?
success: bool & (_exec.exit == 0)
// Details on error, if any
error: {
// Error code
code: _exec.exit
// Error message
message: string | *null
}
export: {
rootfs: dagger.#FS & _exec.output
files: [path=string]: string
_files: {
for path, _ in files {
"\(path)": {
contents: string & _read.contents
_read: core.#ReadFile & {
input: _exec.output
"path": path
}
}
}
}
for path, output in _files {
files: "\(path)": output.contents
}
directories: [path=string]: dagger.#FS
_directories: {
for path, _ in directories {
"\(path)": {
contents: dagger.#FS & _subdir.output
_subdir: core.#Subdir & {
input: _exec.output
"path": path
}
}
}
}
for path, output in _directories {
directories: "\(path)": output.contents
}
}
}
// For compatibility with #Build
output: #Image & {
rootfs: _exec.output
config: input.config
}
// Actually execute the command
_exec: core.#Exec & {
"input": input.rootfs
"always": always
"mounts": mounts
args: _config.output.entrypoint + _config.output.cmd
workdir: _config.output.workdir
user: _config.output.user
"env": env
// env may contain secrets so we can't use core.#Set
if input.config.env != _|_ {
for key, val in input.config.env {
if env[key] == _|_ {
env: "\(key)": val
}
}
}
}
// Command exit code
exit: _exec.exit
}

View File

@@ -1,26 +0,0 @@
package docker
import (
"dagger.io/dagger"
"dagger.io/dagger/core"
)
// Change image config
#Set: {
// The source image
input: #Image
// The image config to change
config: dagger.#ImageConfig
_set: core.#Set & {
"input": input.config
"config": config
}
// Resulting image with the config changes
output: #Image & {
rootfs: input.rootfs
config: _set.output
}
}

View File

@@ -1,120 +0,0 @@
package docker
import (
"dagger.io/dagger"
"dagger.io/dagger/core"
"universe.dagger.io/alpine"
"universe.dagger.io/docker"
)
dagger.#Plan & {
actions: test: build: {
// Test: simple docker.#Build
simple: {
#testValue: "hello world"
image: docker.#Build & {
steps: [
alpine.#Build,
docker.#Run & {
command: {
name: "sh"
flags: "-c": "echo -n $TEST >> /test.txt"
}
env: TEST: #testValue
},
]
}
verify: core.#ReadFile & {
input: image.output.rootfs
path: "/test.txt"
}
verify: contents: #testValue
}
// Test: docker.#Build with multiple steps
multiSteps: {
image: docker.#Build & {
steps: [
alpine.#Build,
docker.#Run & {
command: {
name: "sh"
flags: "-c": "echo -n hello > /bar.txt"
}
},
docker.#Run & {
command: {
name: "sh"
flags: "-c": "echo -n $(cat /bar.txt) world > /foo.txt"
}
},
docker.#Run & {
command: {
name: "sh"
flags: "-c": "echo -n $(cat /foo.txt) >> /test.txt"
}
},
]
}
verify: core.#ReadFile & {
input: image.output.rootfs
path: "/test.txt"
}
verify: contents: "hello world"
}
// Test: simple nesting of docker.#Build
nested: {
build: docker.#Build & {
steps: [
docker.#Build & {
steps: [
docker.#Pull & {
source: "alpine"
},
docker.#Run & {
command: name: "ls"
},
]
},
docker.#Run & {
command: name: "ls"
},
]
}
}
// Test: nested docker.#Build with 3+ levels of depth
// FIXME: this test currently fails.
nestedDeep: {
// build: docker.#Build & {
// steps: [
// docker.#Build & {
// steps: [
// docker.#Build & {
// steps: [
// docker.#Pull & {
// source: "alpine"
// },
// docker.#Run & {
// command: name: "ls"
// },
// ]
// },
// docker.#Run & {
// command: name: "ls"
// },
// ]
// },
// docker.#Run & {
// command: name: "ls"
// },
// ]
// }
}
}
}

View File

@@ -1,70 +0,0 @@
package docker
import (
"dagger.io/dagger"
"dagger.io/dagger/core"
"universe.dagger.io/docker"
)
dagger.#Plan & {
client: filesystem: "./testdata": read: contents: dagger.#FS
actions: test: dockerfile: {
simple: {
build: docker.#Build & {
steps: [
docker.#Dockerfile & {
source: dagger.#Scratch
dockerfile: contents: """
FROM alpine:3.15
RUN echo -n hello world >> /test.txt
"""
},
docker.#Run & {
command: {
name: "/bin/sh"
args: ["-c", """
# Verify that docker.#Dockerfile correctly connect output
# into other steps
grep -q "hello world" /test.txt
"""]
}
},
]
}
verify: core.#ReadFile & {
input: build.output.rootfs
path: "/test.txt"
} & {
contents: "hello world"
}
}
withInput: {
build: docker.#Build & {
steps: [
docker.#Dockerfile & {
source: client.filesystem."./testdata".read.contents
},
docker.#Run & {
command: {
name: "/bin/sh"
args: ["-c", """
hello >> /test.txt
"""]
}
},
]
}
verify: core.#ReadFile & {
input: build.output.rootfs
path: "/test.txt"
} & {
contents: "hello world"
}
}
}
}

View File

@@ -1,121 +0,0 @@
package docker
import (
"dagger.io/dagger"
"dagger.io/dagger/core"
"universe.dagger.io/docker"
)
dagger.#Plan & {
actions: test: image: {
// Test: change image config with docker.#Set
set: {
image: output: docker.#Image & {
rootfs: dagger.#Scratch
config: {
cmd: ["/bin/sh"]
env: PATH: "/sbin:/bin"
onbuild: ["COPY . /app"]
}
}
set: docker.#Set & {
input: image.output
config: {
env: FOO: "bar"
workdir: "/root"
onbuild: ["RUN /app/build.sh"]
}
}
verify: set.output.config & {
env: {
PATH: "/sbin:/bin"
FOO: "bar"
}
cmd: ["/bin/sh"]
workdir: "/root"
onbuild: [
"COPY . /app",
"RUN /app/build.sh",
]
}
}
// Test: image config behavior is correct
config: {
build: core.#Dockerfile & {
source: dagger.#Scratch
dockerfile: contents: """
FROM alpine:3.15.0
RUN echo -n 'not hello from dagger' > /dagger.txt
RUN echo '#!/bin/sh' > /bin/dagger
ENV HELLO_FROM=dagger
RUN echo 'echo -n "hello from $HELLO_FROM" > /dagger.txt' >> /bin/dagger
RUN chmod +x /bin/dagger
WORKDIR /bin
CMD /bin/dagger
"""
}
myimage: docker.#Image & {
rootfs: build.output
config: build.config
}
run: docker.#Run & {
input: myimage
command: name: "ls"
export: files: {
"/dagger.txt": _ & {
contents: "not hello from dagger"
}
"/bin/dagger": _ & {
contents: """
#!/bin/sh
echo -n "hello from $HELLO_FROM" > /dagger.txt
"""
}
}
}
verify_cmd_is_run: docker.#Run & {
input: myimage
export: files: "/dagger.txt": _ & {
contents: "hello from dagger"
}
}
verify_env_is_overridden: docker.#Run & {
input: myimage
export: files: "/dagger.txt": _ & {
contents: "hello from europa"
}
env: HELLO_FROM: "europa"
}
verify_working_directory: docker.#Run & {
input: myimage
command: {
name: "sh"
flags: "-c": #"""
pwd > dir.txt
"""#
}
export: files: "/bin/dir.txt": _ & {
contents: "/bin\n"
}
}
verify_working_directory_is_overridden: docker.#Run & {
input: myimage
workdir: "/"
command: {
name: "sh"
flags: "-c": #"""
pwd > dir.txt
"""#
}
export: files: "/dir.txt": _ & {
contents: "/\n"
}
}
}
}
}

View File

@@ -1,108 +0,0 @@
package docker
import (
"dagger.io/dagger"
"dagger.io/dagger/core"
"universe.dagger.io/docker"
"universe.dagger.io/alpine"
)
dagger.#Plan & {
actions: test: run: {
_build: alpine.#Build & {
packages: bash: _
}
_image: _build.output
// Test: run a simple shell command
simpleShell: {
run: docker.#Run & {
input: _image
command: {
name: "/bin/sh"
args: ["-c", "echo -n hello world >> /output.txt"]
}
}
verify: core.#ReadFile & {
input: run.output.rootfs
path: "/output.txt"
}
verify: contents: "hello world"
}
// Test: export a file
exportFile: {
run: docker.#Run & {
input: _image
command: {
name: "sh"
flags: "-c": #"""
echo -n hello world >> /output.txt
"""#
}
export: files: "/output.txt": string & "hello world"
}
}
// Test: export a directory
exportDirectory: {
run: docker.#Run & {
input: _image
command: {
name: "sh"
flags: "-c": #"""
mkdir -p /test
echo -n hello world >> /test/output.txt
"""#
}
export: directories: "/test": _
}
verify: core.#ReadFile & {
input: run.export.directories."/test"
path: "/output.txt"
}
verify: contents: "hello world"
}
// Test: configs overriding image defaults
configs: {
_base: docker.#Set & {
input: _image
config: {
user: "nobody"
workdir: "/sbin"
entrypoint: ["sh"]
cmd: ["-c", "echo -n $0 $PWD $(whoami) > /tmp/output.txt"]
}
}
// check defaults not overriden by image config
runDefaults: docker.#Run & {
input: _image
command: {
name: "sh"
flags: "-c": "echo -n $PWD $(whoami) > /output.txt"
}
export: files: "/output.txt": "/ root"
}
// check image defaults
imageDefaults: docker.#Run & {
input: _base.output
export: files: "/tmp/output.txt": "sh /sbin nobody"
}
// check overrides by user
overrides: docker.#Run & {
input: _base.output
entrypoint: ["bash"]
workdir: "/root"
user: "root"
export: files: "/tmp/output.txt": "bash /root root"
}
}
}
}

View File

@@ -1,12 +0,0 @@
setup() {
load '../../bats_helpers'
common_setup
}
@test "docker" {
dagger "do" -p ./build.cue test
dagger "do" -p ./dockerfile.cue test
dagger "do" -p ./run.cue test
dagger "do" -p ./image.cue test
}

View File

@@ -1,3 +0,0 @@
## Dagger examples
A collection of examples to help Dagger developers get started.

View File

@@ -1,101 +0,0 @@
package changelog
actions: {
// Reuse in all mix commands
// prod: assets: docker.#Build & {
// steps: [
// // 1. Start from dev assets :)
// dev.assets,
// // 2. Mix magical command
// mix.#Run & {
// script: "mix phx.digest"
// mix: {
// env: "prod"
// app: _appName
// depsCache: "private"
// buildCache: "private"
// }
// workdir: _
// // FIXME: remove copy-pasta
// mounts: nodeModules: {
// contents: core.#CacheDir & {
// // FIXME: do we need an ID here?
// id: "\(mix.app)_assets_node_modules"
// // FIXME: does this command need write access to node_modules cache?
// concurrency: "private"
// }
// dest: "\(workdir)/node_modules"
// }
// },
// ]
// }
// dev: {
// compile: mix.#Compile & {
// env: "dev"
// app: "thechangelog"
// base: inputs.params.runtimeImage
// source: inputs.directories.app.contents
// }
// assets: docker.#Build & {
// steps: [
// // 1. Start from dev runtime build
// {
// output: build.output
// },
// // 2. Build web assets
// mix.#Run & {
// mix: {
// env: "dev"
// app: _appName
// depsCache: "private"
// buildCache: "private"
// }
// // FIXME: move this to a reusable def (yarn package? or private?)
// mounts: nodeModules: {
// contents: core.#CacheDir & {
// // FIXME: do we need an ID here?
// id: "\(mix.app)_assets_node_modules"
// // FIXME: will there be multiple writers?
// concurrency: "locked"
// }
// dest: "\(workdir)/node_modules"
// }
// // FIXME: run 'yarn install' and 'yarn run compile' separately, with different caching?
// // FIXME: can we reuse universe.dagger.io/yarn ???? 0:-)
// script: "yarn install --frozen-lockfile && yarn run compile"
// workdir: "/app/assets"
// },
// ]
// }
// }
// test: {
// build: mix.#Build & {
// env: "test"
// app: _appName
// base: inputs.params.runtimeImage
// source: inputs.directories.app.contents
// }
// // Run tests
// run: docker.#Run & {
// image: build.output
// script: "mix test"
// }
// db: {
// // Pull test DB image
// pull: docker.#Pull & {
// source: inputs.params.test_db_image
// }
// // Run test DB
// // FIXME: kill once no longer needed (when tests are done running)
// run: docker.#Run & {
// image: pull.output
// }
// }
// }
}

View File

@@ -1,87 +0,0 @@
package mix
import (
"dagger.io/dagger"
"universe.dagger.io/docker"
)
#Get: #Run & {
// Applies to all environments
env: null
cache: {
build: null
deps: "locked"
}
container: command: {
name: "sh"
flags: "-c": "mix do deps.get"
}
}
// Compile Elixir dependencies, including the app
#Compile: #Run & {
cache: {
build: "locked"
deps: "locked"
}
container: command: {
name: "sh"
flags: "-c": "mix do deps.compile, compile"
}
}
// Run mix task with all necessary mounts so compiled artefacts get cached
// FIXME: add default image to hexpm/elixir:1.13.2-erlang-23.3.4.11-debian-bullseye-20210902
#Run: {
app: {
// Application name
name: string
// Application source code
source: dagger.#FS
}
// Mix environment
env: string | null
// Configure mix caching
// FIXME: simpler interface, eg. "ro" | "rw"
cache: {
// Dependencies cache
deps: null | "locked"
// Build cache
build: null | "locked"
}
// Run mix in a docker container
container: docker.#Run & {
if env != null {
"env": MIX_ENV: env
}
workdir: mounts.app.dest
mounts: "app": {
contents: app.source
dest: "/mix/app"
}
if cache.deps != null {
mounts: deps: {
contents: core.#CacheDir & {
id: "\(app.name)_deps"
concurrency: cache.deps
}
dest: "\(mounts.app.dest)/deps"
}
}
if cache.build != null {
mounts: buildCache: {
contents: core.#CacheDir & {
id: "\(app.name)_build_\(env)"
concurrency: cache.build
}
dest: "\(mounts.app.dest)/_build/\(env)"
}
}
}
}

View File

@@ -1,9 +0,0 @@
package changelog
import (
"dagger.io/dagger"
)
dagger.#Plan & {
inputs: directories: app: path: "/Users/gerhard/github.com/thechangelog/changelog.com/"
}

View File

@@ -1,83 +0,0 @@
package changelog
import (
"dagger.io/dagger"
"universe.dagger.io/docker"
"universe.dagger.io/git"
"universe.dagger.io/examples/changelog.com/elixir/mix"
)
dagger.#Plan & {
// Receive things from client
inputs: {
directories: {
// App source code
app?: _
}
secrets: {
// Docker ID password
docker: _
}
params: {
app: {
// App name
name: string | *"changelog"
// Address of app base image
image: docker.#Ref | *"thechangelog/runtime:2021-05-29T10.17.12Z"
}
test: {
// Address of test db image
db: image: docker.#Ref | *"circleci/postgres:12.6"
}
}
}
// Do things
actions: {
app: {
name: inputs.params.app.name
// changelog.com source code
source: dagger.#FS
if inputs.directories.app != _|_ {
source: inputs.directories.app.contents
}
if inputs.directories.app == _|_ {
fetch: git.#Pull & {
remote: "https://github.com/thechangelog/changelog.com"
ref: "master"
}
source: fetch.output
}
// Assemble base image
base: docker.#Pull & {
source: inputs.params.app.image
}
image: base.output
// Download Elixir dependencies
deps: mix.#Get & {
app: {
"name": name
"source": source
}
container: "image": image
}
// Compile dev environment
dev: mix.#Compile & {
env: "dev"
app: {
"name": name
"source": source
}
container: "image": image
}
}
}
}

View File

@@ -1,25 +0,0 @@
// dagger do hello --log-format=plain
//
// 9:06AM INF actions._alpine | computing
// 9:06AM INF actions._alpine | completed duration=1s
// 9:06AM INF actions.hello | computing
// 9:06AM INF actions.hello | #3 0.073 hello, world!
// 9:06AM INF actions.hello | completed duration=100ms
package helloworld
import (
"dagger.io/dagger"
"dagger.io/dagger/core"
)
dagger.#Plan & {
actions: {
_alpine: core.#Pull & {source: "alpine:3"}
// Hello world
hello: core.#Exec & {
input: _alpine.output
args: ["echo", "hello, world!"]
always: true
}
}
}

View File

@@ -1,3 +0,0 @@
# Todo APP
[Dagger documentation website](https://docs.dagger.io/)

View File

@@ -1,38 +0,0 @@
{
"name": "moz-todo-react",
"version": "0.1.0",
"private": true,
"homepage": "./",
"dependencies": {
"@testing-library/jest-dom": "^4.2.4",
"@testing-library/react": "^9.3.2",
"@testing-library/user-event": "^7.1.2",
"gh-pages": "^3.2.3",
"nanoid": "^3.1.31",
"react": "^16.13.1",
"react-dom": "^16.13.1",
"react-scripts": "3.4.1"
},
"scripts": {
"start": "react-scripts start",
"build": "react-scripts build",
"test": "react-scripts test --watchAll=false --passWithNoTests",
"gh-pages": "gh-pages -d build -u 'github-actions-bot <support+actions@github.com>'",
"eject": "react-scripts eject"
},
"eslintConfig": {
"extends": "react-app"
},
"browserslist": {
"production": [
">0.2%",
"not dead",
"not op_mini all"
],
"development": [
"last 1 chrome version",
"last 1 firefox version",
"last 1 safari version"
]
}
}

Binary file not shown.

Before

Width:  |  Height:  |  Size: 3.1 KiB

View File

@@ -1,43 +0,0 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8" />
<link rel="icon" href="%PUBLIC_URL%/favicon.ico" />
<meta name="viewport" content="width=device-width, initial-scale=1" />
<meta name="theme-color" content="#000000" />
<meta
name="description"
content="Web site created using create-react-app"
/>
<link rel="apple-touch-icon" href="%PUBLIC_URL%/logo192.png" />
<!--
manifest.json provides metadata used when your web app is installed on a
user's mobile device or desktop. See https://developers.google.com/web/fundamentals/web-app-manifest/
-->
<link rel="manifest" href="%PUBLIC_URL%/manifest.json" />
<!--
Notice the use of %PUBLIC_URL% in the tags above.
It will be replaced with the URL of the `public` folder during the build.
Only files inside the `public` folder can be referenced from the HTML.
Unlike "/favicon.ico" or "favicon.ico", "%PUBLIC_URL%/favicon.ico" will
work correctly both with client-side routing and a non-root public URL.
Learn how to configure a non-root public URL by running `npm run build`.
-->
<title>My Todo app</title>
</head>
<body>
<noscript>You need to enable JavaScript to run this app.</noscript>
<div id="root"></div>
<!--
This HTML file is a template.
If you open it directly in the browser, you will see an empty page.
You can add webfonts, meta tags, or analytics to this file.
The build step will place the bundled scripts into the <body> tag.
To begin the development, run `npm start` or `yarn start`.
To create a production bundle, use `npm run build` or `yarn build`.
-->
</body>
</html>

Binary file not shown.

Before

Width:  |  Height:  |  Size: 5.2 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 9.4 KiB

View File

@@ -1,25 +0,0 @@
{
"short_name": "React App",
"name": "Create React App Sample",
"icons": [
{
"src": "favicon.ico",
"sizes": "64x64 32x32 24x24 16x16",
"type": "image/x-icon"
},
{
"src": "logo192.png",
"type": "image/png",
"sizes": "192x192"
},
{
"src": "logo512.png",
"type": "image/png",
"sizes": "512x512"
}
],
"start_url": ".",
"display": "standalone",
"theme_color": "#000000",
"background_color": "#ffffff"
}

View File

@@ -1,3 +0,0 @@
# https://www.robotstxt.org/robotstxt.html
User-agent: *
Disallow:

View File

@@ -1,120 +0,0 @@
import React, { useState, useRef, useEffect } from "react";
import Form from "./components/Form";
import FilterButton from "./components/FilterButton";
import Todo from "./components/Todo";
import { nanoid } from "nanoid";
function usePrevious(value) {
const ref = useRef();
useEffect(() => {
ref.current = value;
});
return ref.current;
}
const FILTER_MAP = {
All: () => true,
Active: task => !task.completed,
Completed: task => task.completed
};
const FILTER_NAMES = Object.keys(FILTER_MAP);
function App(props) {
const [tasks, setTasks] = useState(props.tasks);
const [filter, setFilter] = useState('All');
function toggleTaskCompleted(id) {
const updatedTasks = tasks.map(task => {
// if this task has the same ID as the edited task
if (id === task.id) {
// use object spread to make a new obkect
// whose `completed` prop has been inverted
return {...task, completed: !task.completed}
}
return task;
});
setTasks(updatedTasks);
}
function deleteTask(id) {
const remainingTasks = tasks.filter(task => id !== task.id);
setTasks(remainingTasks);
}
function editTask(id, newName) {
const editedTaskList = tasks.map(task => {
// if this task has the same ID as the edited task
if (id === task.id) {
//
return {...task, name: newName}
}
return task;
});
setTasks(editedTaskList);
}
const taskList = tasks
.filter(FILTER_MAP[filter])
.map(task => (
<Todo
id={task.id}
name={task.name}
completed={task.completed}
key={task.id}
toggleTaskCompleted={toggleTaskCompleted}
deleteTask={deleteTask}
editTask={editTask}
/>
));
const filterList = FILTER_NAMES.map(name => (
<FilterButton
key={name}
name={name}
isPressed={name === filter}
setFilter={setFilter}
/>
));
function addTask(name) {
const newTask = { id: "todo-" + nanoid(), name: name, completed: false };
setTasks([...tasks, newTask]);
}
const tasksNoun = taskList.length !== 1 ? 'tasks' : 'task';
const headingText = `${taskList.length} ${tasksNoun} remaining`;
const listHeadingRef = useRef(null);
const prevTaskLength = usePrevious(tasks.length);
useEffect(() => {
if (tasks.length - prevTaskLength === -1) {
listHeadingRef.current.focus();
}
}, [tasks.length, prevTaskLength]);
return (
<div className="todoapp stack-large">
<Form addTask={addTask} />
<div className="filters btn-group stack-exception">
{filterList}
</div>
<h2 id="list-heading" tabIndex="-1" ref={listHeadingRef}>
{headingText}
</h2>
<ul
className="todo-list stack-large stack-exception"
aria-labelledby="list-heading"
>
{taskList}
</ul>
</div>
);
}
export default App;

View File

@@ -1,18 +0,0 @@
import React from "react";
function FilterButton(props) {
return (
<button
type="button"
className="btn toggle-btn"
aria-pressed={props.isPressed}
onClick={() => props.setFilter(props.name)}
>
<span className="visually-hidden">Show </span>
<span>{props.name}</span>
<span className="visually-hidden"> tasks</span>
</button>
);
}
export default FilterButton;

View File

@@ -1,45 +0,0 @@
import React, { useState } from "react";
function Form(props) {
const [name, setName] = useState('');
function handleSubmit(e) {
e.preventDefault();
if (!name.trim()) {
return;
}
props.addTask(name);
setName("");
}
function handleChange(e) {
setName(e.target.value);
}
return (
<form onSubmit={handleSubmit}>
<h2 className="label-wrapper">
<label htmlFor="new-todo-input" className="label__lg">
What needs to be done?
</label>
</h2>
<input
type="text"
id="new-todo-input"
className="input input__lg"
name="text"
autoComplete="off"
value={name}
onChange={handleChange}
/>
<button type="submit" className="btn btn__primary btn__lg">
Add
</button>
</form>
);
}
export default Form;

View File

@@ -1,113 +0,0 @@
import React, { useEffect, useRef, useState } from "react";
function usePrevious(value) {
const ref = useRef();
useEffect(() => {
ref.current = value;
});
return ref.current;
}
export default function Todo(props) {
const [isEditing, setEditing] = useState(false);
const [newName, setNewName] = useState('');
const editFieldRef = useRef(null);
const editButtonRef = useRef(null);
const wasEditing = usePrevious(isEditing);
function handleChange(e) {
setNewName(e.target.value);
}
function handleSubmit(e) {
e.preventDefault();
if (!newName.trim()) {
return;
}
props.editTask(props.id, newName);
setNewName("");
setEditing(false);
}
const editingTemplate = (
<form className="stack-small" onSubmit={handleSubmit}>
<div className="form-group">
<label className="todo-label" htmlFor={props.id}>
New name for {props.name}
</label>
<input
id={props.id}
className="todo-text"
type="text"
value={newName || props.name}
onChange={handleChange}
ref={editFieldRef}
/>
</div>
<div className="btn-group">
<button
type="button"
className="btn todo-cancel"
onClick={() => setEditing(false)}
>
Cancel
<span className="visually-hidden">renaming {props.name}</span>
</button>
<button type="submit" className="btn btn__primary todo-edit">
Save
<span className="visually-hidden">new name for {props.name}</span>
</button>
</div>
</form>
);
const viewTemplate = (
<div className="stack-small">
<div className="c-cb">
<input
id={props.id}
type="checkbox"
defaultChecked={props.completed}
onChange={() => props.toggleTaskCompleted(props.id)}
/>
<label className="todo-label" htmlFor={props.id}>
{props.name}
</label>
</div>
<div className="btn-group">
<button
type="button"
className="btn"
onClick={() => setEditing(true)}
ref={editButtonRef}
>
Edit <span className="visually-hidden">{props.name}</span>
</button>
<button
type="button"
className="btn btn__danger"
onClick={() => props.deleteTask(props.id)}
>
Delete <span className="visually-hidden">{props.name}</span>
</button>
</div>
</div>
);
useEffect(() => {
if (!wasEditing && isEditing) {
editFieldRef.current.focus();
}
if (wasEditing && !isEditing) {
editButtonRef.current.focus();
}
}, [wasEditing, isEditing]);
return <li className="todo">{isEditing ? editingTemplate : viewTemplate}</li>;
}

View File

@@ -1,293 +0,0 @@
/* RESETS */
*,
*::before,
*::after {
box-sizing: border-box;
}
*:focus {
outline: 3px dashed #228bec;
outline-offset: 0;
}
html {
font: 62.5% / 1.15 sans-serif;
}
h1,
h2 {
margin-bottom: 0;
}
ul {
list-style: none;
padding: 0;
}
button {
border: none;
margin: 0;
padding: 0;
width: auto;
overflow: visible;
background: transparent;
color: inherit;
font: inherit;
line-height: normal;
-webkit-font-smoothing: inherit;
-moz-osx-font-smoothing: inherit;
-webkit-appearance: none;
}
button::-moz-focus-inner {
border: 0;
}
button,
input,
optgroup,
select,
textarea {
font-family: inherit;
font-size: 100%;
line-height: 1.15;
margin: 0;
}
button,
input {
overflow: visible;
}
input[type="text"] {
border-radius: 0;
}
body {
width: 100%;
max-width: 68rem;
margin: 0 auto;
font: 1.6rem/1.25 Arial, sans-serif;
background-color: #f5f5f5;
color: #4d4d4d;
}
@media screen and (min-width: 620px) {
body {
font-size: 1.9rem;
line-height: 1.31579;
}
}
/*END RESETS*/
/* GLOBAL STYLES */
.form-group > input[type="text"] {
display: inline-block;
margin-top: 0.4rem;
}
.btn {
padding: 0.8rem 1rem 0.7rem;
border: 0.2rem solid #4d4d4d;
cursor: pointer;
text-transform: capitalize;
}
.btn.toggle-btn {
border-width: 1px;
border-color: #d3d3d3;
}
.btn.toggle-btn[aria-pressed="true"] {
text-decoration: underline;
border-color: #4d4d4d;
}
.btn__danger {
color: #fff;
background-color: #ca3c3c;
border-color: #bd2130;
}
.btn__filter {
border-color: lightgrey;
}
.btn__primary {
color: #fff;
background-color: #000;
}
.btn-group {
display: flex;
justify-content: space-between;
}
.btn-group > * {
flex: 1 1 49%;
}
.btn-group > * + * {
margin-left: 0.8rem;
}
.label-wrapper {
margin: 0;
flex: 0 0 100%;
text-align: center;
}
.visually-hidden {
position: absolute !important;
height: 1px;
width: 1px;
overflow: hidden;
clip: rect(1px 1px 1px 1px);
clip: rect(1px, 1px, 1px, 1px);
white-space: nowrap;
}
[class*="stack"] > * {
margin-top: 0;
margin-bottom: 0;
}
.stack-small > * + * {
margin-top: 1.25rem;
}
.stack-large > * + * {
margin-top: 2.5rem;
}
@media screen and (min-width: 550px) {
.stack-small > * + * {
margin-top: 1.4rem;
}
.stack-large > * + * {
margin-top: 2.8rem;
}
}
.stack-exception {
margin-top: 1.2rem;
}
/* END GLOBAL STYLES */
.todoapp {
background: #fff;
margin: 2rem 0 4rem 0;
padding: 1rem;
position: relative;
box-shadow: 0 2px 4px 0 rgba(0, 0, 0, 0.2), 0 2.5rem 5rem 0 rgba(0, 0, 0, 0.1);
}
@media screen and (min-width: 550px) {
.todoapp {
padding: 4rem;
}
}
.todoapp > * {
max-width: 50rem;
margin-left: auto;
margin-right: auto;
}
.todoapp > form {
max-width: 100%;
}
.todoapp > h1 {
display: block;
max-width: 100%;
text-align: center;
margin: 0;
margin-bottom: 1rem;
}
.label__lg {
line-height: 1.01567;
font-weight: 300;
padding: 0.8rem;
margin-bottom: 1rem;
text-align: center;
}
.input__lg {
padding: 2rem;
border: 2px solid #000;
}
.input__lg:focus {
border-color: #4d4d4d;
box-shadow: inset 0 0 0 2px;
}
[class*="__lg"] {
display: inline-block;
width: 100%;
font-size: 1.9rem;
}
[class*="__lg"]:not(:last-child) {
margin-bottom: 1rem;
}
@media screen and (min-width: 620px) {
[class*="__lg"] {
font-size: 2.4rem;
}
}
.filters {
width: 100%;
margin: unset auto;
}
/* Todo item styles */
.todo {
display: flex;
flex-direction: row;
flex-wrap: wrap;
}
.todo > * {
flex: 0 0 100%;
}
.todo-text {
width: 100%;
min-height: 4.4rem;
padding: 0.4rem 0.8rem;
border: 2px solid #565656;
}
.todo-text:focus {
box-shadow: inset 0 0 0 2px;
}
/* CHECKBOX STYLES */
.c-cb {
box-sizing: border-box;
font-family: Arial, sans-serif;
-webkit-font-smoothing: antialiased;
font-weight: 400;
font-size: 1.6rem;
line-height: 1.25;
display: block;
position: relative;
min-height: 44px;
padding-left: 40px;
clear: left;
}
.c-cb > label::before,
.c-cb > input[type="checkbox"] {
box-sizing: border-box;
top: -2px;
left: -2px;
width: 44px;
height: 44px;
}
.c-cb > input[type="checkbox"] {
-webkit-font-smoothing: antialiased;
cursor: pointer;
position: absolute;
z-index: 1;
margin: 0;
opacity: 0;
}
.c-cb > label {
font-size: inherit;
font-family: inherit;
line-height: inherit;
display: inline-block;
margin-bottom: 0;
padding: 8px 15px 5px;
cursor: pointer;
touch-action: manipulation;
}
.c-cb > label::before {
content: "";
position: absolute;
border: 2px solid currentColor;
background: transparent;
}
.c-cb > input[type="checkbox"]:focus + label::before {
border-width: 4px;
outline: 3px dashed #228bec;
}
.c-cb > label::after {
box-sizing: content-box;
content: "";
position: absolute;
top: 11px;
left: 9px;
width: 18px;
height: 7px;
transform: rotate(-45deg);
border: solid;
border-width: 0 0 5px 5px;
border-top-color: transparent;
opacity: 0;
background: transparent;
}
.c-cb > input[type="checkbox"]:checked + label::after {
opacity: 1;
}

View File

@@ -1,18 +0,0 @@
import React from 'react';
import ReactDOM from 'react-dom';
import './index.css';
import App from './App';
const DATA = [
{ id: "todo-0", name: "Eat", completed: true },
{ id: "todo-1", name: "Sleep", completed: false },
{ id: "todo-2", name: "Repeat", completed: false }
];
ReactDOM.render(
<React.StrictMode>
<App tasks={DATA} />
</React.StrictMode>,
document.getElementById('root')
);

View File

@@ -1,106 +0,0 @@
package todoapp
import (
"dagger.io/dagger"
"dagger.io/dagger/core"
"universe.dagger.io/alpine"
"universe.dagger.io/bash"
"universe.dagger.io/docker"
"universe.dagger.io/netlify"
)
dagger.#Plan & {
_nodeModulesMount: "/src/node_modules": {
dest: "/src/node_modules"
type: "cache"
contents: core.#CacheDir & {
id: "todoapp-modules-cache"
}
}
client: {
filesystem: {
"./": read: {
contents: dagger.#FS
exclude: [
"README.md",
"_build",
"todoapp.cue",
"node_modules",
]
}
"./_build": write: contents: actions.build.contents.output
}
env: {
APP_NAME: string
NETLIFY_TEAM: string
NETLIFY_TOKEN: dagger.#Secret
}
}
actions: {
deps: docker.#Build & {
steps: [
alpine.#Build & {
packages: {
bash: {}
yarn: {}
git: {}
}
},
docker.#Copy & {
contents: client.filesystem."./".read.contents
dest: "/src"
},
bash.#Run & {
workdir: "/src"
mounts: {
"/cache/yarn": {
dest: "/cache/yarn"
type: "cache"
contents: core.#CacheDir & {
id: "todoapp-yarn-cache"
}
}
_nodeModulesMount
}
script: contents: #"""
yarn config set cache-folder /cache/yarn
yarn install
"""#
},
]
}
test: bash.#Run & {
input: deps.output
workdir: "/src"
mounts: _nodeModulesMount
script: contents: #"""
yarn run test
"""#
}
build: {
run: bash.#Run & {
input: test.output
mounts: _nodeModulesMount
workdir: "/src"
script: contents: #"""
yarn run build
"""#
}
contents: core.#Subdir & {
input: run.output.rootfs
path: "/src/build"
}
}
deploy: netlify.#Deploy & {
contents: build.contents.output
site: client.env.APP_NAME
token: client.env.NETLIFY_TOKEN
team: client.env.NETLIFY_TEAM
}
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -1,8 +0,0 @@
package git
import (
"dagger.io/dagger/core"
)
#Pull: core.#GitPull
#Push: core.#GitPush

View File

@@ -1,51 +0,0 @@
package go
import (
"dagger.io/dagger"
)
// Build a go binary
#Build: {
// Source code
source: dagger.#FS
// Target package to build
package: *"." | string
// Target architecture
arch: *"amd64" | string
// Target OS
os: *"linux" | string
// Build tags to use for building
tags: *"" | string
// LDFLAGS to use for linking
ldflags: *"" | string
env: [string]: string
container: #Container & {
"source": source
"env": {
env
GOOS: os
GOARCH: arch
}
command: {
args: [package]
flags: {
build: true
"-v": true
"-tags": tags
"-ldflags": ldflags
"-o": "/output/"
}
}
export: directories: "/output": _
}
// Directory containing the output of the build
output: container.export.directories."/output"
}

View File

@@ -1,42 +0,0 @@
// Go operation
package go
import (
"dagger.io/dagger"
"dagger.io/dagger/core"
"universe.dagger.io/docker"
)
// A standalone go environment to run go command
#Container: {
// Container app name
name: *"go_builder" | string
// Source code
source: dagger.#FS
// Use go image
_image: #Image
_sourcePath: "/src"
_cachePath: "/root/.cache/gocache"
docker.#Run & {
input: *_image.output | docker.#Image
workdir: "/src"
command: name: "go"
mounts: {
"source": {
dest: _sourcePath
contents: source
}
"go assets cache": {
contents: core.#CacheDir & {
id: "\(name)_assets"
}
dest: _cachePath
}
}
env: GOMODCACHE: _cachePath
}
}

View File

@@ -1,37 +0,0 @@
package go
import (
"universe.dagger.io/docker"
)
// Go image default version
#DefaultVersion: "1.16"
// Build a go base image
#Image: {
version: *#DefaultVersion | string
packages: [pkgName=string]: version: string | *""
// FIXME Basically a copy of alpine.#Build with a different image
// Should we create a special definition?
docker.#Build & {
steps: [
docker.#Pull & {
source: "index.docker.io/golang:\(version)-alpine"
},
for pkgName, pkg in packages {
docker.#Run & {
command: {
name: "apk"
args: ["add", "\(pkgName)\(pkg.version)"]
flags: {
"-U": true
"--no-cache": true
}
}
}
},
]
}
}

View File

@@ -1,17 +0,0 @@
package go
// Test a go package
#Test: {
// Package to test
package: *"." | string
#Container & {
command: {
args: [package]
flags: {
test: true
"-v": true
}
}
}
}

View File

@@ -1,44 +0,0 @@
package go
import (
"dagger.io/dagger"
"dagger.io/dagger/core"
"universe.dagger.io/go"
"universe.dagger.io/docker"
"universe.dagger.io/alpine"
)
dagger.#Plan & {
client: filesystem: "./data/hello": read: contents: dagger.#FS
actions: test: {
_baseImage: alpine.#Build
simple: {
build: go.#Build & {
source: client.filesystem."./data/hello".read.contents
}
exec: docker.#Run & {
input: _baseImage.output
command: {
name: "/bin/sh"
args: ["-c", "/bin/hello >> /output.txt"]
}
env: NAME: "dagger"
mounts: binary: {
dest: "/bin/hello"
contents: build.output
source: "/test"
}
}
verify: core.#ReadFile & {
input: exec.output.rootfs
path: "/output.txt"
} & {
contents: "Hi dagger!"
}
}
}
}

View File

@@ -1,30 +0,0 @@
package go
import (
"dagger.io/dagger"
"universe.dagger.io/go"
"universe.dagger.io/alpine"
)
dagger.#Plan & {
actions: test: {
_source: dagger.#Scratch & {}
simple: go.#Container & {
source: _source
command: args: ["version"]
}
override: {
base: alpine.#Build & {
packages: go: _
}
command: go.#Container & {
input: base.output
source: _source
command: args: ["version"]
}
}
}
}

View File

@@ -1,44 +0,0 @@
package go
import (
"dagger.io/dagger"
"universe.dagger.io/go"
"universe.dagger.io/docker"
)
dagger.#Plan & {
actions: test: {
_source: dagger.#Scratch & {}
simple: {
_image: go.#Image & {}
verify: docker.#Run & {
input: _image.output
command: {
name: "/bin/sh"
args: ["-c", """
go version | grep "1.16"
"""]
}
}
}
custom: {
_image: go.#Image & {
version: "1.17"
packages: bash: _
}
verify: docker.#Run & {
input: _image.output
command: {
name: "/bin/bash"
args: ["-c", """
go version | grep "1.17"
"""]
}
}
}
}
}

View File

@@ -1,12 +0,0 @@
setup() {
load '../../bats_helpers'
common_setup
}
@test "bash" {
dagger "do" -p ./build.cue test
dagger "do" -p ./container.cue test
dagger "do" -p ./image.cue test
dagger "do" -p ./test.cue test
}

View File

@@ -1,15 +0,0 @@
package go
import (
"dagger.io/dagger"
"universe.dagger.io/go"
)
dagger.#Plan & {
client: filesystem: "./data/hello": read: contents: dagger.#FS
actions: test: go.#Test & {
source: client.filesystem."./data/hello".read.contents
package: "./greeting"
}
}

View File

@@ -1,62 +0,0 @@
#!/bin/bash
set -e -o pipefail
NETLIFY_AUTH_TOKEN="$(cat /run/secrets/token)"
export NETLIFY_AUTH_TOKEN
create_site() {
url="https://api.netlify.com/api/v1/${NETLIFY_ACCOUNT:-}/sites"
curl -s -S --fail-with-body -H "Authorization: Bearer $NETLIFY_AUTH_TOKEN" \
-X POST -H "Content-Type: application/json" \
"$url" \
-d "{\"name\": \"${NETLIFY_SITE_NAME}\", \"custom_domain\": \"${NETLIFY_DOMAIN}\"}" -o body
# shellcheck disable=SC2181
if [ $? -ne 0 ]; then
>&2 echo "Error creating site [${NETLIFY_SITE_NAME}] for account [${NETLIFY_ACCOUNT}]"
cat body >&2
exit 1
fi
jq -r '.site_id' body
}
site_id=$(
curl -s -S -f -H "Authorization: Bearer $NETLIFY_AUTH_TOKEN" \
"https://api.netlify.com/api/v1/sites?filter=all" |
jq -r ".[] | select(.name==\"$NETLIFY_SITE_NAME\") | .id"
)
if [ -z "$site_id" ]; then
if [ "${NETLIFY_SITE_CREATE:-}" != 1 ]; then
echo "Site $NETLIFY_SITE_NAME does not exist"
exit 1
fi
site_id=$(create_site)
if [ -z "$site_id" ]; then
echo "create site failed"
exit 1
else
echo "clean create site API response..."
rm -f body
fi
fi
netlify link --id "$site_id"
netlify deploy \
--build \
--site="$site_id" \
--prod |
tee /tmp/stdout
url="$(grep </tmp/stdout Website | grep -Eo 'https://[^ >]+' | head -1)"
deployUrl="$(grep </tmp/stdout Unique | grep -Eo 'https://[^ >]+' | head -1)"
logsUrl="$(grep </tmp/stdout Logs | grep -Eo 'https://[^ >]+' | head -1)"
# Write output files
mkdir -p /netlify
echo -n "$url" >/netlify/url
echo -n "$deployUrl" >/netlify/deployUrl
echo -n "$logsUrl" >/netlify/logsUrl

View File

@@ -1,110 +0,0 @@
// Deploy to Netlify
// https://netlify.com
package netlify
import (
"dagger.io/dagger"
"dagger.io/dagger/core"
"universe.dagger.io/alpine"
"universe.dagger.io/docker"
"universe.dagger.io/bash"
)
// Deploy a site to Netlify
#Deploy: {
// Contents of the site
contents: dagger.#FS
// Name of the Netlify site
// Example: "my-super-site"
site: string
// Netlify API token
token: dagger.#Secret
// Name of the Netlify team (optional)
// Example: "acme-inc"
// Default: use the Netlify account's default team
team: string | *""
// Domain at which the site should be available (optional)
// If not set, Netlify will allocate one under netlify.app.
// Example: "www.mysupersite.tld"
domain: string | *null
// Create the site if it doesn't exist
create: *true | false
// Build a docker image to run the netlify client
_build: docker.#Build & {
steps: [
alpine.#Build & {
packages: {
bash: {}
curl: {}
jq: {}
npm: {}
}
},
// FIXME: make this an alpine custom package, that would be so cool.
docker.#Run & {
command: {
name: "npm"
args: ["-g", "install", "netlify-cli@8.6.21"]
}
},
]
}
// Run the netlify client in a container
container: bash.#Run & {
input: *_build.output | docker.#Image
script: {
_load: core.#Source & {
path: "."
include: ["*.sh"]
}
directory: _load.output
filename: "deploy.sh"
}
always: true
env: {
NETLIFY_SITE_NAME: site
if (create) {
NETLIFY_SITE_CREATE: "1"
}
if domain != null {
NETLIFY_DOMAIN: domain
}
NETLIFY_ACCOUNT: team
}
workdir: "/src"
mounts: {
"Site contents": {
dest: "/src"
"contents": contents
}
"Netlify token": {
dest: "/run/secrets/token"
contents: token
}
}
export: files: {
"/netlify/url": _
"/netlify/deployUrl": _
"/netlify/logsUrl": _
}
}
// URL of the deployed site
url: container.export.files."/netlify/url"
// URL of the latest deployment
deployUrl: container.export.files."/netlify/deployUrl"
// URL for logs of the latest deployment
logsUrl: container.export.files."/netlify/logsUrl"
}

View File

@@ -1,9 +0,0 @@
setup() {
load '../../bats_helpers'
common_setup
}
@test "netlify" {
dagger "do" test
}

View File

@@ -1,101 +0,0 @@
package netlify
import (
"dagger.io/dagger"
"dagger.io/dagger/core"
"universe.dagger.io/docker"
"universe.dagger.io/netlify"
"universe.dagger.io/netlify/test/testutils"
)
dagger.#Plan & {
client: commands: sops: {
name: "sops"
args: ["-d", "../../test_secrets.yaml"]
stdout: dagger.#Secret
}
actions: test: {
// Configuration common to all tests
common: {
testSecrets: core.#DecodeSecret & {
input: client.commands.sops.stdout
format: "yaml"
}
token: testSecrets.output.netlifyToken.contents
marker: "hello world"
data: core.#WriteFile & {
input: dagger.#Scratch
path: "index.html"
contents: marker
}
}
// Test: deploy a simple site to Netlify
simple: {
// Deploy to netlify
deploy: netlify.#Deploy & {
team: "blocklayer"
token: common.token
site: "dagger-test"
contents: common.data.output
}
verify: testutils.#AssertURL & {
url: deploy.deployUrl
contents: common.marker
}
}
// Test: deploy to Netlify with a custom image
swapImage: {
// Deploy to netlify
deploy: netlify.#Deploy & {
team: "blocklayer"
token: common.token
site: "dagger-test"
contents: common.data.output
container: input: customImage.output
}
customImage: docker.#Build & {
steps: [
docker.#Pull & {
source: "alpine"
},
docker.#Run & {
command: {
name: "apk"
args: [
"add",
"--no-cache",
"yarn",
"bash",
"rsync",
"curl",
"jq",
]
}
},
docker.#Run & {
command: {
name: "yarn"
args: ["global", "add", "netlify-cli"]
}
},
]
}
verify: testutils.#AssertURL & {
url: deploy.deployUrl
contents: common.marker
}
}
}
}

View File

@@ -1,26 +0,0 @@
package testutils
import (
"universe.dagger.io/bash"
"universe.dagger.io/alpine"
)
// Assert the text contents available at a URL
#AssertURL: {
url: string
contents: string
run: bash.#Run & {
input: image.output
script: "contents": """
test "$(curl \(url))" = "\(contents)"
"""
}
image: alpine.#Build & {
packages: {
bash: {}
curl: {}
}
}
}

View File

@@ -1,26 +0,0 @@
// Run and deploy the Nginx web server
// https://nginx.org
package nginx
import (
"universe.dagger.io/docker"
)
// Build a nginx container image
// FIXME: bootstrapping by wrapping "docker pull nginx"
// Possible ways to improve:
// 1. "docker build" the docker hub image ourselves: https://github.com/nginxinc/docker-nginx
// 2. Reimplement same docker build in pure Cue (no more Dockerfile)
// FIXME: build from source or package distro, instead of docker pull
#Build: {
output: docker.#Image & _pull.image
_pull: docker.#Pull
*{
flavor: "alpine"
_pull: source: "index.docker.io/nginx:stable-alpine"
} | {
flavor: "debian"
_pull: source: "index.docker.io/nginx:stable"
}
}

View File

@@ -1,11 +0,0 @@
{
"license": "Apache-2.0",
"scripts": {
"test": "bats --report-formatter junit --jobs 4 $(find . -type f -name '*.bats' -not -path '*/node_modules/*')"
},
"devDependencies": {
"bats": "^1.5.0",
"bats-assert": "^2.0.0",
"bats-support": "^0.3.0"
}
}

View File

@@ -1,55 +0,0 @@
// Helpers to run PowerShell commands in containers
package powershell
import (
"dagger.io/dagger"
"dagger.io/dagger/core"
"universe.dagger.io/docker"
)
// Run a PowerShell (pwsh) script in a Docker container
// This does not suppore Windows containers or Windows PowerShell.
// Since this is a thin wrapper over docker.#Run, we embed it.
// Whether to embed or wrap is a case-by-case decision, like in Go.
#Run: {
// The script to execute
script: {
// A directory containing one or more PowerShell scripts
directory: dagger.#FS
// Name of the file to execute
filename: string
_directory: directory
_filename: filename
} | {
// Script contents
contents: string
_filename: "run.ps1"
_write: core.#WriteFile & {
input: dagger.#Scratch
path: _filename
"contents": contents
}
_directory: _write.output
}
// Arguments to the script
args: [...string]
// Where in the container to mount the scripts directory
_mountpoint: "/powershell/scripts"
docker.#Run & {
command: {
name: "pwsh"
"args": args
flags: "-File": "\(_mountpoint)/\(script._filename)"
}
mounts: "Pwsh scripts": {
contents: script._directory
dest: _mountpoint
}
}
}

View File

@@ -1 +0,0 @@
Set-Content -Value "Hello world!" -Path "/out.txt"

View File

@@ -1,10 +0,0 @@
setup() {
load '../../bats_helpers'
common_setup
}
@test "powershell" {
dagger "do" -p ./test.cue test
}

View File

@@ -1,72 +0,0 @@
package powershell
import (
"dagger.io/dagger"
"dagger.io/dagger/core"
"universe.dagger.io/docker"
"universe.dagger.io/powershell"
)
dagger.#Plan & {
actions: test: {
_pull: docker.#Pull & {
source: "mcr.microsoft.com/powershell"
}
_image: _pull.output
// Run a script from source directory + filename
runFile: {
dir: _load.output
_load: core.#Source & {
path: "./data"
include: ["*.ps1"]
}
run: powershell.#Run & {
input: _image
export: files: "/out.txt": _
script: {
directory: dir
filename: "hello.ps1"
}
}
output: run.export.files."/out.txt" & "Hello world!\n"
}
// Run a script from string
runString: {
run: powershell.#Run & {
input: _image
export: files: "/output.txt": _
script: contents: "Set-Content -Value 'Hello inline world!' -Path '/output.txt'"
}
output: run.export.files."/output.txt" & "Hello inline world!\n"
}
// Test args from string
runStringArg: {
run: powershell.#Run & {
input: _image
export: files: "/output.txt": _
script: contents: "Set-Content -Value 'Hello arg world!' -Path $($args[0])"
args: ["/output.txt"]
}
output: run.export.files."/output.txt" & "Hello arg world!\n"
}
// Test 2 args from string
runString2Arg: {
run: powershell.#Run & {
input: _image
export: files: "/output.txt": _
script: contents: "Set-Content -Value \"Hello args $($args[0])\" -Path $($args[1])"
args: ["world!", "/output.txt"]
}
output: run.export.files."/output.txt" & "Hello args world!\n"
}
}
}

View File

@@ -1,29 +0,0 @@
// Helpers to run python programs
package python
import (
"universe.dagger.io/docker"
"universe.dagger.io/alpine"
)
// Run a python script in a container
#Run: {
// Contents of the python script
script: string
// FIXME: don't pass the script as argument: write to filesystme instead
docker.#Run & {
command: {
name: "python"
flags: "-c": script
}
// As a convenience, image defaults to a ready-to-use python environment
image: docker.#Image | *_defaultImage
_defaultImage: alpine.#Image & {
packages: python: version: "3"
}
}
}

View File

@@ -1,21 +0,0 @@
netlifyToken: ENC[AES256_GCM,data:DeTBgf73iiIDVJZ3i1Rd6Cn9KvJGwh7n8/u/zWKdpaMvU7R1X43JqMbZMg==,iv:0HmdJr7BHKQk+RrCWAzZCkU7BkJ5N5//otgwAgJnQ6w=,tag:DoVYsCnO6HMHXpakX4uBlA==,type:str]
sops:
kms: []
gcp_kms: []
azure_kv: []
hc_vault: []
age:
- recipient: age1gxwmtwahzwdmrskhf90ppwlnze30lgpm056kuesrxzeuyclrwvpsupwtpk
enc: |
-----BEGIN AGE ENCRYPTED FILE-----
YWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSBnUEhWbjV3M29oUUJyWk81
Wk1WQ1E0cmtuVlhNSGxkWUM3WmJXdUYvbzAwCjlFWW9IVmtmTjY1aU1LR2lxWFlT
am9RemNqSDRWK2FDYk1xeGNiTFlWMFUKLS0tIFVrSzBCMERQbnhYb09ReVpFK00v
TG5YUDlFVzlRRFBCdEhsNVlVK1dMRTgKx1TPZWWQiaU8iMni03/ekG+m4rFCcaa4
JI+ED2d+8411BgZtlss/ukQtwskidvYTvetyWw2jes6o1lhfDv5q2A==
-----END AGE ENCRYPTED FILE-----
lastmodified: "2022-01-20T00:42:44Z"
mac: ENC[AES256_GCM,data:N4dbowNmz34Hn/o1Ofv4g9Z5I7EzcYyrGpXSu9fkczd69zkTpv87uFamEdV/kQM2bbIEm9gS8d0oTi41qsC0iax368YUJmjG6xMptwrrA/mcjRzwXjlPrCZN9454srJw4NXWm0F5/aJQa4XlO65OCLZw+4WCz0wyAWwKzuQNAb0=,iv:EIG55jdEIbVp390uCVJ/rCjJO+s+CsAblH0/CIMNgIc=,tag:dcZDoMsBToikTQ83R0azag==,type:str]
pgp: []
unencrypted_suffix: _unencrypted
version: 3.7.1

View File

@@ -1,18 +0,0 @@
# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY.
# yarn lockfile v1
bats-assert@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/bats-assert/-/bats-assert-2.0.0.tgz#ba1b4eeee2c7848f1a25948b623790dd41a2b94b"
integrity sha512-qO3kNilWxW8iCONu9NDUfvsCiC6JzL6DPOc/DGq9z3bZ9/A7wURJ+FnFMxGbofOmWbCoy7pVhofn0o47A95qkQ==
bats-support@^0.3.0:
version "0.3.0"
resolved "https://registry.yarnpkg.com/bats-support/-/bats-support-0.3.0.tgz#a1f6b8878d2a51837911fdffa0750036f60701ef"
integrity sha512-z+2WzXbI4OZgLnynydqH8GpI3+DcOtepO66PlK47SfEzTkiuV9hxn9eIQX+uLVFbt2Oqoc7Ky3TJ/N83lqD+cg==
bats@^1.5.0:
version "1.5.0"
resolved "https://registry.yarnpkg.com/bats/-/bats-1.5.0.tgz#683f522e89df7d8fc99bf3631d35501f35445166"
integrity sha512-83YgQw24Yi2c1ctB0Vd7WCsACUMSWuEtOboxQZyFQYfiv9hDMW7nk7bdloqGLg3vK5pOODCBGBQjhvRmHKsJuA==

View File

@@ -1,12 +0,0 @@
{
"name": "test",
"main": "index.js",
"license": {
"type": "Apache-2.0",
"url": "https://opensource.org/licenses/apache2.0.php"
},
"scripts": {
"build": "mkdir -p ./build && cp /.env ./build/env"
}
}

View File

@@ -1,11 +0,0 @@
{
"name": "test",
"main": "index.js",
"license": {
"type": "Apache-2.0",
"url": "https://opensource.org/licenses/apache2.0.php"
},
"scripts": {
"build": "mkdir -p ./build && echo output > ./build/test && touch .env && cp .env ./build/"
}
}

View File

@@ -1,9 +0,0 @@
setup() {
load '../../bats_helpers'
common_setup
}
@test "yarn" {
dagger "do" test
}

View File

@@ -1,91 +0,0 @@
package yarn
import (
"dagger.io/dagger"
"dagger.io/dagger/core"
"universe.dagger.io/docker"
"universe.dagger.io/yarn"
)
dagger.#Plan & {
client: filesystem: {
"./data/foo": read: contents: dagger.#FS
"./data/bar": read: contents: dagger.#FS
}
actions: test: {
// Configuration for all tests
common: {
data: client.filesystem."./data/foo".read.contents
}
// Run yarn.#Build
simple: {
build: yarn.#Build & {
source: common.data
}
verify: #AssertFile & {
input: build.output
path: "test"
contents: "output\n"
}
}
// Run yarn.#Build with a custom name
customName: {
build: yarn.#Build & {
name: "My Build"
source: common.data
}
verify: #AssertFile & {
input: build.output
path: "test"
contents: "output\n"
}
}
// Run yarn.#Build with a custom docker image
customImage: {
buildImage: docker.#Build & {
steps: [
docker.#Pull & {
source: "alpine"
},
docker.#Run & {
command: {
name: "apk"
args: ["add", "yarn", "bash"]
}
},
]
}
image: build.output
build: yarn.#Build & {
source: common.data
container: #input: buildImage.output
}
}
}
}
// Make an assertion on the contents of a file
#AssertFile: {
input: dagger.#FS
path: string
contents: string
_read: core.#ReadFile & {
"input": input
"path": path
}
actual: _read.contents
// Assertion
contents: actual
}

Some files were not shown because too many files have changed in this diff Show More