Cookbook
Filesystem
List host directory contents
The following code listing obtains a reference to the host working directory and lists the directory's contents.
- Go
- Node.js
- Python
package main
import (
"context"
"fmt"
"log"
"os"
"dagger.io/dagger"
)
func main() {
ctx := context.Background()
client, err := dagger.Connect(ctx, dagger.WithLogOutput(os.Stderr))
if err != nil {
log.Println(err)
return
}
defer client.Close()
entries, err := client.Host().Directory(".").Entries(ctx)
if err != nil {
log.Println(err)
return
}
fmt.Println(entries)
}
import { connect, Client } from "@dagger.io/dagger"
connect(
async (client: Client) => {
const entries = await client.host().directory(".").entries()
console.log(entries)
},
{ LogOutput: process.stderr },
)
import sys
import anyio
import dagger
async def main():
async with dagger.Connection(dagger.Config(log_output=sys.stderr)) as client:
entries = await client.host().directory(".").entries()
print(entries)
anyio.run(main)
Get host directory with filters
The following code listing obtains a reference to the host working directory containing all files except *.txt
files.
- Go
- Node.js
- Python
package main
import (
"context"
"fmt"
"log"
"os"
"dagger.io/dagger"
)
func main() {
os.WriteFile("foo.txt", []byte("1"), 0600)
os.WriteFile("bar.txt", []byte("2"), 0600)
os.WriteFile("baz.rar", []byte("3"), 0600)
ctx := context.Background()
client, err := dagger.Connect(ctx, dagger.WithLogOutput(os.Stderr))
if err != nil {
log.Println(err)
return
}
defer client.Close()
entries, err := client.Host().Directory(".", dagger.HostDirectoryOpts{
Exclude: []string{"*.txt"},
}).Entries(ctx)
if err != nil {
log.Println(err)
return
}
fmt.Println(entries)
}
import { connect, Client } from "@dagger.io/dagger"
import * as fs from "fs"
const files = ["foo.txt", "bar.txt", "baz.rar"]
let count = 1
for (const file of files) {
fs.writeFileSync(file, count.toString())
count = count + 1
}
connect(
async (client: Client) => {
const entries = await client
.host()
.directory(".", { exclude: ["*.txt"] })
.entries()
console.log(entries)
},
{ LogOutput: process.stderr },
)
import sys
import anyio
import dagger
async def main():
for i, file in enumerate(["foo.txt", "bar.txt", "baz.rar"]):
await (anyio.Path(".") / file).write_text(str(i + 1))
cfg = dagger.Config(log_output=sys.stderr)
async with dagger.Connection(cfg) as client:
entries = await client.host().directory(".", exclude=["*.txt"]).entries()
print(entries)
anyio.run(main)
The following code listing obtains a reference to the host working directory containing only *.rar
files.
- Go
- Node.js
- Python
package main
import (
"context"
"fmt"
"log"
"os"
"dagger.io/dagger"
)
func main() {
os.WriteFile("foo.txt", []byte("1"), 0600)
os.WriteFile("bar.txt", []byte("2"), 0600)
os.WriteFile("baz.rar", []byte("3"), 0600)
ctx := context.Background()
client, err := dagger.Connect(ctx, dagger.WithLogOutput(os.Stderr))
if err != nil {
log.Println(err)
return
}
defer client.Close()
entries, err := client.Host().Directory(".", dagger.HostDirectoryOpts{
Include: []string{"*.rar"},
}).Entries(ctx)
if err != nil {
log.Println(err)
return
}
fmt.Println(entries)
}
import { connect, Client } from "@dagger.io/dagger"
import * as fs from "fs"
const files = ["foo.txt", "bar.txt", "baz.rar"]
let count = 1
for (const file of files) {
fs.writeFileSync(file, count.toString())
count = count + 1
}
connect(
async (client: Client) => {
const entries = await client
.host()
.directory(".", { include: ["*.rar"] })
.entries()
console.log(entries)
},
{ LogOutput: process.stderr },
)
import sys
import anyio
import dagger
async def main():
for i, file in enumerate(["foo.txt", "bar.txt", "baz.rar"]):
await (anyio.Path(".") / file).write_text(str(i + 1))
cfg = dagger.Config(log_output=sys.stderr)
async with dagger.Connection(cfg) as client:
entries = await client.host().directory(".", include=["*.rar"]).entries()
print(entries)
anyio.run(main)
The following code listing obtains a reference to the host working directory containing all files except *.rar
files.
- Go
- Node.js
- Python
package main
import (
"context"
"fmt"
"log"
"os"
"dagger.io/dagger"
)
func main() {
os.WriteFile("foo.txt", []byte("1"), 0600)
os.WriteFile("bar.txt", []byte("2"), 0600)
os.WriteFile("baz.rar", []byte("3"), 0600)
ctx := context.Background()
client, err := dagger.Connect(ctx, dagger.WithLogOutput(os.Stderr))
if err != nil {
log.Println(err)
return
}
defer client.Close()
entries, err := client.Host().Directory(".", dagger.HostDirectoryOpts{
Include: []string{"*.*"},
Exclude: []string{"*.rar"},
}).Entries(ctx)
if err != nil {
log.Println(err)
return
}
fmt.Println(entries)
}
import { connect, Client } from "@dagger.io/dagger"
import * as fs from "fs"
const files = ["foo.txt", "bar.txt", "baz.rar"]
let count = 1
for (const file of files) {
fs.writeFileSync(file, count.toString())
count = count + 1
}
connect(
async (client: Client) => {
const entries = await client
.host()
.directory(".", { include: ["*.*"], exclude: ["*.rar"] })
.entries()
console.log(entries)
},
{ LogOutput: process.stderr },
)
import sys
import anyio
import dagger
async def main():
for i, file in enumerate(["foo.txt", "bar.txt", "baz.rar"]):
await (anyio.Path(".") / file).write_text(str(i + 1))
cfg = dagger.Config(log_output=sys.stderr)
async with dagger.Connection(cfg) as client:
entries = (
await client.host()
.directory(".", exclude=["*.rar"], include=["*.*"])
.entries()
)
print(entries)
anyio.run(main)
Transfer and read host directory in container
The following code listing writes a host directory to a container at the /host
container path and then reads the contents of the directory.
- Go
- Node.js
- Python
package main
import (
"context"
"fmt"
"log"
"os"
"dagger.io/dagger"
)
func main() {
ctx := context.Background()
client, err := dagger.Connect(ctx, dagger.WithLogOutput(os.Stderr))
if err != nil {
log.Println(err)
return
}
defer client.Close()
contents, err := client.Container().
From("alpine:latest").
WithDirectory("/host", client.Host().Directory(".")).
WithExec([]string{"ls", "/host"}).
Stdout(ctx)
if err != nil {
log.Println(err)
return
}
fmt.Println(contents)
}
import { connect, Client } from "@dagger.io/dagger"
connect(
async (client: Client) => {
const contents = await client
.container()
.from("alpine:latest")
.withDirectory("/host", client.host().directory("."))
.withExec(["ls", "/host"])
.stdout()
console.log(contents)
},
{ LogOutput: process.stderr },
)
import sys
import anyio
import dagger
async def main():
async with dagger.Connection(dagger.Config(log_output=sys.stderr)) as client:
out = await (
client.container()
.from_("alpine:latest")
.with_directory("/host", client.host().directory("."))
.with_exec(["ls", "/host"])
.stdout()
)
print(out)
anyio.run(main)
Transfer and write to host directory from container
The following code listing writes a host directory to a container at the /host
container path, adds a file to it, and then exports the modified directory back to the host:
Modifications made to a host directory written to a container filesystem path do not appear on the host. Data flows only one way between Dagger operations, because they are connected in a DAG. To write modifications back to the host directory, you must explicitly export the directory back to the host filesystem.
- Go
- Node.js
- Python
package main
import (
"context"
"fmt"
"log"
"os"
"dagger.io/dagger"
)
func main() {
ctx := context.Background()
client, err := dagger.Connect(ctx, dagger.WithLogOutput(os.Stderr))
if err != nil {
log.Println(err)
return
}
defer client.Close()
contents, err := client.Container().
From("alpine:latest").
WithDirectory("/host", client.Host().Directory("/tmp/sandbox")).
WithExec([]string{"/bin/sh", "-c", `echo foo > /host/bar`}).
Directory("/host").
Export(ctx, "/tmp/sandbox")
if err != nil {
log.Println(err)
return
}
fmt.Println(contents)
}
import { connect, Client } from "@dagger.io/dagger"
connect(
async (client: Client) => {
const contents = await client
.container()
.from("alpine:latest")
.withDirectory("/host", client.host().directory("/tmp/sandbox"))
.withExec(["/bin/sh", "-c", `echo foo > /host/bar`])
.directory("/host")
.export("/tmp/sandbox")
console.log(contents)
},
{ LogOutput: process.stderr },
)
import sys
import anyio
import dagger
async def main():
async with dagger.Connection(dagger.Config(log_output=sys.stderr)) as client:
out = await (
client.container()
.from_("alpine:latest")
.with_directory("/host", client.host().directory("/tmp/sandbox"))
.with_exec(["/bin/sh", "-c", "`echo foo > /host/bar`"])
.directory("/host")
.export("/tmp/sandbox")
)
print(out)
anyio.run(main)
Add Git repository as directory to container
The following code listing adds a remote Git repository branch to a container as a directory at the /src
container path and then executes a command in the container to list the directory contents.
- Go
- Node.js
- Python
package main
import (
"context"
"fmt"
"log"
"os"
"dagger.io/dagger"
)
func main() {
// create Dagger client
ctx := context.Background()
client, err := dagger.Connect(ctx, dagger.WithLogOutput(os.Stderr))
if err != nil {
panic(err)
}
defer client.Close()
// get repository at specified branch
project := client.
Git("https://github.com/dagger/dagger").
Branch("main").
Tree()
// return container with repository
// at /src path
contents, err := client.Container().
From("alpine:latest").
WithDirectory("/src", project).
WithWorkdir("/src").
WithExec([]string{"ls", "/src"}).
Stdout(ctx)
if err != nil {
log.Println(err)
return
}
fmt.Println(contents)
}
import { connect } from "@dagger.io/dagger"
// create Dagger client
connect(
async (client) => {
// get repository at specified branch
const project = client
.git("https://github.com/dagger/dagger")
.branch("main")
.tree()
// return container with repository
// at /src path
const contents = await client
.container()
.from("alpine:latest")
.withDirectory("/src", project)
.withWorkdir("/src")
.withExec(["ls", "/src"])
.stdout()
console.log(contents)
},
{ LogOutput: process.stderr },
)
import sys
import anyio
import dagger
async def main():
# create Dagger client
async with dagger.Connection(dagger.Config(log_output=sys.stderr)) as client:
# get repository at specified branch
project = client.git("https://github.com/dagger/dagger").branch("main").tree()
# return container with repository
# at /src path
out = await (
client.container()
.from_("alpine:latest")
.with_directory("/src", project)
.with_workdir("/src")
.with_exec(["ls", "/src"])
.stdout()
)
print(out)
anyio.run(main)
Add Git repository as directory to container with filters
The following code listing adds a remote Git repository branch as a directory at the /src
container path, excluding *.md
files.
- Go
- Node.js
- Python
package main
import (
"context"
"fmt"
"log"
"os"
"dagger.io/dagger"
)
func main() {
// create Dagger client
ctx := context.Background()
client, err := dagger.Connect(ctx, dagger.WithLogOutput(os.Stderr))
if err != nil {
panic(err)
}
defer client.Close()
// get repository at specified branch
project := client.
Git("https://github.com/dagger/dagger").
Branch("main").
Tree()
// return container with repository
// at /src path
// excluding *.md files
contents, err := client.Container().
From("alpine:latest").
WithDirectory("/src", project, dagger.ContainerWithDirectoryOpts{
Exclude: []string{"*.md"},
}).
WithWorkdir("/src").
WithExec([]string{"ls", "/src"}).
Stdout(ctx)
if err != nil {
log.Println(err)
return
}
fmt.Println(contents)
}
import { connect } from "@dagger.io/dagger"
// create Dagger client
connect(
async (client) => {
// get repository at specified branch
const project = client
.git("https://github.com/dagger/dagger")
.branch("main")
.tree()
// return container with repository
// at /src path
// excluding *.md files
const contents = await client
.container()
.from("alpine:latest")
.withDirectory("/src", project, { exclude: ["*.md"] })
.withWorkdir("/src")
.withExec(["ls", "/src"])
.stdout()
console.log(contents)
},
{ LogOutput: process.stderr },
)
import sys
import anyio
import dagger
async def main():
# create Dagger client
async with dagger.Connection(dagger.Config(log_output=sys.stderr)) as client:
# get repository at specified branch
project = client.git("https://github.com/dagger/dagger").branch("main").tree()
# return container with repository
# at /src path
# excluding *.md files
out = await (
client.container()
.from_("alpine:latest")
.with_directory("/src", project, exclude=["*.md"])
.with_workdir("/src")
.with_exec(["ls", "/src"])
.stdout()
)
print(out)
anyio.run(main)
The following code listing adds a remote Git repository branch as a directory at the /src
container path, including only *.md
files.
- Go
- Node.js
- Python
package main
import (
"context"
"fmt"
"log"
"os"
"dagger.io/dagger"
)
func main() {
// create Dagger client
ctx := context.Background()
client, err := dagger.Connect(ctx, dagger.WithLogOutput(os.Stderr))
if err != nil {
panic(err)
}
defer client.Close()
// get repository at specified branch
project := client.
Git("https://github.com/dagger/dagger").
Branch("main").
Tree()
// return container with repository
// at /src path
// including only *.md files
contents, err := client.Container().
From("alpine:latest").
WithDirectory("/src", project, dagger.ContainerWithDirectoryOpts{
Include: []string{"*.md"},
}).
WithWorkdir("/src").
WithExec([]string{"ls", "/src"}).
Stdout(ctx)
if err != nil {
log.Println(err)
return
}
fmt.Println(contents)
}
import { connect } from "@dagger.io/dagger"
// create Dagger client
connect(
async (client) => {
// get repository at specified branch
const project = client
.git("https://github.com/dagger/dagger")
.branch("main")
.tree()
// return container with repository
// at /src path
// including only *.md files
const contents = await client
.container()
.from("alpine:latest")
.withDirectory("/src", project, { include: ["*.md"] })
.withWorkdir("/src")
.withExec(["ls", "/src"])
.stdout()
console.log(contents)
},
{ LogOutput: process.stderr },
)
import sys
import anyio
import dagger
async def main():
# create Dagger client
async with dagger.Connection(dagger.Config(log_output=sys.stderr)) as client:
# get repository at specified branch
project = client.git("https://github.com/dagger/dagger").branch("main").tree()
# return container with repository
# at /src path
# including only *.md files
out = await (
client.container()
.from_("alpine:latest")
.with_directory("/src", project, include=["*.md"])
.with_workdir("/src")
.with_exec(["ls", "/src"])
.stdout()
)
print(out)
anyio.run(main)
The following code listing adds a remote Git repository branch as a directory at the /src
container path, including all *.md
files except README.md
.
- Go
- Node.js
- Python
package main
import (
"context"
"fmt"
"log"
"os"
"dagger.io/dagger"
)
func main() {
// create Dagger client
ctx := context.Background()
client, err := dagger.Connect(ctx, dagger.WithLogOutput(os.Stderr))
if err != nil {
panic(err)
}
defer client.Close()
// get repository at specified branch
project := client.
Git("https://github.com/dagger/dagger").
Branch("main").
Tree()
// return container with repository
// at /src path
// include all *.md files except README.md
contents, err := client.Container().
From("alpine:latest").
WithDirectory("/src", project, dagger.ContainerWithDirectoryOpts{
Include: []string{"*.md"},
Exclude: []string{"README.md"},
}).
WithWorkdir("/src").
WithExec([]string{"ls", "/src"}).
Stdout(ctx)
if err != nil {
log.Println(err)
return
}
fmt.Println(contents)
}
import { connect } from "@dagger.io/dagger"
// create Dagger client
connect(
async (client) => {
// get repository at specified branch
const project = client
.git("https://github.com/dagger/dagger")
.branch("main")
.tree()
// return container with repository
// at /src path
// include all *.md files except README.md
const contents = await client
.container()
.from("alpine:latest")
.withDirectory("/src", project, {
include: ["*.md"],
exclude: ["README.md"],
})
.withWorkdir("/src")
.withExec(["ls", "/src"])
.stdout()
console.log(contents)
},
{ LogOutput: process.stderr },
)
import sys
import anyio
import dagger
async def main():
# create Dagger client
async with dagger.Connection(dagger.Config(log_output=sys.stderr)) as client:
# get repository at specified branch
project = client.git("https://github.com/dagger/dagger").branch("main").tree()
# return container with repository
# at /src path
# include all *.md files except README.md
out = await (
client.container()
.from_("alpine:latest")
.with_directory("/src", project, include=["*.md"], exclude=["README.md"])
.with_workdir("/src")
.with_exec(["ls", "/src"])
.stdout()
)
print(out)
anyio.run(main)
Builds
Perform multi-stage build
The following code listing performs a multi-stage build.
- Go
- Node.js
- Python
package main
import (
"context"
"fmt"
"os"
"dagger.io/dagger"
)
func main() {
// create dagger client
ctx := context.Background()
client, err := dagger.Connect(ctx, dagger.WithLogOutput(os.Stderr))
if err != nil {
panic(err)
}
defer client.Close()
// get host directory
project := client.Host().Directory(".")
// build app
builder := client.Container().
From("golang:latest").
WithDirectory("/src", project).
WithWorkdir("/src").
WithEnvVariable("CGO_ENABLED", "0").
WithExec([]string{"go", "build", "-o", "myapp"})
// publish binary on alpine base
prodImage := client.Container().
From("alpine").
WithFile("/bin/myapp", builder.File("/src/myapp")).
WithEntrypoint([]string{"/bin/myapp"})
addr, err := prodImage.Publish(ctx, "localhost:5000/multistage")
if err != nil {
panic(err)
}
fmt.Println(addr)
}
import { connect, Client } from "@dagger.io/dagger"
connect(
async (client: Client) => {
// get host directory
const project = client.host().directory(".")
// build app
const builder = client
.container()
.from("golang:latest")
.withDirectory("/src", project)
.withWorkdir("/src")
.withEnvVariable("CGO_ENABLED", "0")
.withExec(["go", "build", "-o", "myapp"])
// publish binary on alpine base
const prod = client
.container()
.from("alpine")
.withFile("/bin/myapp", builder.file("/src/myapp"))
.withEntrypoint(["/bin/myapp"])
const addr = await prod.publish("localhost:5000/multistage")
console.log(addr)
},
{ LogOutput: process.stderr },
)
import sys
import anyio
import dagger
async def main():
# create Dagger client
async with dagger.Connection(dagger.Config(log_output=sys.stderr)) as client:
# get host directory
project = client.host().directory(".")
# build app
builder = (
client.container()
.from_("golang:latest")
.with_directory("/src", project)
.with_workdir("/src")
.with_env_variable("CGO_ENABLED", "0")
.with_exec(["go", "build", "-o", "myapp"])
)
# publish binary on alpine base
prod = (
client.container()
.from_("alpine")
.with_file("/bin/myapp", builder.file("/src/myapp"))
.with_entrypoint(["/bin/myapp"])
)
addr = await prod.publish("localhost:5000/multistage")
print(addr)
anyio.run(main)
Perform matrix build
The following code listing builds separate images for multiple OS and CPU architecture combinations.
// Create a multi-build pipeline for a Go application.
package main
import (
"context"
"fmt"
"os"
"dagger.io/dagger"
)
func main() {
println("Building with Dagger")
// define build matrix
geese := []string{"linux", "darwin"}
goarches := []string{"amd64", "arm64"}
ctx := context.Background()
// initialize dagger client
c, err := dagger.Connect(ctx, dagger.WithLogOutput(os.Stderr))
if err != nil {
panic(err)
}
// get reference to the local project
src := c.Host().Directory(".")
// create empty directory to put build outputs
outputs := c.Directory()
golang := c.Container().
// get golang image
From("golang:latest").
// mount source code into golang image
WithDirectory("/src", src).
WithWorkdir("/src")
for _, goos := range geese {
for _, goarch := range goarches {
// create a directory for each OS and architecture
path := fmt.Sprintf("build/%s/%s/", goos, goarch)
build := golang.
// set GOARCH and GOOS in the build environment
WithEnvVariable("GOOS", goos).
WithEnvVariable("GOARCH", goarch).
WithExec([]string{"go", "build", "-o", path})
// add build to outputs
outputs = outputs.WithDirectory(path, build.Directory(path))
}
}
// write build artifacts to host
ok, err := outputs.Export(ctx, ".")
if err != nil {
panic(err)
}
if !ok {
panic("did not export files")
}
}
import { connect } from "@dagger.io/dagger"
// Create a multi-build pipeline for a Go application.
// define build matrix
const oses = ["linux", "darwin"]
const arches = ["amd64", "arm64"]
// initialize dagger client
connect(
async (client) => {
console.log("Building with Dagger")
// get reference to the local project
const src = client.host().directory(".")
// create empty directory to put build outputs
var outputs = client.directory()
const golang = client
.container()
// get golang image
.from("golang:latest")
// mount source code into golang image
.withDirectory("/src", src)
.withWorkdir("/src")
for (const os of oses) {
for (const arch of arches) {
// create a directory for each OS and architecture
const path = `build/${os}/${arch}/`
const build = golang
// set GOARCH and GOOS in the build environment
.withEnvVariable("GOOS", os)
.withEnvVariable("GOARCH", arch)
.withExec(["go", "build", "-o", path])
// add build to outputs
outputs = outputs.withDirectory(path, build.directory(path))
}
}
// write build artifacts to host
await outputs.export(".")
},
{ LogOutput: process.stderr },
)
"""Create a multi-build pipeline for a Go application."""
import itertools
import sys
import anyio
import dagger
async def main():
print("Building with Dagger")
# define build matrix
oses = ["linux", "darwin"]
arches = ["amd64", "arm64"]
# initialize dagger client
async with dagger.Connection(dagger.Config(log_output=sys.stderr)) as client:
# get reference to the local project
src = client.host().directory(".")
# create empty directory to put build outputs
outputs = client.directory()
golang = (
# get `golang` image
client.container()
.from_("golang:latest")
# mount source code into `golang` image
.with_directory("/src", src)
.with_workdir("/src")
)
for goos, goarch in itertools.product(oses, arches):
# create a directory for each OS and architecture
path = f"build/{goos}/{goarch}/"
build = (
golang
# set GOARCH and GOOS in the build environment
.with_env_variable("GOOS", goos)
.with_env_variable("GOARCH", goarch)
.with_exec(["go", "build", "-o", path])
)
# add build to outputs
outputs = outputs.with_directory(path, build.directory(path))
# write build artifacts to host
await outputs.export(".")
anyio.run(main)
Build multi-arch image
The following code listing builds a single image for different CPU architectures using native emulation.
package main
import (
"context"
"fmt"
"os"
"dagger.io/dagger"
)
// the platforms to build for and push in a multi-platform image
var platforms = []dagger.Platform{
"linux/amd64", // a.k.a. x86_64
"linux/arm64", // a.k.a. aarch64
"linux/s390x", // a.k.a. IBM S/390
}
// the container registry for the multi-platform image
const imageRepo = "localhost/testrepo:latest"
func main() {
ctx := context.Background()
client, err := dagger.Connect(ctx, dagger.WithLogOutput(os.Stderr))
if err != nil {
panic(err)
}
defer client.Close()
// the git repository containing code for the binary to be built
gitRepo := client.Git("https://github.com/dagger/dagger.git").
Branch("086862926433e19e1f24cd709e6165c36bdb2633").
Tree()
platformVariants := make([]*dagger.Container, 0, len(platforms))
for _, platform := range platforms {
// pull the golang image for this platform
ctr := client.Container(dagger.ContainerOpts{Platform: platform})
ctr = ctr.From("golang:1.20-alpine")
// mount in source code
ctr = ctr.WithDirectory("/src", gitRepo)
// mount in an empty dir where the built binary will live
ctr = ctr.WithDirectory("/output", client.Directory())
// ensure the binary will be statically linked and thus executable
// in the final image
ctr = ctr.WithEnvVariable("CGO_ENABLED", "0")
// build the binary and put the result at the mounted output
// directory
ctr = ctr.WithWorkdir("/src")
ctr = ctr.WithExec([]string{
"go", "build",
"-o", "/output/dagger",
"/src/cmd/dagger",
})
// select the output directory
outputDir := ctr.Directory("/output")
// wrap the output directory in a new empty container marked
// with the same platform
binaryCtr := client.
Container(dagger.ContainerOpts{Platform: platform}).
WithRootfs(outputDir)
platformVariants = append(platformVariants, binaryCtr)
}
// publishing the final image uses the same API as single-platform
// images, but now additionally specify the `PlatformVariants`
// option with the containers built before.
imageDigest, err := client.
Container().
Publish(ctx, imageRepo, dagger.ContainerPublishOpts{
PlatformVariants: platformVariants,
// Some registries may require explicit use of docker mediatypes
// rather than the default OCI mediatypes
// MediaTypes: dagger.Dockermediatypes,
})
if err != nil {
panic(err)
}
fmt.Println("Pushed multi-platform image w/ digest: ", imageDigest)
}
Build multi-arch image with cross-compilation
The following code listing builds a single image for different CPU architectures using cross-compilation.
package main
import (
"context"
"fmt"
"os"
"dagger.io/dagger"
platformFormat "github.com/containerd/containerd/platforms"
)
var platforms = []dagger.Platform{
"linux/amd64", // a.k.a. x86_64
"linux/arm64", // a.k.a. aarch64
"linux/s390x", // a.k.a. IBM S/390
}
// the container registry for the multi-platform image
const imageRepo = "localhost/testrepo:latest"
// util that returns the architecture of the provided platform
func architectureOf(platform dagger.Platform) string {
return platformFormat.MustParse(string(platform)).Architecture
}
func main() {
ctx := context.Background()
client, err := dagger.Connect(ctx, dagger.WithLogOutput(os.Stderr))
if err != nil {
panic(err)
}
defer client.Close()
gitRepo := client.Git("https://github.com/dagger/dagger.git").
Branch("086862926433e19e1f24cd709e6165c36bdb2633").
Tree()
platformVariants := make([]*dagger.Container, 0, len(platforms))
for _, platform := range platforms {
// pull the golang image for the *host platform*. This is
// accomplished by just not specifying a platform; the default
// is that of the host.
ctr := client.Container()
ctr = ctr.From("golang:1.20-alpine")
// mount in our source code
ctr = ctr.WithDirectory("/src", gitRepo)
// mount in an empty dir to put the built binary
ctr = ctr.WithDirectory("/output", client.Directory())
// ensure the binary will be statically linked and thus executable
// in the final image
ctr = ctr.WithEnvVariable("CGO_ENABLED", "0")
// configure the go compiler to use cross-compilation targeting the
// desired platform
ctr = ctr.WithEnvVariable("GOOS", "linux")
ctr = ctr.WithEnvVariable("GOARCH", architectureOf(platform))
// build the binary and put the result at the mounted output
// directory
ctr = ctr.WithWorkdir("/src")
ctr = ctr.WithExec([]string{
"go", "build",
"-o", "/output/dagger",
"/src/cmd/dagger",
})
// select the output directory
outputDir := ctr.Directory("/output")
// wrap the output directory in a new empty container marked
// with the platform
binaryCtr := client.
Container(dagger.ContainerOpts{Platform: platform}).
WithRootfs(outputDir)
platformVariants = append(platformVariants, binaryCtr)
}
// publishing the final image uses the same API as single-platform
// images, but now additionally specify the `PlatformVariants`
// option with the containers built before.
imageDigest, err := client.
Container().
Publish(ctx, imageRepo, dagger.ContainerPublishOpts{
PlatformVariants: platformVariants,
// Some registries may require explicit use of docker mediatypes
// rather than the default OCI mediatypes
// MediaTypes: dagger.Dockermediatypes,
})
if err != nil {
panic(err)
}
fmt.Println("published multi-platform image with digest", imageDigest)
}
Build image from Dockerfile
The following code listing builds an image from a Dockerfile in the current working directory on the host.
- Go
- Node.js
- Python
package main
import (
"context"
"fmt"
"math"
"math/rand"
"os"
"dagger.io/dagger"
)
func main() {
ctx := context.Background()
// initialize Dagger client
client, err := dagger.Connect(ctx, dagger.WithLogOutput(os.Stderr))
if err != nil {
panic(err)
}
defer client.Close()
contextDir := client.Host().Directory(".")
ref, err := contextDir.
DockerBuild().
Publish(ctx, fmt.Sprintf("ttl.sh/hello-dagger-%.0f", math.Floor(rand.Float64()*10000000))) //#nosec
if err != nil {
panic(err)
}
fmt.Printf("Published image to :%s\n", ref)
}
import { connect } from "@dagger.io/dagger"
connect(
async (client) => {
// set build context
const contextDir = client.host().directory(".")
// build using Dockerfile
// publish the resulting container to a registry
const imageRef = await contextDir
.dockerBuild()
.publish("ttl.sh/hello-dagger-" + Math.floor(Math.random() * 10000000))
console.log(`Published image to: ${imageRef}`)
},
{ LogOutput: process.stderr },
)
import random
import sys
import anyio
import dagger
async def main():
config = dagger.Config(log_output=sys.stdout)
async with dagger.Connection(config) as client:
# set build context
context_dir = client.host().directory(".")
# build using Dockerfile
# publish the resulting container to a registry
image_ref = await context_dir.docker_build().publish(
f"ttl.sh/hello-dagger-{random.randrange(10 ** 8)}"
)
print(f"Published image to: {image_ref}")
anyio.run(main)
Build image from Dockerfile using different build context
The following code listing builds an image from a Dockerfile using a build context directory in a different location than the current working directory.
- Go
- Node.js
- Python
package main
import (
"context"
"fmt"
"math"
"math/rand"
"os"
"dagger.io/dagger"
)
func main() {
ctx := context.Background()
// initialize Dagger client
client, err := dagger.Connect(ctx, dagger.WithLogOutput(os.Stderr))
if err != nil {
panic(err)
}
defer client.Close()
// get build context directory
contextDir := client.Host().Directory("/projects/myapp")
// get Dockerfile in different filesystem location
dockerfilePath := "/data/myapp/custom.Dockerfile"
dockerfile := client.Host().File(dockerfilePath)
// add Dockerfile to build context directory
workspace := contextDir.WithFile("custom.Dockerfile", dockerfile)
// build using Dockerfile
// publish the resulting container to a registry
ref, err := client.
Container().
Build(workspace, dagger.ContainerBuildOpts{
Dockerfile: "custom.Dockerfile",
}).
Publish(ctx, fmt.Sprintf("ttl.sh/hello-dagger-%.0f", math.Floor(rand.Float64()*10000000))) //#nosec
if err != nil {
panic(err)
}
fmt.Printf("Published image to :%s\n", ref)
}
import { connect } from "@dagger.io/dagger"
connect(
async (client) => {
// get build context directory
const contextDir = client.host().directory("/projects/myapp")
// get Dockerfile in different filesystem location
const dockerfilePath = "/data/myapp/custom.Dockerfile"
const dockerfile = client.host().file(dockerfilePath)
// add Dockerfile to build context directory
const workspace = contextDir.withFile("custom.Dockerfile", dockerfile)
// build using Dockerfile
// publish the resulting container to a registry
const imageRef = await client
.container()
.build(workspace, { dockerfile: "custom.Dockerfile" })
.publish("ttl.sh/hello-dagger-" + Math.floor(Math.random() * 10000000))
console.log(`Published image to: ${imageRef}`)
},
{ LogOutput: process.stderr },
)
import random
import sys
import anyio
import dagger
async def main():
config = dagger.Config(log_output=sys.stdout)
async with dagger.Connection(config) as client:
# get build context directory
context_dir = client.host().directory("/projects/myapp")
# get Dockerfile in different filesystem location
dockerfile_path = "/data/myapp/custom.Dockerfile"
dockerfile = client.host().file(dockerfile_path)
# add Dockerfile to build context directory
workspace = context_dir.with_file("custom.Dockerfile", dockerfile)
# build using Dockerfile
# publish the resulting container to a registry
image_ref = await (
client.container()
.build(context=workspace, dockerfile="custom.Dockerfile")
.publish(f"ttl.sh/hello-dagger-{random.SystemRandom().randint(1,10000000)}")
)
print(f"Published image to: {image_ref}")
anyio.run(main)
Add OCI annotations to image
The following code listing adds OpenContainer Initiative (OCI) annotations to an image.
- Go
- Node.js
- Python
package main
import (
"context"
"fmt"
"os"
"time"
"dagger.io/dagger"
)
func main() {
// create Dagger client
ctx := context.Background()
client, err := dagger.Connect(ctx, dagger.WithLogOutput(os.Stderr))
if err != nil {
panic(err)
}
defer client.Close()
// create and publish image with annotations
ctr := client.Container().
From("alpine").
WithLabel("org.opencontainers.image.title", "my-alpine").
WithLabel("org.opencontainers.image.version", "1.0").
WithLabel("org.opencontainers.image.created", time.Now().String()).
WithLabel("org.opencontainers.image.source", "https://github.com/alpinelinux/docker-alpine").
WithLabel("org.opencontainers.image.licenses", "MIT")
addr, err := ctr.Publish(ctx, "ttl.sh/my-alpine")
// note: some registries (e.g. ghcr.io) may require explicit use
// of Docker mediatypes rather than the default OCI mediatypes
// addr, err := ctr.Publish(ctx, "ttl.sh/my-alpine", dagger.ContainerPublishOpts{
// MediaTypes: dagger.Dockermediatypes,
// })
if err != nil {
panic(err)
}
fmt.Println(addr)
}
import { connect } from "@dagger.io/dagger"
// create Dagger client
connect(
async (client) => {
// create and publish image with annotations
const container = client
.container()
.from("alpine")
.withLabel("org.opencontainers.image.title", "my-alpine")
.withLabel("org.opencontainers.image.version", "1.0")
.withLabel("org.opencontainers.image.created", new Date())
.WithLabel(
"org.opencontainers.image.source",
"https://github.com/alpinelinux/docker-alpine",
)
.WithLabel("org.opencontainers.image.licenses", "MIT")
const addr = await container.publish("ttl.sh/my-alpine")
// note: some registries (e.g. ghcr.io) may require explicit use
// of Docker mediatypes rather than the default OCI mediatypes
// const addr = await container.publish("ttl.sh/my-alpine", {
// mediaTypes: "Dockermediatypes",
// })
console.log(addr)
},
{ LogOutput: process.stderr },
)
import sys
from datetime import datetime, timezone
import anyio
import dagger
async def main():
# create Dagger client
async with dagger.Connection(dagger.Config(log_output=sys.stderr)) as client:
# publish app on alpine base
ctr = (
client.container()
.from_("alpine")
.with_label("org.opencontainers.image.title", "my-alpine")
.with_label("org.opencontainers.image.version", "1.0")
.with_label(
"org.opencontainers.image.created",
datetime.now(timezone.utc).isoformat(),
)
.with_label(
"org.opencontainers.image.source",
"https://github.com/alpinelinux/docker-alpine",
)
.with_label("org.opencontainers.image.licenses", "MIT")
)
addr = await ctr.publish("ttl.sh/my-alpine")
# note: some registries (e.g. ghcr.io) may require explicit use
# of Docker mediatypes rather than the default OCI mediatypes
# addr = await ctr.publish("ttl.sh/my-alpine", media_types="DockerMediaTypes")
print(addr)
anyio.run(main)
Define build-time variables
The following code listing defines various environment variables for build purposes.
// Create a multi-build pipeline for a Go application.
package main
import (
"context"
"fmt"
"os"
"dagger.io/dagger"
)
func main() {
println("Building with Dagger")
// define build matrix
geese := []string{"linux", "darwin"}
goarches := []string{"amd64", "arm64"}
ctx := context.Background()
// initialize dagger client
c, err := dagger.Connect(ctx, dagger.WithLogOutput(os.Stderr))
if err != nil {
panic(err)
}
// get reference to the local project
src := c.Host().Directory(".")
// create empty directory to put build outputs
outputs := c.Directory()
golang := c.Container().
// get golang image
From("golang:latest").
// mount source code into golang image
WithDirectory("/src", src).
WithWorkdir("/src")
for _, goos := range geese {
for _, goarch := range goarches {
// create a directory for each OS and architecture
path := fmt.Sprintf("build/%s/%s/", goos, goarch)
build := golang.
// set GOARCH and GOOS in the build environment
WithEnvVariable("GOOS", goos).
WithEnvVariable("GOARCH", goarch).
WithExec([]string{"go", "build", "-o", path})
// add build to outputs
outputs = outputs.WithDirectory(path, build.Directory(path))
}
}
// write build artifacts to host
ok, err := outputs.Export(ctx, ".")
if err != nil {
panic(err)
}
if !ok {
panic("did not export files")
}
}
import { connect } from "@dagger.io/dagger"
// Create a multi-build pipeline for a Go application.
// define build matrix
const oses = ["linux", "darwin"]
const arches = ["amd64", "arm64"]
// initialize dagger client
connect(
async (client) => {
console.log("Building with Dagger")
// get reference to the local project
const src = client.host().directory(".")
// create empty directory to put build outputs
var outputs = client.directory()
const golang = client
.container()
// get golang image
.from("golang:latest")
// mount source code into golang image
.withDirectory("/src", src)
.withWorkdir("/src")
for (const os of oses) {
for (const arch of arches) {
// create a directory for each OS and architecture
const path = `build/${os}/${arch}/`
const build = golang
// set GOARCH and GOOS in the build environment
.withEnvVariable("GOOS", os)
.withEnvVariable("GOARCH", arch)
.withExec(["go", "build", "-o", path])
// add build to outputs
outputs = outputs.withDirectory(path, build.directory(path))
}
}
// write build artifacts to host
await outputs.export(".")
},
{ LogOutput: process.stderr },
)
"""Create a multi-build pipeline for a Go application."""
import itertools
import sys
import anyio
import dagger
async def main():
print("Building with Dagger")
# define build matrix
oses = ["linux", "darwin"]
arches = ["amd64", "arm64"]
# initialize dagger client
async with dagger.Connection(dagger.Config(log_output=sys.stderr)) as client:
# get reference to the local project
src = client.host().directory(".")
# create empty directory to put build outputs
outputs = client.directory()
golang = (
# get `golang` image
client.container()
.from_("golang:latest")
# mount source code into `golang` image
.with_directory("/src", src)
.with_workdir("/src")
)
for goos, goarch in itertools.product(oses, arches):
# create a directory for each OS and architecture
path = f"build/{goos}/{goarch}/"
build = (
golang
# set GOARCH and GOOS in the build environment
.with_env_variable("GOOS", goos)
.with_env_variable("GOARCH", goarch)
.with_exec(["go", "build", "-o", path])
)
# add build to outputs
outputs = outputs.with_directory(path, build.directory(path))
# write build artifacts to host
await outputs.export(".")
anyio.run(main)
Access private Git repository
The following code listing demonstrates how to access a private Git repository using SSH.
- Go
- Node.js
- Python
package main
import (
"context"
"fmt"
"os"
"dagger.io/dagger"
)
func main() {
ctx := context.Background()
client, err := dagger.Connect(ctx)
if err != nil {
panic(err)
}
defer client.Close()
// Retrieve path of authentication agent socket from host
sshAgentPath := os.Getenv("SSH_AUTH_SOCK")
// Private repository with a README.md file at the root.
readme, err := client.
Git("git@private-repository.git", dagger.GitOpts{
SSHAuthSocket: client.Host().UnixSocket(sshAgentPath),
}).
Branch("main").
Tree().
File("README.md").
Contents(ctx)
if err != nil {
panic(err)
}
fmt.Println("readme", readme)
}
import { connect, Client } from "@dagger.io/dagger"
import process from "process"
// initialize Dagger client
connect(async (client: Client) => {
// Collect value of SSH_AUTH_SOCK env var, to retrieve authentication socket path
const sshAuthSockPath = process.env.SSH_AUTH_SOCK?.toString() || ""
// Retrieve authentication socket ID from host
const sshAgentSocketID = await client.host().unixSocket(sshAuthSockPath).id()
const repo = client
// Retrieve the repository
.git("git@private-repository.git")
// Select the main branch, and the filesystem tree associated
.branch("main")
.tree({
sshAuthSocket: sshAgentSocketID,
})
// Select the README.md file
.file("README.md")
// Retrieve the content of the README file
const file = await repo.contents()
console.log(file)
})
"""Clone a Private Git Repository and print the content of the README.md file."""
import os
import sys
import anyio
import dagger
async def main():
async with dagger.Connection(dagger.Config(log_output=sys.stderr)) as client:
# Collect value of SSH_AUTH_SOCK env var, to retrieve auth socket path
ssh_auth_path = os.environ.get("SSH_AUTH_SOCK", "")
# Retrieve authentication socket from host
ssh_agent_socket = client.host().unix_socket(ssh_auth_path)
repo = (
client
# Retrieve the repository
.git("git@private-repository.git")
# Select the main branch, and the filesystem tree associated
.branch("main").tree(ssh_auth_socket=ssh_agent_socket)
# Select the README.md file
.file("README.md")
)
# Retrieve the content of the README file
file = await repo.contents()
print(file)
anyio.run(main)
Invalidate cache
The following code listing demonstrates how to invalidate the Dagger pipeline operations cache and thereby force execution of subsequent pipeline steps, by introducing a volatile time variable at a specific point in the Dagger pipeline.
This is a temporary workaround until cache invalidation support is officially added to Dagger.
Changes in mounted cache volumes or secrets do not invalidate the Dagger pipeline operations cache.
- Go
- Node.js
- Python
package main
import (
"context"
"fmt"
"os"
"time"
"dagger.io/dagger"
)
func main() {
// create Dagger client
ctx := context.Background()
client, err := dagger.Connect(ctx, dagger.WithLogOutput(os.Stderr))
if err != nil {
panic(err)
}
defer client.Close()
// invalidate cache to force execution
// of second WithExec() operation
output, err := client.Pipeline("test").
Container().
From("alpine").
WithExec([]string{"apk", "add", "curl"}).
WithEnvVariable("CACHEBUSTER", time.Now().String()).
WithExec([]string{"apk", "add", "zip"}).
Stdout(ctx)
if err != nil {
panic(err)
}
fmt.Println(output)
}
import { connect } from "@dagger.io/dagger"
// create Dagger client
connect(
async (client) => {
// invalidate cache to force execution
// of second withExec() operation
const output = await client
.pipeline("test")
.container()
.from("alpine")
.withExec(["apk", "add", "curl"])
.withEnvVariable("CACHEBUSTER", Date.now().toString())
.withExec(["apk", "add", "zip"])
.stdout()
console.log(output)
},
{ LogOutput: process.stderr },
)
import sys
from datetime import datetime
import anyio
import dagger
async def main():
# create Dagger client
async with dagger.Connection(dagger.Config(log_output=sys.stderr)) as client:
# invalidate cache to force execution
# of second with_exec() operation
output = await (
client.pipeline("test")
.container()
.from_("alpine")
.with_exec(["apk", "add", "curl"])
.with_env_variable("CACHEBUSTER", str(datetime.now()))
.with_exec(["apk", "add", "zip"])
.stdout()
)
print(output)
anyio.run(main)
Services
Expose service containers to host
The following code listing makes HTTP requests from the host to an HTTP service running in a Dagger pipeline.
- Go
- Node.js
- Python
package main
import (
"context"
"fmt"
"io"
"net/http"
"os"
"dagger.io/dagger"
)
func main() {
ctx := context.Background()
// create Dagger client
client, err := dagger.Connect(ctx, dagger.WithLogOutput(os.Stderr))
if err != nil {
panic(err)
}
defer client.Close()
// create HTTP service container with exposed port 8080
httpSrv := client.Container().
From("python").
WithDirectory("/srv", client.Directory().WithNewFile("index.html", "Hello, world!")).
WithWorkdir("/srv").
WithExec([]string{"python", "-m", "http.server", "8080"}).
WithExposedPort(8080).
AsService()
// expose HTTP service to host
tunnel, err := client.Host().Tunnel(httpSrv).Start(ctx)
if err != nil {
panic(err)
}
defer tunnel.Stop(ctx)
// get HTTP service address
srvAddr, err := tunnel.Endpoint(ctx)
if err != nil {
panic(err)
}
// access HTTP service from host
res, err := http.Get("http://" + srvAddr)
if err != nil {
panic(err)
}
defer res.Body.Close()
// print response
body, err := io.ReadAll(res.Body)
if err != nil {
panic(err)
}
fmt.Println(string(body))
}
import { connect, Client } from "@dagger.io/dagger"
import fetch from "node-fetch"
connect(
async (client: Client) => {
// create HTTP service container with exposed port 8080
const httpSrv = client
.container()
.from("python")
.withDirectory(
"/srv",
client.directory().withNewFile("index.html", "Hello, world!"),
)
.withWorkdir("/srv")
.withExec(["python", "-m", "http.server", "8080"])
.withExposedPort(8080)
.asService()
// expose HTTP service to host
const tunnel = await client.host().tunnel(httpSrv).start()
// get HTTP service address
const srvAddr = await tunnel.endpoint()
// access HTTP service from host
// print response
await fetch("http://" + srvAddr)
.then((res) => res.text())
.then((body) => console.log(body))
},
{ LogOutput: process.stderr },
)
import sys
import anyio
import httpx
import dagger
async def main():
# create Dagger client
async with dagger.Connection(dagger.Config(log_output=sys.stderr)) as client:
# create HTTP service container with exposed port 8080
http_srv = (
client.container()
.from_("python")
.with_directory(
"/srv",
client.directory().with_new_file("index.html", "Hello, world!"),
)
.with_workdir("/srv")
.with_exec(["python", "-m", "http.server", "8080"])
.with_exposed_port(8080)
.as_service()
)
# expose HTTP service to host
tunnel = await client.host().tunnel(http_srv).start()
# get HTTP service address
endpoint = await tunnel.endpoint()
# access HTTP service from host
async with httpx.AsyncClient() as http:
r = await http.get(f"http://{endpoint}")
print(r.status_code)
print(r.text)
anyio.run(main)
Expose host services to containers
The following code listing shows how a database client in a Dagger pipeline can access a database service running on the host.
- Go
- Node.js
- Python
package main
import (
"context"
"fmt"
"os"
"dagger.io/dagger"
)
func main() {
ctx := context.Background()
// create Dagger client
client, err := dagger.Connect(ctx, dagger.WithLogOutput(os.Stderr))
if err != nil {
panic(err)
}
defer client.Close()
// expose host service on port 3306
hostSrv := client.Host().Service([]dagger.PortForward{
{Frontend: 3306, Backend: 3306},
})
// create MariaDB container
// with host service binding
// execute SQL query on host service
out, err := client.Container().
From("mariadb:10.11.2").
WithServiceBinding("db", hostSrv).
WithExec([]string{"/bin/sh", "-c", "/usr/bin/mysql --user=root --password=secret --host=db -e 'SELECT * FROM mysql.user'"}).
Stdout(ctx)
if err != nil {
panic(err)
}
fmt.Println(out)
}
import { connect, Client } from "@dagger.io/dagger"
connect(
async (client: Client) => {
// expose host service on port 3306
const hostSrv = client.host().service([{ frontend: 3306, backend: 3306 }])
// create MariaDB container
// with host service binding
// execute SQL query on host service
const out = await client
.container()
.from("mariadb:10.11.2")
.withServiceBinding("db", hostSrv)
.withExec([
"/bin/sh",
"-c",
"/usr/bin/mysql --user=root --password=secret --host=db -e 'SELECT * FROM mysql.user'",
])
.stdout()
console.log(out)
},
{ LogOutput: process.stderr },
)
import sys
import anyio
import dagger
async def main():
# create Dagger client
async with dagger.Connection(dagger.Config(log_output=sys.stderr)) as client:
# expose host service on port 3306
host_srv = client.host().service(
[
dagger.PortForward(
backend=3306, frontend=3306, protocol=dagger.NetworkProtocol.TCP
)
]
)
# create MariaDB container
# with host service binding
# execute SQL query on host service
out = await (
client.container()
.from_("mariadb:10.11.2")
.with_service_binding("db", host_srv)
.with_exec(
[
"/bin/sh",
"-c",
"/usr/bin/mysql --user=root --password=secret --host=db -e 'SELECT * FROM mysql.user'",
]
)
.stdout()
)
print(out)
anyio.run(main)
Use transient database service for application tests
The following code listing creates a temporary MariaDB database service and binds it to an application container for unit/integration testing.
- Go
- Node.js
- Python
package main
import (
"context"
"fmt"
"os"
"dagger.io/dagger"
)
func main() {
ctx := context.Background()
// create Dagger client
client, err := dagger.Connect(ctx, dagger.WithLogOutput(os.Stderr))
if err != nil {
panic(err)
}
defer client.Close()
// get MariaDB base image
mariadb := client.Container().
From("mariadb:10.11.2").
WithEnvVariable("MARIADB_USER", "user").
WithEnvVariable("MARIADB_PASSWORD", "password").
WithEnvVariable("MARIADB_DATABASE", "drupal").
WithEnvVariable("MARIADB_ROOT_PASSWORD", "root").
WithExposedPort(3306).
AsService()
// get Drupal base image
// install additional dependencies
drupal := client.Container().
From("drupal:10.0.7-php8.2-fpm").
WithExec([]string{"composer", "require", "drupal/core-dev", "--dev", "--update-with-all-dependencies"})
// add service binding for MariaDB
// run kernel tests using PHPUnit
test, err := drupal.
WithServiceBinding("db", mariadb).
WithEnvVariable("SIMPLETEST_DB", "mysql://user:password@db/drupal").
WithEnvVariable("SYMFONY_DEPRECATIONS_HELPER", "disabled").
WithWorkdir("/opt/drupal/web/core").
WithExec([]string{"../../vendor/bin/phpunit", "-v", "--group", "KernelTests"}).
Stdout(ctx)
if err != nil {
panic(err)
}
fmt.Println(test)
}
import { connect, Client } from "@dagger.io/dagger"
connect(
async (client: Client) => {
// get MariaDB base image
const mariadb = client
.container()
.from("mariadb:10.11.2")
.withEnvVariable("MARIADB_USER", "user")
.withEnvVariable("MARIADB_PASSWORD", "password")
.withEnvVariable("MARIADB_DATABASE", "drupal")
.withEnvVariable("MARIADB_ROOT_PASSWORD", "root")
.withExposedPort(3306)
.asService()
// get Drupal base image
// install additional dependencies
const drupal = client
.container()
.from("drupal:10.0.7-php8.2-fpm")
.withExec([
"composer",
"require",
"drupal/core-dev",
"--dev",
"--update-with-all-dependencies",
])
// add service binding for MariaDB
// run unit tests using PHPUnit
const test = await drupal
.withServiceBinding("db", mariadb)
.withEnvVariable("SIMPLETEST_DB", "mysql://user:password@db/drupal")
.withEnvVariable("SYMFONY_DEPRECATIONS_HELPER", "disabled")
.withWorkdir("/opt/drupal/web/core")
.withExec(["../../vendor/bin/phpunit", "-v", "--group", "KernelTests"])
.stdout()
// print ref
console.log(test)
},
{ LogOutput: process.stderr },
)
import sys
import anyio
import dagger
async def main():
# create Dagger client
async with dagger.Connection(dagger.Config(log_output=sys.stderr)) as client:
# get MariaDB base image
mariadb = (
client.container()
.from_("mariadb:10.11.2")
.with_env_variable("MARIADB_USER", "user")
.with_env_variable("MARIADB_PASSWORD", "password")
.with_env_variable("MARIADB_DATABASE", "drupal")
.with_env_variable("MARIADB_ROOT_PASSWORD", "root")
.with_exposed_port(3306)
.as_service()
)
# get Drupal base image
# install additional dependencies
drupal = (
client.container()
.from_("drupal:10.0.7-php8.2-fpm")
.with_exec(
[
"composer",
"require",
"drupal/core-dev",
"--dev",
"--update-with-all-dependencies",
]
)
)
# add service binding for MariaDB
# run unit tests using PHPUnit
test = await (
drupal.with_service_binding("db", mariadb)
.with_env_variable("SIMPLETEST_DB", "mysql://user:password@db/drupal")
.with_env_variable("SYMFONY_DEPRECATIONS_HELPER", "disabled")
.with_workdir("/opt/drupal/web/core")
.with_exec(["../../vendor/bin/phpunit", "-v", "--group", "KernelTests"])
.stdout()
)
print(test)
anyio.run(main)
Start and stop services
The following code listing demonstrates explicitly starting a Docker daemon for use in a test suite.
- Go
- Node.js
- Python
package main_test
import (
"context"
"testing"
"dagger.io/dagger"
"github.com/stretchr/testify/require"
)
func TestFoo(t *testing.T) {
ctx := context.Background()
c, err := dagger.Connect(ctx)
require.NoError(t, err)
dockerd, err := c.Container().From("docker:dind").AsService().Start(ctx)
require.NoError(t, err)
// dockerd is now running, and will stay running
// so you don't have to worry about it restarting after a 10 second gap
// then in all of your tests, continue to use an explicit binding:
_, err = c.Container().From("golang").
WithServiceBinding("docker", dockerd).
WithEnvVariable("DOCKER_HOST", "tcp://docker:2375").
WithExec([]string{"go", "test", "./..."}).
Sync(ctx)
require.NoError(t, err)
// or, if you prefer
// trust `Endpoint()` to construct the address
//
// note that this has the exact same non-cache-busting semantics as WithServiceBinding,
// since hostnames are stable and content-addressed
//
// this could be part of the global test suite setup.
dockerHost, err := dockerd.Endpoint(ctx, dagger.ServiceEndpointOpts{
Scheme: "tcp",
})
require.NoError(t, err)
_, err = c.Container().From("golang").
WithEnvVariable("DOCKER_HOST", dockerHost).
WithExec([]string{"go", "test", "./..."}).
Sync(ctx)
require.NoError(t, err)
// Service.Stop() is available to explicitly stop the service if needed
}
import { connect, Client } from "@dagger.io/dagger"
connect(
async (client: Client) => {
const dockerd = await client
.container()
.from("docker:dind")
.asService()
.start()
// dockerd is now running, and will stay running
// so you don't have to worry about it restarting after a 10 second gap
// then in all of your tests, continue to use an explicit binding:
const test = await client
.container()
.from("golang")
.withServiceBinding("docker", dockerd)
.withEnvVariable("DOCKER_HOST", "tcp://docker:2375")
.withExec(["go", "test", "./..."])
.sync()
console.log("test: ", test)
// or, if you prefer
// trust `endpoint()` to construct the address
//
// note that this has the exact same non-cache-busting semantics as withServiceBinding,
// since hostnames are stable and content-addressed
//
// this could be part of the global test suite setup.
const dockerHost = await dockerd.endpoint({ scheme: "tcp" })
const testWithEndpoint = await client
.container()
.from("golang")
.withEnvVariable("DOCKER_HOST", dockerHost)
.withExec(["go", "test", "./..."])
.sync()
console.log("testWithEndpoint: ", testWithEndpoint)
// service.stop() is available to explicitly stop the service if needed
},
{ LogOutput: process.stderr },
)
import sys
import anyio
import dagger
async def main():
# create Dagger client
async with dagger.Connection(dagger.Config(log_output=sys.stderr)) as client:
dockerd = await client.container().from_("docker:dind").as_service().start()
# dockerd is now running, and will stay running
# so you don't have to worry about it restarting after a 10 second gap
test = await (
client.container()
.from_("golang")
.with_service_binding("docker", dockerd)
.with_env_variable("DOCKER_HOST", "tcp://docker:2375")
.with_exec(["go", "test", "./..."])
.sync()
)
print("test: " + test)
# or, if you prefer
# trust `endpoint()` to construct the address
#
# note that this has the exact same non-cache-busting semantics as with_service_binding,
# since hostnames are stable and content-addressed
#
# this could be part of the global test suite setup.
docker_host = await dockerd.endpoint(scheme="tcp")
test_with_endpoint = await (
client.container()
.from_("golang")
.with_env_variable("DOCKER_HOST", docker_host)
.with_exec(["go", "test", "./..."])
.sync()
)
print("test_with_endpoint: " + test_with_endpoint)
# service.stop() is available to explicitly stop the service if needed
anyio.run(main)
Outputs
Publish image to registry
The following code listing publishes a container image to a remote registry (Docker Hub). Replace the DOCKER-HUB-USERNAME
and DOCKER-HUB-PASSWORD
placeholders with your Docker Hub username and password respectively.
- Go
- Node.js
- Python
package main
import (
"context"
"fmt"
"os"
"dagger.io/dagger"
)
func main() {
// initialize Dagger client
ctx := context.Background()
client, err := dagger.Connect(ctx, dagger.WithLogOutput(os.Stderr))
if err != nil {
panic(err)
}
defer client.Close()
// set secret as string value
secret := client.SetSecret("password", "DOCKER-HUB-PASSWORD")
// create container
c := client.Container(dagger.ContainerOpts{Platform: "linux/amd64"}).
From("nginx:1.23-alpine").
WithNewFile("/usr/share/nginx/html/index.html", dagger.ContainerWithNewFileOpts{
Contents: "Hello from Dagger!",
Permissions: 0o400,
})
// use secret for registry authentication
addr, err := c.
WithRegistryAuth("docker.io", "DOCKER-HUB-USERNAME", secret).
Publish(ctx, "DOCKER-HUB-USERNAME/my-nginx")
if err != nil {
panic(err)
}
// print result
fmt.Println("Published at:", addr)
}
import { connect } from "@dagger.io/dagger"
// initialize Dagger client
connect(
async (client) => {
// set secret as string value
const secret = client.setSecret("password", "DOCKER-HUB-PASSWORD")
// create container
const c = client
.container()
.from("nginx:1.23-alpine")
.withNewFile("/usr/share/nginx/html/index.html", {
contents: "Hello from Dagger!",
permissions: 0o400,
})
// use secret for registry authentication
const addr = await c
.withRegistryAuth("docker.io", "DOCKER-HUB-USERNAME", secret)
.publish("DOCKER-HUB-USERNAME/my-nginx")
// print result
console.log(`Published at: ${addr}`)
},
{ LogOutput: process.stderr },
)
import sys
import anyio
import dagger
async def main():
async with dagger.Connection(dagger.Config(log_output=sys.stderr)) as client:
# set secret as string value
secret = client.set_secret("password", "DOCKER-HUB-PASSWORD")
# create container
ctr = (
client.container(platform=dagger.Platform("linux/amd64"))
.from_("nginx:1.23-alpine")
.with_new_file(
"/usr/share/nginx/html/index.html",
contents="Hello from Dagger!",
permissions=0o400,
)
)
# use secret for registry authentication
addr = await ctr.with_registry_auth(
"docker.io", "DOCKER-HUB-USERNAME", secret
).publish("DOCKER-HUB-USERNAME/my-nginx")
print(f"Published at: {addr}")
anyio.run(main)
Export image to host
The following code listing exports a container image from a Dagger pipeline to the host.
- Go
- Node.js
- Python
package main
import (
"context"
"fmt"
"os"
"dagger.io/dagger"
)
func main() {
// initialize Dagger client
ctx := context.Background()
client, err := dagger.Connect(ctx, dagger.WithLogOutput(os.Stderr))
if err != nil {
panic(err)
}
defer client.Close()
// use NGINX container
// add new webserver index page
c := client.Container(dagger.ContainerOpts{Platform: "linux/amd64"}).
From("nginx:1.23-alpine").
WithNewFile("/usr/share/nginx/html/index.html", dagger.ContainerWithNewFileOpts{
Contents: "Hello from Dagger!",
Permissions: 0o400,
})
// export to host filesystem
val, err := c.Export(ctx, "/tmp/my-nginx.tar")
if err != nil {
panic(err)
}
// print result
fmt.Println("Exported image: ", val)
}
import { connect } from "@dagger.io/dagger"
// initialize Dagger client
connect(
async (client) => {
// use NGINX container
// add new webserver index page
const ctr = client
.container({ platform: "linux/amd64" })
.from("nginx:1.23-alpine")
.withNewFile("/usr/share/nginx/html/index.html", {
contents: "Hello from Dagger!",
permissions: 0o400,
})
// export to host filesystem
const result = await ctr.export("/tmp/my-nginx.tar")
// print result
console.log(`Exported image: ${result}`)
},
{ LogOutput: process.stderr },
)
import sys
import anyio
import dagger
async def main():
# initialize Dagger client
async with dagger.Connection(dagger.Config(log_output=sys.stderr)) as client:
# use NGINX container
# add new webserver index page
ctr = (
client.container(platform=dagger.Platform("linux/amd64"))
.from_("nginx:1.23-alpine")
.with_new_file(
"/usr/share/nginx/html/index.html",
contents="Hello from Dagger!",
permissions=0o400,
)
)
# export to host filesystem
val = await ctr.export("/tmp/my-nginx.tar")
print(f"Exported image: {val}")
anyio.run(main)
Export container directory to host
The following code listing exports the contents of a container directory to the host's temporary directory.
- Go
package main
import (
"context"
"fmt"
"log"
"os"
"path/filepath"
"dagger.io/dagger"
)
func main() {
hostdir := os.TempDir()
ctx := context.Background()
client, err := dagger.Connect(ctx, dagger.WithLogOutput(os.Stderr))
if err != nil {
log.Println(err)
return
}
defer client.Close()
_, err = client.Container().From("alpine:latest").
WithWorkdir("/tmp").
WithExec([]string{"wget", "https://dagger.io"}).
Directory(".").
Export(ctx, hostdir)
if err != nil {
log.Println(err)
return
}
contents, err := os.ReadFile(filepath.Join(hostdir, "index.html"))
if err != nil {
log.Println(err)
return
}
fmt.Println(string(contents))
}
Publish image to registry with multiple tags
The following code listing tags a container image multiple times and publishes it to a remote registry (Docker Hub). Set the Docker Hub username and password as host environment variables named DOCKERHUB_USERNAME
and DOCKERHUB_PASSWORD
respectively.
- Go
- Node.js
- Python
package main
import (
"context"
"fmt"
"os"
"dagger.io/dagger"
)
func main() {
// create Dagger client
ctx := context.Background()
client, err := dagger.Connect(ctx, dagger.WithLogOutput(os.Stderr))
if err != nil {
panic(err)
}
defer client.Close()
// load registry credentials from environment variables
username := os.Getenv("DOCKERHUB_USERNAME")
if username == "" {
panic("DOCKERHUB_USERNAME env var must be set")
}
passwordPlaintext := os.Getenv("DOCKERHUB_PASSWORD")
if passwordPlaintext == "" {
panic("DOCKERHUB_PASSWORD env var must be set")
}
password := client.SetSecret("password", passwordPlaintext)
// define multiple image tags
tags := [4]string{"latest", "1.0-alpine", "1.0", "1.0.0"}
// create and publish image with multiple tags
ctr := client.Container().
From("alpine").
WithRegistryAuth("docker.io", username, password)
for _, tag := range tags {
addr, err := ctr.Publish(ctx, fmt.Sprintf("%s/alpine:%s", username, tag))
if err != nil {
panic(err)
}
fmt.Println("Published: ", addr)
}
}
import { connect } from "@dagger.io/dagger"
// create Dagger client
connect(
async (client) => {
// define tags
const tags = ["latest", "1.0-alpine", "1.0", "1.0.0"]
if (!process.env.DOCKERHUB_USERNAME) {
console.log("DOCKERHUB_USERNAME environment variable must be set")
process.exit()
}
if (!process.env.DOCKERHUB_PASSWORD) {
console.log("DOCKERHUB_PASSWORD environment variable must be set")
process.exit()
}
const username = process.env.DOCKERHUB_USERNAME
const password = process.env.DOCKERHUB_PASSWORD
// set secret as string value
const secret = client.setSecret("password", password)
// create and publish image with multiple tags
const container = client.container().from("alpine")
for (var tag in tags) {
let addr = await container
.withRegistryAuth("docker.io", username, secret)
.publish(`${username}/my-alpine:${tags[tag]}`)
console.log(`Published at: ${addr}`)
}
},
{ LogOutput: process.stderr },
)
import os
import sys
import anyio
import dagger
async def main():
# define tags
tags = ["latest", "1.0-alpine", "1.0", "1.0.0"]
if "DOCKERHUB_USERNAME" not in os.environ:
msg = "DOCKERHUB_USERNAME environment variable must be set"
raise OSError(msg)
if "DOCKERHUB_PASSWORD" not in os.environ:
msg = "DOCKERHUB_PASSWORD environment variable must be set"
raise OSError(msg)
username = os.environ["DOCKERHUB_USERNAME"]
password = os.environ["DOCKERHUB_PASSWORD"]
# create Dagger client
async with dagger.Connection(dagger.Config(log_output=sys.stderr)) as client:
# set secret as string value
secret = client.set_secret("password", password)
# create and publish image with multiple tags
container = client.container().from_("alpine")
for tag in tags:
addr = await container.with_registry_auth(
"docker.io", username, secret
).publish(f"{username}/my-alpine:{tag}")
print(f"Published at: {addr}")
anyio.run(main)
Secrets
Expose secret via environment variable
The following code listing demonstrates how to inject an environment variable in a container as a secret.
- Go
- Node.js
- Python
package main
import (
"context"
"fmt"
"os"
"dagger.io/dagger"
)
func main() {
// initialize Dagger client
ctx := context.Background()
if os.Getenv("GH_SECRET") == "" {
panic("Environment variable GH_SECRET is not set")
}
client, err := dagger.Connect(ctx, dagger.WithLogOutput(os.Stderr))
if err != nil {
panic(err)
}
defer client.Close()
// read secret from host variable
secret := client.SetSecret("gh-secret", os.Getenv("GH_SECRET"))
// use secret in container environment
out, err := client.
Container().
From("alpine:3.17").
WithSecretVariable("GITHUB_API_TOKEN", secret).
WithExec([]string{"apk", "add", "curl"}).
WithExec([]string{"sh", "-c", `curl "https://api.github.com/repos/dagger/dagger/issues" --header "Accept: application/vnd.github+json" --header "Authorization: Bearer $GITHUB_API_TOKEN"`}).
Stdout(ctx)
if err != nil {
panic(err)
}
fmt.Println(out)
}
import { connect } from "@dagger.io/dagger"
// check for required environment variable
if (!process.env["GH_SECRET"]) {
console.log(`GH_SECRET variable must be set`)
process.exit()
}
// initialize Dagger client
connect(
async (client) => {
// read secret from host variable
const secret = client.setSecret("gh-secret", process.env["GH_SECRET"])
// use secret in container environment
const out = await client
.container()
.from("alpine:3.17")
.withSecretVariable("GITHUB_API_TOKEN", secret)
.withExec(["apk", "add", "curl"])
.withExec([
"sh",
"-c",
`curl "https://api.github.com/repos/dagger/dagger/issues" --header "Accept: application/vnd.github+json" --header "Authorization: Bearer $GITHUB_API_TOKEN"`,
])
.stdout()
// print result
console.log(out)
},
{ LogOutput: process.stderr },
)
import os
import sys
import anyio
import dagger
async def main():
if "GH_SECRET" not in os.environ:
msg = "GH_SECRET environment variable must be set"
raise OSError(msg)
async with dagger.Connection(dagger.Config(log_output=sys.stderr)) as client:
# read secret from host variable
secret = client.set_secret("gh-secret", os.environ["GH_SECRET"])
# use secret in container environment
out = await (
client.container(platform=dagger.Platform("linux/amd64"))
.from_("alpine:3.17")
.with_secret_variable("GITHUB_API_TOKEN", secret)
.with_exec(["apk", "add", "curl"])
.with_exec(
[
"sh",
"-c",
"""curl "https://api.github.com/repos/dagger/dagger/issues" --header "Accept: application/vnd.github+json" --header "Authorization: Bearer $GITHUB_API_TOKEN" """,
]
)
.stdout()
)
print(out)
anyio.run(main)
Expose secret via file
The following code listing demonstrates how to inject a file in a container as a secret.
- Go
- Node.js
- Python
package main
import (
"context"
"fmt"
"os"
"dagger.io/dagger"
)
func main() {
// initialize Dagger client
ctx := context.Background()
client, err := dagger.Connect(ctx, dagger.WithLogOutput(os.Stderr))
if err != nil {
panic(err)
}
defer client.Close()
// read file
config, err := os.ReadFile("/home/USER/.config/gh/hosts.yml")
if err != nil {
panic(err)
}
// set secret to file contents
secret := client.SetSecret("ghConfig", string(config))
// mount secret as file in container
out, err := client.
Container().
From("alpine:3.17").
WithExec([]string{"apk", "add", "github-cli"}).
WithMountedSecret("/root/.config/gh/hosts.yml", secret).
WithWorkdir("/root").
WithExec([]string{"gh", "auth", "status"}).
Stdout(ctx)
if err != nil {
panic(err)
}
fmt.Println(out)
}
import { connect } from "@dagger.io/dagger"
import { readFile } from "fs/promises"
// initialize Dagger client
connect(
async (client) => {
// read file
const config = await readFile("/home/USER/.config/gh/hosts.yml")
// set secret to file contents
const secret = client.setSecret("ghConfig", config.toString())
// mount secret as file in container
const out = await client
.container()
.from("alpine:3.17")
.withExec(["apk", "add", "github-cli"])
.withMountedSecret("/root/.config/gh/hosts.yml", secret)
.withWorkdir("/root")
.withExec(["gh", "auth", "status"])
.stdout()
// print result
console.log(out)
},
{ LogOutput: process.stderr },
)
import sys
import anyio
import dagger
async def main():
async with dagger.Connection(dagger.Config(log_output=sys.stderr)) as client:
# read file
config = await anyio.Path("/home/USER/.config/gh/hosts.yml").read_text()
# set secret to file contents
secret = client.set_secret("ghConfig", config)
# mount secret as file in container
out = await (
client.container(platform=dagger.Platform("linux/amd64"))
.from_("alpine:3.17")
.with_exec(["apk", "add", "github-cli"])
.with_mounted_secret("/root/.config/gh/hosts.yml", secret)
.with_workdir("/root")
.with_exec(["gh", "auth", "status"])
.stdout()
)
print(out)
anyio.run(main)
Use secret in Dockerfile build
The following code listing demonstrates how to inject a secret into a Dockerfile build. The secret is automatically mounted in the build container at /run/secrets/SECRET-ID
.
- Go
- Node.js
- Python
package main
import (
"context"
"fmt"
"os"
"dagger.io/dagger"
)
func main() {
ctx := context.Background()
if os.Getenv("GH_SECRET") == "" {
panic("Environment variable GH_SECRET is not set")
}
// initialize Dagger client
client, err := dagger.Connect(ctx, dagger.WithLogOutput(os.Stderr))
if err != nil {
panic(err)
}
defer client.Close()
// read secret from host variable
secret := client.SetSecret("gh-secret", os.Getenv("GH_SECRET"))
// set context directory for Dockerfile build
contextDir := client.Host().Directory(".")
// build using Dockerfile
// specify secrets for Dockerfile build
// secrets will be mounted at /run/secrets/[secret-name]
out, err := contextDir.
DockerBuild(dagger.DirectoryDockerBuildOpts{
Dockerfile: "Dockerfile",
Secrets: []*dagger.Secret{secret},
}).
Stdout(ctx)
if err != nil {
panic(err)
}
fmt.Println(out)
}
import { connect } from "@dagger.io/dagger"
// check for required environment variable
if (!process.env["GH_SECRET"]) {
console.log(`GH_SECRET variable must be set`)
process.exit()
}
// initialize Dagger client
connect(
async (client) => {
// read secret from host variable
const secret = client.setSecret("gh-secret", process.env["GH_SECRET"])
// set context directory for Dockerfile build
const contextDir = client.host().directory(".")
// build using Dockerfile
// specify secrets for Dockerfile build
// secrets will be mounted at /run/secrets/[secret-name]
const out = await contextDir
.dockerBuild({
dockerfile: "Dockerfile",
secrets: [secret],
})
.stdout()
// print result
console.log(out)
},
{ LogOutput: process.stderr },
)
import os
import sys
import anyio
import dagger
async def main():
if "GH_SECRET" not in os.environ:
msg = "GH_SECRET environment variable must be set"
raise OSError(msg)
async with dagger.Connection(dagger.Config(log_output=sys.stderr)) as client:
# read secret from host variable
secret = client.set_secret("gh-secret", os.environ["GH_SECRET"])
# set context directory for Dockerfile build
context_dir = client.host().directory(".")
# build using Dockerfile
# specify secrets for Dockerfile build
# secrets will be mounted at /run/secrets/[secret-name]
out = await context_dir.docker_build(
dockerfile="Dockerfile",
secrets=[secret],
).stdout()
print(out)
anyio.run(main)
The sample Dockerfile below demonstrates the process of mounting the secret using a secret
filesystem mount type and using it in the Dockerfile build process:
FROM alpine:3.17
RUN apk add curl
RUN --mount=type=secret,id=gh-secret curl "https://api.github.com/repos/dagger/dagger/issues" --header "Accept: application/vnd.github+json" --header "Authorization: Bearer $(cat /run/secrets/gh-secret)"
Load secret from Google Cloud Secret Manager
The following code listing reads a secret (a GitHub API token) from Google Cloud Secret Manager and uses it in a Dagger pipeline to interact with the GitHub API.
Set up Application Default Credentials (ADC) and replace the PROJECT-ID
and SECRET-ID
placeholders with your Google Cloud project and secret identifiers respectively.
- Go
- Node.js
- Python
package main
import (
"context"
"fmt"
"os"
secretmanager "cloud.google.com/go/secretmanager/apiv1"
"cloud.google.com/go/secretmanager/apiv1/secretmanagerpb"
"dagger.io/dagger"
)
func main() {
ctx := context.Background()
// get secret from Google Cloud Secret Manager
secretPlaintext, err := gcpGetSecretPlaintext(ctx, "PROJECT-ID", "SECRET-ID")
if err != nil {
panic(err)
}
// initialize Dagger client
client, err := dagger.Connect(ctx, dagger.WithLogOutput(os.Stderr))
if err != nil {
panic(err)
}
defer client.Close()
// load secret into Dagger
secret := client.SetSecret("ghApiToken", string(secretPlaintext))
// use secret in container environment
out, err := client.
Container().
From("alpine:3.17").
WithSecretVariable("GITHUB_API_TOKEN", secret).
WithExec([]string{"apk", "add", "curl"}).
WithExec([]string{"sh", "-c", `curl "https://api.github.com/repos/dagger/dagger/issues" --header "Accept: application/vnd.github+json" --header "Authorization: Bearer $GITHUB_API_TOKEN"`}).
Stdout(ctx)
if err != nil {
panic(err)
}
// print result
fmt.Println(out)
}
func gcpGetSecretPlaintext(ctx context.Context, projectID, secretID string) (string, error) {
secretUri := fmt.Sprintf("projects/%s/secrets/%s/versions/latest", projectID, secretID)
// initialize Google Cloud API client
gcpClient, err := secretmanager.NewClient(ctx)
if err != nil {
panic(err)
}
defer gcpClient.Close()
// retrieve secret
secReq := &secretmanagerpb.AccessSecretVersionRequest{
Name: secretUri,
}
res, err := gcpClient.AccessSecretVersion(ctx, secReq)
if err != nil {
panic(err)
}
secretPlaintext := res.Payload.Data
return string(secretPlaintext), nil
}
import { connect } from "@dagger.io/dagger"
import { SecretManagerServiceClient } from "@google-cloud/secret-manager"
// initialize Dagger client
connect(
async (client) => {
// get secret from Google Cloud Secret Manager
const secretPlaintext = await gcpGetSecretPlaintext(
"PROJECT-ID",
"SECRET-ID",
)
// load secret into Dagger
const secret = client.setSecret("ghApiToken", secretPlaintext)
// use secret in container environment
const out = await client
.container()
.from("alpine:3.17")
.withSecretVariable("GITHUB_API_TOKEN", secret)
.withExec(["apk", "add", "curl"])
.withExec([
"sh",
"-c",
`curl "https://api.github.com/repos/dagger/dagger/issues" --header "Accept: application/vnd.github+json" --header "Authorization: Bearer $GITHUB_API_TOKEN"`,
])
.stdout()
// print result
console.log(out)
},
{ LogOutput: process.stderr },
)
async function gcpGetSecretPlaintext(projectID, secretID) {
// initialize Google Cloud API client
const client = new SecretManagerServiceClient()
const secretUri = `projects/${projectID}/secrets/${secretID}/versions/latest`
// retrieve secret
const [accessResponse] = await client.accessSecretVersion({
name: secretUri,
})
const secretPlaintext = accessResponse.payload.data.toString("utf8")
return secretPlaintext
}
import sys
import anyio
from google.cloud import secretmanager
import dagger
async def main():
async with dagger.Connection(dagger.Config(log_output=sys.stderr)) as client:
# get secret from Google Cloud Secret Manager
secret_plaintext = await gcp_get_secret_plaintext("PROJECT-ID", "SECRET-ID")
# read secret from host variable
secret = client.set_secret("ghApiToken", secret_plaintext)
# use secret in container environment
out = await (
client.container(platform=dagger.Platform("linux/amd64"))
.from_("alpine:3.17")
.with_secret_variable("GITHUB_API_TOKEN", secret)
.with_exec(["apk", "add", "curl"])
.with_exec(
[
"sh",
"-c",
"""curl "https://api.github.com/repos/dagger/dagger/issues" --header "Accept: application/vnd.github+json" --header "Authorization: Bearer $GITHUB_API_TOKEN" """,
]
)
.stdout()
)
print(out)
async def gcp_get_secret_plaintext(project_id, secret_id):
secret_uri = f"projects/{project_id}/secrets/{secret_id}/versions/latest"
# initialize Google Cloud API client
client = secretmanager.SecretManagerServiceAsyncClient()
# retrieve secret
response = await client.access_secret_version(request={"name": secret_uri})
return response.payload.data.decode("UTF-8")
anyio.run(main)
Load secret from Hashicorp Vault
The following code listing reads a secret (a GitHub API token) from a Hashicorp Vault Key/Value v2 engine and uses it in a Dagger pipeline to interact with the GitHub API.
Set the Hashicorp Vault URI, namespace, role and secret identifiers as host environment variables named VAULT_ADDRESS
, VAULT_NAMESPACE
, VAULT_ROLE_ID
and VAULT_SECRET_ID
respectively. Replace the MOUNT-PATH
, SECRET-ID
and SECRET-KEY
placeholders with your Hashicorp Vault mount point, secret identifier and key respectively.
- Go
- Node.js
- Python
package main
import (
"context"
"fmt"
"os"
"dagger.io/dagger"
"github.com/hashicorp/vault-client-go"
"github.com/hashicorp/vault-client-go/schema"
)
func main() {
ctx := context.Background()
client, err := dagger.Connect(ctx, dagger.WithLogOutput(os.Stdout))
if err != nil {
panic(err)
}
defer client.Close()
// get secret from Vault
secretPlaintext, err := getVaultSecret("MOUNT-PATH", "SECRET-ID", "SECRET-KEY")
if err != nil {
panic(err)
}
// load secret into Dagger
secret := client.SetSecret("ghApiToken", secretPlaintext)
// use secret in container environment
out, err := client.Container().
From("alpine:3.17").
WithSecretVariable("GITHUB_API_TOKEN", secret).
WithExec([]string{"apk", "add", "curl"}).
WithExec([]string{"sh", "-c", "curl \"https://api.github.com/repos/dagger/dagger/issues\" --header \"Accept: application/vnd.github+json\" --header \"Authorization: Bearer $GITHUB_API_TOKEN\""}).
Stdout(ctx)
// print result
fmt.Println(out)
}
func getVaultSecret(mountPath, secretID, secretKey string) (string, error) {
ctx := context.Background()
// check for required variables in host environment
address := os.Getenv("VAULT_ADDRESS")
role_id := os.Getenv("VAULT_ROLE_ID")
secret_id := os.Getenv("VAULT_SECRET_ID")
// create Vault client
client, err := vault.New(
vault.WithAddress(address),
)
if err != nil {
return "", err
}
// log in to Vault
resp, err := client.Auth.AppRoleLogin(
ctx,
schema.AppRoleLoginRequest{
RoleId: role_id,
SecretId: secret_id,
},
vault.WithMountPath(mountPath),
)
if err != nil {
return "", err
}
if err := client.SetToken(resp.Auth.ClientToken); err != nil {
return "", err
}
// read and return secret
secret, err := client.Secrets.KvV2Read(
ctx,
secretID,
vault.WithMountPath(mountPath),
)
if err != nil {
return "", err
}
return fmt.Sprintf("%s", secret.Data.Data[secretKey]), nil
}
import { connect } from "@dagger.io/dagger"
import fetch from "node-fetch"
// initialize Dagger client
connect(
async (client) => {
// get secret from Vault
const secretPlaintext = await getVaultSecret(
"MOUNT-PATH",
"SECRET-ID",
"SECRET-KEY",
)
// load secret into Dagger
const secret = client.setSecret("ghApiToken", secretPlaintext)
// use secret in container environment
const out = await client
.container()
.from("alpine:3.17")
.withSecretVariable("GITHUB_API_TOKEN", secret)
.withExec(["apk", "add", "curl"])
.withExec([
"sh",
"-c",
`curl "https://api.github.com/repos/dagger/dagger/issues" --header "Accept: application/vnd.github+json" --header "Authorization: Bearer $GITHUB_API_TOKEN"`,
])
.stdout()
// print result
console.log(out)
},
{ LogOutput: process.stderr },
)
async function getVaultSecret(mountPath, secretID, secretKey) {
// check for required variables in host environment
const vars = [
"VAULT_ADDRESS",
"VAULT_NAMESPACE",
"VAULT_ROLE_ID",
"VAULT_SECRET_ID",
]
vars.forEach((v) => {
if (!process.env[v]) {
console.log(`${v} variable must be set`)
process.exit()
}
})
const address = process.env.VAULT_ADDRESS
const namespace = process.env.VAULT_NAMESPACE
const role = process.env.VAULT_ROLE_ID
const secret = process.env.VAULT_SECRET_ID
// request client token
let url = `${address}/v1/auth/approle/login`
let body = { role_id: role, secret_id: secret }
let options = {
method: "POST",
headers: {
Accept: "application/json",
"X-Vault-Namespace": `${namespace}`,
},
body: JSON.stringify(body),
}
// read client token
let tokenResponse = await fetch(url, options)
.then((res) => res.json())
.catch((err) => console.error("Error: " + err))
const token = tokenResponse.auth.client_token
// request secret
url = `${address}/v1/${mountPath}/data/${secretID}`
options = {
method: "GET",
headers: {
Accept: "application/json",
"X-Vault-Namespace": `${namespace}`,
"X-Vault-Token": `${token}`,
},
}
// return secret
let secretResponse = await fetch(url, options)
.then((res) => res.json())
.catch((err) => console.error("Error: " + err))
return secretResponse.data.data[secretKey]
}
import os
import sys
import anyio
import hvac
import dagger
async def main():
async with dagger.Connection(dagger.Config(log_output=sys.stderr)) as client:
# get secret from Vault
secret_plaintext = await get_vault_secret(
"MOUNT-PATH",
"SECRET-ID",
"SECRET-KEY",
)
# load secret into Dagger
secret = client.set_secret("ghApiToken", secret_plaintext)
# use secret in container environment
out = await (
client.container(platform=dagger.Platform("linux/amd64"))
.from_("alpine:3.17")
.with_secret_variable("GITHUB_API_TOKEN", secret)
.with_exec(["apk", "add", "curl"])
.with_exec(
[
"sh",
"-c",
"""curl "https://api.github.com/repos/dagger/dagger/issues" --header "Accept: application/vnd.github+json" --header "Authorization: Bearer $GITHUB_API_TOKEN" """,
]
)
.stdout()
)
print(out)
async def get_vault_secret(mount_path, secret_id, secret_key):
# check for required variables in host environment
for var in ["VAULT_ADDRESS", "VAULT_NAMESPACE", "VAULT_ROLE_ID", "VAULT_SECRET_ID"]:
if var not in os.environ:
msg = f"{var} environment variable must be set"
raise OSError(msg)
# create Vault client
client = hvac.Client(
url=os.environ.get("VAULT_ADDRESS"),
namespace=os.environ.get("VAULT_NAMESPACE"),
)
# log in to Vault
client.auth.approle.login(
role_id=os.environ.get("VAULT_ROLE_ID"),
secret_id=os.environ.get("VAULT_SECRET_ID"),
use_token=True,
)
# read and return secret
read_response = client.secrets.kv.read_secret_version(
path=secret_id,
mount_point=mount_path,
raise_on_deleted_version=True,
)
return read_response["data"]["data"][secret_key]
anyio.run(main)
Mount directories as secrets in a container
The following code listing demonstrates how to securely mount directories as secrets in a container. The directory structure/file names will be accessible, but contents of the secrets will be scrubbed:
- Go
- Node.js
- Python
package main
import (
"context"
"fmt"
"os"
"path/filepath"
"dagger.io/dagger"
)
func main() {
ctx := context.Background()
client, err := dagger.Connect(ctx, dagger.WithLogOutput(os.Stderr))
if err != nil {
panic(err)
}
defer client.Close()
gpgKey := os.Getenv("GPG_KEY")
if gpgKey == "" {
gpgKey = "public"
}
// Export file signature
ok, err := client.Container().
From("alpine:3.17").
WithExec([]string{"apk", "add", "--no-cache", "gnupg"}).
With(mountedSecretDirectory(client, "/root/.gnupg", "~/.gnupg")).
WithWorkdir("/root").
WithMountedFile("myapp", client.Host().File("myapp")).
WithExec([]string{"gpg", "--detach-sign", "--armor", "-u", gpgKey, "myapp"}).
File("myapp.asc").
Export(ctx, "myapp.asc")
if !ok || err != nil {
panic(err)
}
fmt.Println("Signature exported successfully")
}
func mountedSecretDirectory(client *dagger.Client, targetPath, sourcePath string) func(*dagger.Container) *dagger.Container {
return func(c *dagger.Container) *dagger.Container {
sourceDir := filepath.Join(os.Getenv("HOME"), sourcePath[2:])
filepath.Walk(sourceDir, func(path string, info os.FileInfo, err error) error {
if err != nil {
return err
}
if info.Mode().IsRegular() {
relativePath, _ := filepath.Rel(sourceDir, path)
target := filepath.Join(targetPath, relativePath)
secret := client.Host().SetSecretFile(filepath.Base(path), path)
c = c.WithMountedSecret(target, secret)
}
return nil
})
// Fix directory permissions
c = c.WithExec([]string{"sh", "-c", fmt.Sprintf("find %s -type d -exec chmod 700 {} \\;", targetPath)})
return c
}
}
import { Client, connect, Container } from "@dagger.io/dagger"
import * as fs from "fs"
import * as glob from "glob"
import * as path from "path"
const GPG_KEY = process.env.GPG_KEY || "public"
connect(
async (client: Client) => {
await client
.container()
.from("alpine:3.17")
.withExec(["apk", "add", "--no-cache", "gnupg"])
.with(await mountedSecretDirectory(client, "/root/.gnupg", "~/.gnupg"))
.withWorkdir("/root")
.withMountedFile("myapp", client.host().file("myapp"))
.withExec(["gpg", "--detach-sign", "--armor", "-u", GPG_KEY, "myapp"])
.file("myapp.asc")
.export("myapp.asc")
},
{ LogOutput: process.stderr },
)
async function mountedSecretDirectory(
client: Client,
targetPath: string,
sourcePath: string,
): Promise<(c: Container) => Container> {
sourcePath = path.resolve(
process.env.HOME || process.env.USERPROFILE || "",
sourcePath.substring(2),
)
const globFiles = glob.sync(`${sourcePath}/**/*`, { nodir: true })
const files = globFiles.filter((file) => fs.statSync(file).isFile())
function _mountedSecretDirectory(container: Container) {
for (const file of files) {
const relative = path.relative(sourcePath, file)
const secret = client.host().setSecretFile(relative, file)
container = container.withMountedSecret(
path.join(targetPath, relative),
secret,
)
}
// Fix directory permissions
return container.withExec([
"sh",
"-c",
`find ${targetPath} -type d -exec chmod 700 {} \\;`,
])
}
return _mountedSecretDirectory
}
import os
import sys
import anyio
import dagger
GPG_KEY = os.environ.get("GPG_KEY", "public")
async def main():
async with dagger.Connection(dagger.Config(log_output=sys.stderr)) as client:
await (
client.container()
.from_("alpine:3.17")
.with_exec(["apk", "add", "--no-cache", "gnupg"])
.with_(await mounted_secret_directory(client, "/root/.gnupg", "~/.gnupg"))
.with_workdir("/root")
.with_mounted_file("myapp", client.host().file("myapp"))
.with_exec(["gpg", "--detach-sign", "--armor", "-u", GPG_KEY, "myapp"])
.file("myapp.asc")
.export("myapp.asc")
)
async def mounted_secret_directory(
client: dagger.Client,
target_path: str,
source_path: str,
):
target = anyio.Path(target_path)
base = await anyio.Path(source_path).expanduser()
files = [path async for path in base.rglob("*") if await path.is_file()]
def _mounted_secret_directory(ctr: dagger.Container) -> dagger.Container:
for path in files:
relative = path.relative_to(base)
secret = client.host().set_secret_file(str(relative), str(path))
ctr = ctr.with_mounted_secret(str(target / relative), secret)
# Fix directory permissions.
return ctr.with_exec(
["sh", "-c", f"find {target} -type d -exec chmod 700 {{}} \\;"]
)
return _mounted_secret_directory
anyio.run(main)
Error handling
Terminate gracefully
The following code listing demonstrates how to handle errors gracefully, without crashing the program or script running Dagger pipelines.
- Go
- Node.js
- Python
package main
import (
"context"
"fmt"
"os"
"dagger.io/dagger"
)
func main() {
if err := run(); err != nil {
// Don't panic
fmt.Fprintln(os.Stderr, err)
os.Exit(1)
}
}
func run() error {
ctx := context.Background()
client, err := dagger.Connect(ctx, dagger.WithLogOutput(os.Stderr))
if err != nil {
return fmt.Errorf("dagger connect: %w", err)
}
defer client.Close()
err = Test(ctx, client)
if err != nil {
return fmt.Errorf("test pipeline: %w", err)
}
fmt.Println("Test passed!")
return nil
}
func Test(ctx context.Context, client *dagger.Client) error {
_, err := client.
Container().
From("alpine").
// ERROR: cat: read error: Is a directory
WithExec([]string{"cat", "/"}).
Sync(ctx)
return err
}
import { connect, GraphQLRequestError } from "@dagger.io/dagger"
connect(
async (client) => {
try {
await test(client)
} catch (e) {
if (e instanceof GraphQLRequestError) {
// If it's an API error, just show the error message.
console.error(e.toString())
} else {
// Otherwise, show the full stack trace for debugging.
console.error(e)
}
// Abort script with non-zero exit code.
process.exit(1)
}
console.log("Test passed!")
},
{ LogOutput: process.stderr },
)
async function test(client) {
await client
.container()
.from("alpines")
// ERROR: cat: read error: Is a directory
.withExec(["cat", "/"])
.sync()
}
import logging
import sys
import anyio
import dagger
async def main():
async with dagger.Connection(dagger.Config(log_output=sys.stderr)) as client:
try:
await test(client)
except dagger.QueryError as e:
# QueryError is for valid GraphQL responses that return errors.
print(e, file=sys.stderr)
# Abort script with non-zero exit code.
sys.exit(1)
print("Test passed!")
async def test(client: dagger.Client):
await (
client.container()
.from_("alpine")
# ERROR: cat: read error: Is a directory
.with_exec(["cat", "/"])
.sync()
)
if __name__ == "__main__":
try:
anyio.run(main)
except dagger.DaggerError:
# DaggerError is the base class for all errors raised by dagger.
logging.exception("Unexpected dagger error")
sys.exit(1)
Handle exit code and unexpected errors
The following code listing demonstrates how to handle a non-zero exit code (an error from running a command) in a container, with several use cases:
- Difference between “test failed” and “failed to test”
- Handle a specific exit code value
- Handle a failure from a command executed in a container, without checking for the exit code
- Catching and handling a failure from a command executed in a container, without propagating it
- Get the standard output of a command, irrespective of whether or not it failed
- Go
- Node.js
- Python
package main
import (
"context"
"errors"
"fmt"
"os"
"dagger.io/dagger"
)
// WarningExit is the exit code for warnings.
const WarningExit = 5
var reportCmd = `
echo "QA Checks"
echo "========="
echo "Check 1: PASS"
echo "Check 2: FAIL"
echo "Check 3: PASS"
exit 1
`
func main() {
if err := run(); err != nil {
// Don't panic
fmt.Fprintln(os.Stderr, err)
os.Exit(1)
}
}
func run() error {
ctx := context.Background()
client, err := dagger.Connect(ctx, dagger.WithLogOutput(os.Stderr))
if err != nil {
return fmt.Errorf("dagger connect: %w", err)
}
defer client.Close()
err = Test(ctx, client)
if err != nil {
// Unexpected error (not from WithExec).
return fmt.Errorf("test pipeline: %w", err)
}
result, err := Report(ctx, client)
if err != nil {
// Unexpected error (not from WithExec).
return fmt.Errorf("report pipeline: %w", err)
}
fmt.Println(result)
return nil
}
func Test(ctx context.Context, client *dagger.Client) error {
_, err := client.
Container().
From("alpine").
WithExec([]string{"sh", "-c", "echo Skipped! >&2; exit 5"}).
Sync(ctx)
// Handle error from WithExec error here, but let other errors bubble up.
var e *dagger.ExecError
if errors.As(err, &e) {
// Don't do anything when skipped.
// Print message to stderr otherwise.
if e.ExitCode != WarningExit {
fmt.Fprintf(os.Stderr, "Test failed: %s", e.Stderr)
}
return nil
}
return err
}
func Report(ctx context.Context, client *dagger.Client) (string, error) {
output, err := client.
Container().
From("alpines"). // ⚠️ typo! non-exec failure
WithExec([]string{"sh", "-c", reportCmd}).
Stdout(ctx)
// Get stdout even on non-zero exit.
var e *dagger.ExecError
if errors.As(err, &e) {
// Not necessary to check for `e.ExitCode != 0`.
return e.Stdout, nil
}
return output, err
}
import { connect, GraphQLRequestError, ExecError } from "@dagger.io/dagger"
// Exit code for warnings.
const WARNING_EXIT = 5
const REPORT_CMD = `
echo "QA Checks"
echo "========="
echo "Check 1: PASS"
echo "Check 2: FAIL"
echo "Check 3: PASS"
exit 1
`
connect(
async (client) => {
try {
// Will only abort if there's an unexpected error,
// in which case the next pipeline won't execute.
await test(client)
console.log(await report(client))
} catch (e) {
if (e instanceof GraphQLRequestError) {
// If it's an API error, just show the error message.
console.error(e.toString())
} else {
// Otherwise, show the full stack trace for debugging.
console.error(e)
}
// Abort script with non-zero exit code.
process.exit(1)
}
},
{ LogOutput: process.stderr },
)
async function test(client) {
try {
await client
.container()
.from("alpine")
// ERROR: cat: read error: Is a directory
.withExec(["sh", "-c", "echo Skipped! >&2; exit 5"])
.sync()
} catch (e) {
// Handle error from withExec here, but let other errors bubble up.
if (e instanceof ExecError) {
// Don't do anything when skipped.
// Print message to stderr otherwise.
if (e.exitCode !== WARNING_EXIT) {
console.error("Test failed: %s", e.stderr)
}
return
}
// Rethrow other errors.
throw e
}
}
async function report(client) {
// Get stdout even on non-zero exit code.
try {
return await client
.container()
.from("alpines") // ⚠️ typo! non-exec failure
.withExec(["sh", "-c", REPORT_CMD])
.stdout()
} catch (e) {
if (e instanceof ExecError) {
// Not necessary to check for `e.exitCode != 0`.
return e.stdout
}
// Rethrow other errors.
throw e
}
}
import logging
import sys
import anyio
import dagger
WARNING_EXIT = 5
"""Exit code for warnings."""
REPORT_CMD = """
echo "QA Checks"
echo "========="
echo "Check 1: PASS"
echo "Check 2: FAIL"
echo "Check 3: PASS"
exit 1
"""
async def main():
async with dagger.Connection(dagger.Config(log_output=sys.stderr)) as client:
# Will only abort if there's an unexpected error,
# in which case the next pipeline won't execute.
await test(client)
print(await report(client))
async def test(client: dagger.Client):
try:
await (
client.container()
.from_("alpine")
.with_exec(["sh", "-c", "echo Skipped! >&2; exit 5"])
.sync()
)
except dagger.ExecError as e:
# Handle error from with_exec here, but let other errors bubble up.
# Don't do anything when skipped.
# Print message to stderr otherwise.
if e.exit_code != WARNING_EXIT:
print("Test failed:", e.stderr, file=sys.stderr)
async def report(client: dagger.Client) -> str:
# Get stdout even on non-zero exit code.
try:
return await (
client.container()
.from_("alpines") # ⚠️ typo! non-exec failure
.with_exec(["sh", "-c", REPORT_CMD])
.stdout()
)
except dagger.ExecError as e:
# Not necessary to check for `e.exit_code != 0`.
return e.stdout
if __name__ == "__main__":
try:
anyio.run(main)
except dagger.DaggerError:
# DaggerError is the base class for all errors raised by dagger.
logging.exception("Unexpected dagger error")
sys.exit(1)
Continue using container after command execution fails
This code listing demonstrates how to continue using a container after a command executed within it fails. A common use case for this is to export a report that a test suite tool generates.
The caveat with this approach is that forcing a zero exit code on a failure caches the failure. This may not be desired depending on the use case.
- Go
- Node.js
- Python
package main
import (
"context"
"fmt"
"os"
"dagger.io/dagger"
)
var script = `#!/bin/sh
echo "Test Suite"
echo "=========="
echo "Test 1: PASS" >> report.txt
echo "Test 2: FAIL" >> report.txt
echo "Test 3: PASS" >> report.txt
exit 1
`
func main() {
if err := run(); err != nil {
// Don't panic
fmt.Fprintln(os.Stderr, err)
os.Exit(1)
}
}
func run() error {
ctx := context.Background()
client, err := dagger.Connect(ctx, dagger.WithLogOutput(os.Stderr))
if err != nil {
return err
}
defer client.Close()
return Test(ctx, client)
}
func Test(ctx context.Context, client *dagger.Client) error {
// The result of `Sync` is the container, which allows continued chaining.
ctr, err := client.
Container().
From("alpine").
// Add script with execution permission to simulate a testing tool.
WithNewFile("run-tests", dagger.ContainerWithNewFileOpts{
Contents: script,
Permissions: 0o750,
}).
// If the exit code isn't needed: "run-tests; true"
WithExec([]string{"sh", "-c", "/run-tests; echo -n $? > /exit_code"}).
Sync(ctx)
if err != nil {
// Unexpected error, could be network failure.
return fmt.Errorf("run tests: %w", err)
}
// Save report locally for inspection.
_, err = ctr.
File("report.txt").
Export(ctx, "report.txt")
if err != nil {
// Test suite ran but there's no report file.
return fmt.Errorf("get report: %w", err)
}
// Use the saved exit code to determine if the tests passed.
exitCode, err := ctr.File("/exit_code").Contents(ctx)
if err != nil {
return fmt.Errorf("get exit code: %w", err)
}
if exitCode != "0" {
fmt.Fprintln(os.Stderr, "Tests failed!")
} else {
fmt.Println("Tests passed!")
}
return nil
}
import { connect, GraphQLRequestError } from "@dagger.io/dagger"
const SCRIPT = `#!/bin/sh
echo "Test Suite"
echo "=========="
echo "Test 1: PASS" >> report.txt
echo "Test 2: FAIL" >> report.txt
echo "Test 3: PASS" >> report.txt
exit 1
`
connect(
async (client) => {
try {
await test(client)
} catch (e) {
if (e instanceof GraphQLRequestError) {
// If it's an API error, just show the error message.
console.error(e.toString())
} else {
// Otherwise, show the full stack trace for debugging.
console.error(e)
}
// Abort script with non-zero exit code.
process.exit(1)
}
},
{ LogOutput: process.stderr },
)
async function test(client) {
// If any one of these steps fails, it's an unexpected error so we don't
// need to handle anything here.
// The result of `sync` is the container, which allows continued chaining.
const ctr = await client
.container()
.from("alpine")
// Add script with execution permission to simulate a testing tool.
.withNewFile("run-tests", { contents: SCRIPT, permissions: 0o750 })
// If the exit code isn't needed: "run-tests; true
.withExec(["sh", "-c", "/run-tests; echo -n $? > /exit_code"])
.sync()
// Save report locally for inpspection.
await ctr.file("report.txt").export("report.txt")
// Use the saved exit code to determine if the tests passed.
const exitCode = await ctr.file("exit_code").contents()
if (exitCode !== "0") {
console.error("Tests failed!")
} else {
console.log("Tests passed!")
}
}
import logging
import sys
import anyio
import dagger
SCRIPT = """#!/bin/sh
echo "Test Suite"
echo "=========="
echo "Test 1: PASS" >> report.txt
echo "Test 2: FAIL" >> report.txt
echo "Test 3: PASS" >> report.txt
exit 1
"""
async def main():
async with dagger.Connection(dagger.Config(log_output=sys.stderr)) as client:
await test(client)
async def test(client: dagger.Client):
# If any one of these steps fails, it's an unexpected error so we don't
# need to handle anything here.
# The result of `sync` is the container, which allows continued chaining.
ctr = (
await (
client.container()
.from_("alpine")
# Add script with execution permission to simulate a testing tool.
.with_new_file("run-tests", contents=SCRIPT, permissions=0o750)
# If the exit code isn't needed: "run-tests; true"
.with_exec(["sh", "-c", "/run-tests; echo -n $? > /exit_code"])
.sync()
)
)
# Save report locally for inspection.
await ctr.file("report.txt").export("report.txt")
# Use the saved exit code to determine if the tests passed.
exit_code = await ctr.file("exit_code").contents()
if exit_code != "0":
print("Tests failed!", file=sys.stderr)
else:
print("Tests passed!")
if __name__ == "__main__":
try:
anyio.run(main)
except dagger.DaggerError:
# DaggerError is the base class for all errors raised by dagger.
logging.exception("Unexpected dagger error")
sys.exit(1)
Optimizations
Cache dependencies
The following code listing uses a cache volume for application dependencies. This enables Dagger to reuse the contents of the cache every time the pipeline runs, and thereby speed up pipeline operations.
- Go
- Node.js
- Python
package main
import (
"context"
"os"
"dagger.io/dagger"
)
func main() {
ctx := context.Background()
// initialize Dagger client
client, err := dagger.Connect(ctx, dagger.WithLogOutput(os.Stderr))
if err != nil {
panic(err)
}
defer client.Close()
// use a golang:1.21 container
// mount the source code directory on the host
// at /src in the container
// mount the cache volume to persist dependencies
source := client.Container().
From("golang:1.21").
WithDirectory("/src", client.Host().Directory(".")).
WithWorkdir("/src").
WithMountedCache("/go/pkg/mod", client.CacheVolume("go-mod-121")).
WithEnvVariable("GOMODCACHE", "/go/pkg/mod").
WithMountedCache("/go/build-cache", client.CacheVolume("go-build-121")).
WithEnvVariable("GOCACHE", "/go/build-cache")
// set the working directory in the container
// install application dependencies
_, err = source.
WithExec([]string{"go", "build"}).
Sync(ctx)
if err != nil {
panic(err)
}
}
import { connect } from "@dagger.io/dagger"
connect(
async (client) => {
// use a node:18 container
// mount the source code directory on the host
// at /src in the container
// mount the cache volume to persist dependencies
const source = client
.container()
.from("node:18")
.withDirectory("/src", client.host().directory("."))
.withWorkdir("/src")
.withMountedCache(
"/src/node_modules",
client.cacheVolume("node-18-myapp-myenv"),
)
.withMountedCache("/root/.npm", client.cacheVolume("node-18"))
// set the working directory in the container
// install application dependencies
await source.withExec(["npm", "install"]).sync()
},
{ LogOutput: process.stderr },
)
import sys
import anyio
import dagger
async def main():
config = dagger.Config(log_output=sys.stderr)
async with dagger.Connection(config) as client:
# use a python:3.11 container
# mount the source code directory on the host
# at /src in the container
# mount the cache volumes to persist dependencies
source = (
client.container()
.from_("python:3.11")
.with_directory("/src", client.host().directory("."))
.with_workdir("/src")
.with_mounted_cache("/root/.cache/pip", client.cache_volume("python-311"))
)
# set the working directory in the container
# install application dependencies
await source.with_exec(["pip", "install", "-r", "requirements.txt"]).sync()
anyio.run(main)
Persist service state between runs
The following code listing uses a cache volume to persist a service's data across pipeline runs.
- Go
- Node.js
- Python
package main
import (
"context"
"fmt"
"os"
"dagger.io/dagger"
)
func main() {
ctx := context.Background()
// create Dagger client
client, err := dagger.Connect(ctx, dagger.WithLogOutput(os.Stderr))
if err != nil {
panic(err)
}
defer client.Close()
// create Redis service container
redisSrv := client.Container().
From("redis").
WithExposedPort(6379).
WithMountedCache("/data", client.CacheVolume("my-redis")).
WithWorkdir("/data").
AsService()
// create Redis client container
redisCLI := client.Container().
From("redis").
WithServiceBinding("redis-srv", redisSrv).
WithEntrypoint([]string{"redis-cli", "-h", "redis-srv"})
// set and save value
redisCLI.
WithExec([]string{"set", "foo", "abc"}).
WithExec([]string{"save"}).
Stdout(ctx)
// get value
val, err := redisCLI.
WithExec([]string{"get", "foo"}).
Stdout(ctx)
if err != nil {
panic(err)
}
fmt.Println(val)
}
import { connect, Client } from "@dagger.io/dagger"
connect(
async (client: Client) => {
const redisSrv = client
.container()
.from("redis")
.withExposedPort(6379)
.withMountedCache("/data", client.cacheVolume("my-redis"))
.withWorkdir("/data")
.asService()
// create Redis client container
const redisCLI = client
.container()
.from("redis")
.withServiceBinding("redis-srv", redisSrv)
.withEntrypoint(["redis-cli", "-h", "redis-srv"])
// set and save value
await redisCLI.withExec(["set", "foo", "abc"]).withExec(["save"]).stdout()
// get value
const val = await redisCLI.withExec(["get", "foo"]).stdout()
console.log(val)
},
{ LogOutput: process.stderr },
)
import sys
import anyio
import dagger
async def main():
# create Dagger client
async with dagger.Connection(dagger.Config(log_output=sys.stderr)) as client:
# create Redis service container
redis_srv = (
client.container()
.from_("redis")
.with_exposed_port(6379)
.with_mounted_cache("/data", client.cache_volume("my-redis"))
.with_workdir("/data")
.as_service()
)
# create Redis client container
redis_cli = (
client.container()
.from_("redis")
.with_service_binding("redis-srv", redis_srv)
.with_entrypoint(["redis-cli", "-h", "redis-srv"])
)
# set and save value
await redis_cli.with_exec(["set", "foo", "abc"]).with_exec(["save"]).stdout()
# get value
val = await redis_cli.with_exec(["get", "foo"]).stdout()
print(val)
anyio.run(main)
Add multiple environment variables to a container
The following code listing demonstrates how to add multiple environment variables to a container.
- Go
- Node.js
- Python
package main
import (
"context"
"fmt"
"os"
"dagger.io/dagger"
)
func main() {
// create Dagger client
ctx := context.Background()
client, err := dagger.Connect(ctx, dagger.WithLogOutput(os.Stderr))
if err != nil {
panic(err)
}
defer client.Close()
// setup container and
// define environment variables
ctr := client.
Container().
From("alpine").
With(EnvVariables(map[string]string{
"ENV_VAR_1": "VALUE 1",
"ENV_VAR_2": "VALUE 2",
"ENV_VAR_3": "VALUE 3",
})).
WithExec([]string{"env"})
// print environment variables
out, err := ctr.Stdout(ctx)
if err != nil {
panic(err)
}
fmt.Println(out)
}
func EnvVariables(envs map[string]string) dagger.WithContainerFunc {
return func(c *dagger.Container) *dagger.Container {
for key, value := range envs {
c = c.WithEnvVariable(key, value)
}
return c
}
}
import { connect, Client, Container } from "@dagger.io/dagger"
// create Dagger client
connect(
async (client: Client) => {
// setup container and
// define environment variables
const ctr = client
.container()
.from("alpine")
.with(
envVariables({
ENV_VAR_1: "VALUE 1",
ENV_VAR_2: "VALUE 2",
ENV_VAR_3: "VALUE 3",
}),
)
.withExec(["env"])
// print environment variables
console.log(await ctr.stdout())
},
{ LogOutput: process.stderr },
)
function envVariables(envs: Record<string, string>) {
return (c: Container): Container => {
Object.entries(envs).forEach(([key, value]) => {
c = c.withEnvVariable(key, value)
})
return c
}
}
import sys
import anyio
import dagger
async def main():
# create Dagger client
async with dagger.Connection(dagger.Config(log_output=sys.stderr)) as client:
# setup container and
# define environment variables
ctr = (
client.container()
.from_("alpine")
.with_(
env_variables(
{
"ENV_VAR_1": "VALUE 1",
"ENV_VAR_2": "VALUE 2",
"ENV_VAR_3": "VALUE 3",
}
)
)
.with_exec(["env"])
)
# print environment variables
print(await ctr.stdout())
def env_variables(envs: dict[str, str]):
def env_variables_inner(ctr: dagger.Container):
for key, value in envs.items():
ctr = ctr.with_env_variable(key, value)
return ctr
return env_variables_inner
anyio.run(main)
Organize pipeline code into modules & classes
The following code listing demonstrates how to organize Dagger pipeline code into independent modules (or functions/packages, depending on your programming language) to improve code reusability and organization. It also demonstrates how to reuse the Dagger client and, therefore, share the Dagger session between modules.
The same Dagger client can safely be used in concurrent threads/routines. Therefore, it is recommended to reuse the Dagger client wherever possible, instead of creating a new client for each use. Initializing and using multiple Dagger clients in the same pipeline can result in unexpected behavior.
- Go
- Node.js
- Python
package main
import (
"context"
"fmt"
"os"
"dagger.io/dagger"
"main/alpine"
)
func main() {
ctx := context.Background()
// initialize Dagger client
client, err := dagger.Connect(ctx, dagger.WithLogOutput(os.Stderr))
if err != nil {
panic(err)
}
defer client.Close()
// pass client to method imported from another module
fmt.Println(alpine.Version(client))
}
package alpine
import (
"context"
"dagger.io/dagger"
)
// create base image
func base(client *dagger.Client) *dagger.Container {
return client.
Container().
From("alpine:latest")
}
// run command in base image
func Version(client *dagger.Client) string {
ctx := context.Background()
out, err := base(client).
WithExec([]string{"cat", "/etc/alpine-release"}).
Stdout(ctx)
if err != nil {
panic(err)
}
return out
}
import { connect, Client } from "@dagger.io/dagger"
import * as alpine from "./alpine.mts"
connect(
// initialize Dagger client
// pass client to method imported from another module
async (client: Client) => {
console.log(await alpine.version(client))
},
{ LogOutput: process.stderr },
)
import { Client, Container } from "@dagger.io/dagger"
// get base image
function base(client: Client): Container {
return client.container().from("alpine:latest")
}
// run command in base image
export async function version(client: Client): Promise<string> {
return base(client).withExec(["cat", "/etc/alpine-release"]).stdout()
}
import sys
import anyio
import dagger
from .alpine import version
# initialize Dagger client
# pass client to method imported from another module
async def main():
async with dagger.Connection(dagger.Config(log_output=sys.stderr)) as client:
print(await version(client))
if __name__ == "__main__":
anyio.run(main)
import dagger
# get base image
def base(client: dagger.Client):
return client.container().from_("alpine:latest")
# run command in base image
async def version(client: dagger.Client):
return await base(client).with_exec(["cat", "/etc/alpine-release"]).stdout()
Another possible approach is to use independent classes (or interfaces, depending on the programming language) with public methods as functions. With this, it is no longer necessary to pass the client to all imported functions. The following code listing demonstrates this approach.
- Go
- Node.js
- Python
package main
import (
"context"
"fmt"
"os"
"dagger.io/dagger"
"main/alpine"
)
func main() {
ctx := context.Background()
// initialize Dagger client
client, err := dagger.Connect(ctx, dagger.WithLogOutput(os.Stderr))
if err != nil {
panic(err)
}
defer client.Close()
// Create pipeline structure imported from another module passing the client
pipeline := alpine.New(client)
// Call version function
fmt.Println(pipeline.Version(ctx))
}
package alpine
import (
"context"
"dagger.io/dagger"
)
type Alpine struct {
client *dagger.Client
}
// Create a Alpine structure
func New(client *dagger.Client) *Alpine {
return &Alpine{client: client}
}
// create base image
func (a *Alpine) base() *dagger.Container {
return a.client.
Container().
From("alpine:latest")
}
// run command in base image
func (a *Alpine) Version(ctx context.Context) string {
out, err := a.
base().
WithExec([]string{"cat", "/etc/alpine-release"}).
Stdout(ctx)
if err != nil {
panic(err)
}
return out
}
import { connect, Client } from "@dagger.io/dagger"
import { Alpine } from "./alpine.mts"
connect(
// initialize Dagger client
// pass client to method imported from another module
async (client: Client) => {
// create pipeline object passing the client
const pipeline = new Alpine(client)
// call pipeline method
console.log(await pipeline.version())
},
{ LogOutput: process.stderr },
)
import { Client, Container } from "@dagger.io/dagger"
export class Alpine {
private client: Client
// initialize pipeline class
constructor(client: Client) {
this.client = client
}
// get base image
private base(): Container {
return this.client.container().from("alpine:latest")
}
// run command in base image
public async version(): Promise<string> {
return this.base().withExec(["cat", "/etc/alpine-release"]).stdout()
}
}
import sys
import anyio
import dagger
from .alpine import Alpine
# initialize Dagger client
# pass client to method imported from another module
async def main():
async with dagger.Connection(dagger.Config(log_output=sys.stderr)) as client:
pipeline = Alpine(client)
print(await pipeline.version())
if __name__ == "__main__":
anyio.run(main)
import dagger
class Alpine:
__client: dagger.Client
# initialize pipeline class
def __init__(self, client: dagger.Client):
self.__client = client
# get base image
def __base(self):
return self.__client.container().from_("alpine:latest")
# run command in base image
async def version(self):
return await self.__base().with_exec(["cat", "/etc/alpine-release"]).stdout()
Execute pipeline operations concurrently
The following code listing demonstrates how to use native-language concurrency features (goroutines in Go, promises in TypeScript, and task groups in Python) to execute pipeline operations in parallel.
- Go
- Node.js
- Python
package main
import (
"context"
"crypto/rand"
"log"
"math/big"
"os"
"golang.org/x/sync/errgroup"
"dagger.io/dagger"
)
func longTimeTask(ctx context.Context, c *dagger.Client) error {
sleepTime, err := rand.Int(rand.Reader, big.NewInt(10))
if err != nil {
return err
}
_, err = c.Container().From("alpine").
WithExec([]string{"sleep", sleepTime.String()}).
WithExec([]string{"echo", "task done"}).
Sync(ctx)
return err
}
func main() {
ctx := context.Background()
client, err := dagger.Connect(ctx, dagger.WithLogOutput(os.Stderr))
if err != nil {
log.Println(err)
return
}
defer client.Close()
// Create err-group to handle error
eg, gctx := errgroup.WithContext(ctx)
// Launch task 1
eg.Go(func() error {
return longTimeTask(gctx, client)
})
// Launch task 2
eg.Go(func() error {
return longTimeTask(gctx, client)
})
// Launch task 3
eg.Go(func() error {
return longTimeTask(gctx, client)
})
// Wait for each task to be completed
err = eg.Wait()
if err != nil {
panic(err)
}
}
import { Client, connect } from "@dagger.io/dagger"
import { randomInt } from "crypto"
async function longTimeTask(c: Client): Promise<void> {
await c
.container()
.from("alpine")
.withExec(["sleep", randomInt(0, 10).toString()])
.withExec(["echo", "task done"])
.sync()
}
connect(
async (client) => {
await Promise.all([
longTimeTask(client),
longTimeTask(client),
longTimeTask(client),
])
},
{ LogOutput: process.stderr },
)
import secrets
import sys
import anyio
import dagger
async def long_time_task(c: dagger.Client):
"""
a task that can take a long time.
:param c: dagger client.
"""
await c.container().from_("alpine").with_exec(
["sleep", str(secrets.randbelow(10))]
).with_exec(["echo", "task done"]).sync()
async def main():
"""Execute multiple tasks in concurrency."""
async with dagger.Connection(
dagger.Config(log_output=sys.stderr)
) as client, anyio.create_task_group() as tg:
tg.start_soon(long_time_task, client)
tg.start_soon(long_time_task, client)
tg.start_soon(long_time_task, client)
anyio.run(main)
Integrations
Docker Engine
The following code listing shows how to connect to a Docker Engine on the host machine, by mounting the Docker UNIX socket into a container, and running the docker
CLI.
- Go
- Node.js
- Python
package main
import (
"context"
"fmt"
"os"
"dagger.io/dagger"
)
func main() {
// create Dagger client
ctx := context.Background()
client, err := dagger.Connect(ctx, dagger.WithLogOutput(os.Stderr))
if err != nil {
panic(err)
}
defer client.Close()
// setup container with docker socket
ctr := client.
Container().
From("docker").
WithUnixSocket("/var/run/docker.sock", client.Host().UnixSocket("/var/run/docker.sock")).
WithExec([]string{"docker", "run", "--rm", "alpine", "uname", "-a"})
// print docker run
out, err := ctr.Stdout(ctx)
if err != nil {
panic(err)
}
fmt.Println(out)
}
import { connect } from "@dagger.io/dagger"
// create Dagger client
connect(
async (client) => {
// setup container with docker socket
const ctr = client
.container()
.from("docker")
.withUnixSocket(
"/var/run/docker.sock",
client.host().unixSocket("/var/run/docker.sock"),
)
.withExec(["docker", "run", "--rm", "alpine", "uname", "-a"])
.stdout()
// print docker run
console.log(await ctr.stdout())
},
{ LogOutput: process.stderr },
)
import sys
import anyio
import dagger
async def main():
# create Dagger client
async with dagger.Connection(dagger.Config(log_output=sys.stderr)) as client:
# setup container with docker socket
ctr = (
client.container()
.from_("docker")
.with_unix_socket(
"/var/run/docker.sock",
client.host().unix_socket("/var/run/docker.sock"),
)
.with_exec(["docker", "run", "--rm", "alpine", "uname", "-a"])
)
# print docker run
print(await ctr.stdout())
anyio.run(main)
Tailscale
The following code listing shows how to have a container running in a Dagger pipeline access a Tailscale network using Tailscale's userspace networking.
Set the TAILSCALE_AUTHKEY
host environment variable to a Tailscale authentication key and the TAILSCALE_SERVICE_URL
host environment variable to a URL accessibly only on the Tailscale network.
- Go
- Node.js
- Python
package main
import (
"context"
"fmt"
"os"
"dagger.io/dagger"
)
func main() {
// create Dagger client
ctx := context.Background()
client, err := dagger.Connect(ctx, dagger.WithLogOutput(os.Stderr))
if err != nil {
panic(err)
}
defer client.Close()
tailscaleAuthKey := os.Getenv("TAILSCALE_AUTHKEY")
if tailscaleAuthKey == "" {
panic("TAILSCALE_AUTHKEY env var must be set")
}
// set secret
authKeySecret := client.SetSecret("tailscaleAuthKey", tailscaleAuthKey)
tailscaleServiceURL := os.Getenv("TAILSCALE_SERVICE_URL")
if tailscaleServiceURL == "" {
panic("TAILSCALE_SERVICE_URL env var must be set")
}
// create Tailscale service container
tailscale := client.Container().
From("tailscale/tailscale:stable").
WithSecretVariable("TAILSCALE_AUTHKEY", authKeySecret).
WithExec([]string{"/bin/sh", "-c", "tailscaled --tun=userspace-networking --socks5-server=0.0.0.0:1055 --outbound-http-proxy-listen=0.0.0.0:1055 & tailscale up --authkey $TAILSCALE_AUTHKEY &"}).
WithExposedPort(1055)
// access Tailscale network
out, err := client.Container().
From("alpine:3.17").
WithExec([]string{"apk", "add", "curl"}).
WithServiceBinding("tailscale", tailscale).
WithEnvVariable("ALL_PROXY", "socks5://tailscale:1055/").
WithExec([]string{"curl", "--silent", "--verbose", tailscaleServiceURL}).
Sync(ctx)
if err != nil {
panic(err)
}
fmt.Println(out)
}
import { connect } from "@dagger.io/dagger"
// check for required variables in host environment
const vars = ["TAILSCALE_AUTHKEY", "TAILSCALE_SERVICE_URL"]
vars.forEach((v) => {
if (!process.env[v]) {
console.log(`${v} variable must be set`)
process.exit()
}
})
// create Dagger client
connect(
async (client) => {
// create Tailscale authentication key as secret
const authKeySecret = client.setSecret(
"tailscaleAuthkey",
process.env.TAILSCALE_AUTHKEY,
)
const tailscaleServiceURL = process.env.TAILSCALE_SERVICE_URL
// create Tailscale service container
const tailscale = client
.container()
.from("tailscale/tailscale:stable")
.withSecretVariable("TAILSCALE_AUTHKEY", authKeySecret)
.withExec([
"/bin/sh",
"-c",
"tailscaled --tun=userspace-networking --socks5-server=0.0.0.0:1055 --outbound-http-proxy-listen=0.0.0.0:1055 & tailscale up --authkey $TAILSCALE_AUTHKEY &",
])
.withExposedPort(1055)
// access Tailscale network
const out = await client
.container()
.from("alpine:3.17")
.withExec(["apk", "add", "curl"])
.withServiceBinding("tailscale", tailscale)
.withEnvVariable("ALL_PROXY", "socks5://tailscale:1055/")
.withExec(["curl", "--silent", "--verbose", tailscaleServiceURL])
.sync()
console.log(out)
},
{ LogOutput: process.stderr },
)
import os
import sys
import anyio
import dagger
async def main():
# check for required variables in host environment
for var in ["TAILSCALE_AUTHKEY", "TAILSCALE_SERVICE_URL"]:
if var not in os.environ:
msg = f'"{var}" environment variable must be set'
raise OSError(msg)
# create Dagger client
async with dagger.Connection(dagger.Config(log_output=sys.stderr)) as client:
# create Tailscale authentication key as secret
auth_key_secret = client.set_secret(
"tailscaleAuthkey", os.environ["TAILSCALE_AUTHKEY"]
)
tailscale_service_url = os.environ["TAILSCALE_SERVICE_URL"]
# create Tailscale service container
tailscale = (
client.container()
.from_("tailscale/tailscale:stable")
.with_secret_variable(name="TAILSCALE_AUTHKEY", secret=auth_key_secret)
.with_exec(
[
"/bin/sh",
"-c",
(
"tailscaled --tun=userspace-networking"
" --socks5-server=0.0.0.0:1055"
" --outbound-http-proxy-listen=0.0.0.0:1055 & tailscale up"
" --authkey $TAILSCALE_AUTHKEY &"
),
]
)
.with_exposed_port(1055)
)
# access Tailscale network
out = await (
client.container()
.from_("alpine:3.17")
.with_exec(["apk", "add", "curl"])
.with_service_binding("tailscale", tailscale)
.with_env_variable("ALL_PROXY", "socks5://tailscale:1055/")
.with_exec(["curl", "--silent", "--verbose", tailscale_service_url])
.sync()
)
print(out)
anyio.run(main)
AWS Cloud Development Kit
The following code listing builds, publishes and deploys a container using the Amazon Web Services (AWS) Cloud Development Kit (CDK).
- Go
package main
import (
"context"
"fmt"
"os"
"dagger.io/dagger"
)
// build() reads the source code, run the tests and build the app and publish it to a container registry
func build(ctx context.Context, client *dagger.Client, registry *RegistryInfo) (string, error) {
nodeCache := client.CacheVolume("node")
// Read the source code from local directory
// sourceDir := client.Host().Directory("./app", dagger.HostDirectoryOpts{
// Exclude: []string{"node_modules/"},
// })
// Read the source code from a remote git repository
sourceDir := client.Git("https://github.com/dagger/hello-dagger.git").
Commit("5343dfee12cfc59013a51886388a7cacee3f16b9").
Tree()
source := client.Container().
From("node:16").
WithDirectory("/src", sourceDir).
WithMountedCache("/src/node_modules", nodeCache)
runner := source.
WithWorkdir("/src").
WithExec([]string{"npm", "install"})
test := runner.
WithExec([]string{"npm", "test", "--", "--watchAll=false"})
buildDir := test.
WithExec([]string{"npm", "run", "build"}).
Directory("./build")
// Explicitly build for "linux/amd64" to match the target (container on Fargate)
return client.Container(dagger.ContainerOpts{Platform: "linux/amd64"}).
From("nginx").
WithDirectory("/usr/share/nginx/html", buildDir).
WithRegistryAuth(
"125635003186.dkr.ecr.us-west-1.amazonaws.com",
registry.username,
client.SetSecret("registryPassword", registry.password),
).
Publish(ctx, registry.uri)
}
// deployToECS deploys a container image to the ECS cluster
func deployToECS(ctx context.Context, client *dagger.Client, awsClient *AWSClient, containerImage string) string {
stackParameters := map[string]string{
"ContainerImage": containerImage,
}
outputs, err := awsClient.cdkDeployStack(ctx, client, "DaggerDemoECSStack", stackParameters)
if err != nil {
panic(err)
}
return outputs["LoadBalancerDNS"]
}
func main() {
ctx := context.Background()
// initialize Dagger client
client, err := dagger.Connect(ctx, dagger.WithLogOutput(os.Stderr))
if err != nil {
panic(err)
}
defer client.Close()
// initialize AWS client
awsClient, err := NewAWSClient(ctx, "us-west-1")
if err != nil {
panic(err)
}
// init the ECR Registry using the AWS CDK
registry := initRegistry(ctx, client, awsClient)
imageRef, err := build(ctx, client, registry)
if err != nil {
panic(err)
}
fmt.Println("Published image to", imageRef)
// init and deploy to ECS using the AWS CDK
publicDNS := deployToECS(ctx, client, awsClient, imageRef)
fmt.Printf("Deployed to http://%s/\n", publicDNS)
}
package main
import (
"context"
"encoding/base64"
"errors"
"fmt"
"log"
"os"
"strings"
"dagger.io/dagger"
"github.com/aws/aws-sdk-go-v2/config"
"github.com/aws/aws-sdk-go-v2/service/cloudformation"
"github.com/aws/aws-sdk-go-v2/service/cloudformation/types"
"github.com/aws/aws-sdk-go-v2/service/ecr"
"github.com/aws/jsii-runtime-go"
)
type AWSClient struct {
region string
cCfn *cloudformation.Client
cEcr *ecr.Client
}
func NewAWSClient(ctx context.Context, region string) (*AWSClient, error) {
cfg, err := config.LoadDefaultConfig(ctx)
if err != nil {
return nil, err
}
cfg.Region = region
client := &AWSClient{
region: region,
cCfn: cloudformation.NewFromConfig(cfg),
cEcr: ecr.NewFromConfig(cfg),
}
return client, nil
}
func (c *AWSClient) GetCfnStackOutputs(ctx context.Context, stackName string) (map[string]string, error) {
out, err := c.cCfn.DescribeStacks(ctx, &cloudformation.DescribeStacksInput{
StackName: jsii.String(stackName),
})
if err != nil {
return nil, err
}
if len(out.Stacks) < 1 {
return nil, fmt.Errorf("cannot DescribeStack name %q", stackName)
}
stack := out.Stacks[0]
// status := string(stack.StackStatus)
return FormatStackOutputs(stack.Outputs), nil
}
func (c *AWSClient) GetECRAuthorizationToken(ctx context.Context) (string, error) {
log.Printf("ECR GetAuthorizationToken for region %q", c.region)
out, err := c.cEcr.GetAuthorizationToken(ctx, &ecr.GetAuthorizationTokenInput{})
if err != nil {
return "", err
}
if len(out.AuthorizationData) < 1 {
return "", fmt.Errorf("GetECRAuthorizationToken returned empty AuthorizationData")
}
authToken := *out.AuthorizationData[0].AuthorizationToken
return authToken, nil
}
// GetECRUsernamePassword fetches ECR auth token and converts it to username / password
func (c *AWSClient) GetECRUsernamePassword(ctx context.Context) (string, string, error) {
token, err := c.GetECRAuthorizationToken(ctx)
if err != nil {
return "", "", err
}
decoded, err := base64.StdEncoding.DecodeString(token)
if err != nil {
return "", "", err
}
split := strings.SplitN(string(decoded), ":", 2)
if len(split) < 1 {
return "", "", fmt.Errorf("invalid base64 decoded data")
}
return split[0], split[1], nil
}
// FormatStackOutputs converts stack outputs into a map of string for easy printing
func FormatStackOutputs(outputs []types.Output) map[string]string {
outs := map[string]string{}
for _, o := range outputs {
outs[*o.OutputKey] = *o.OutputValue
}
return outs
}
// cdkDeployStack deploys a CloudFormation stack via the CDK cli
func (c *AWSClient) cdkDeployStack(ctx context.Context, client *dagger.Client, stackName string, stackParameters map[string]string) (map[string]string, error) {
cdkCode := client.Host().Directory("./infra", dagger.HostDirectoryOpts{
Exclude: []string{"cdk.out/", "infra"},
})
awsConfig := client.Host().Directory(os.ExpandEnv("${HOME}/.aws"))
cdkCommand := []string{"cdk", "deploy", "--require-approval=never", stackName}
// Append the stack parameters to the CLI, if there is any
for k, v := range stackParameters {
cdkCommand = append(cdkCommand, "--parameters", fmt.Sprintf("%s=%s", k, v))
}
_, err := client.Container().From("samalba/aws-cdk:2.65.0").
WithEnvVariable("AWS_REGION", c.region).
WithEnvVariable("AWS_DEFAULT_REGION", c.region).
WithDirectory("/opt/app", cdkCode).
WithDirectory("/root/.aws", awsConfig).
WithExec(cdkCommand).
Sync(ctx)
if err != nil {
var exErr *dagger.ExecError
if errors.As(err, &exErr) {
return nil, fmt.Errorf("cdk deploy exited with code %d", exErr.ExitCode)
}
return nil, err
}
outputs, err := c.GetCfnStackOutputs(ctx, stackName)
if err != nil {
return nil, err
}
return outputs, nil
}
package main
import (
"context"
"dagger.io/dagger"
)
type RegistryInfo struct {
uri string
username string
password string
}
// initRegistry creates and/or authenticate with an ECR repository
func initRegistry(ctx context.Context, client *dagger.Client, awsClient *AWSClient) *RegistryInfo {
outputs, err := awsClient.cdkDeployStack(ctx, client, "DaggerDemoECRStack", nil)
if err != nil {
panic(err)
}
repoURI := outputs["RepositoryUri"]
username, password, err := awsClient.GetECRUsernamePassword(ctx)
if err != nil {
panic(err)
}
return &RegistryInfo{repoURI, username, password}
}
Google Cloud Run
The following code listing builds, publishes and deploys a container using Google Container Registry and Google Cloud Run.
- Go
- Node.js
- Python
package main
import (
"context"
"fmt"
"os"
run "cloud.google.com/go/run/apiv2"
runpb "cloud.google.com/go/run/apiv2/runpb"
"dagger.io/dagger"
)
const GCR_SERVICE_URL = "projects/PROJECT/locations/us-central1/services/myapp"
const GCR_PUBLISH_ADDRESS = "gcr.io/PROJECT/myapp"
func main() {
// create Dagger client
ctx := context.Background()
daggerClient, err := dagger.Connect(ctx, dagger.WithLogOutput(os.Stderr))
if err != nil {
panic(err)
}
defer daggerClient.Close()
// get working directory on host
source := daggerClient.Host().Directory(".", dagger.HostDirectoryOpts{
Exclude: []string{"ci", "node_modules"},
})
// build application
node := daggerClient.Container(dagger.ContainerOpts{Platform: "linux/amd64"}).
From("node:16")
c := node.
WithDirectory("/src", source).
WithWorkdir("/src").
WithExec([]string{"cp", "-R", ".", "/home/node"}).
WithWorkdir("/home/node").
WithExec([]string{"npm", "install"}).
WithEntrypoint([]string{"npm", "start"})
// publish container to Google Container Registry
addr, err := c.Publish(ctx, GCR_PUBLISH_ADDRESS)
if err != nil {
panic(err)
}
// print ref
fmt.Println("Published at:", addr)
// create Google Cloud Run client
gcrClient, err := run.NewServicesClient(ctx)
if err != nil {
panic(err)
}
defer gcrClient.Close()
// define service request
gcrRequest := &runpb.UpdateServiceRequest{
Service: &runpb.Service{
Name: GCR_SERVICE_URL,
Template: &runpb.RevisionTemplate{
Containers: []*runpb.Container{
{
Image: addr,
Ports: []*runpb.ContainerPort{
{
Name: "http1",
ContainerPort: 1323,
},
},
},
},
},
},
}
// update service
gcrOperation, err := gcrClient.UpdateService(ctx, gcrRequest)
if err != nil {
panic(err)
}
// wait for service request completion
gcrResponse, err := gcrOperation.Wait(ctx)
if err != nil {
panic(err)
}
// print ref
fmt.Println("Deployment for image", addr, "now available at", gcrResponse.Uri)
}
import { connect } from "@dagger.io/dagger"
import { ServicesClient } from "@google-cloud/run"
const GCR_SERVICE_URL = "projects/PROJECT/locations/us-central1/services/myapp"
const GCR_PUBLISH_ADDRESS = "gcr.io/PROJECT/myapp"
// initialize Dagger client
connect(
async (daggerClient) => {
// get reference to the project directory
const source = daggerClient
.host()
.directory(".", { exclude: ["node_modules/", "ci/"] })
// get Node image
const node = daggerClient
.container({ platform: "linux/amd64" })
.from("node:16")
// mount cloned repository into Node image
// install dependencies
const c = node
.withDirectory("/src", source)
.withWorkdir("/src")
.withExec(["cp", "-R", ".", "/home/node"])
.withWorkdir("/home/node")
.withExec(["npm", "install"])
.withEntrypoint(["npm", "start"])
// publish container to Google Container Registry
const gcrContainerPublishResponse = await c.publish(GCR_PUBLISH_ADDRESS)
// print ref
console.log(`Published at: ${gcrContainerPublishResponse}`)
// initialize Google Cloud Run client
const gcrClient = new ServicesClient()
// define service request
const gcrServiceUpdateRequest = {
service: {
name: GCR_SERVICE_URL,
template: {
containers: [
{
image: gcrContainerPublishResponse,
ports: [
{
name: "http1",
containerPort: 3000,
},
],
},
],
},
},
}
// update service
const [gcrServiceUpdateOperation] = await gcrClient.updateService(
gcrServiceUpdateRequest,
)
const [gcrServiceUpdateResponse] = await gcrServiceUpdateOperation.promise()
// print ref
console.log(
`Deployment for image ${gcrContainerPublishResponse} now available at ${gcrServiceUpdateResponse.uri}`,
)
},
{ LogOutput: process.stderr },
)
import sys
import anyio
from google.cloud import run_v2
import dagger
GCR_SERVICE_URL = "projects/PROJECT/locations/us-central1/services/myapp"
GCR_PUBLISH_ADDRESS = "gcr.io/PROJECT/myapp"
async def main():
# initialize Dagger client
async with dagger.Connection(dagger.Config(log_output=sys.stderr)) as client:
# get reference to the project directory
source = client.host().directory(".", exclude=["node_modules", "ci"])
# get Node image
node = client.container(platform=dagger.Platform("linux/amd64")).from_(
"node:16"
)
# mount source code directory into Node image
# install dependencies
# set entrypoint
c = (
node.with_directory("/src", source)
.with_workdir("/src")
.with_exec(["cp", "-R", ".", "/home/node"])
.with_workdir("/home/node")
.with_exec(["npm", "install"])
.with_entrypoint(["npm", "start"])
)
# publish container to Google Container Registry
addr = await c.publish(GCR_PUBLISH_ADDRESS)
print(f"Published at: {addr}")
# create Google Cloud Run client
gcr_client = run_v2.ServicesAsyncClient()
# define a service request
gcr_request = run_v2.UpdateServiceRequest(
service=run_v2.Service(
name=GCR_SERVICE_URL,
template=run_v2.RevisionTemplate(
containers=[
run_v2.Container(
image=addr,
ports=[
run_v2.ContainerPort(
name="http1",
container_port=1323,
),
],
),
],
),
)
)
# update service
gcr_operation = await gcr_client.update_service(request=gcr_request)
# wait for service request completion
response = await gcr_operation.result()
print(f"Deployment for image {addr} now available at {response.uri}.")
anyio.run(main)
GitHub Actions
- Go
- Node.js
- Python
The following code listing shows how to integrate Dagger with GitHub Actions.
name: dagger
on:
push:
branches: [main]
jobs:
build:
name: build
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions/setup-go@v3
with:
go-version: '1.20'
- name: Install Dagger CLI
run: cd /usr/local && { curl -L https://dl.dagger.io/dagger/install.sh | sh; cd -; }
- name: Run Dagger pipeline
run: dagger run go run main.go
name: dagger
on:
push:
branches: [main]
jobs:
build:
name: build
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions/setup-node@v3
with:
node-version: 18
- name: Install deps
run: npm ci
- name: Install Dagger CLI
run: cd /usr/local && { curl -L https://dl.dagger.io/dagger/install.sh | sh; cd -; }
- name: Run Dagger pipeline
run: dagger run node index.mjs
name: dagger
on:
push:
branches: [main]
jobs:
build:
name: build
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: '3.11'
- name: Install deps
run: pip install .
- name: Install Dagger CLI
run: cd /usr/local && { curl -L https://dl.dagger.io/dagger/install.sh | sh; cd -; }
- name: Run Dagger pipeline
run: dagger run python main.py
GitLab CI
The following code listing shows how to integrate Dagger with GitLab CI.
- Go
- Node.js
- Python
.docker:
image: golang:1.20-alpine
services:
- docker:${DOCKER_VERSION}-dind
variables:
DOCKER_HOST: tcp://docker:2376
DOCKER_TLS_VERIFY: '1'
DOCKER_TLS_CERTDIR: '/certs'
DOCKER_CERT_PATH: '/certs/client'
DOCKER_DRIVER: overlay2
DOCKER_VERSION: '20.10.16'
.dagger:
extends: [.docker]
before_script:
- apk add docker-cli curl
- cd /usr/local && { curl -L https://dl.dagger.io/dagger/install.sh | sh; cd -; }
build:
extends: [.dagger]
script:
- dagger run go run main.go
.docker:
image: node:18-alpine
services:
- docker:${DOCKER_VERSION}-dind
variables:
DOCKER_HOST: tcp://docker:2376
DOCKER_TLS_VERIFY: '1'
DOCKER_TLS_CERTDIR: '/certs'
DOCKER_CERT_PATH: '/certs/client'
DOCKER_DRIVER: overlay2
DOCKER_VERSION: '20.10.16'
.dagger:
extends: [.docker]
before_script:
- apk add docker-cli curl
- cd /usr/local && { curl -L https://dl.dagger.io/dagger/install.sh | sh; cd -; }
build:
extends: [.dagger]
script:
- npm ci
- dagger run node index.mjs
.docker:
image: python:3.11-alpine
services:
- docker:${DOCKER_VERSION}-dind
variables:
DOCKER_HOST: tcp://docker:2376
DOCKER_TLS_VERIFY: '1'
DOCKER_TLS_CERTDIR: '/certs'
DOCKER_CERT_PATH: '/certs/client'
DOCKER_DRIVER: overlay2
DOCKER_VERSION: '20.10.16'
.dagger:
extends: [.docker]
before_script:
- apk add docker-cli curl
- cd /usr/local && { curl -L https://dl.dagger.io/dagger/install.sh | sh; cd -; }
build:
extends: [.dagger]
script:
- pip install .
- dagger run python main.py
CircleCI
The following code listing shows how to integrate Dagger with CircleCI.
- Go
- Node.js
- Python
version: 2.1
jobs:
build:
docker:
- image: cimg/go:1.20
steps:
- checkout
- setup_remote_docker:
docker_layer_caching: true
- run:
name: Install Dagger CLI
command: cd /usr/local && { curl -L https://dl.dagger.io/dagger/install.sh | sudo sh; cd -; }
- run:
name: Run Dagger pipeline
command: dagger run --progress plain go run main.go
workflows:
dagger:
jobs:
- build
version: 2.1
jobs:
build:
docker:
- image: cimg/node:lts
steps:
- checkout
- setup_remote_docker:
docker_layer_caching: true
- run:
name: Install deps
command: npm ci
- run:
name: Install Dagger CLI
command: cd /usr/local && { curl -L https://dl.dagger.io/dagger/install.sh | sudo sh; cd -; }
- run:
name: Run Dagger pipeline
command: dagger run --progress plain node index.mjs
workflows:
dagger:
jobs:
- build
version: 2.1
jobs:
build:
docker:
- image: cimg/python:3.11
steps:
- checkout
- setup_remote_docker:
docker_layer_caching: true
- run:
name: Install deps
command: pip install .
- run:
name: Install Dagger CLI
command: cd /usr/local && { curl -L https://dl.dagger.io/dagger/install.sh | sudo sh; cd -; }
- run:
name: Run Dagger pipeline
command: dagger run --progress plain python main.py
workflows:
dagger:
jobs:
- build
Jenkins
The following code listing shows how to integrate Dagger with Jenkins.
- Go
- Node.js
- Python
pipeline {
agent { label 'dagger' }
stages {
stage("dagger") {
steps {
sh '''
cd /usr/local && { curl -L https://dl.dagger.io/dagger/install.sh | sh; cd -; }
dagger run go run main.go
'''
}
}
}
}
pipeline {
agent { label 'dagger' }
stages {
stage("dagger") {
steps {
sh '''
npm ci
cd /usr/local && { curl -L https://dl.dagger.io/dagger/install.sh | sh; cd -; }
dagger run node index.mjs
'''
}
}
}
}
pipeline {
agent { label 'dagger' }
stages {
stage("dagger") {
steps {
sh '''
pip install .
cd /usr/local && { curl -L https://dl.dagger.io/dagger/install.sh | sh; cd -; }
dagger run python main.py
'''
}
}
}
}
Requires docker
client and go
installed on your Jenkins agent, a Docker host available (can be docker:dind
), and agents labeled in Jenkins with dagger
.
Azure Pipelines
The following code listing shows how to integrate Dagger with Azure Pipelines.
- Go
- Node.js
- Python
trigger:
- master
pool:
name: 'Default'
vmImage: ubuntu-latest
steps:
- task: GoTool@0
inputs:
version: '1.20'
- script: cd /usr/local && { curl -L https://dl.dagger.io/dagger/install.sh | sh; cd -; }
displayName: 'Install Dagger CLI'
- script: dagger run go run main.go
displayName: 'Run Dagger'
trigger:
- master
pool:
name: 'Default'
vmImage: ubuntu-latest
steps:
- task: NodeTool@0
inputs:
versionSpec: '18.x'
- script: npm ci
displayName: 'Install dependencies'
- script: cd /usr/local && { curl -L https://dl.dagger.io/dagger/install.sh | sh; cd -; }
displayName: 'Install Dagger CLI'
- script: dagger run node index.mjs
displayName: 'Run Dagger'
trigger:
- master
pool:
name: 'Default'
vmImage: ubuntu-latest
steps:
- task: UsePythonVersion@0
inputs:
versionSpec: '3.11'
- script: pip install .
displayName: 'Install dependencies'
- script: cd /usr/local && { curl -L https://dl.dagger.io/dagger/install.sh | sh; cd -; }
displayName: 'Install Dagger CLI'
- script: dagger run python main.py
displayName: 'Run Dagger'
AWS CodePipeline
The following code listing shows how to integrate Dagger with AWS CodePipeline.
- Go
- Node.js
- Python
version: 0.2
phases:
pre_build:
commands:
- echo "Installing Dagger CLI"
- cd /usr/local && { curl -L https://dl.dagger.io/dagger/install.sh | sh; cd -; }
build:
commands:
- echo "Running Dagger pipeline"
- dagger run go run main.go
version: 0.2
phases:
pre_build:
commands:
- echo "Installing dependencies"
- npm ci
- echo "Installing Dagger CLI"
- cd /usr/local && { curl -L https://dl.dagger.io/dagger/install.sh | sh; cd -; }
build:
commands:
- echo "Running Dagger pipeline"
- dagger run node index.mjs
version: 0.2
phases:
pre_build:
commands:
- echo "Installing dependencies"
- pip install .
- echo "Installing Dagger CLI"
- cd /usr/local && { curl -L https://dl.dagger.io/dagger/install.sh | sh; cd -; }
build:
commands:
- echo "Running Dagger pipeline"
- dagger run python main.py