Quickstart
Perform a multi-stage build
Now that you have a working Dagger pipeline, let's refine and optimize it.
You may have noticed that the previous listing exported the build artifacts to a directory on the host, and then copied them to a directory in the destination container. While this works, a more efficient approach is to use a multi-stage build...something that Dagger, by virtue of its design, excels at.
This is because Dagger SDK objects like Container
and Directory
can be thought of as collections of state. You can save this state and reference it elsewhere (even in a different Dagger pipeline or engine). You can also update the state from the point you left off, or use it an input to another operation.
In the context of a multi-stage build, this means that you can use Dagger to:
- Perform a build in a container.
- Obtain and save a
Directory
object referencing the filesystem state of that container (including the build artifacts) after the build. - Pass the saved
Directory
object as a parameter to a different container or pipeline, thereby transferring the saved filesystem state (and build artifacts) to that container or pipeline. - Perform further container or pipeline operations as needed.
Let's now update our pipeline to use a multi-stage build, as described above.
package main
import (
"context"
"fmt"
"math"
"math/rand"
"os"
"dagger.io/dagger"
)
func main() {
ctx := context.Background()
// initialize Dagger client
client, err := dagger.Connect(ctx, dagger.WithLogOutput(os.Stderr))
if err != nil {
panic(err)
}
defer client.Close()
// use a node:16-slim container
// mount the source code directory on the host
// at /src in the container
source := client.Container().
From("node:16-slim").
WithDirectory("/src", client.Host().Directory(".", dagger.HostDirectoryOpts{
Exclude: []string{"node_modules/", "ci/", "build/"},
}))
// set the working directory in the container
// install application dependencies
runner := source.WithWorkdir("/src").
WithExec([]string{"npm", "install"})
// run application tests
test := runner.WithExec([]string{"npm", "test", "--", "--watchAll=false"})
// first stage
// build application
buildDir := test.WithExec([]string{"npm", "run", "build"}).
Directory("./build")
// second stage
// use an nginx:alpine container
// copy the build/ directory from the first stage
// publish the resulting container to a registry
ref, err := client.Container().
From("nginx:1.23-alpine").
WithDirectory("/usr/share/nginx/html", buildDir).
Publish(ctx, fmt.Sprintf("ttl.sh/hello-dagger-%.0f", math.Floor(rand.Float64()*10000000))) //#nosec
if err != nil {
panic(err)
}
fmt.Printf("Published image to: %s\n", ref)
}
Run the pipeline by executing the command below from the application directory:
dagger run go run ci/main.go
import { connect } from "@dagger.io/dagger"
connect(
async (client) => {
// use a node:16-slim container
// mount the source code directory on the host
// at /src in the container
const source = client
.container()
.from("node:16-slim")
.withDirectory(
"/src",
client
.host()
.directory(".", { exclude: ["node_modules/", "ci/", "build/"] }),
)
// set the working directory in the container
// install application dependencies
const runner = source.withWorkdir("/src").withExec(["npm", "install"])
// run application tests
const test = runner.withExec(["npm", "test", "--", "--watchAll=false"])
// first stage
// build application
const buildDir = test.withExec(["npm", "run", "build"]).directory("./build")
// second stage
// use an nginx:alpine container
// copy the build/ directory from the first stage
// publish the resulting container to a registry
const imageRef = await client
.container()
.from("nginx:1.23-alpine")
.withDirectory("/usr/share/nginx/html", buildDir)
.publish("ttl.sh/hello-dagger-" + Math.floor(Math.random() * 10000000))
console.log(`Published image to: ${imageRef}`)
},
{ LogOutput: process.stderr },
)
Run the pipeline by executing the command below from the application directory:
dagger run node ci/index.mjs
import random
import sys
import anyio
import dagger
async def main():
config = dagger.Config(log_output=sys.stdout)
async with dagger.Connection(config) as client:
# use a node:16-slim container
# mount the source code directory on the host
# at /src in the container
source = (
client.container()
.from_("node:16-slim")
.with_directory(
"/src",
client.host().directory(
".", exclude=["node_modules/", "ci/", "build/"]
),
)
)
# set the working directory in the container
# install application dependencies
runner = source.with_workdir("/src").with_exec(["npm", "install"])
# run application tests
test = runner.with_exec(["npm", "test", "--", "--watchAll=false"])
# first stage
# build application
build_dir = test.with_exec(["npm", "run", "build"]).directory("./build")
# second stage
# use an nginx:alpine container
# copy the build/ directory from the first stage
# publish the resulting container to a registry
image_ref = await (
client.container()
.from_("nginx:1.23-alpine")
.with_directory("/usr/share/nginx/html", build_dir)
.publish(f"ttl.sh/hello-dagger-{random.randrange(10 ** 8)}")
)
print(f"Published image to: {image_ref}")
anyio.run(main)
Run the pipeline by executing the command below from the application directory:
dagger run python ci/main.py
This revised pipeline produces the same result as before, but using a two-stage process:
- In the first stage, the pipeline installs dependencies, runs tests and builds the application in the
node:16-slim
container. However, instead of exporting thebuild/
directory to the host, it saves the correspondingDirectory
object as a constant. This object represents the filesystem state of thebuild/
directory in the container after the build, and is portable to other Dagger pipelines. - In the second stage, the pipeline uses the saved
Directory
object as input, thereby transferring the filesystem state (the built React application) to thenginx:alpine
container. It then publishes the result to a registry as previously described.