Second attempt at Dockerfile hashing

This commit is contained in:
Radon Rosborough 2020-12-31 22:11:24 -08:00
parent 0b60ad12d1
commit 87793e9778
10 changed files with 399 additions and 200 deletions

View File

@ -39,6 +39,10 @@ script:
node tools/generate-build-script.js --lang $(L) --type $(T) > $(BUILD)/build.bash
chmod +x $(BUILD)/build.bash
.PHONY: script-all
script-all:
node tools/make-foreach.js script
.PHONY: pkg
pkg:
@: $${L} $${T}
@ -134,6 +138,10 @@ download:
### Publish artifacts to registries
.PHONY: plan
plan:
node tools/plan-publish.js
.PHONY: push
push:
@: $${I} $${DOCKER_REPO}

View File

@ -7,11 +7,16 @@ tee -a /etc/hosts >/dev/null <<< "127.0.0.1 admin"
groupadd -g "$(stat -c %g "$PWD")" -o -p '!' -r riju
useradd -u "$(stat -c %u "$PWD")" -g "$(stat -c %g "$PWD")" -o -p '!' -m -N -l -s /usr/bin/bash -G sudo riju
runuser -u riju -- touch /home/riju/.sudo_as_admin_successful
runuser -u riju -- ln -sT /var/riju/.aws /home/riju/.aws
runuser -u riju -- ln -sT /var/riju/.docker /home/riju/.docker
runuser -u riju -- ln -sT /var/riju/.ssh /home/riju/.ssh
runuser -u riju -- ln -sT /var/riju/.terraform.d /home/riju/.terraform.d
runuser -u riju -- touch /home/riju/.sudo_as_admin_successful
runuser -u riju -- tee -a /home/riju/.bashrc >/dev/null <<"EOF"
PS1="\$? $PS1"
EOF
runuser -u riju -- yarn install
exec runuser -u riju "$@"

View File

@ -3,8 +3,9 @@ import http from "http";
import express from "express";
import { getLangs } from "./config.js";
import { hashCompositeImage } from "./hash-composite-image.js";
import { getLangs, getPackages } from "./config.js";
import { getLocalImageDigest } from "./docker-util.js";
import { hashDockerfile } from "./hash-dockerfile.js";
import { runCommand } from "./util.js";
// Get a Node.js http server object that will serve information and
@ -21,13 +22,31 @@ function getServer(langs) {
// Parse command-line arguments, run main functionality, and exit.
async function main() {
const hash = await hashDockerfile(
"composite",
{
"riju:runtime": await getLocalImageDigest("riju:runtime"),
},
{
salt: {
packageHashes: await Promise.all(
(await getPackages()).map(async ({ debPath }) => {
return (
await runCommand(`dpkg-deb -f ${debPath} Riju-Script-Hash`, {
getStdout: true,
})
).stdout.trim();
})
),
},
}
);
const server = getServer(await getLangs());
await new Promise((resolve) => server.listen(8487, "localhost", resolve));
try {
const hash = await hashCompositeImage("debs");
await runCommand(
`docker build . -f docker/composite/Dockerfile -t riju:composite` +
` --network host --no-cache --label riju-composite-hash=${hash}`
` --network host --no-cache --label riju.image-hash=${hash}`
);
} finally {
await server.close();

View File

@ -20,6 +20,25 @@ export async function getLangs() {
.map((lang) => path.parse(lang).name);
}
// Return a list of objects representing the packages to be built. See
// the function implementation for the full list of keys.
export async function getPackages() {
const packages = [];
for (const lang of await getLangs()) {
for (const type of ["lang", "config"]) {
const name = `riju-${type}-${lang}`;
packages.push({
lang,
type,
name,
buildScriptPath: `build/${type}/${lang}/build.bash`,
debPath: `build/${type}/${lang}/${name}.deb`,
});
}
}
return packages;
}
// Read the YAML config file for the language with the given string ID
// and return it as an object.
export async function readLangConfig(lang) {

82
tools/docker-util.js Normal file
View File

@ -0,0 +1,82 @@
import process from "process";
import { runCommand } from "./util.js";
// Return the digest of a local image. This is the actual image
// digest, not any of its associated registry digests. If the image
// doesn't exist locally, return null.
export async function getLocalImageDigest(image) {
return (
(
await runCommand(`docker images --no-trunc --quiet "${image}"`, {
getStdout: true,
})
).stdout.trim() || null
);
}
// Return the value of a label on a local Docker image. If the image
// or label doesn't exist, return null.
export async function getLocalImageLabel(image, label) {
let output;
try {
output = (
await runCommand(`docker inspect "${image}"`, { getStdout: true })
).stdout;
} catch (err) {
if (
(await runCommand(`docker images -q "${image}"`, { getStdout: true }))
.stdout
) {
// The image exists locally, something unexpected must have
// happened in docker inspect.
throw err;
} else {
// The image doesn't exist locally, that must be why docker
// inspect didn't work.
return null;
}
}
const labels = JSON.stringify(output)[0].Config.Labels;
return labels[label] || null;
}
// Return the value of a label on a Docker image that is on a remote
// registry. If the image or label doesn't exist, return null.
export async function getRemoteImageLabel(image, label) {
const [repo, tag] = image.split(":");
let output;
try {
output = await runCommand(`skopeo inspect docker://${image}`, {
getStdout: true,
});
} catch (err) {
const tags = JSON.stringify(
(
await runCommand(`skopeo list-tags "docker://${repo}"`, {
getStdout: true,
})
).stdout
).Tags;
if (tags.includes(tag)) {
// Tag exists, something unexpected must have gone wrong when
// running skopeo inspect.
throw err;
} else {
// Tag does not exist, that must be why skopeo inspect didn't
// work.
return null;
}
}
const labels = JSON.parse(output).Labels;
return labels[label] || null;
}
// Return the value of $DOCKER_REPO, throwing an error if it's not set
// in the environment.
export function getDockerRepo() {
if (!process.env.DOCKER_REPO) {
throw new Error(`unset environment variable: \$DOCKER_REPO`);
}
return process.env.DOCKER_REPO;
}

View File

@ -1,117 +0,0 @@
import crypto from "crypto";
import { promises as fs } from "fs";
import process from "process";
import url from "url";
import { getLangs } from "./config.js";
import { runCommand } from "./util.js";
// Return the composite image hash as a string. This is designed for
// library usage; main() just calls it and prints the result.
//
// If mode is "scripts" then each build script is run through SHA-1
// and the resulting hashes are hashed together.
//
// If mode is "debs" then the Riju-Script-Hash value written into the
// metadata of each .deb (all of them must exist locally) is
// extracted, and they are all hashed together.
//
// If mode is "s3" then all the published hashes are retrieved from
// S3. The relevant ones are hashed together.
//
// If mode is "registry" then the composite Docker image published to
// ${DOCKER_REPO} is inspected to extract its composite hash from an
// image label.
export async function hashCompositeImage(mode) {
let getHash;
switch (mode) {
case "scripts":
getHash = async (lang, type) => {
const text = await fs.readFile(
`build/${type}/${lang}/build.bash`,
"utf-8"
);
return crypto.createHash("sha1").update(text).digest("hex");
};
break;
case "debs":
getHash = async (lang, type) => {
return (
await runCommand(
`dpkg-deb -f build/${type}/${lang}/riju-${type}-${lang}.deb Riju-Script-Hash`,
{ getStdout: true }
)
).stdout.trim();
};
break;
case "s3":
const remoteHashes = Object.fromEntries(
(
await runCommand("tools/list-s3-hashes.bash", { getStdout: true })
).stdout
.trim()
.split("\n")
.map((path) => {
const [_, pkg, hash] = path.split("/");
return [pkg, hash];
})
);
getHash = async (lang, type) => remoteHashes[`riju-${type}-${lang}`];
break;
case "registry":
const tags = (
await runCommand(
`skopeo list-tags "docker://\${DOCKER_REPO}" | jq -r '.Tags[]'`,
{ getStdout: true }
)
).stdout
.trim()
.split("\n");
if (!tags.includes("composite")) {
return "not yet published";
}
return (
await runCommand(
`skopeo inspect docker://\${DOCKER_REPO}:composite | jq -r '.Labels["riju-composite-hash"]'`,
{ getStdout: true }
)
).stdout.trim();
default:
console.error(`hash-composite-image.js: unsupported mode: ${mode}`);
process.exit(1);
}
const langs = await getLangs();
const hashes = {};
for (const lang of langs) {
for (const type of ["config", "lang"]) {
const hash = await getHash(lang, type);
if (hash.length !== 40) {
throw new Error(`malformed hash: ${hash}`);
}
hashes[`riju-${type}-${lang}`] = hash;
}
}
const allHashes = Object.values(hashes).sort().join(",");
return crypto.createHash("sha1").update(allHashes).digest("hex");
}
// Parse command-line arguments, run main functionality, and exit.
async function main() {
const args = process.argv.slice(2);
if (args.length !== 1) {
console.error(
"usage: node hash-composite-image.js (scripts | debs | s3 | registry)"
);
process.exit(1);
}
const mode = args[0];
console.log(await hashCompositeImage(mode));
process.exit(0);
}
if (process.argv[1] === url.fileURLToPath(import.meta.url)) {
main().catch((err) => {
console.error(err);
process.exit(1);
});
}

View File

@ -70,15 +70,19 @@ async function listFiles(path) {
// change in this object, and when irrelevant things change, this
// object does not change.
//
// Options:
// dependentHashes should be an object which contains as keys all base
// images used in the Dockerfile. The value for each base image is
// included into the encoding of the Dockerfile, so that its hash will
// change when one of the base images changes.
//
// * remote: fetch Riju image digests from registry instead of local
// index
async function encodeDockerfile(name, opts) {
const { remote } = opts || {};
// opts is an optional config object. Keys:
// * salt: additional arbitrary object which will be included verbatim
// into the returned encoding object
async function encodeDockerfile(name, dependentHashes, opts) {
const { salt } = opts || {};
const dockerfile = await parseDockerfile(name);
const ignore = await parseDockerignore();
return await Promise.all(
const steps = await Promise.all(
dockerfile.map(async ({ name, args, error }) => {
if (error) {
throw error;
@ -124,79 +128,35 @@ async function encodeDockerfile(name, opts) {
throw new Error("got unexpected non-string for FROM args");
}
let image = args.split(" ")[0];
let [repo, tag] = image.split(":");
if (repo === "riju" && remote) {
repo = process.env.DOCKER_REPO;
if (!repo) {
throw new Error("$DOCKER_REPO not set");
}
}
image = `${repo}:${tag}`;
if (remote) {
const tags = (
await runCommand(
`skopeo list-tags "docker://${repo}" | jq -r '.Tags[]'`,
{ getStdout: true }
)
).stdout
.trim()
.split("\n");
if (tags.includes(tag)) {
step.digest = (
await runCommand(
`skopeo inspect docker://${image} | jq -r .Digest`,
{ getStdout: true }
)
).stdout.trim();
} else {
step.digest = "none";
}
} else {
step.digest =
(
await runCommand(
`docker images --no-trunc --quiet "${image}"`,
{ getStdout: true }
)
).stdout.trim() || "none";
step.hash = dependentHashes[image];
if (!step.hash) {
throw new Error(`no hash given for base image: ${image}`);
}
break;
}
return step;
})
);
if (salt) {
steps.push({ name: "SALT", args: salt });
}
return steps;
}
// Parse command-line arguments, run main functionality, and exit.
async function main() {
const program = new Command();
program
.arguments("<name>")
.storeOptionsAsProperties(false)
.option("--debug", "output Dockerfile internal representation, unhashed")
.option("--remote", "fetch image digests from remote registry");
program.parse(process.argv);
if (program.args.length !== 1) {
program.help();
}
const [name] = program.args;
const { debug, remote } = program.opts();
const encoding = await encodeDockerfile(name, { remote });
if (debug) {
console.log(JSON.stringify(encoding, null, 2));
} else {
const hash = crypto
.createHash("sha1")
.update(JSON.stringify(encoding))
.digest("hex");
console.log(hash);
}
process.exit(0);
}
if (process.argv[1] === url.fileURLToPath(import.meta.url)) {
main().catch((err) => {
console.error(err);
process.exit(1);
});
// Given the name of a Dockerfile like "app", compute its hash. This
// is a string that will change whenever the Dockerfile or any of its
// build contexts changes meaningfully. dependentHashes should be an
// object containing hashes for any base images used in the
// Dockerfile (keys are base image names, values are strings).
//
// opts is an optional config object. Keys:
// * salt: additional arbitrary object which will factor into the
// generated hash, so the hash will change whenever the salt
// changes
export async function hashDockerfile(name, dependentHashes, opts) {
const encoding = await encodeDockerfile(name, dependentHashes, opts);
return crypto
.createHash("sha1")
.update(JSON.stringify(encoding))
.digest("hex");
}

View File

@ -1,5 +0,0 @@
#!/usr/bin/env bash
set -euo pipefail
aws s3api list-objects-v2 --bucket riju-debs --prefix hashes | jq -r '.Contents[].Key'

25
tools/make-foreach.js Normal file
View File

@ -0,0 +1,25 @@
import process from "process";
import url from "url";
import { getPackages } from "./config.js";
import { runCommand } from "./util.js";
// Parse command-line arguments, run main functionality, and exit.
async function main() {
const targets = process.argv.slice(2);
if (targets.length === 0) {
console.error("usage: make-foreach.js TARGET...");
process.exit(1);
}
for (const { lang, type } of await getPackages()) {
await runCommand(`make ${targets} L=${lang} T=${type}`);
}
process.exit(0);
}
if (process.argv[1] === url.fileURLToPath(import.meta.url)) {
main().catch((err) => {
console.error(err);
process.exit(1);
});
}

203
tools/plan-publish.js Normal file
View File

@ -0,0 +1,203 @@
import crypto from "crypto";
import { promises as fs } from "fs";
import process from "process";
import url from "url";
import { Command } from "commander";
import { getPackages } from "./config.js";
import {
getLocalImageDigest,
getLocalImageLabel,
getRemoteImageLabel,
getDockerRepo,
} from "./docker-util.js";
import { hashDockerfile } from "./hash-dockerfile.js";
import { runCommand } from "./util.js";
async function planDockerImage(name, dependentHashes, opts) {
const DOCKER_REPO = getDockerRepo();
const desired = await hashDockerfile(name, dependentHashes, opts);
const local = await getLocalImageLabel(`riju:${name}`, "riju.image-hash");
const remote = await getRemoteImageLabel(
`${DOCKER_REPO}:${name}`,
"riju.image-hash"
);
dependentHashes[`${DOCKER_REPO}:${name}`] = desired;
return {
artifact: "Docker image",
name,
desired,
local,
remote,
download: async () => {
await runCommand(`make pull I=${name}`);
},
build: async () => {
await runCommand(`make image I=${name}`);
},
upload: async () => {
await runCommand(`make push I=${name}`);
},
};
}
async function planDebianPackages() {
const remoteHashes = Object.fromEntries(
JSON.parse(
(
await runCommand(
`aws s3api list-objects-v2 --bucket riju-debs --prefix hashes`,
{ getStdout: true }
)
).stdout
).Contents.map(({ Key: key }) => {
const [_, remoteName, remoteHash] = key.split("/");
return [remoteName, remoteHash];
})
);
return await Promise.all(
(await getPackages()).map(
async ({ lang, type, name, buildScriptPath, debPath }) => {
const desired = crypto
.createHash("sha1")
.update(await fs.readFile(buildScriptPath, "utf-8"))
.digest("hex");
let debExists = true;
try {
await fs.access(debPath);
} catch (err) {
debExists = false;
}
let local = null;
if (debExists) {
local =
(
await runCommand(`dpkg-deb -f ${debPath} Riju-Script-Hash`)
).stdout.trim() || null;
}
const remote = remoteHashes[name] || null;
return {
artifact: "Debian package",
name,
desired,
local,
remote,
download: async () => {
await runCommand(`make download L=${lang} T=${type}`);
},
build: async () => {
await runCommand(`make pkg L=${lang} T=${type}`);
},
upload: async () => {
await runCommand(`make upload L=${lang} T=${type}`);
},
};
}
)
);
}
async function computePlan() {
const dependentHashes = {};
const packaging = await planDockerImage("packaging", dependentHashes);
const runtime = await planDockerImage("runtime", dependentHashes);
const packages = await planDebianPackages();
const packageHashes = packages.map(({ desired }) => desired).sort();
const composite = await planDockerImage("composite", dependentHashes, {
salt: { packageHashes },
});
const compile = await planDockerImage("compile", dependentHashes);
const app = await planDockerImage("app", dependentHashes);
return [packaging, runtime, ...packages, composite, compile, app];
}
function printTable(data, headers) {
const widths = headers.map(({ key, title }) =>
Math.max(title.length, ...data.map((datum) => datum[key].length))
);
[
headers.map(({ title }) => title.toUpperCase()),
widths.map((width) => "-".repeat(width)),
...data.map((datum) => headers.map(({ key }) => datum[key])),
].map((values) =>
console.log(
values.map((value, idx) => value.padEnd(widths[idx])).join(" ")
)
);
}
// Parse command-line arguments, run main functionality, and exit.
async function main() {
const program = new Command();
program.option("--publish", "deploy newly built artifacts");
program.option("--all", "show also unchanged artifacts");
program.parse(process.argv);
const plan = await computePlan();
const filteredPlan = plan.filter(({ desired, remote }) => desired !== remote);
console.log();
if (filteredPlan.length === 0) {
console.log(`*** NO CHANGES REQUIRED TO ${plan.length} ARTIFACTS ***`);
if (!program.all) {
plan = filteredPlan;
}
} else {
console.log(
`*** CHANGES REQUIRED TO ${filteredPlan.length} of ${plan.length} ARTIFACTS ***`
);
}
console.log();
if (plan.length === 0) {
process.exit(0);
}
const tableData = plan.map(
({ artifact, name, desired, local, remote, download, build, upload }) => {
let action, details, func;
if (remote === desired && local === desired) {
action = "(no action)";
details = desired;
func = () => {};
} else if (remote === desired && local !== desired) {
action = "download remote";
details = `${local} (local) => ${desired}`;
func = download;
} else if (local === desired && remote !== desired) {
action = "publish local";
details = `${remote} (remote) => ${desired}`;
func = upload;
} else {
action = "rebuild and publish";
if (local === remote) {
details = `${local} (local) => ${desired}`;
} else {
details = `${local} (local), ${remote} (remote) => ${desired}`;
}
func = async () => {
await build();
await upload();
};
}
return { artifact, name, action, details, func };
}
);
printTable(tableData, [
{ key: "artifact", title: "Type" },
{ key: "name", title: "Name" },
{ key: "action", title: "Action" },
{ key: "details", title: "Details" },
]);
console.log();
if (program.publish) {
for ({ func } of tableData) {
await func();
}
}
process.exit(0);
}
if (process.argv[1] === url.fileURLToPath(import.meta.url)) {
main().catch((err) => {
console.error(err);
process.exit(1);
});
}