refactor(security): unify hardened install and fs write flows

This commit is contained in:
Peter Steinberger
2026-03-02 17:23:24 +00:00
parent d3e8b17aa6
commit d4bf07d075
9 changed files with 435 additions and 441 deletions

View File

@@ -26,6 +26,11 @@ type PathSafetyOptions = {
allowedType?: SafeOpenSyncAllowedType;
};
type PathSafetyCheck = {
target: SandboxResolvedFsPath;
options: PathSafetyOptions;
};
export type SandboxResolvedPath = {
hostPath: string;
relativePath: string;
@@ -97,8 +102,9 @@ class SandboxFsBridgeImpl implements SandboxFsBridge {
signal?: AbortSignal;
}): Promise<Buffer> {
const target = this.resolveResolvedPath(params);
await this.assertPathSafety(target, { action: "read files" });
const result = await this.runCommand('set -eu; cat -- "$1"', {
const result = await this.runCheckedCommand({
checks: [{ target, options: { action: "read files" } }],
script: 'set -eu; cat -- "$1"',
args: [target.containerPath],
signal: params.signal,
});
@@ -127,8 +133,10 @@ class SandboxFsBridgeImpl implements SandboxFsBridge {
});
try {
await this.assertPathSafety(target, { action: "write files", requireWritable: true });
await this.runCommand('set -eu; mv -f -- "$1" "$2"', {
await this.runCheckedCommand({
checks: [{ target, options: { action: "write files", requireWritable: true } }],
recheckBeforeCommand: true,
script: 'set -eu; mv -f -- "$1" "$2"',
args: [tempPath, target.containerPath],
signal: params.signal,
});
@@ -141,12 +149,18 @@ class SandboxFsBridgeImpl implements SandboxFsBridge {
async mkdirp(params: { filePath: string; cwd?: string; signal?: AbortSignal }): Promise<void> {
const target = this.resolveResolvedPath(params);
this.ensureWriteAccess(target, "create directories");
await this.assertPathSafety(target, {
action: "create directories",
requireWritable: true,
allowedType: "directory",
});
await this.runCommand('set -eu; mkdir -p -- "$1"', {
await this.runCheckedCommand({
checks: [
{
target,
options: {
action: "create directories",
requireWritable: true,
allowedType: "directory",
},
},
],
script: 'set -eu; mkdir -p -- "$1"',
args: [target.containerPath],
signal: params.signal,
});
@@ -161,21 +175,23 @@ class SandboxFsBridgeImpl implements SandboxFsBridge {
}): Promise<void> {
const target = this.resolveResolvedPath(params);
this.ensureWriteAccess(target, "remove files");
await this.assertPathSafety(target, {
action: "remove files",
requireWritable: true,
aliasPolicy: PATH_ALIAS_POLICIES.unlinkTarget,
});
const flags = [params.force === false ? "" : "-f", params.recursive ? "-r" : ""].filter(
Boolean,
);
const rmCommand = flags.length > 0 ? `rm ${flags.join(" ")}` : "rm";
await this.assertPathSafety(target, {
action: "remove files",
requireWritable: true,
aliasPolicy: PATH_ALIAS_POLICIES.unlinkTarget,
});
await this.runCommand(`set -eu; ${rmCommand} -- "$1"`, {
await this.runCheckedCommand({
checks: [
{
target,
options: {
action: "remove files",
requireWritable: true,
aliasPolicy: PATH_ALIAS_POLICIES.unlinkTarget,
},
},
],
recheckBeforeCommand: true,
script: `set -eu; ${rmCommand} -- "$1"`,
args: [target.containerPath],
signal: params.signal,
});
@@ -191,31 +207,30 @@ class SandboxFsBridgeImpl implements SandboxFsBridge {
const to = this.resolveResolvedPath({ filePath: params.to, cwd: params.cwd });
this.ensureWriteAccess(from, "rename files");
this.ensureWriteAccess(to, "rename files");
await this.assertPathSafety(from, {
action: "rename files",
requireWritable: true,
aliasPolicy: PATH_ALIAS_POLICIES.unlinkTarget,
await this.runCheckedCommand({
checks: [
{
target: from,
options: {
action: "rename files",
requireWritable: true,
aliasPolicy: PATH_ALIAS_POLICIES.unlinkTarget,
},
},
{
target: to,
options: {
action: "rename files",
requireWritable: true,
},
},
],
recheckBeforeCommand: true,
script:
'set -eu; dir=$(dirname -- "$2"); if [ "$dir" != "." ]; then mkdir -p -- "$dir"; fi; mv -- "$1" "$2"',
args: [from.containerPath, to.containerPath],
signal: params.signal,
});
await this.assertPathSafety(to, {
action: "rename files",
requireWritable: true,
});
await this.assertPathSafety(from, {
action: "rename files",
requireWritable: true,
aliasPolicy: PATH_ALIAS_POLICIES.unlinkTarget,
});
await this.assertPathSafety(to, {
action: "rename files",
requireWritable: true,
});
await this.runCommand(
'set -eu; dir=$(dirname -- "$2"); if [ "$dir" != "." ]; then mkdir -p -- "$dir"; fi; mv -- "$1" "$2"',
{
args: [from.containerPath, to.containerPath],
signal: params.signal,
},
);
}
async stat(params: {
@@ -224,8 +239,9 @@ class SandboxFsBridgeImpl implements SandboxFsBridge {
signal?: AbortSignal;
}): Promise<SandboxFsStat | null> {
const target = this.resolveResolvedPath(params);
await this.assertPathSafety(target, { action: "stat files" });
const result = await this.runCommand('set -eu; stat -c "%F|%s|%Y" -- "$1"', {
const result = await this.runCheckedCommand({
checks: [{ target, options: { action: "stat files" } }],
script: 'set -eu; stat -c "%F|%s|%Y" -- "$1"',
args: [target.containerPath],
signal: params.signal,
allowFailure: true,
@@ -272,6 +288,33 @@ class SandboxFsBridgeImpl implements SandboxFsBridge {
});
}
private async runCheckedCommand(params: {
checks: PathSafetyCheck[];
script: string;
args?: string[];
stdin?: Buffer | string;
allowFailure?: boolean;
signal?: AbortSignal;
recheckBeforeCommand?: boolean;
}): Promise<ExecDockerRawResult> {
await this.assertPathChecks(params.checks);
if (params.recheckBeforeCommand) {
await this.assertPathChecks(params.checks);
}
return await this.runCommand(params.script, {
args: params.args,
stdin: params.stdin,
allowFailure: params.allowFailure,
signal: params.signal,
});
}
private async assertPathChecks(checks: PathSafetyCheck[]): Promise<void> {
for (const check of checks) {
await this.assertPathSafety(check.target, check.options);
}
}
private async assertPathSafety(target: SandboxResolvedFsPath, options: PathSafetyOptions) {
const lexicalMount = this.resolveMountByContainerPath(target.containerPath);
if (!lexicalMount) {

View File

@@ -1,24 +1,19 @@
import { createHash, randomUUID } from "node:crypto";
import { randomUUID } from "node:crypto";
import fs from "node:fs";
import path from "node:path";
import { Readable } from "node:stream";
import { pipeline } from "node:stream/promises";
import type { ReadableStream as NodeReadableStream } from "node:stream/web";
import { isWindowsDrivePath } from "../infra/archive-path.js";
import {
createTarEntrySafetyChecker,
extractArchive as extractArchiveSafe,
} from "../infra/archive.js";
import { writeFileFromPathWithinRoot } from "../infra/fs-safe.js";
import { assertCanonicalPathWithinBase } from "../infra/install-safe-path.js";
import { fetchWithSsrFGuard } from "../infra/net/fetch-guard.js";
import { isWithinDir } from "../infra/path-safety.js";
import { runCommandWithTimeout } from "../process/exec.js";
import { ensureDir, resolveUserPath } from "../utils.js";
import { extractArchive } from "./skills-install-extract.js";
import { formatInstallFailureMessage } from "./skills-install-output.js";
import type { SkillInstallResult } from "./skills-install.js";
import type { SkillEntry, SkillInstallSpec } from "./skills.js";
import { hasBinary } from "./skills.js";
import { resolveSkillToolsRootDir } from "./skills/tools-dir.js";
function isNodeReadableStream(value: unknown): value is NodeJS.ReadableStream {
@@ -64,101 +59,6 @@ function resolveArchiveType(spec: SkillInstallSpec, filename: string): string |
return undefined;
}
const TAR_VERBOSE_MONTHS = new Set([
"Jan",
"Feb",
"Mar",
"Apr",
"May",
"Jun",
"Jul",
"Aug",
"Sep",
"Oct",
"Nov",
"Dec",
]);
const ISO_DATE_PATTERN = /^\d{4}-\d{2}-\d{2}$/;
function mapTarVerboseTypeChar(typeChar: string): string {
switch (typeChar) {
case "l":
return "SymbolicLink";
case "h":
return "Link";
case "b":
return "BlockDevice";
case "c":
return "CharacterDevice";
case "p":
return "FIFO";
case "s":
return "Socket";
case "d":
return "Directory";
default:
return "File";
}
}
function parseTarVerboseSize(line: string): number {
const tokens = line.trim().split(/\s+/).filter(Boolean);
if (tokens.length < 6) {
throw new Error(`unable to parse tar verbose metadata: ${line}`);
}
let dateIndex = tokens.findIndex((token) => TAR_VERBOSE_MONTHS.has(token));
if (dateIndex > 0) {
const size = Number.parseInt(tokens[dateIndex - 1] ?? "", 10);
if (!Number.isFinite(size) || size < 0) {
throw new Error(`unable to parse tar entry size: ${line}`);
}
return size;
}
dateIndex = tokens.findIndex((token) => ISO_DATE_PATTERN.test(token));
if (dateIndex > 0) {
const size = Number.parseInt(tokens[dateIndex - 1] ?? "", 10);
if (!Number.isFinite(size) || size < 0) {
throw new Error(`unable to parse tar entry size: ${line}`);
}
return size;
}
throw new Error(`unable to parse tar verbose metadata: ${line}`);
}
function parseTarVerboseMetadata(stdout: string): Array<{ type: string; size: number }> {
const lines = stdout
.split("\n")
.map((line) => line.trim())
.filter(Boolean);
return lines.map((line) => {
const typeChar = line[0] ?? "";
if (!typeChar) {
throw new Error("unable to parse tar entry type");
}
return {
type: mapTarVerboseTypeChar(typeChar),
size: parseTarVerboseSize(line),
};
});
}
async function hashFileSha256(filePath: string): Promise<string> {
const hash = createHash("sha256");
const stream = fs.createReadStream(filePath);
return await new Promise<string>((resolve, reject) => {
stream.on("data", (chunk) => {
hash.update(chunk as Buffer);
});
stream.on("error", reject);
stream.on("end", () => {
resolve(hash.digest("hex"));
});
});
}
async function downloadFile(params: {
url: string;
rootDir: string;
@@ -201,125 +101,6 @@ async function downloadFile(params: {
}
}
async function extractArchive(params: {
archivePath: string;
archiveType: string;
targetDir: string;
stripComponents?: number;
timeoutMs: number;
}): Promise<{ stdout: string; stderr: string; code: number | null }> {
const { archivePath, archiveType, targetDir, stripComponents, timeoutMs } = params;
const strip =
typeof stripComponents === "number" && Number.isFinite(stripComponents)
? Math.max(0, Math.floor(stripComponents))
: 0;
try {
if (archiveType === "zip") {
await extractArchiveSafe({
archivePath,
destDir: targetDir,
timeoutMs,
kind: "zip",
stripComponents: strip,
});
return { stdout: "", stderr: "", code: 0 };
}
if (archiveType === "tar.gz") {
await extractArchiveSafe({
archivePath,
destDir: targetDir,
timeoutMs,
kind: "tar",
stripComponents: strip,
tarGzip: true,
});
return { stdout: "", stderr: "", code: 0 };
}
if (archiveType === "tar.bz2") {
if (!hasBinary("tar")) {
return { stdout: "", stderr: "tar not found on PATH", code: null };
}
const preflightHash = await hashFileSha256(archivePath);
// Preflight list to prevent zip-slip style traversal before extraction.
const listResult = await runCommandWithTimeout(["tar", "tf", archivePath], { timeoutMs });
if (listResult.code !== 0) {
return {
stdout: listResult.stdout,
stderr: listResult.stderr || "tar list failed",
code: listResult.code,
};
}
const entries = listResult.stdout
.split("\n")
.map((line) => line.trim())
.filter(Boolean);
const verboseResult = await runCommandWithTimeout(["tar", "tvf", archivePath], { timeoutMs });
if (verboseResult.code !== 0) {
return {
stdout: verboseResult.stdout,
stderr: verboseResult.stderr || "tar verbose list failed",
code: verboseResult.code,
};
}
const metadata = parseTarVerboseMetadata(verboseResult.stdout);
if (metadata.length !== entries.length) {
return {
stdout: verboseResult.stdout,
stderr: `tar verbose/list entry count mismatch (${metadata.length} vs ${entries.length})`,
code: 1,
};
}
const checkTarEntrySafety = createTarEntrySafetyChecker({
rootDir: targetDir,
stripComponents: strip,
escapeLabel: "targetDir",
});
for (let i = 0; i < entries.length; i += 1) {
const entryPath = entries[i];
const entryMeta = metadata[i];
if (!entryPath || !entryMeta) {
return {
stdout: verboseResult.stdout,
stderr: "tar metadata parse failure",
code: 1,
};
}
checkTarEntrySafety({
path: entryPath,
type: entryMeta.type,
size: entryMeta.size,
});
}
const postPreflightHash = await hashFileSha256(archivePath);
if (postPreflightHash !== preflightHash) {
return {
stdout: "",
stderr: "tar archive changed during safety preflight; refusing to extract",
code: 1,
};
}
const argv = ["tar", "xf", archivePath, "-C", targetDir];
if (strip > 0) {
argv.push("--strip-components", String(strip));
}
return await runCommandWithTimeout(argv, { timeoutMs });
}
return { stdout: "", stderr: `unsupported archive type: ${archiveType}`, code: null };
} catch (err) {
const message = err instanceof Error ? err.message : String(err);
return { stdout: "", stderr: message, code: 1 };
}
}
export async function installDownloadSpec(params: {
entry: SkillEntry;
spec: SkillInstallSpec;

View File

@@ -0,0 +1,144 @@
import { createHash } from "node:crypto";
import fs from "node:fs";
import {
createTarEntrySafetyChecker,
extractArchive as extractArchiveSafe,
} from "../infra/archive.js";
import { runCommandWithTimeout } from "../process/exec.js";
import { parseTarVerboseMetadata } from "./skills-install-tar-verbose.js";
import { hasBinary } from "./skills.js";
export type ArchiveExtractResult = { stdout: string; stderr: string; code: number | null };
async function hashFileSha256(filePath: string): Promise<string> {
const hash = createHash("sha256");
const stream = fs.createReadStream(filePath);
return await new Promise<string>((resolve, reject) => {
stream.on("data", (chunk) => {
hash.update(chunk as Buffer);
});
stream.on("error", reject);
stream.on("end", () => {
resolve(hash.digest("hex"));
});
});
}
export async function extractArchive(params: {
archivePath: string;
archiveType: string;
targetDir: string;
stripComponents?: number;
timeoutMs: number;
}): Promise<ArchiveExtractResult> {
const { archivePath, archiveType, targetDir, stripComponents, timeoutMs } = params;
const strip =
typeof stripComponents === "number" && Number.isFinite(stripComponents)
? Math.max(0, Math.floor(stripComponents))
: 0;
try {
if (archiveType === "zip") {
await extractArchiveSafe({
archivePath,
destDir: targetDir,
timeoutMs,
kind: "zip",
stripComponents: strip,
});
return { stdout: "", stderr: "", code: 0 };
}
if (archiveType === "tar.gz") {
await extractArchiveSafe({
archivePath,
destDir: targetDir,
timeoutMs,
kind: "tar",
stripComponents: strip,
tarGzip: true,
});
return { stdout: "", stderr: "", code: 0 };
}
if (archiveType === "tar.bz2") {
if (!hasBinary("tar")) {
return { stdout: "", stderr: "tar not found on PATH", code: null };
}
const preflightHash = await hashFileSha256(archivePath);
// Preflight list to prevent zip-slip style traversal before extraction.
const listResult = await runCommandWithTimeout(["tar", "tf", archivePath], { timeoutMs });
if (listResult.code !== 0) {
return {
stdout: listResult.stdout,
stderr: listResult.stderr || "tar list failed",
code: listResult.code,
};
}
const entries = listResult.stdout
.split("\n")
.map((line) => line.trim())
.filter(Boolean);
const verboseResult = await runCommandWithTimeout(["tar", "tvf", archivePath], { timeoutMs });
if (verboseResult.code !== 0) {
return {
stdout: verboseResult.stdout,
stderr: verboseResult.stderr || "tar verbose list failed",
code: verboseResult.code,
};
}
const metadata = parseTarVerboseMetadata(verboseResult.stdout);
if (metadata.length !== entries.length) {
return {
stdout: verboseResult.stdout,
stderr: `tar verbose/list entry count mismatch (${metadata.length} vs ${entries.length})`,
code: 1,
};
}
const checkTarEntrySafety = createTarEntrySafetyChecker({
rootDir: targetDir,
stripComponents: strip,
escapeLabel: "targetDir",
});
for (let i = 0; i < entries.length; i += 1) {
const entryPath = entries[i];
const entryMeta = metadata[i];
if (!entryPath || !entryMeta) {
return {
stdout: verboseResult.stdout,
stderr: "tar metadata parse failure",
code: 1,
};
}
checkTarEntrySafety({
path: entryPath,
type: entryMeta.type,
size: entryMeta.size,
});
}
const postPreflightHash = await hashFileSha256(archivePath);
if (postPreflightHash !== preflightHash) {
return {
stdout: "",
stderr: "tar archive changed during safety preflight; refusing to extract",
code: 1,
};
}
const argv = ["tar", "xf", archivePath, "-C", targetDir];
if (strip > 0) {
argv.push("--strip-components", String(strip));
}
return await runCommandWithTimeout(argv, { timeoutMs });
}
return { stdout: "", stderr: `unsupported archive type: ${archiveType}`, code: null };
} catch (err) {
const message = err instanceof Error ? err.message : String(err);
return { stdout: "", stderr: message, code: 1 };
}
}

View File

@@ -0,0 +1,80 @@
const TAR_VERBOSE_MONTHS = new Set([
"Jan",
"Feb",
"Mar",
"Apr",
"May",
"Jun",
"Jul",
"Aug",
"Sep",
"Oct",
"Nov",
"Dec",
]);
const ISO_DATE_PATTERN = /^\d{4}-\d{2}-\d{2}$/;
function mapTarVerboseTypeChar(typeChar: string): string {
switch (typeChar) {
case "l":
return "SymbolicLink";
case "h":
return "Link";
case "b":
return "BlockDevice";
case "c":
return "CharacterDevice";
case "p":
return "FIFO";
case "s":
return "Socket";
case "d":
return "Directory";
default:
return "File";
}
}
function parseTarVerboseSize(line: string): number {
const tokens = line.trim().split(/\s+/).filter(Boolean);
if (tokens.length < 6) {
throw new Error(`unable to parse tar verbose metadata: ${line}`);
}
let dateIndex = tokens.findIndex((token) => TAR_VERBOSE_MONTHS.has(token));
if (dateIndex > 0) {
const size = Number.parseInt(tokens[dateIndex - 1] ?? "", 10);
if (!Number.isFinite(size) || size < 0) {
throw new Error(`unable to parse tar entry size: ${line}`);
}
return size;
}
dateIndex = tokens.findIndex((token) => ISO_DATE_PATTERN.test(token));
if (dateIndex > 0) {
const size = Number.parseInt(tokens[dateIndex - 1] ?? "", 10);
if (!Number.isFinite(size) || size < 0) {
throw new Error(`unable to parse tar entry size: ${line}`);
}
return size;
}
throw new Error(`unable to parse tar verbose metadata: ${line}`);
}
export function parseTarVerboseMetadata(stdout: string): Array<{ type: string; size: number }> {
const lines = stdout
.split("\n")
.map((line) => line.trim())
.filter(Boolean);
return lines.map((line) => {
const typeChar = line[0] ?? "";
if (!typeChar) {
throw new Error("unable to parse tar entry type");
}
return {
type: mapTarVerboseTypeChar(typeChar),
size: parseTarVerboseSize(line),
};
});
}