From 2a922ef562fb1a07a44d1ea3df8f811cbdb661ad Mon Sep 17 00:00:00 2001 From: Nathan Flurry Date: Tue, 27 Jan 2026 15:43:19 -0800 Subject: [PATCH] chore: copy and adapt rivet release scripts for sandbox-agent --- scripts/release/artifacts.ts | 117 --- scripts/release/build-artifacts.ts | 36 + scripts/release/docker.ts | 49 + scripts/release/git.ts | 32 +- scripts/release/main.ts | 1321 ++++++++++++-------------- scripts/release/package.json | 38 +- scripts/release/promote-artifacts.ts | 78 ++ scripts/release/publish.ts | 166 ---- scripts/release/sdk.ts | 203 ++++ scripts/release/static/install.ps1 | 51 + scripts/release/static/install.sh | 76 +- scripts/release/tsconfig.json | 19 +- scripts/release/update_version.ts | 52 +- scripts/release/utils.ts | 309 +++--- 14 files changed, 1284 insertions(+), 1263 deletions(-) delete mode 100644 scripts/release/artifacts.ts create mode 100644 scripts/release/build-artifacts.ts create mode 100644 scripts/release/docker.ts create mode 100644 scripts/release/promote-artifacts.ts delete mode 100644 scripts/release/publish.ts create mode 100644 scripts/release/sdk.ts create mode 100644 scripts/release/static/install.ps1 diff --git a/scripts/release/artifacts.ts b/scripts/release/artifacts.ts deleted file mode 100644 index 7e0f150..0000000 --- a/scripts/release/artifacts.ts +++ /dev/null @@ -1,117 +0,0 @@ -import * as fs from "node:fs/promises"; -import * as path from "node:path"; -import { $ } from "execa"; -import type { ReleaseOpts } from "./main.js"; -import { - assertDirExists, - copyReleasesPath, - deleteReleasesPath, - listReleasesObjects, - uploadContentToReleases, - uploadDirToReleases, - uploadFileToReleases, -} from "./utils.js"; - -const PREFIX = "sandbox-agent"; - -const BINARY_FILES = [ - "sandbox-agent-x86_64-unknown-linux-musl", - "sandbox-agent-x86_64-pc-windows-gnu.exe", - "sandbox-agent-x86_64-apple-darwin", - "sandbox-agent-aarch64-apple-darwin", -]; - -/** - * Build TypeScript SDK and upload to commit directory. - * This is called during setup-ci phase. - */ -export async function buildAndUploadArtifacts(opts: ReleaseOpts) { - console.log("==> Building TypeScript SDK"); - const sdkDir = path.join(opts.root, "sdks", "typescript"); - await $({ stdio: "inherit", cwd: sdkDir })`pnpm install`; - await $({ stdio: "inherit", cwd: sdkDir })`pnpm run build`; - - const distPath = path.join(sdkDir, "dist"); - await assertDirExists(distPath); - - console.log(`==> Uploading TypeScript SDK to ${PREFIX}/${opts.commit}/typescript/`); - await uploadDirToReleases(distPath, `${PREFIX}/${opts.commit}/typescript/`); - - console.log("✅ TypeScript SDK artifacts uploaded"); -} - -/** - * Promote artifacts from commit directory to version directory. - * This is called during complete-ci phase. - */ -export async function promoteArtifacts(opts: ReleaseOpts) { - // Promote TypeScript SDK - await promotePath(opts, "typescript"); -} - -async function promotePath(opts: ReleaseOpts, name: string) { - console.log(`==> Promoting ${name} artifacts`); - - const sourcePrefix = `${PREFIX}/${opts.commit}/${name}/`; - const commitFiles = await listReleasesObjects(sourcePrefix); - if (!Array.isArray(commitFiles?.Contents) || commitFiles.Contents.length === 0) { - throw new Error(`No files found under ${sourcePrefix}`); - } - - await copyPath(sourcePrefix, `${PREFIX}/${opts.version}/${name}/`); - if (opts.latest) { - await copyPath(sourcePrefix, `${PREFIX}/latest/${name}/`); - } -} - -async function copyPath(sourcePrefix: string, targetPrefix: string) { - console.log(`Copying ${sourcePrefix} -> ${targetPrefix}`); - await deleteReleasesPath(targetPrefix); - await copyReleasesPath(sourcePrefix, targetPrefix); -} - -/** - * Upload install script with version substitution. - */ -export async function uploadInstallScripts(opts: ReleaseOpts) { - const installPath = path.join(opts.root, "scripts", "release", "static", "install.sh"); - let installContent = await fs.readFile(installPath, "utf8"); - - const uploadForVersion = async (versionValue: string, remoteVersion: string) => { - const content = installContent.replace(/__VERSION__/g, versionValue); - const uploadKey = `${PREFIX}/${remoteVersion}/install.sh`; - console.log(`Uploading install script: ${uploadKey}`); - await uploadContentToReleases(content, uploadKey); - }; - - await uploadForVersion(opts.version, opts.version); - if (opts.latest) { - await uploadForVersion("latest", "latest"); - } -} - -/** - * Upload compiled binaries from dist/ directory. - */ -export async function uploadBinaries(opts: ReleaseOpts) { - const distDir = path.join(opts.root, "dist"); - await assertDirExists(distDir); - - for (const fileName of BINARY_FILES) { - const localPath = path.join(distDir, fileName); - - try { - await fs.access(localPath); - } catch { - throw new Error(`Missing binary: ${localPath}`); - } - - console.log(`Uploading binary: ${fileName}`); - await uploadFileToReleases(localPath, `${PREFIX}/${opts.version}/${fileName}`); - if (opts.latest) { - await uploadFileToReleases(localPath, `${PREFIX}/latest/${fileName}`); - } - } - - console.log("✅ Binaries uploaded"); -} diff --git a/scripts/release/build-artifacts.ts b/scripts/release/build-artifacts.ts new file mode 100644 index 0000000..736a9c5 --- /dev/null +++ b/scripts/release/build-artifacts.ts @@ -0,0 +1,36 @@ +import * as path from "node:path"; +import { $ } from "execa"; +import type { ReleaseOpts } from "./main"; +import { assertDirExists, PREFIX, uploadDirToReleases } from "./utils"; + +export async function buildJsArtifacts(opts: ReleaseOpts) { + await buildAndUploadTypescriptSdk(opts); +} + +async function buildAndUploadTypescriptSdk(opts: ReleaseOpts) { + console.log(`==> Building TypeScript SDK`); + + // Build TypeScript SDK + await $({ + stdio: "inherit", + cwd: opts.root, + })`pnpm --filter sandbox-agent build`; + + console.log(`✅ TypeScript SDK built successfully`); + + // Upload TypeScript SDK to R2 + console.log(`==> Uploading TypeScript SDK Artifacts`); + + const sdkDistPath = path.resolve( + opts.root, + "sdks/typescript/dist", + ); + + await assertDirExists(sdkDistPath); + + // Upload to commit directory + console.log(`Uploading TypeScript SDK to ${PREFIX}/${opts.commit}/typescript/`); + await uploadDirToReleases(sdkDistPath, `${PREFIX}/${opts.commit}/typescript/`); + + console.log(`✅ TypeScript SDK artifacts uploaded successfully`); +} diff --git a/scripts/release/docker.ts b/scripts/release/docker.ts new file mode 100644 index 0000000..5a0db41 --- /dev/null +++ b/scripts/release/docker.ts @@ -0,0 +1,49 @@ +import { $ } from "execa"; +import type { ReleaseOpts } from "./main"; +import { fetchGitRef, versionOrCommitToRef } from "./utils"; + +const IMAGE = "rivetdev/sandbox-agent"; + +export async function tagDocker(opts: ReleaseOpts) { + // Determine which commit to use for source images + let sourceCommit = opts.commit; + if (opts.reuseEngineVersion) { + console.log(`==> Reusing Docker images from ${opts.reuseEngineVersion}`); + const ref = versionOrCommitToRef(opts.reuseEngineVersion); + await fetchGitRef(ref); + const result = await $`git rev-parse ${ref}`; + sourceCommit = result.stdout.trim().slice(0, 7); + console.log(`==> Source commit: ${sourceCommit}`); + } + + // Check both architecture images exist using manifest inspect + console.log(`==> Checking images exist: ${IMAGE}:${sourceCommit}-{amd64,arm64}`); + try { + console.log(`==> Inspecting ${IMAGE}:${sourceCommit}-amd64`); + await $({ stdio: "inherit" })`docker manifest inspect ${IMAGE}:${sourceCommit}-amd64`; + console.log(`==> Inspecting ${IMAGE}:${sourceCommit}-arm64`); + await $({ stdio: "inherit" })`docker manifest inspect ${IMAGE}:${sourceCommit}-arm64`; + console.log(`==> Both images exist`); + } catch (error) { + console.error(`==> Error inspecting images:`, error); + throw new Error( + `Images ${IMAGE}:${sourceCommit}-{amd64,arm64} do not exist on Docker Hub. Error: ${error}`, + ); + } + + // Create and push manifest with version + await createManifest(sourceCommit, opts.version); + + // Create and push manifest with latest + if (opts.latest) { + await createManifest(sourceCommit, "latest"); + } +} + +async function createManifest(from: string, to: string) { + console.log(`==> Creating manifest: ${IMAGE}:${to} from ${IMAGE}:${from}-{amd64,arm64}`); + + // Use buildx imagetools to create and push multi-arch manifest + // This works with manifest lists as inputs (unlike docker manifest create) + await $({ stdio: "inherit" })`docker buildx imagetools create --tag ${IMAGE}:${to} ${IMAGE}:${from}-amd64 ${IMAGE}:${from}-arm64`; +} diff --git a/scripts/release/git.ts b/scripts/release/git.ts index 3d5c1a1..6f7cf7b 100644 --- a/scripts/release/git.ts +++ b/scripts/release/git.ts @@ -1,8 +1,8 @@ import { $ } from "execa"; -import * as semver from "semver"; -import type { ReleaseOpts } from "./main.js"; +import type { ReleaseOpts } from "./main"; export async function validateGit(_opts: ReleaseOpts) { + // Validate there's no uncommitted changes const result = await $`git status --porcelain`; const status = result.stdout; if (status.trim().length > 0) { @@ -15,8 +15,12 @@ export async function validateGit(_opts: ReleaseOpts) { export async function createAndPushTag(opts: ReleaseOpts) { console.log(`Creating tag v${opts.version}...`); try { + // Create tag and force update if it exists await $({ stdio: "inherit", cwd: opts.root })`git tag -f v${opts.version}`; + + // Push tag with force to ensure it's updated await $({ stdio: "inherit", cwd: opts.root })`git push origin v${opts.version} -f`; + console.log(`✅ Tag v${opts.version} created and pushed`); } catch (err) { console.error("❌ Failed to create or push tag"); @@ -28,40 +32,46 @@ export async function createGitHubRelease(opts: ReleaseOpts) { console.log("Creating GitHub release..."); try { + // Get the current tag name (should be the tag created during the release process) + const { stdout: currentTag } = await $({ + cwd: opts.root, + })`git describe --tags --exact-match`; + const tagName = currentTag.trim(); + console.log(`Looking for existing release for ${opts.version}`); + // Check if a release with this version name already exists const { stdout: releaseJson } = await $({ cwd: opts.root, })`gh release list --json name,tagName`; const releases = JSON.parse(releaseJson); const existingRelease = releases.find( - (r: { name: string }) => r.name === opts.version, + (r: any) => r.name === opts.version, ); if (existingRelease) { console.log( - `Updating release ${opts.version} to point to tag v${opts.version}`, + `Updating release ${opts.version} to point to new tag ${tagName}`, ); await $({ stdio: "inherit", cwd: opts.root, - })`gh release edit ${existingRelease.tagName} --tag v${opts.version}`; + })`gh release edit ${existingRelease.tagName} --tag ${tagName}`; } else { console.log( - `Creating new release ${opts.version} pointing to tag v${opts.version}`, + `Creating new release ${opts.version} pointing to tag ${tagName}`, ); await $({ stdio: "inherit", cwd: opts.root, - })`gh release create v${opts.version} --title ${opts.version} --generate-notes`; + })`gh release create ${tagName} --title ${opts.version} --generate-notes`; - // Mark as prerelease if needed - const parsed = semver.parse(opts.version); - if (parsed && parsed.prerelease.length > 0) { + // Check if this is a pre-release (contains -rc. or similar) + if (opts.version.includes("-")) { await $({ stdio: "inherit", cwd: opts.root, - })`gh release edit v${opts.version} --prerelease`; + })`gh release edit ${tagName} --prerelease`; } } diff --git a/scripts/release/main.ts b/scripts/release/main.ts index b744b3b..e04c0aa 100755 --- a/scripts/release/main.ts +++ b/scripts/release/main.ts @@ -1,783 +1,632 @@ #!/usr/bin/env tsx -import fs from "node:fs"; -import path from "node:path"; -import { execFileSync, spawnSync } from "node:child_process"; -import readline from "node:readline"; +import * as path from "node:path"; +import * as url from "node:url"; +import { $ } from "execa"; +import { program } from "commander"; +import * as semver from "semver"; +import { buildJsArtifacts } from "./build-artifacts"; +import { promoteArtifacts } from "./promote-artifacts"; +import { tagDocker } from "./docker"; +import { + createAndPushTag, + createGitHubRelease, + validateGit, +} from "./git"; +import { publishCrates, publishNpmCli, publishNpmSdk } from "./sdk"; +import { updateVersion } from "./update_version"; +import { assert, assertEquals, fetchGitRef, versionOrCommitToRef } from "./utils"; -const ENDPOINT_URL = - "https://2a94c6a0ced8d35ea63cddc86c2681e7.r2.cloudflarestorage.com"; -const BUCKET = "rivet-releases"; -const PREFIX = "sandbox-agent"; +const __dirname = path.dirname(url.fileURLToPath(import.meta.url)); +const ROOT_DIR = path.resolve(__dirname, "..", ".."); -const BINARY_FILES = [ - "sandbox-agent-x86_64-unknown-linux-musl", - "sandbox-agent-x86_64-pc-windows-gnu.exe", - "sandbox-agent-x86_64-apple-darwin", - "sandbox-agent-aarch64-apple-darwin", -]; +export interface ReleaseOpts { + root: string; + version: string; + latest: boolean; + /** Commit to publish release for. */ + commit: string; + /** Optional version to reuse artifacts and Docker images from instead of building. */ + reuseEngineVersion?: string; +} -const CRATE_ORDER = [ - "error", - "agent-credentials", - "agent-schema", - "universal-agent-schema", - "agent-management", - "sandbox-agent", -]; +async function getAllGitVersions(): Promise { + try { + // Fetch tags to ensure we have the latest + // Use --force to overwrite local tags that conflict with remote + try { + await $`git fetch --tags --force --quiet`; + } catch (fetchError) { + console.warn("Warning: Could not fetch remote tags, using local tags only"); + } -const PLATFORM_MAP: Record = { - "x86_64-unknown-linux-musl": { pkg: "linux-x64", os: "linux", cpu: "x64", ext: "" }, - "x86_64-pc-windows-gnu": { pkg: "win32-x64", os: "win32", cpu: "x64", ext: ".exe" }, - "x86_64-apple-darwin": { pkg: "darwin-x64", os: "darwin", cpu: "x64", ext: "" }, - "aarch64-apple-darwin": { pkg: "darwin-arm64", os: "darwin", cpu: "arm64", ext: "" }, -}; + // Get all version tags + const result = await $`git tag -l v*`; + const tags = result.stdout.trim().split("\n").filter(Boolean); + if (tags.length === 0) { + return []; + } + + // Parse and sort all versions (newest first) + const versions = tags + .map(tag => tag.replace(/^v/, "")) + .filter(v => semver.valid(v)) + .sort((a, b) => semver.rcompare(a, b)); + + return versions; + } catch (error) { + console.warn("Warning: Could not get git tags:", error); + return []; + } +} + +async function getLatestGitVersion(): Promise { + const versions = await getAllGitVersions(); + + if (versions.length === 0) { + return null; + } + + // Find the latest version (excluding prereleases) + const stableVersions = versions.filter(v => { + const parsed = semver.parse(v); + return parsed && parsed.prerelease.length === 0; + }); + + return stableVersions[0] || null; +} + +async function shouldTagAsLatest(newVersion: string): Promise { + // Check if version has prerelease identifier (e.g., 1.0.0-rc.1) + const parsedVersion = semver.parse(newVersion); + if (!parsedVersion) { + throw new Error(`Invalid semantic version: ${newVersion}`); + } + + // If it has a prerelease identifier, it's not latest + if (parsedVersion.prerelease.length > 0) { + return false; + } + + // Get the latest version from git tags + const latestGitVersion = await getLatestGitVersion(); + + // If no previous versions exist, this is the latest + if (!latestGitVersion) { + return true; + } + + // Check if new version is greater than the latest git version + return semver.gt(newVersion, latestGitVersion); +} + +async function validateReuseVersion(version: string): Promise { + console.log(`Validating that ${version} exists...`); + + const ref = versionOrCommitToRef(version); + await fetchGitRef(ref); + + // Get short commit from ref + let shortCommit: string; + try { + const result = await $`git rev-parse ${ref}`; + const fullCommit = result.stdout.trim(); + shortCommit = fullCommit.slice(0, 7); + console.log(`✅ Found ${ref} (commit ${shortCommit})`); + } catch (error) { + throw new Error( + `${version} does not exist in git. Make sure ${ref} exists in the repository.`, + ); + } + + // Check Docker images exist + console.log(`Checking Docker images for ${shortCommit}...`); + try { + await $({ stdio: "inherit" })`docker manifest inspect rivetdev/sandbox-agent:${shortCommit}-amd64`; + await $({ stdio: "inherit" })`docker manifest inspect rivetdev/sandbox-agent:${shortCommit}-arm64`; + console.log("✅ Docker images exist"); + } catch (error) { + throw new Error( + `Docker images for version ${version} (commit ${shortCommit}) do not exist. Error: ${error}`, + ); + } + + // Check S3 artifacts exist + console.log(`Checking S3 artifacts for ${shortCommit}...`); + const endpointUrl = + "https://2a94c6a0ced8d35ea63cddc86c2681e7.r2.cloudflarestorage.com"; + + // Get credentials + let awsAccessKeyId = process.env.R2_RELEASES_ACCESS_KEY_ID; + if (!awsAccessKeyId) { + const result = + await $`op read "op://Engineering/rivet-releases R2 Upload/username"`; + awsAccessKeyId = result.stdout.trim(); + } + let awsSecretAccessKey = process.env.R2_RELEASES_SECRET_ACCESS_KEY; + if (!awsSecretAccessKey) { + const result = + await $`op read "op://Engineering/rivet-releases R2 Upload/password"`; + awsSecretAccessKey = result.stdout.trim(); + } + + const awsEnv = { + AWS_ACCESS_KEY_ID: awsAccessKeyId, + AWS_SECRET_ACCESS_KEY: awsSecretAccessKey, + AWS_DEFAULT_REGION: "auto", + }; + + const commitPrefix = `sandbox-agent/${shortCommit}/`; + const listResult = await $({ + env: awsEnv, + shell: true, + stdio: ["pipe", "pipe", "inherit"], + })`aws s3api list-objects --bucket rivet-releases --prefix ${commitPrefix} --endpoint-url ${endpointUrl}`; + const files = JSON.parse(listResult.stdout); + + if (!Array.isArray(files?.Contents) || files.Contents.length === 0) { + throw new Error( + `No S3 artifacts found for version ${version} (commit ${shortCommit}) under ${commitPrefix}`, + ); + } + + console.log(`✅ S3 artifacts exist (${files.Contents.length} files found)`); +} + +async function runLocalChecks(opts: ReleaseOpts) { + console.log("Running local checks..."); + + // Cargo check + console.log("Running cargo check..."); + try { + await $({ stdio: "inherit", cwd: opts.root })`cargo check`; + console.log("✅ Cargo check passed"); + } catch (err) { + console.error("❌ Cargo check failed"); + throw err; + } + + // Cargo fmt check + console.log("Running cargo fmt --check..."); + try { + await $({ stdio: "inherit", cwd: opts.root })`cargo fmt --check`; + console.log("✅ Cargo fmt check passed"); + } catch (err) { + console.error("❌ Cargo fmt check failed"); + throw err; + } + + // TypeScript type check + console.log("Running TypeScript type check..."); + try { + await $({ stdio: "inherit", cwd: opts.root })`pnpm check-types`; + console.log("✅ TypeScript type check passed"); + } catch (err) { + console.error("❌ TypeScript type check failed"); + throw err; + } + + console.log("✅ All local checks passed"); +} + +async function runCiChecks(opts: ReleaseOpts) { + console.log("Running CI checks..."); + + // TypeScript type check + console.log("Running TypeScript type check..."); + try { + await $({ stdio: "inherit", cwd: opts.root })`pnpm check-types`; + console.log("✅ TypeScript type check passed"); + } catch (err) { + console.error("❌ TypeScript type check failed"); + throw err; + } + + console.log("✅ All CI checks passed"); +} + +async function getVersionFromArgs(opts: { + version?: string; + major?: boolean; + minor?: boolean; + patch?: boolean; +}): Promise { + // Check if explicit version is provided via --version flag + if (opts.version) { + return opts.version; + } + + // Check for version bump flags + if (!opts.major && !opts.minor && !opts.patch) { + throw new Error( + "Must provide either --version, --major, --minor, or --patch", + ); + } + + // Get latest version from git tags and calculate new one + const latestVersion = await getLatestGitVersion(); + if (!latestVersion) { + throw new Error( + "No existing version tags found. Use --version to set an explicit version.", + ); + } + console.log(`Latest git version: ${latestVersion}`); + + let newVersion: string | null = null; + + if (opts.major) { + newVersion = semver.inc(latestVersion, "major"); + } else if (opts.minor) { + newVersion = semver.inc(latestVersion, "minor"); + } else if (opts.patch) { + newVersion = semver.inc(latestVersion, "patch"); + } + + if (!newVersion) { + throw new Error("Failed to calculate new version"); + } + + return newVersion; +} + +// Available steps const STEPS = [ - "confirm-release", - "update-version", - "generate-artifacts", - "git-commit", - "git-push", - "trigger-workflow", - "run-checks", - "publish-crates", - "publish-npm-sdk", - "publish-npm-cli", - "upload-typescript", - "upload-install", - "upload-binaries", - "push-tag", - "create-github-release", + "confirm-release", + "update-version", + "run-local-checks", + "git-commit", + "git-push", + "trigger-workflow", + "validate-reuse-version", + "run-ci-checks", + "build-js-artifacts", + "publish-crates", + "publish-npm-sdk", + "publish-npm-cli", + "tag-docker", + "promote-artifacts", + "push-tag", + "create-github-release", ] as const; -const PHASES = ["setup-local", "setup-ci", "complete-ci"] as const; +const PHASES = [ + "setup-local", + "setup-ci", + "complete-ci", +] as const; type Step = (typeof STEPS)[number]; type Phase = (typeof PHASES)[number]; +// Map phases to individual steps const PHASE_MAP: Record = { - "setup-local": [ - "confirm-release", - "update-version", - "generate-artifacts", - "git-commit", - "git-push", - "trigger-workflow", - ], - "setup-ci": ["run-checks"], - "complete-ci": [ - "publish-crates", - "publish-npm-sdk", - "publish-npm-cli", - "upload-typescript", - "upload-install", - "upload-binaries", - "push-tag", - "create-github-release", - ], + // These steps modify the source code, so they need to be ran & committed + // locally. CI cannot push commits. + // + // run-local-checks runs cargo check, cargo fmt, and type checks to fail + // fast before committing/pushing. + "setup-local": [ + "confirm-release", + "update-version", + "run-local-checks", + "git-commit", + "git-push", + "trigger-workflow", + ], + // These steps validate the repository and build JS artifacts before + // triggering release. + "setup-ci": ["validate-reuse-version", "run-ci-checks", "build-js-artifacts"], + // These steps run after the required artifacts have been successfully built. + "complete-ci": [ + "publish-crates", + "publish-npm-sdk", + "publish-npm-cli", + "tag-docker", + "promote-artifacts", + "push-tag", + "create-github-release", + ], }; -function parseArgs(argv: string[]) { - const args = new Map(); - const flags = new Set(); - for (let i = 0; i < argv.length; i += 1) { - const arg = argv[i]; - if (!arg.startsWith("--")) continue; - if (arg.includes("=")) { - const [key, value] = arg.split("="); - args.set(key, value ?? ""); - continue; - } - const next = argv[i + 1]; - if (next && !next.startsWith("--")) { - args.set(arg, next); - i += 1; - } else { - flags.add(arg); - } - } - return { args, flags }; -} - -function run(cmd: string, cmdArgs: string[], options: Record = {}) { - const result = spawnSync(cmd, cmdArgs, { stdio: "inherit", ...options }); - if (result.status !== 0) { - process.exit(result.status ?? 1); - } -} - -function runCapture(cmd: string, cmdArgs: string[], options: Record = {}) { - const result = spawnSync(cmd, cmdArgs, { - stdio: ["ignore", "pipe", "pipe"], - encoding: "utf8", - ...options, - }); - if (result.status !== 0) { - const stderr = result.stderr ? String(result.stderr).trim() : ""; - throw new Error(`${cmd} failed: ${stderr}`); - } - return (result.stdout || "").toString().trim(); -} - -interface ParsedSemver { - major: number; - minor: number; - patch: number; - prerelease: string[]; -} - -function parseSemver(version: string): ParsedSemver { - const match = version.match( - /^(0|[1-9]\d*)\.(0|[1-9]\d*)\.(0|[1-9]\d*)(?:-([0-9A-Za-z.-]+))?(?:\+([0-9A-Za-z.-]+))?$/, - ); - if (!match) { - throw new Error(`Invalid semantic version: ${version}`); - } - return { - major: Number(match[1]), - minor: Number(match[2]), - patch: Number(match[3]), - prerelease: match[4] ? match[4].split(".") : [], - }; -} - -function compareSemver(a: ParsedSemver, b: ParsedSemver) { - if (a.major !== b.major) return a.major - b.major; - if (a.minor !== b.minor) return a.minor - b.minor; - return a.patch - b.patch; -} - -function isStable(version: string) { - return parseSemver(version).prerelease.length === 0; -} - -function getNpmTag(version: string, latest: boolean) { - if (latest) return null; - const prerelease = parseSemver(version).prerelease; - if (prerelease.length === 0) { - return "next"; - } - const hasRc = prerelease.some((part) => part.toLowerCase().startsWith("rc")); - if (hasRc) { - return "rc"; - } - throw new Error(`Prerelease versions must use rc tag when not latest: ${version}`); -} - -function getAllGitVersions() { - try { - execFileSync("git", ["fetch", "--tags", "--force", "--quiet"], { - stdio: "ignore", - }); - } catch { - // best-effort - } - - const output = runCapture("git", ["tag", "-l", "v*"]); - if (!output) return []; - - return output - .split("\n") - .map((tag) => tag.replace(/^v/, "")) - .filter((tag) => { - try { - parseSemver(tag); - return true; - } catch { - return false; - } - }) - .sort((a, b) => compareSemver(parseSemver(b), parseSemver(a))); -} - -function getLatestStableVersion() { - const versions = getAllGitVersions(); - const stable = versions.filter((version) => isStable(version)); - return stable[0] || null; -} - -function shouldTagAsLatest(version: string) { - const parsed = parseSemver(version); - if (parsed.prerelease.length > 0) { - return false; - } - - const latestStable = getLatestStableVersion(); - if (!latestStable) { - return true; - } - - return compareSemver(parsed, parseSemver(latestStable)) > 0; -} - -function npmVersionExists(packageName: string, version: string): boolean { - const result = spawnSync("npm", ["view", `${packageName}@${version}`, "version"], { - stdio: ["ignore", "pipe", "pipe"], - encoding: "utf8", - }); - if (result.status === 0) { - return true; - } - const stderr = result.stderr || ""; - if ( - stderr.includes(`No match found for version ${version}`) || - stderr.includes(`'${packageName}@${version}' is not in this registry`) - ) { - return false; - } - // If it's an unexpected error, assume version doesn't exist to allow publish attempt - return false; -} - -function crateVersionExists(crateName: string, version: string): boolean { - const result = spawnSync("cargo", ["search", crateName, "--limit", "1"], { - stdio: ["ignore", "pipe", "pipe"], - encoding: "utf8", - }); - if (result.status !== 0) { - return false; - } - // Output format: "crate_name = \"version\"" - const output = result.stdout || ""; - const match = output.match(new RegExp(`^${crateName}\\s*=\\s*"([^"]+)"`)); - if (match && match[1] === version) { - return true; - } - return false; -} - -function createAndPushTag(rootDir: string, version: string) { - console.log(`==> Creating tag v${version}`); - run("git", ["tag", "-f", `v${version}`], { cwd: rootDir }); - run("git", ["push", "origin", `v${version}`, "-f"], { cwd: rootDir }); - console.log(`Tag v${version} created and pushed`); -} - -function createGitHubRelease(rootDir: string, version: string) { - console.log(`==> Creating GitHub release for v${version}`); - - // Check if release already exists - const listResult = spawnSync("gh", ["release", "list", "--json", "tagName"], { - cwd: rootDir, - stdio: ["ignore", "pipe", "pipe"], - encoding: "utf8", - }); - - if (listResult.status === 0) { - const releases = JSON.parse(listResult.stdout || "[]"); - const exists = releases.some((r: { tagName: string }) => r.tagName === `v${version}`); - if (exists) { - console.log(`Release v${version} already exists, updating...`); - run("gh", ["release", "edit", `v${version}`, "--tag", `v${version}`], { cwd: rootDir }); - return; - } - } - - // Create new release - const isPrerelease = parseSemver(version).prerelease.length > 0; - const releaseArgs = ["release", "create", `v${version}`, "--title", version, "--generate-notes"]; - if (isPrerelease) { - releaseArgs.push("--prerelease"); - } - run("gh", releaseArgs, { cwd: rootDir }); - console.log(`GitHub release v${version} created`); -} - -function getAwsEnv() { - const accessKey = - process.env.AWS_ACCESS_KEY_ID || process.env.R2_RELEASES_ACCESS_KEY_ID; - const secretKey = - process.env.AWS_SECRET_ACCESS_KEY || - process.env.R2_RELEASES_SECRET_ACCESS_KEY; - - if (!accessKey || !secretKey) { - throw new Error("Missing AWS credentials for releases bucket"); - } - - return { - AWS_ACCESS_KEY_ID: accessKey, - AWS_SECRET_ACCESS_KEY: secretKey, - AWS_DEFAULT_REGION: "auto", - }; -} - -function uploadDir(localPath: string, remotePath: string) { - const env = { ...process.env, ...getAwsEnv() }; - run( - "aws", - [ - "s3", - "cp", - localPath, - `s3://${BUCKET}/${remotePath}`, - "--recursive", - "--checksum-algorithm", - "CRC32", - "--endpoint-url", - ENDPOINT_URL, - ], - { env }, - ); -} - -function uploadFile(localPath: string, remotePath: string) { - const env = { ...process.env, ...getAwsEnv() }; - run( - "aws", - [ - "s3", - "cp", - localPath, - `s3://${BUCKET}/${remotePath}`, - "--checksum-algorithm", - "CRC32", - "--endpoint-url", - ENDPOINT_URL, - ], - { env }, - ); -} - -function uploadContent(content: string, remotePath: string) { - const env = { ...process.env, ...getAwsEnv() }; - const result = spawnSync( - "aws", - [ - "s3", - "cp", - "-", - `s3://${BUCKET}/${remotePath}`, - "--endpoint-url", - ENDPOINT_URL, - ], - { - env, - input: content, - stdio: ["pipe", "inherit", "inherit"], - }, - ); - if (result.status !== 0) { - process.exit(result.status ?? 1); - } -} - -function updatePackageJson(filePath: string, version: string, updateOptionalDeps = false) { - const pkg = JSON.parse(fs.readFileSync(filePath, "utf8")); - pkg.version = version; - if (updateOptionalDeps && pkg.optionalDependencies) { - for (const dep of Object.keys(pkg.optionalDependencies)) { - pkg.optionalDependencies[dep] = version; - } - } - fs.writeFileSync(filePath, JSON.stringify(pkg, null, 2) + "\n"); -} - -function updateVersion(rootDir: string, version: string) { - const cargoPath = path.join(rootDir, "Cargo.toml"); - let cargoContent = fs.readFileSync(cargoPath, "utf8"); - cargoContent = cargoContent.replace(/^version = ".*"/m, `version = "${version}"`); - fs.writeFileSync(cargoPath, cargoContent); - - updatePackageJson(path.join(rootDir, "sdks", "typescript", "package.json"), version, true); - updatePackageJson(path.join(rootDir, "sdks", "cli", "package.json"), version, true); - - const platformsDir = path.join(rootDir, "sdks", "cli", "platforms"); - for (const entry of fs.readdirSync(platformsDir, { withFileTypes: true })) { - if (!entry.isDirectory()) continue; - const pkgPath = path.join(platformsDir, entry.name, "package.json"); - if (fs.existsSync(pkgPath)) { - updatePackageJson(pkgPath, version, false); - } - } -} - -function buildTypescript(rootDir: string) { - const sdkDir = path.join(rootDir, "sdks", "typescript"); - if (!fs.existsSync(sdkDir)) { - throw new Error(`TypeScript SDK not found at ${sdkDir}`); - } - run("pnpm", ["install"], { cwd: sdkDir }); - run("pnpm", ["run", "build"], { cwd: sdkDir }); - return path.join(sdkDir, "dist"); -} - -function generateArtifacts(rootDir: string) { - run("pnpm", ["install"], { cwd: rootDir }); - run("pnpm", ["--filter", "@sandbox-agent/inspector", "build"], { - cwd: rootDir, - env: { ...process.env, SANDBOX_AGENT_SKIP_INSPECTOR: "1" }, - }); - const sdkDir = path.join(rootDir, "sdks", "typescript"); - run("pnpm", ["run", "generate"], { cwd: sdkDir }); - run("cargo", ["check", "-p", "sandbox-agent-universal-schema-gen"], { cwd: rootDir }); - run("cargo", ["run", "-p", "sandbox-agent-openapi-gen", "--", "--out", "docs/openapi.json"], { - cwd: rootDir, - }); -} - -function uploadTypescriptArtifacts(rootDir: string, version: string, latest: boolean) { - console.log("==> Building TypeScript SDK"); - const distPath = buildTypescript(rootDir); - - console.log("==> Uploading TypeScript artifacts"); - uploadDir(distPath, `${PREFIX}/${version}/typescript/`); - if (latest) { - uploadDir(distPath, `${PREFIX}/latest/typescript/`); - } -} - -function uploadInstallScript(rootDir: string, version: string, latest: boolean) { - const installPath = path.join(rootDir, "scripts", "release", "static", "install.sh"); - let installContent = fs.readFileSync(installPath, "utf8"); - - const uploadForVersion = (versionValue: string, remoteVersion: string) => { - const content = installContent.replace(/__VERSION__/g, versionValue); - uploadContent(content, `${PREFIX}/${remoteVersion}/install.sh`); - }; - - uploadForVersion(version, version); - if (latest) { - uploadForVersion("latest", "latest"); - } -} - -function uploadBinaries(rootDir: string, version: string, latest: boolean) { - const distDir = path.join(rootDir, "dist"); - if (!fs.existsSync(distDir)) { - throw new Error(`dist directory not found at ${distDir}`); - } - - for (const fileName of BINARY_FILES) { - const localPath = path.join(distDir, fileName); - if (!fs.existsSync(localPath)) { - throw new Error(`Missing binary: ${localPath}`); - } - - uploadFile(localPath, `${PREFIX}/${version}/${fileName}`); - if (latest) { - uploadFile(localPath, `${PREFIX}/latest/${fileName}`); - } - } -} - -function runChecks(rootDir: string) { - console.log("==> Installing Node dependencies"); - run("pnpm", ["install"], { cwd: rootDir }); - - console.log("==> Building inspector frontend"); - run("pnpm", ["--filter", "@sandbox-agent/inspector", "build"], { - cwd: rootDir, - env: { ...process.env, SANDBOX_AGENT_SKIP_INSPECTOR: "1" }, - }); - - console.log("==> Running Rust checks"); - run("cargo", ["fmt", "--all", "--", "--check"], { cwd: rootDir }); - run("cargo", ["clippy", "--all-targets", "--", "-D", "warnings"], { cwd: rootDir }); - run("cargo", ["test", "--all-targets"], { cwd: rootDir }); - - console.log("==> Running TypeScript checks"); - run("pnpm", ["run", "build"], { cwd: rootDir }); - - console.log("==> Running TypeScript SDK tests"); - run("pnpm", ["--filter", "sandbox-agent", "test"], { cwd: rootDir }); - - console.log("==> Running CLI SDK tests"); - run("pnpm", ["--filter", "@sandbox-agent/cli", "test"], { cwd: rootDir }); - - console.log("==> Validating OpenAPI spec for Mintlify"); - run("pnpm", ["dlx", "mint", "openapi-check", "docs/openapi.json"], { cwd: rootDir }); -} - -function publishCrates(rootDir: string, version: string) { - updateVersion(rootDir, version); - - for (const crate of CRATE_ORDER) { - const crateName = `sandbox-agent-${crate}`; - if (crateVersionExists(crateName, version)) { - console.log(`==> Skipping ${crateName}@${version} (already published)`); - continue; - } - console.log(`==> Publishing ${crateName}`); - const crateDir = path.join(rootDir, "server", "packages", crate); - run("cargo", ["publish", "--allow-dirty"], { cwd: crateDir }); - console.log("Waiting 30s for index..."); - Atomics.wait(new Int32Array(new SharedArrayBuffer(4)), 0, 0, 30000); - } -} - -function publishNpmSdk(rootDir: string, version: string, latest: boolean) { - const sdkDir = path.join(rootDir, "sdks", "typescript"); - const packageName = "sandbox-agent"; - - if (npmVersionExists(packageName, version)) { - console.log(`==> Skipping ${packageName}@${version} (already published)`); - return; - } - - console.log(`==> Publishing ${packageName}@${version} to npm`); - const npmTag = getNpmTag(version, latest); - run("npm", ["version", version, "--no-git-tag-version", "--allow-same-version"], { cwd: sdkDir }); - run("pnpm", ["install"], { cwd: sdkDir }); - run("pnpm", ["run", "build"], { cwd: sdkDir }); - const publishArgs = ["publish", "--access", "public"]; - if (npmTag) publishArgs.push("--tag", npmTag); - run("npm", publishArgs, { cwd: sdkDir }); -} - -function publishNpmCli(rootDir: string, version: string, latest: boolean) { - const cliDir = path.join(rootDir, "sdks", "cli"); - const distDir = path.join(rootDir, "dist"); - const npmTag = getNpmTag(version, latest); - - for (const [target, info] of Object.entries(PLATFORM_MAP)) { - const packageName = `@sandbox-agent/cli-${info.pkg}`; - - if (npmVersionExists(packageName, version)) { - console.log(`==> Skipping ${packageName}@${version} (already published)`); - continue; - } - - const platformDir = path.join(cliDir, "platforms", info.pkg); - const binDir = path.join(platformDir, "bin"); - fs.mkdirSync(binDir, { recursive: true }); - - const srcBinary = path.join(distDir, `sandbox-agent-${target}${info.ext}`); - const dstBinary = path.join(binDir, `sandbox-agent${info.ext}`); - fs.copyFileSync(srcBinary, dstBinary); - if (info.ext !== ".exe") fs.chmodSync(dstBinary, 0o755); - - console.log(`==> Publishing ${packageName}@${version}`); - run("npm", ["version", version, "--no-git-tag-version", "--allow-same-version"], { cwd: platformDir }); - const publishArgs = ["publish", "--access", "public"]; - if (npmTag) publishArgs.push("--tag", npmTag); - run("npm", publishArgs, { cwd: platformDir }); - } - - const mainPackageName = "@sandbox-agent/cli"; - if (npmVersionExists(mainPackageName, version)) { - console.log(`==> Skipping ${mainPackageName}@${version} (already published)`); - return; - } - - console.log(`==> Publishing ${mainPackageName}@${version}`); - const pkgPath = path.join(cliDir, "package.json"); - const pkg = JSON.parse(fs.readFileSync(pkgPath, "utf8")); - pkg.version = version; - for (const dep of Object.keys(pkg.optionalDependencies || {})) { - pkg.optionalDependencies[dep] = version; - } - fs.writeFileSync(pkgPath, JSON.stringify(pkg, null, 2) + "\n"); - const publishArgs = ["publish", "--access", "public"]; - if (npmTag) publishArgs.push("--tag", npmTag); - run("npm", publishArgs, { cwd: cliDir }); -} - -function validateGit(rootDir: string) { - const status = runCapture("git", ["status", "--porcelain"], { cwd: rootDir }); - if (status.trim()) { - throw new Error("Working tree is dirty; commit or stash changes before release."); - } -} - -async function confirmRelease(version: string, latest: boolean) { - const rl = readline.createInterface({ input: process.stdin, output: process.stdout }); - const answer = await new Promise((resolve) => { - rl.question(`Release ${version} (latest=${latest})? (yes/no): `, resolve); - }); - rl.close(); - if (answer.toLowerCase() !== "yes" && answer.toLowerCase() !== "y") { - console.log("Release cancelled"); - process.exit(0); - } -} - async function main() { - const { args, flags } = parseArgs(process.argv.slice(2)); - const versionArg = args.get("--version"); - if (!versionArg) { - console.error("--version is required"); - process.exit(1); - } + // Setup commander + program + .name("release") + .description("Release a new version of sandbox-agent") + .option("--major", "Bump major version") + .option("--minor", "Bump minor version") + .option("--patch", "Bump patch version") + .option("--version ", "Set specific version") + .option( + "--override-commit ", + "Override the commit to pull artifacts from (defaults to current commit)", + ) + .option( + "--reuse-engine-version ", + "Reuse artifacts and Docker images from a previous version (e.g., 0.1.0) or git revision (e.g., bb7f292)", + ) + .option("--latest", "Tag version as the latest version", true) + .option("--no-latest", "Do not tag version as the latest version") + .option("--no-validate-git", "Skip git validation (for testing)") + .option( + "--only-steps ", + `Run specific steps (comma-separated). Available: ${STEPS.join(", ")}`, + ) + .option( + "--phase ", + `Run a release phase (comma-separated). Available: ${PHASES.join(", ")}`, + ) + .parse(); - const version = versionArg.replace(/^v/, ""); - parseSemver(version); + const opts = program.opts(); - let latest: boolean; - if (flags.has("--latest")) { - latest = true; - } else if (flags.has("--no-latest")) { - latest = false; - } else { - latest = shouldTagAsLatest(version); - } + // Parse requested steps + if (!opts.phase && !opts.onlySteps) { + throw new Error( + "Must provide either --phase or --only-steps. Run with --help for more information.", + ); + } - const outputPath = args.get("--output"); - if (flags.has("--print-latest")) { - if (outputPath) { - fs.appendFileSync(outputPath, `latest=${latest}\n`); - } else { - process.stdout.write(latest ? "true" : "false"); - } - } + if (opts.phase && opts.onlySteps) { + throw new Error("Cannot use both --phase and --only-steps together"); + } - const phaseArg = args.get("--phase"); - const stepsArg = args.get("--only-steps"); - const requestedSteps = new Set(); + const requestedSteps = new Set(); + if (opts.onlySteps) { + const steps = opts.onlySteps.split(",").map((s: string) => s.trim()); + for (const step of steps) { + if (!STEPS.includes(step as Step)) { + throw new Error( + `Invalid step: ${step}. Available steps: ${STEPS.join(", ")}`, + ); + } + requestedSteps.add(step as Step); + } + } else if (opts.phase) { + const phases = opts.phase.split(",").map((s: string) => s.trim()); + for (const phase of phases) { + if (!PHASES.includes(phase as Phase)) { + throw new Error( + `Invalid phase: ${phase}. Available phases: ${PHASES.join(", ")}`, + ); + } + const steps = PHASE_MAP[phase as Phase]; + for (const step of steps) { + requestedSteps.add(step); + } + } + } - if (phaseArg || stepsArg) { - if (phaseArg && stepsArg) { - throw new Error("Cannot use both --phase and --only-steps"); - } + // Helper function to check if a step should run + const shouldRunStep = (step: Step): boolean => { + return requestedSteps.has(step); + }; - if (phaseArg) { - const phases = phaseArg.split(",").map((value) => value.trim()); - for (const phase of phases) { - if (!PHASES.includes(phase as Phase)) { - throw new Error(`Invalid phase: ${phase}`); - } - for (const step of PHASE_MAP[phase as Phase]) { - requestedSteps.add(step); - } - } - } + // Get version from arguments or calculate based on flags + const version = await getVersionFromArgs({ + version: opts.version, + major: opts.major, + minor: opts.minor, + patch: opts.patch, + }); - if (stepsArg) { - const steps = stepsArg.split(",").map((value) => value.trim()); - for (const step of steps) { - if (!STEPS.includes(step as Step)) { - throw new Error(`Invalid step: ${step}`); - } - requestedSteps.add(step as Step); - } - } - } + assert( + /^(0|[1-9]\d*)\.(0|[1-9]\d*)\.(0|[1-9]\d*)(?:-((?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+([0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$/.test( + version, + ), + "version must be a valid semantic version", + ); - const rootDir = process.cwd(); - const shouldRun = (step: Step) => requestedSteps.has(step); - const hasPhases = requestedSteps.size > 0; + // Automatically determine if this should be tagged as latest + // Can be overridden by --latest or --no-latest flags + let isLatest: boolean; + if (opts.latest !== undefined) { + // User explicitly set the flag + isLatest = opts.latest; + } else { + // Auto-determine based on version + isLatest = await shouldTagAsLatest(version); + console.log(`Auto-determined latest flag: ${isLatest} (version: ${version})`); + } - if (!hasPhases) { - if (flags.has("--check")) { - runChecks(rootDir); - } - if (flags.has("--publish-crates")) { - publishCrates(rootDir, version); - } - if (flags.has("--publish-npm-sdk")) { - publishNpmSdk(rootDir, version, latest); - } - if (flags.has("--publish-npm-cli")) { - publishNpmCli(rootDir, version, latest); - } - if (flags.has("--upload-typescript")) { - uploadTypescriptArtifacts(rootDir, version, latest); - } - if (flags.has("--upload-install")) { - uploadInstallScript(rootDir, version, latest); - } - if (flags.has("--upload-binaries")) { - uploadBinaries(rootDir, version, latest); - } - return; - } + // Setup opts + let commit: string; + if (opts.overrideCommit) { + // Manually override commit + commit = opts.overrideCommit; + } else { + // Read commit + const result = await $`git rev-parse HEAD`; + commit = result.stdout.trim(); + } - if (shouldRun("confirm-release") && !flags.has("--no-confirm")) { - await confirmRelease(version, latest); - } + const releaseOpts: ReleaseOpts = { + root: ROOT_DIR, + version: version, + latest: isLatest, + commit, + reuseEngineVersion: opts.reuseEngineVersion, + }; - const validateGitEnabled = !flags.has("--no-validate-git"); - if ((shouldRun("git-commit") || shouldRun("git-push")) && validateGitEnabled) { - validateGit(rootDir); - } + if (releaseOpts.commit.length == 40) { + releaseOpts.commit = releaseOpts.commit.slice(0, 7); + } - if (shouldRun("update-version")) { - console.log("==> Updating versions"); - updateVersion(rootDir, version); - } + assertEquals(releaseOpts.commit.length, 7, "must use 7 char short commit"); - if (shouldRun("generate-artifacts")) { - console.log("==> Generating OpenAPI and universal schemas"); - generateArtifacts(rootDir); - } + if (opts.validateGit && !shouldRunStep("run-ci-checks")) { + // HACK: Skip setup-ci because for some reason there's changes in the setup step but only in GitHub Actions + await validateGit(releaseOpts); + } - if (shouldRun("git-commit")) { - console.log("==> Committing changes"); - run("git", ["add", "."], { cwd: rootDir }); - run("git", ["commit", "--allow-empty", "-m", `chore(release): update version to ${version}`], { - cwd: rootDir, - }); - } + if (shouldRunStep("confirm-release")) { + console.log("==> Release Confirmation"); + console.log(`\nRelease Details:`); + console.log(` Version: ${releaseOpts.version}`); + console.log(` Latest: ${releaseOpts.latest}`); + console.log(` Commit: ${releaseOpts.commit}`); + if (releaseOpts.reuseEngineVersion) { + console.log(` Reusing engine version: ${releaseOpts.reuseEngineVersion}`); + } - if (shouldRun("git-push")) { - console.log("==> Pushing changes"); - const branch = runCapture("git", ["rev-parse", "--abbrev-ref", "HEAD"], { cwd: rootDir }); - if (branch === "main") { - run("git", ["push"], { cwd: rootDir }); - } else { - run("git", ["push", "-u", "origin", "HEAD"], { cwd: rootDir }); - } - } + // Get current branch + const branchResult = await $`git rev-parse --abbrev-ref HEAD`; + const branch = branchResult.stdout.trim(); + console.log(` Branch: ${branch}`); - // if (shouldRun("trigger-workflow")) { - // console.log("==> Triggering release workflow"); - // const branch = runCapture("git", ["rev-parse", "--abbrev-ref", "HEAD"], { cwd: rootDir }); - // const latestFlag = latest ? "true" : "false"; - // run( - // "gh", - // [ - // "workflow", - // "run", - // ".github/workflows/release.yaml", - // "-f", - // `version=${version}`, - // "-f", - // `latest=${latestFlag}`, - // "--ref", - // branch, - // ], - // { cwd: rootDir }, - // ); - // } + // Get and display recent versions + const allVersions = await getAllGitVersions(); - if (shouldRun("run-checks")) { - runChecks(rootDir); - } + if (allVersions.length > 0) { + // Find the latest stable version (excluding prereleases) + const stableVersions = allVersions.filter(v => { + const parsed = semver.parse(v); + return parsed && parsed.prerelease.length === 0; + }); + const latestStableVersion = stableVersions[0] || null; - if (shouldRun("publish-crates")) { - publishCrates(rootDir, version); - } + console.log(`\nRecent versions:`); + const recentVersions = allVersions.slice(0, 10); + for (const version of recentVersions) { + const isLatest = version === latestStableVersion; + const marker = isLatest ? " (latest)" : ""; + console.log(` - ${version}${marker}`); + } + } - if (shouldRun("publish-npm-sdk")) { - publishNpmSdk(rootDir, version, latest); - } + // Prompt for confirmation + const readline = await import("node:readline"); + const rl = readline.createInterface({ + input: process.stdin, + output: process.stdout, + }); - if (shouldRun("publish-npm-cli")) { - publishNpmCli(rootDir, version, latest); - } + const answer = await new Promise((resolve) => { + rl.question("\nProceed with release? (yes/no): ", resolve); + }); + rl.close(); - if (shouldRun("upload-typescript")) { - uploadTypescriptArtifacts(rootDir, version, latest); - } + if (answer.toLowerCase() !== "yes" && answer.toLowerCase() !== "y") { + console.log("Release cancelled"); + process.exit(0); + } - if (shouldRun("upload-install")) { - uploadInstallScript(rootDir, version, latest); - } + console.log("✅ Release confirmed"); + } - if (shouldRun("upload-binaries")) { - uploadBinaries(rootDir, version, latest); - } + if (shouldRunStep("update-version")) { + console.log("==> Updating Version"); + await updateVersion(releaseOpts); + } - if (shouldRun("push-tag")) { - createAndPushTag(rootDir, version); - } + if (shouldRunStep("run-local-checks")) { + console.log("==> Running Local Checks"); + await runLocalChecks(releaseOpts); + } - if (shouldRun("create-github-release")) { - createGitHubRelease(rootDir, version); - } + if (shouldRunStep("git-commit")) { + assert(opts.validateGit, "cannot commit without git validation"); + console.log("==> Committing Changes"); + await $({ stdio: "inherit" })`git add .`; + await $({ + stdio: "inherit", + shell: true, + })`git commit --allow-empty -m "chore(release): update version to ${releaseOpts.version}"`; + } + + if (shouldRunStep("git-push")) { + assert(opts.validateGit, "cannot push without git validation"); + console.log("==> Pushing Commits"); + const branchResult = await $`git rev-parse --abbrev-ref HEAD`; + const branch = branchResult.stdout.trim(); + if (branch === "main") { + // Push on main + await $({ stdio: "inherit" })`git push`; + } else { + // Modify current branch + await $({ stdio: "inherit" })`gt submit --force --no-edit --publish`; + } + } + + if (shouldRunStep("trigger-workflow")) { + console.log("==> Triggering Workflow"); + const branchResult = await $`git rev-parse --abbrev-ref HEAD`; + const branch = branchResult.stdout.trim(); + const latestFlag = releaseOpts.latest ? "true" : "false"; + + // Build workflow command + let workflowCmd = `gh workflow run .github/workflows/release.yaml -f version=${releaseOpts.version} -f latest=${latestFlag}`; + if (releaseOpts.reuseEngineVersion) { + workflowCmd += ` -f reuse_engine_version=${releaseOpts.reuseEngineVersion}`; + } + workflowCmd += ` --ref ${branch}`; + + await $({ stdio: "inherit", shell: true })`${workflowCmd}`; + + // Get repository info and print workflow link + const repoResult = await $`gh repo view --json nameWithOwner -q .nameWithOwner`; + const repo = repoResult.stdout.trim(); + console.log(`\nWorkflow triggered: https://github.com/${repo}/actions/workflows/release.yaml`); + console.log(`View all runs: https://github.com/${repo}/actions`); + } + + if (shouldRunStep("validate-reuse-version")) { + if (releaseOpts.reuseEngineVersion) { + console.log("==> Validating Reuse Version"); + await validateReuseVersion(releaseOpts.reuseEngineVersion); + } + } + + if (shouldRunStep("run-ci-checks")) { + console.log("==> Running CI Checks"); + await runCiChecks(releaseOpts); + } + + if (shouldRunStep("build-js-artifacts")) { + console.log("==> Building JS Artifacts"); + await buildJsArtifacts(releaseOpts); + } + + if (shouldRunStep("publish-crates")) { + console.log("==> Publishing Crates"); + await publishCrates(releaseOpts); + } + + if (shouldRunStep("publish-npm-sdk")) { + console.log("==> Publishing NPM SDK"); + await publishNpmSdk(releaseOpts); + } + + if (shouldRunStep("publish-npm-cli")) { + console.log("==> Publishing NPM CLI"); + await publishNpmCli(releaseOpts); + } + + if (shouldRunStep("tag-docker")) { + console.log("==> Tagging Docker"); + await tagDocker(releaseOpts); + } + + if (shouldRunStep("promote-artifacts")) { + console.log("==> Promoting Artifacts"); + await promoteArtifacts(releaseOpts); + } + + if (shouldRunStep("push-tag")) { + console.log("==> Pushing Tag"); + await createAndPushTag(releaseOpts); + } + + if (shouldRunStep("create-github-release")) { + console.log("==> Creating GitHub Release"); + await createGitHubRelease(releaseOpts); + } + + console.log("==> Complete"); } main().catch((err) => { - console.error(err); - process.exit(1); + console.error(err); + process.exit(1); }); diff --git a/scripts/release/package.json b/scripts/release/package.json index dfa483b..445684d 100644 --- a/scripts/release/package.json +++ b/scripts/release/package.json @@ -1,19 +1,23 @@ { - "name": "release", - "version": "0.1.0", - "private": true, - "type": "module", - "scripts": { - "check-types": "tsc --noEmit" - }, - "devDependencies": { - "@types/node": "^22.0.0", - "@types/semver": "^7.5.8" - }, - "dependencies": { - "commander": "^12.1.0", - "execa": "^9.5.0", - "glob": "^10.3.10", - "semver": "^7.6.0" - } + "name": "release", + "version": "2.0.21", + "description": "", + "main": "index.js", + "scripts": { + "check-types": "tsc --noEmit" + }, + "keywords": [], + "author": "", + "license": "ISC", + "packageManager": "pnpm@10.13.1", + "devDependencies": { + "@types/node": "^24.3.0", + "@types/semver": "^7.5.8" + }, + "dependencies": { + "commander": "^12.1.0", + "execa": "^8.0.1", + "glob": "^10.3.10", + "semver": "^7.6.0" + } } diff --git a/scripts/release/promote-artifacts.ts b/scripts/release/promote-artifacts.ts new file mode 100644 index 0000000..9cfdff2 --- /dev/null +++ b/scripts/release/promote-artifacts.ts @@ -0,0 +1,78 @@ +import * as fs from "node:fs/promises"; +import * as path from "node:path"; +import { $ } from "execa"; +import type { ReleaseOpts } from "./main"; +import { + copyReleasesPath, + deleteReleasesPath, + fetchGitRef, + listReleasesObjects, + PREFIX, + uploadContentToReleases, + versionOrCommitToRef, +} from "./utils"; + +export async function promoteArtifacts(opts: ReleaseOpts) { + // Determine which commit to use for source artifacts + let sourceCommit = opts.commit; + if (opts.reuseEngineVersion) { + console.log(`==> Reusing artifacts from ${opts.reuseEngineVersion}`); + const ref = versionOrCommitToRef(opts.reuseEngineVersion); + await fetchGitRef(ref); + const result = await $`git rev-parse ${ref}`; + sourceCommit = result.stdout.trim().slice(0, 7); + console.log(`==> Source commit: ${sourceCommit}`); + } + + // Promote TypeScript SDK artifacts (uploaded by build-artifacts.ts to sandbox-agent/{commit}/typescript/) + await promotePath(opts, sourceCommit, "typescript"); + + // Promote binary artifacts (uploaded by CI in release.yaml to sandbox-agent/{commit}/binaries/) + await promotePath(opts, sourceCommit, "binaries"); + + // Upload install scripts + await uploadInstallScripts(opts, opts.version); + if (opts.latest) { + await uploadInstallScripts(opts, "latest"); + } +} + + +async function uploadInstallScripts(opts: ReleaseOpts, version: string) { + const installScriptPaths = [ + path.resolve(opts.root, "scripts/release/static/install.sh"), + path.resolve(opts.root, "scripts/release/static/install.ps1"), + ]; + + for (const scriptPath of installScriptPaths) { + let scriptContent = await fs.readFile(scriptPath, "utf-8"); + scriptContent = scriptContent.replace(/__VERSION__/g, version); + + const uploadKey = `${PREFIX}/${version}/${scriptPath.split("/").pop() ?? ""}`; + + console.log(`Uploading install script: ${uploadKey}`); + await uploadContentToReleases(scriptContent, uploadKey); + } +} + +async function copyPath(sourcePrefix: string, targetPrefix: string) { + console.log(`Copying ${sourcePrefix} -> ${targetPrefix}`); + await deleteReleasesPath(targetPrefix); + await copyReleasesPath(sourcePrefix, targetPrefix); +} + +/** S3-to-S3 copy from sandbox-agent/{commit}/{name}/ to sandbox-agent/{version}/{name}/ */ +async function promotePath(opts: ReleaseOpts, sourceCommit: string, name: string) { + console.log(`==> Promoting ${name} artifacts`); + + const sourcePrefix = `${PREFIX}/${sourceCommit}/${name}/`; + const commitFiles = await listReleasesObjects(sourcePrefix); + if (!Array.isArray(commitFiles?.Contents) || commitFiles.Contents.length === 0) { + throw new Error(`No files found under ${sourcePrefix}`); + } + + await copyPath(sourcePrefix, `${PREFIX}/${opts.version}/${name}/`); + if (opts.latest) { + await copyPath(sourcePrefix, `${PREFIX}/latest/${name}/`); + } +} diff --git a/scripts/release/publish.ts b/scripts/release/publish.ts deleted file mode 100644 index 476c3ec..0000000 --- a/scripts/release/publish.ts +++ /dev/null @@ -1,166 +0,0 @@ -import * as fs from "node:fs/promises"; -import * as path from "node:path"; -import { $ } from "execa"; -import * as semver from "semver"; -import type { ReleaseOpts } from "./main.js"; - -const CRATE_ORDER = [ - "error", - "agent-credentials", - "agent-schema", - "universal-agent-schema", - "agent-management", - "sandbox-agent", -]; - -const PLATFORM_MAP: Record = { - "x86_64-unknown-linux-musl": { pkg: "linux-x64", os: "linux", cpu: "x64", ext: "" }, - "x86_64-pc-windows-gnu": { pkg: "win32-x64", os: "win32", cpu: "x64", ext: ".exe" }, - "x86_64-apple-darwin": { pkg: "darwin-x64", os: "darwin", cpu: "x64", ext: "" }, - "aarch64-apple-darwin": { pkg: "darwin-arm64", os: "darwin", cpu: "arm64", ext: "" }, -}; - -async function npmVersionExists(packageName: string, version: string): Promise { - console.log(`Checking if ${packageName}@${version} exists on npm...`); - try { - await $({ - stdout: "ignore", - stderr: "pipe", - })`npm view ${packageName}@${version} version`; - return true; - } catch (error: unknown) { - const stderr = error && typeof error === "object" && "stderr" in error - ? String(error.stderr) - : ""; - if ( - stderr.includes(`No match found for version ${version}`) || - stderr.includes(`'${packageName}@${version}' is not in this registry`) - ) { - return false; - } - // Unexpected error, assume not exists to allow publish attempt - return false; - } -} - -async function crateVersionExists(crateName: string, version: string): Promise { - console.log(`Checking if ${crateName}@${version} exists on crates.io...`); - try { - const result = await $`cargo search ${crateName} --limit 1`; - const output = result.stdout || ""; - const match = output.match(new RegExp(`^${crateName}\\s*=\\s*"([^"]+)"`)); - return !!(match && match[1] === version); - } catch { - return false; - } -} - -function getNpmTag(version: string, latest: boolean): string | null { - if (latest) return null; - const parsed = semver.parse(version); - if (!parsed) throw new Error(`Invalid version: ${version}`); - - if (parsed.prerelease.length === 0) { - return "next"; - } - const hasRc = parsed.prerelease.some((part) => - String(part).toLowerCase().startsWith("rc") - ); - if (hasRc) { - return "rc"; - } - throw new Error(`Prerelease versions must use rc tag when not latest: ${version}`); -} - -export async function publishCrates(opts: ReleaseOpts) { - for (const crate of CRATE_ORDER) { - const crateName = `sandbox-agent-${crate}`; - - if (await crateVersionExists(crateName, opts.version)) { - console.log(`==> Skipping ${crateName}@${opts.version} (already published)`); - continue; - } - - console.log(`==> Publishing ${crateName}@${opts.version}`); - const crateDir = path.join(opts.root, "server", "packages", crate); - await $({ stdio: "inherit", cwd: crateDir })`cargo publish --allow-dirty`; - - console.log("Waiting 30s for crates.io index..."); - await new Promise(resolve => setTimeout(resolve, 30000)); - } -} - -export async function publishNpmSdk(opts: ReleaseOpts) { - const sdkDir = path.join(opts.root, "sdks", "typescript"); - const packageName = "sandbox-agent"; - - if (await npmVersionExists(packageName, opts.version)) { - console.log(`==> Skipping ${packageName}@${opts.version} (already published)`); - return; - } - - console.log(`==> Publishing ${packageName}@${opts.version}`); - const npmTag = getNpmTag(opts.version, opts.latest); - - await $({ stdio: "inherit", cwd: sdkDir })`npm version ${opts.version} --no-git-tag-version --allow-same-version`; - await $({ stdio: "inherit", cwd: sdkDir })`pnpm install`; - await $({ stdio: "inherit", cwd: sdkDir })`pnpm run build`; - - const publishArgs = ["publish", "--access", "public"]; - if (npmTag) publishArgs.push("--tag", npmTag); - await $({ stdio: "inherit", cwd: sdkDir })`npm ${publishArgs}`; -} - -export async function publishNpmCli(opts: ReleaseOpts) { - const cliDir = path.join(opts.root, "sdks", "cli"); - const distDir = path.join(opts.root, "dist"); - const npmTag = getNpmTag(opts.version, opts.latest); - - // Publish platform-specific packages - for (const [target, info] of Object.entries(PLATFORM_MAP)) { - const packageName = `@sandbox-agent/cli-${info.pkg}`; - - if (await npmVersionExists(packageName, opts.version)) { - console.log(`==> Skipping ${packageName}@${opts.version} (already published)`); - continue; - } - - const platformDir = path.join(cliDir, "platforms", info.pkg); - const binDir = path.join(platformDir, "bin"); - await fs.mkdir(binDir, { recursive: true }); - - const srcBinary = path.join(distDir, `sandbox-agent-${target}${info.ext}`); - const dstBinary = path.join(binDir, `sandbox-agent${info.ext}`); - await fs.copyFile(srcBinary, dstBinary); - if (info.ext !== ".exe") { - await fs.chmod(dstBinary, 0o755); - } - - console.log(`==> Publishing ${packageName}@${opts.version}`); - await $({ stdio: "inherit", cwd: platformDir })`npm version ${opts.version} --no-git-tag-version --allow-same-version`; - - const publishArgs = ["publish", "--access", "public"]; - if (npmTag) publishArgs.push("--tag", npmTag); - await $({ stdio: "inherit", cwd: platformDir })`npm ${publishArgs}`; - } - - // Publish main CLI package - const mainPackageName = "@sandbox-agent/cli"; - if (await npmVersionExists(mainPackageName, opts.version)) { - console.log(`==> Skipping ${mainPackageName}@${opts.version} (already published)`); - return; - } - - console.log(`==> Publishing ${mainPackageName}@${opts.version}`); - const pkgPath = path.join(cliDir, "package.json"); - const pkg = JSON.parse(await fs.readFile(pkgPath, "utf8")); - pkg.version = opts.version; - for (const dep of Object.keys(pkg.optionalDependencies || {})) { - pkg.optionalDependencies[dep] = opts.version; - } - await fs.writeFile(pkgPath, JSON.stringify(pkg, null, 2) + "\n"); - - const publishArgs = ["publish", "--access", "public"]; - if (npmTag) publishArgs.push("--tag", npmTag); - await $({ stdio: "inherit", cwd: cliDir })`npm ${publishArgs}`; -} diff --git a/scripts/release/sdk.ts b/scripts/release/sdk.ts new file mode 100644 index 0000000..9302d04 --- /dev/null +++ b/scripts/release/sdk.ts @@ -0,0 +1,203 @@ +import { $ } from "execa"; +import { readFile } from "node:fs/promises"; +import { join } from "node:path"; +import type { ReleaseOpts } from "./main"; + +// Crates to publish in dependency order +const CRATES = [ + "error", + "agent-credentials", + "extracted-agent-schemas", + "universal-agent-schema", + "agent-management", + "sandbox-agent", +] as const; + +// NPM CLI packages +const CLI_PACKAGES = [ + "@sandbox-agent/cli", + "@sandbox-agent/cli-linux-x64", + "@sandbox-agent/cli-win32-x64", + "@sandbox-agent/cli-darwin-x64", + "@sandbox-agent/cli-darwin-arm64", +] as const; + +async function npmVersionExists( + packageName: string, + version: string, +): Promise { + console.log( + `==> Checking if NPM version exists: ${packageName}@${version}`, + ); + try { + await $({ + stdout: "ignore", + stderr: "pipe", + })`npm view ${packageName}@${version} version`; + return true; + } catch (error: any) { + if (error.stderr) { + if ( + !error.stderr.includes( + `No match found for version ${version}`, + ) && + !error.stderr.includes( + `'${packageName}@${version}' is not in this registry.`, + ) + ) { + throw new Error( + `unexpected npm view version output: ${error.stderr}`, + ); + } + } + return false; + } +} + +async function crateVersionExists( + crateName: string, + version: string, +): Promise { + console.log(`==> Checking if crate version exists: ${crateName}@${version}`); + try { + const result = await $({ + stdout: "pipe", + stderr: "pipe", + })`cargo search ${crateName} --limit 1`; + // cargo search output format: "cratename = \"version\" # description" + const output = result.stdout; + const match = output.match(new RegExp(`^${crateName}\\s*=\\s*"([^"]+)"`)); + if (match && match[1] === version) { + return true; + } + return false; + } catch (error: any) { + // If cargo search fails, assume crate doesn't exist + return false; + } +} + +export async function publishCrates(opts: ReleaseOpts) { + console.log("==> Publishing crates to crates.io"); + + for (const crate of CRATES) { + const cratePath = join(opts.root, "server/packages", crate); + + // Read Cargo.toml to get the actual crate name + const cargoTomlPath = join(cratePath, "Cargo.toml"); + const cargoToml = await readFile(cargoTomlPath, "utf-8"); + const nameMatch = cargoToml.match(/^name\s*=\s*"([^"]+)"/m); + const crateName = nameMatch ? nameMatch[1] : `sandbox-agent-${crate}`; + + // Check if version already exists + const versionExists = await crateVersionExists(crateName, opts.version); + if (versionExists) { + console.log( + `Version ${opts.version} of ${crateName} already exists on crates.io. Skipping...`, + ); + continue; + } + + // Publish + console.log(`==> Publishing to crates.io: ${crateName}@${opts.version}`); + + try { + await $({ + stdio: "inherit", + cwd: cratePath, + })`cargo publish --allow-dirty`; + console.log(`✅ Published ${crateName}@${opts.version}`); + } catch (err) { + console.error(`❌ Failed to publish ${crateName}`); + throw err; + } + + // Wait a bit for crates.io to index the new version (needed for dependency resolution) + console.log("Waiting for crates.io to index..."); + await new Promise((resolve) => setTimeout(resolve, 30000)); + } + + console.log("✅ All crates published"); +} + +export async function publishNpmSdk(opts: ReleaseOpts) { + const sdkPath = join(opts.root, "sdks/typescript"); + const packageJsonPath = join(sdkPath, "package.json"); + const packageJson = JSON.parse(await readFile(packageJsonPath, "utf-8")); + const name = packageJson.name; + + // Check if version already exists + const versionExists = await npmVersionExists(name, opts.version); + if (versionExists) { + console.log( + `Version ${opts.version} of ${name} already exists. Skipping...`, + ); + return; + } + + // Build the SDK + console.log(`==> Building TypeScript SDK`); + await $({ + stdio: "inherit", + cwd: opts.root, + })`pnpm --filter sandbox-agent build`; + + // Publish + console.log(`==> Publishing to NPM: ${name}@${opts.version}`); + + // Add --tag flag for release candidates + const isReleaseCandidate = opts.version.includes("-rc."); + const tag = isReleaseCandidate ? "rc" : "latest"; + + await $({ + stdio: "inherit", + cwd: sdkPath, + })`pnpm publish --access public --tag ${tag} --no-git-checks`; + + console.log(`✅ Published ${name}@${opts.version}`); +} + +export async function publishNpmCli(opts: ReleaseOpts) { + console.log("==> Publishing CLI packages to NPM"); + + for (const packageName of CLI_PACKAGES) { + // Check if version already exists + const versionExists = await npmVersionExists(packageName, opts.version); + if (versionExists) { + console.log( + `Version ${opts.version} of ${packageName} already exists. Skipping...`, + ); + continue; + } + + // Determine package path + let packagePath: string; + if (packageName === "@sandbox-agent/cli") { + packagePath = join(opts.root, "sdks/cli"); + } else { + // Platform-specific packages: @sandbox-agent/cli-linux-x64 -> sdks/cli/platforms/linux-x64 + const platform = packageName.replace("@sandbox-agent/cli-", ""); + packagePath = join(opts.root, "sdks/cli/platforms", platform); + } + + // Publish + console.log(`==> Publishing to NPM: ${packageName}@${opts.version}`); + + // Add --tag flag for release candidates + const isReleaseCandidate = opts.version.includes("-rc."); + const tag = isReleaseCandidate ? "rc" : "latest"; + + try { + await $({ + stdio: "inherit", + cwd: packagePath, + })`pnpm publish --access public --tag ${tag} --no-git-checks`; + console.log(`✅ Published ${packageName}@${opts.version}`); + } catch (err) { + console.error(`❌ Failed to publish ${packageName}`); + throw err; + } + } + + console.log("✅ All CLI packages published"); +} diff --git a/scripts/release/static/install.ps1 b/scripts/release/static/install.ps1 new file mode 100644 index 0000000..0bb9436 --- /dev/null +++ b/scripts/release/static/install.ps1 @@ -0,0 +1,51 @@ +#!/usr/bin/env pwsh + +$ErrorActionPreference = 'Stop' + +[Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12 + +# Create bin directory for sandbox-agent +$BinDir = $env:BIN_DIR +$SandboxAgentInstall = if ($BinDir) { + $BinDir +} else { + "${Home}\.sandbox-agent\bin" +} + +if (!(Test-Path $SandboxAgentInstall)) { + New-Item $SandboxAgentInstall -ItemType Directory | Out-Null +} + +$SandboxAgentExe = "$SandboxAgentInstall\sandbox-agent.exe" +$Version = '__VERSION__' +$FileName = 'sandbox-agent-x86_64-pc-windows-gnu.exe' + +Write-Host +Write-Host "> Installing sandbox-agent ${Version}" + +# Download binary +$DownloadUrl = "https://releases.rivet.dev/sandbox-agent/${Version}/binaries/${FileName}" +Write-Host +Write-Host "> Downloading ${DownloadUrl}" +Invoke-WebRequest $DownloadUrl -OutFile $SandboxAgentExe -UseBasicParsing + +# Install to PATH +Write-Host +Write-Host "> Installing sandbox-agent" +$User = [System.EnvironmentVariableTarget]::User +$Path = [System.Environment]::GetEnvironmentVariable('Path', $User) +if (!(";${Path};".ToLower() -like "*;${SandboxAgentInstall};*".ToLower())) { + [System.Environment]::SetEnvironmentVariable('Path', "${Path};${SandboxAgentInstall}", $User) + $Env:Path += ";${SandboxAgentInstall}" + Write-Host "Please restart your PowerShell session or run the following command to refresh the environment variables:" + Write-Host "[System.Environment]::SetEnvironmentVariable('Path', '${Path};${SandboxAgentInstall}', [System.EnvironmentVariableTarget]::Process)" +} + +Write-Host +Write-Host "> Checking installation" +sandbox-agent.exe --version + +Write-Host +Write-Host "sandbox-agent was installed successfully to ${SandboxAgentExe}." +Write-Host "Run 'sandbox-agent --help' to get started." +Write-Host diff --git a/scripts/release/static/install.sh b/scripts/release/static/install.sh index fe5e806..5ccf931 100755 --- a/scripts/release/static/install.sh +++ b/scripts/release/static/install.sh @@ -7,17 +7,19 @@ # shellcheck enable=require-variable-braces set -eu -WORK_DIR="/tmp/sandbox_agent_install" -rm -rf "$WORK_DIR" -mkdir -p "$WORK_DIR" -cd "$WORK_DIR" +rm -rf /tmp/sandbox_agent_install +mkdir /tmp/sandbox_agent_install +cd /tmp/sandbox_agent_install SANDBOX_AGENT_VERSION="${SANDBOX_AGENT_VERSION:-__VERSION__}" -SANDBOX_AGENT_BASE_URL="${SANDBOX_AGENT_BASE_URL:-https://releases.rivet.dev}" UNAME="$(uname -s)" ARCH="$(uname -m)" +# Find asset suffix if [ "$(printf '%s' "$UNAME" | cut -c 1-6)" = "Darwin" ]; then + echo + echo "> Detected macOS" + if [ "$ARCH" = "x86_64" ]; then FILE_NAME="sandbox-agent-x86_64-apple-darwin" elif [ "$ARCH" = "arm64" ]; then @@ -27,54 +29,62 @@ if [ "$(printf '%s' "$UNAME" | cut -c 1-6)" = "Darwin" ]; then exit 1 fi elif [ "$(printf '%s' "$UNAME" | cut -c 1-5)" = "Linux" ]; then - if [ "$ARCH" = "x86_64" ]; then - FILE_NAME="sandbox-agent-x86_64-unknown-linux-musl" - else - echo "Unsupported Linux arch $ARCH" 1>&2 - exit 1 - fi + echo + echo "> Detected Linux ($(getconf LONG_BIT) bit)" + + FILE_NAME="sandbox-agent-x86_64-unknown-linux-musl" else echo "Unable to determine platform" 1>&2 exit 1 fi +# Determine install location set +u if [ -z "$BIN_DIR" ]; then BIN_DIR="/usr/local/bin" fi set -u - INSTALL_PATH="$BIN_DIR/sandbox-agent" if [ ! -d "$BIN_DIR" ]; then - CHECK_DIR="$BIN_DIR" - while [ ! -d "$CHECK_DIR" ] && [ "$CHECK_DIR" != "/" ]; do - CHECK_DIR=$(dirname "$CHECK_DIR") - done + # Find the base parent directory. We're using mkdir -p, which recursively creates directories, so we can't rely on `dirname`. + CHECK_DIR="$BIN_DIR" + while [ ! -d "$CHECK_DIR" ] && [ "$CHECK_DIR" != "/" ]; do + CHECK_DIR=$(dirname "$CHECK_DIR") + done + + # Check if the directory is writable + if [ ! -w "$CHECK_DIR" ]; then + echo + echo "> Creating directory $BIN_DIR (requires sudo)" + sudo mkdir -p "$BIN_DIR" + else + echo + echo "> Creating directory $BIN_DIR" + mkdir -p "$BIN_DIR" + fi - if [ ! -w "$CHECK_DIR" ]; then - echo "> Creating directory $BIN_DIR (requires sudo)" - sudo mkdir -p "$BIN_DIR" - else - echo "> Creating directory $BIN_DIR" - mkdir -p "$BIN_DIR" - fi fi -URL="$SANDBOX_AGENT_BASE_URL/sandbox-agent/${SANDBOX_AGENT_VERSION}/${FILE_NAME}" +# Download binary +URL="https://releases.rivet.dev/sandbox-agent/${SANDBOX_AGENT_VERSION}/binaries/${FILE_NAME}" +echo echo "> Downloading $URL" - curl -fsSL "$URL" -o sandbox-agent chmod +x sandbox-agent +# Move binary if [ ! -w "$BIN_DIR" ]; then - echo "> Installing sandbox-agent to $INSTALL_PATH (requires sudo)" - sudo mv ./sandbox-agent "$INSTALL_PATH" + echo + echo "> Installing sandbox-agent to $INSTALL_PATH (requires sudo)" + sudo mv ./sandbox-agent "$INSTALL_PATH" else - echo "> Installing sandbox-agent to $INSTALL_PATH" - mv ./sandbox-agent "$INSTALL_PATH" + echo + echo "> Installing sandbox-agent to $INSTALL_PATH" + mv ./sandbox-agent "$INSTALL_PATH" fi +# Check if path may be incorrect case ":$PATH:" in *:$BIN_DIR:*) ;; *) @@ -84,4 +94,10 @@ case ":$PATH:" in ;; esac -echo "sandbox-agent installed successfully." +echo +echo "> Checking installation" +"$BIN_DIR/sandbox-agent" --version + +echo +echo "sandbox-agent was installed successfully." +echo "Run 'sandbox-agent --help' to get started." diff --git a/scripts/release/tsconfig.json b/scripts/release/tsconfig.json index 1bfcab3..27ce8a7 100644 --- a/scripts/release/tsconfig.json +++ b/scripts/release/tsconfig.json @@ -1,12 +1,11 @@ { - "compilerOptions": { - "target": "ES2022", - "module": "NodeNext", - "moduleResolution": "NodeNext", - "esModuleInterop": true, - "strict": true, - "skipLibCheck": true, - "noEmit": true - }, - "include": ["*.ts"] + "extends": "../../tsconfig.base.json", + "compilerOptions": { + "types": ["node"], + "paths": { + "@/*": ["./src/*"] + } + }, + "include": ["**/*.ts"], + "exclude": ["node_modules"] } diff --git a/scripts/release/update_version.ts b/scripts/release/update_version.ts index c991007..cfd86bf 100644 --- a/scripts/release/update_version.ts +++ b/scripts/release/update_version.ts @@ -1,15 +1,21 @@ import * as fs from "node:fs/promises"; -import { glob } from "glob"; import { $ } from "execa"; -import type { ReleaseOpts } from "./main.js"; -import { assert } from "./utils.js"; +import { glob } from "glob"; +import type { ReleaseOpts } from "./main"; + +function assert(condition: any, message?: string): asserts condition { + if (!condition) { + throw new Error(message || "Assertion failed"); + } +} export async function updateVersion(opts: ReleaseOpts) { + // Define substitutions const findReplace = [ { path: "Cargo.toml", - find: /^version = ".*"/m, - replace: `version = "${opts.version}"`, + find: /\[workspace\.package\]\nversion = ".*"/, + replace: `[workspace.package]\nversion = "${opts.version}"`, }, { path: "sdks/typescript/package.json", @@ -28,41 +34,17 @@ export async function updateVersion(opts: ReleaseOpts) { }, ]; + // Substitute all files for (const { path: globPath, find, replace } of findReplace) { const paths = await glob(globPath, { cwd: opts.root }); assert(paths.length > 0, `no paths matched: ${globPath}`); - - for (const filePath of paths) { - const fullPath = `${opts.root}/${filePath}`; - const file = await fs.readFile(fullPath, "utf-8"); - assert(find.test(file), `file does not match ${find}: ${filePath}`); - + for (const path of paths) { + const file = await fs.readFile(path, "utf-8"); + assert(find.test(file), `file does not match ${find}: ${path}`); const newFile = file.replace(find, replace); - await fs.writeFile(fullPath, newFile); + await fs.writeFile(path, newFile); - await $({ cwd: opts.root })`git add ${filePath}`; + await $({ cwd: opts.root })`git add ${path}`; } } - - // Update optionalDependencies in CLI package.json - const cliPkgPath = `${opts.root}/sdks/cli/package.json`; - const cliPkg = JSON.parse(await fs.readFile(cliPkgPath, "utf-8")); - if (cliPkg.optionalDependencies) { - for (const dep of Object.keys(cliPkg.optionalDependencies)) { - cliPkg.optionalDependencies[dep] = opts.version; - } - await fs.writeFile(cliPkgPath, JSON.stringify(cliPkg, null, 2) + "\n"); - await $({ cwd: opts.root })`git add sdks/cli/package.json`; - } - - // Update optionalDependencies in TypeScript SDK package.json - const sdkPkgPath = `${opts.root}/sdks/typescript/package.json`; - const sdkPkg = JSON.parse(await fs.readFile(sdkPkgPath, "utf-8")); - if (sdkPkg.optionalDependencies) { - for (const dep of Object.keys(sdkPkg.optionalDependencies)) { - sdkPkg.optionalDependencies[dep] = opts.version; - } - await fs.writeFile(sdkPkgPath, JSON.stringify(sdkPkg, null, 2) + "\n"); - await $({ cwd: opts.root })`git add sdks/typescript/package.json`; - } } diff --git a/scripts/release/utils.ts b/scripts/release/utils.ts index 5d9b59c..3cbfd9c 100644 --- a/scripts/release/utils.ts +++ b/scripts/release/utils.ts @@ -1,12 +1,177 @@ import * as fs from "node:fs/promises"; import { $ } from "execa"; -export function assert(condition: unknown, message?: string): asserts condition { +export const PREFIX = "sandbox-agent"; + +export function assert(condition: any, message?: string): asserts condition { if (!condition) { throw new Error(message || "Assertion failed"); } } +/** + * Converts a version string or commit hash to a git ref. + * If the input contains a dot, it's treated as a version (e.g., "0.1.0" -> "v0.1.0"). + * Otherwise, it's treated as a git revision and returned as-is (e.g., "bb7f292"). + */ +export function versionOrCommitToRef(versionOrCommit: string): string { + if (versionOrCommit.includes(".")) { + assert( + !versionOrCommit.startsWith("v"), + `Version should not start with "v" (got "${versionOrCommit}", use "${versionOrCommit.slice(1)}" instead)`, + ); + return `v${versionOrCommit}`; + } + return versionOrCommit; +} + +/** + * Fetches a git ref from the remote. For tags, fetches all tags. For commits, unshallows the repo. + */ +export async function fetchGitRef(ref: string): Promise { + if (ref.startsWith("v")) { + console.log(`Fetching tags...`); + await $({ stdio: "inherit" })`git fetch --tags --force`; + } else { + // Git doesn't allow fetching commits directly by SHA, and CI often uses + // shallow clones. Unshallow the repo to ensure the commit is available. + console.log(`Unshallowing repo to find commit ${ref}...`); + try { + await $({ stdio: "inherit" })`git fetch --unshallow origin`; + } catch { + // Already unshallowed, just fetch + await $({ stdio: "inherit" })`git fetch origin`; + } + } +} + +interface ReleasesS3Config { + awsEnv: Record; + endpointUrl: string; +} + +let cachedConfig: ReleasesS3Config | null = null; + +async function getReleasesS3Config(): Promise { + if (cachedConfig) { + return cachedConfig; + } + + let awsAccessKeyId = process.env.R2_RELEASES_ACCESS_KEY_ID; + if (!awsAccessKeyId) { + const result = + await $`op read ${"op://Engineering/rivet-releases R2 Upload/username"}`; + awsAccessKeyId = result.stdout.trim(); + } + let awsSecretAccessKey = process.env.R2_RELEASES_SECRET_ACCESS_KEY; + if (!awsSecretAccessKey) { + const result = + await $`op read ${"op://Engineering/rivet-releases R2 Upload/password"}`; + awsSecretAccessKey = result.stdout.trim(); + } + + assert(awsAccessKeyId, "AWS_ACCESS_KEY_ID is required"); + assert(awsSecretAccessKey, "AWS_SECRET_ACCESS_KEY is required"); + + cachedConfig = { + awsEnv: { + AWS_ACCESS_KEY_ID: awsAccessKeyId, + AWS_SECRET_ACCESS_KEY: awsSecretAccessKey, + AWS_DEFAULT_REGION: "auto", + }, + endpointUrl: + "https://2a94c6a0ced8d35ea63cddc86c2681e7.r2.cloudflarestorage.com", + }; + + return cachedConfig; +} + +export async function uploadDirToReleases( + localPath: string, + remotePath: string, +): Promise { + const { awsEnv, endpointUrl } = await getReleasesS3Config(); + // Use --checksum-algorithm CRC32 for R2 compatibility (matches CI upload in release.yaml) + await $({ + env: awsEnv, + shell: true, + stdio: "inherit", + })`aws s3 cp ${localPath} s3://rivet-releases/${remotePath} --recursive --checksum-algorithm CRC32 --endpoint-url ${endpointUrl}`; +} + +export async function uploadContentToReleases( + content: string, + remotePath: string, +): Promise { + const { awsEnv, endpointUrl } = await getReleasesS3Config(); + await $({ + env: awsEnv, + input: content, + shell: true, + stdio: ["pipe", "inherit", "inherit"], + })`aws s3 cp - s3://rivet-releases/${remotePath} --endpoint-url ${endpointUrl}`; +} + +export interface ListReleasesResult { + Contents?: { Key: string; Size: number }[]; +} + +export async function listReleasesObjects( + prefix: string, +): Promise { + const { awsEnv, endpointUrl } = await getReleasesS3Config(); + const result = await $({ + env: awsEnv, + shell: true, + stdio: ["pipe", "pipe", "inherit"], + })`aws s3api list-objects --bucket rivet-releases --prefix ${prefix} --endpoint-url ${endpointUrl}`; + return JSON.parse(result.stdout); +} + +export async function deleteReleasesPath(remotePath: string): Promise { + const { awsEnv, endpointUrl } = await getReleasesS3Config(); + await $({ + env: awsEnv, + shell: true, + stdio: "inherit", + })`aws s3 rm s3://rivet-releases/${remotePath} --recursive --endpoint-url ${endpointUrl}`; +} + +/** + * Copies objects from one S3 path to another within the releases bucket. + * + * NOTE: We implement our own recursive copy instead of using `aws s3 cp --recursive` + * because of a Cloudflare R2 bug. R2 doesn't support the `x-amz-tagging-directive` + * header, which the AWS CLI sends even with `--copy-props none` for small files. + * Using `s3api copy-object` directly avoids this header. + * + * See: https://community.cloudflare.com/t/r2-s3-compat-doesnt-support-net-sdk-for-copy-operations-due-to-tagging-header/616867 + */ +export async function copyReleasesPath( + sourcePath: string, + targetPath: string, +): Promise { + const { awsEnv, endpointUrl } = await getReleasesS3Config(); + + const listResult = await $({ + env: awsEnv, + })`aws s3api list-objects --bucket rivet-releases --prefix ${sourcePath} --endpoint-url ${endpointUrl}`; + + const objects = JSON.parse(listResult.stdout); + if (!objects.Contents?.length) { + throw new Error(`No objects found under ${sourcePath}`); + } + + for (const obj of objects.Contents) { + const sourceKey = obj.Key; + const targetKey = sourceKey.replace(sourcePath, targetPath); + console.log(` ${sourceKey} -> ${targetKey}`); + await $({ + env: awsEnv, + })`aws s3api copy-object --bucket rivet-releases --key ${targetKey} --copy-source rivet-releases/${sourceKey} --endpoint-url ${endpointUrl}`; + } +} + export function assertEquals(actual: T, expected: T, message?: string): void { if (actual !== expected) { throw new Error(message || `Expected ${expected}, got ${actual}`); @@ -28,148 +193,10 @@ export async function assertDirExists(dirPath: string): Promise { if (!stat.isDirectory()) { throw new Error(`Path exists but is not a directory: ${dirPath}`); } - } catch (err: unknown) { - if (err && typeof err === "object" && "code" in err && err.code === "ENOENT") { + } catch (err: any) { + if (err.code === "ENOENT") { throw new Error(`Directory not found: ${dirPath}`); } throw err; } } - -// R2 configuration -const ENDPOINT_URL = "https://2a94c6a0ced8d35ea63cddc86c2681e7.r2.cloudflarestorage.com"; -const BUCKET = "rivet-releases"; - -interface ReleasesS3Config { - awsEnv: Record; - endpointUrl: string; -} - -let cachedConfig: ReleasesS3Config | null = null; - -export async function getReleasesS3Config(): Promise { - if (cachedConfig) { - return cachedConfig; - } - - let awsAccessKeyId = process.env.R2_RELEASES_ACCESS_KEY_ID || process.env.AWS_ACCESS_KEY_ID; - let awsSecretAccessKey = process.env.R2_RELEASES_SECRET_ACCESS_KEY || process.env.AWS_SECRET_ACCESS_KEY; - - // Try 1Password fallback for local development - if (!awsAccessKeyId) { - try { - const result = await $`op read ${"op://Engineering/rivet-releases R2 Upload/username"}`; - awsAccessKeyId = result.stdout.trim(); - } catch { - // 1Password not available - } - } - if (!awsSecretAccessKey) { - try { - const result = await $`op read ${"op://Engineering/rivet-releases R2 Upload/password"}`; - awsSecretAccessKey = result.stdout.trim(); - } catch { - // 1Password not available - } - } - - assert(awsAccessKeyId, "R2_RELEASES_ACCESS_KEY_ID is required"); - assert(awsSecretAccessKey, "R2_RELEASES_SECRET_ACCESS_KEY is required"); - - cachedConfig = { - awsEnv: { - AWS_ACCESS_KEY_ID: awsAccessKeyId, - AWS_SECRET_ACCESS_KEY: awsSecretAccessKey, - AWS_DEFAULT_REGION: "auto", - }, - endpointUrl: ENDPOINT_URL, - }; - - return cachedConfig; -} - -export async function uploadFileToReleases( - localPath: string, - remotePath: string, -): Promise { - const { awsEnv, endpointUrl } = await getReleasesS3Config(); - await $({ - env: awsEnv, - stdio: "inherit", - })`aws s3 cp ${localPath} s3://${BUCKET}/${remotePath} --checksum-algorithm CRC32 --endpoint-url ${endpointUrl}`; -} - -export async function uploadDirToReleases( - localPath: string, - remotePath: string, -): Promise { - const { awsEnv, endpointUrl } = await getReleasesS3Config(); - await $({ - env: awsEnv, - stdio: "inherit", - })`aws s3 cp ${localPath} s3://${BUCKET}/${remotePath} --recursive --checksum-algorithm CRC32 --endpoint-url ${endpointUrl}`; -} - -export async function uploadContentToReleases( - content: string, - remotePath: string, -): Promise { - const { awsEnv, endpointUrl } = await getReleasesS3Config(); - await $({ - env: awsEnv, - input: content, - stdio: ["pipe", "inherit", "inherit"], - })`aws s3 cp - s3://${BUCKET}/${remotePath} --endpoint-url ${endpointUrl}`; -} - -export interface ListReleasesResult { - Contents?: { Key: string; Size: number }[]; -} - -export async function listReleasesObjects( - prefix: string, -): Promise { - const { awsEnv, endpointUrl } = await getReleasesS3Config(); - const result = await $({ - env: awsEnv, - stdio: ["pipe", "pipe", "inherit"], - })`aws s3api list-objects --bucket ${BUCKET} --prefix ${prefix} --endpoint-url ${endpointUrl}`; - return JSON.parse(result.stdout); -} - -export async function deleteReleasesPath(remotePath: string): Promise { - const { awsEnv, endpointUrl } = await getReleasesS3Config(); - await $({ - env: awsEnv, - stdio: "inherit", - })`aws s3 rm s3://${BUCKET}/${remotePath} --recursive --endpoint-url ${endpointUrl}`; -} - -/** - * Copies objects from one S3 path to another within the releases bucket. - * Uses s3api copy-object to avoid R2 tagging header issues. - */ -export async function copyReleasesPath( - sourcePath: string, - targetPath: string, -): Promise { - const { awsEnv, endpointUrl } = await getReleasesS3Config(); - - const listResult = await $({ - env: awsEnv, - })`aws s3api list-objects --bucket ${BUCKET} --prefix ${sourcePath} --endpoint-url ${endpointUrl}`; - - const objects = JSON.parse(listResult.stdout); - if (!objects.Contents?.length) { - throw new Error(`No objects found under ${sourcePath}`); - } - - for (const obj of objects.Contents) { - const sourceKey = obj.Key; - const targetKey = sourceKey.replace(sourcePath, targetPath); - console.log(` ${sourceKey} -> ${targetKey}`); - await $({ - env: awsEnv, - })`aws s3api copy-object --bucket ${BUCKET} --key ${targetKey} --copy-source ${BUCKET}/${sourceKey} --endpoint-url ${endpointUrl}`; - } -}