forked from wrenn/wrenn
Add USER, COPY, ENV persistence to template build system
Implement three new recipe commands for the admin template builder: - USER <name>: creates the user (adduser + passwordless sudo), switches execution context so subsequent RUN/START commands run as that user via su wrapping. Last USER becomes the template's default_user. - COPY <src> <dst>: copies files from an uploaded build archive (tar/tar.gz/zip) into the sandbox. Source paths validated against traversal. Ownership set to the current USER. - ENV persistence: accumulated env vars stored in templates.default_env (JSONB) and injected via PostInit when sandboxes are created from the template, mirroring Docker's image metadata approach. Supporting changes: - Pre-build creates wrenn-user as default (via USER command) - WORKDIR now creates the directory if it doesn't exist (mkdir -p) - Per-step progress updates (ProgressFunc callback) for live UI - Multipart form support on POST /v1/admin/builds for archive upload - Proto: default_user/default_env fields on Create/ResumeSandboxRequest - Host agent: SetDefaults calls PostInitWithDefaults on envd - Control plane: reads template defaults, passes on sandbox create/resume - Frontend: file upload widget, recipe copy button, keyword colors for USER/COPY, fixed Svelte whitespace stripping in step display - Admin panel defaults to /admin/templates instead of /admin/hosts - Migration adds default_user and default_env to templates and template_builds tables
This commit is contained in:
17
db/migrations/20260411182550_template_defaults.sql
Normal file
17
db/migrations/20260411182550_template_defaults.sql
Normal file
@ -0,0 +1,17 @@
|
||||
-- +goose Up
|
||||
ALTER TABLE templates
|
||||
ADD COLUMN default_user TEXT NOT NULL DEFAULT 'root',
|
||||
ADD COLUMN default_env JSONB NOT NULL DEFAULT '{}';
|
||||
|
||||
ALTER TABLE template_builds
|
||||
ADD COLUMN default_user TEXT NOT NULL DEFAULT 'root',
|
||||
ADD COLUMN default_env JSONB NOT NULL DEFAULT '{}';
|
||||
|
||||
-- +goose Down
|
||||
ALTER TABLE template_builds
|
||||
DROP COLUMN default_env,
|
||||
DROP COLUMN default_user;
|
||||
|
||||
ALTER TABLE templates
|
||||
DROP COLUMN default_env,
|
||||
DROP COLUMN default_user;
|
||||
@ -31,3 +31,8 @@ WHERE id = $1;
|
||||
UPDATE template_builds
|
||||
SET error = $2, status = 'failed', completed_at = NOW()
|
||||
WHERE id = $1;
|
||||
|
||||
-- name: UpdateBuildDefaults :exec
|
||||
UPDATE template_builds
|
||||
SET default_user = $2, default_env = $3
|
||||
WHERE id = $1;
|
||||
|
||||
@ -1,6 +1,6 @@
|
||||
-- name: InsertTemplate :one
|
||||
INSERT INTO templates (id, name, type, vcpus, memory_mb, size_bytes, team_id)
|
||||
VALUES ($1, $2, $3, $4, $5, $6, $7)
|
||||
INSERT INTO templates (id, name, type, vcpus, memory_mb, size_bytes, team_id, default_user, default_env)
|
||||
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9)
|
||||
RETURNING *;
|
||||
|
||||
-- name: GetTemplate :one
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
import { apiFetch, type ApiResult } from '$lib/api/client';
|
||||
import { apiFetch, apiFetchMultipart, type ApiResult } from '$lib/api/client';
|
||||
|
||||
export type BuildLogEntry = {
|
||||
step: number;
|
||||
@ -26,6 +26,8 @@ export type Build = {
|
||||
error?: string;
|
||||
sandbox_id?: string;
|
||||
host_id?: string;
|
||||
default_user: string;
|
||||
default_env: Record<string, string>;
|
||||
created_at: string;
|
||||
started_at?: string;
|
||||
completed_at?: string;
|
||||
@ -39,9 +41,18 @@ export type CreateBuildParams = {
|
||||
vcpus?: number;
|
||||
memory_mb?: number;
|
||||
skip_pre_post?: boolean;
|
||||
archive?: File;
|
||||
};
|
||||
|
||||
export async function createBuild(params: CreateBuildParams): Promise<ApiResult<Build>> {
|
||||
if (params.archive) {
|
||||
// Use multipart when an archive file is provided.
|
||||
const { archive, ...config } = params;
|
||||
const formData = new FormData();
|
||||
formData.append('config', JSON.stringify(config));
|
||||
formData.append('archive', archive);
|
||||
return apiFetchMultipart('POST', '/api/v1/admin/builds', formData);
|
||||
}
|
||||
return apiFetch('POST', '/api/v1/admin/builds', params);
|
||||
}
|
||||
|
||||
|
||||
@ -22,3 +22,24 @@ export async function apiFetch<T>(method: string, path: string, body?: unknown):
|
||||
return { ok: false, error: 'Unable to connect to the server' };
|
||||
}
|
||||
}
|
||||
|
||||
export async function apiFetchMultipart<T>(method: string, path: string, formData: FormData): Promise<ApiResult<T>> {
|
||||
try {
|
||||
const headers: Record<string, string> = {};
|
||||
if (auth.token) headers['Authorization'] = `Bearer ${auth.token}`;
|
||||
|
||||
const res = await fetch(path, {
|
||||
method,
|
||||
headers,
|
||||
body: formData
|
||||
});
|
||||
|
||||
if (res.status === 204) return { ok: true, data: undefined as T };
|
||||
|
||||
const data = await res.json();
|
||||
if (!res.ok) return { ok: false, error: data?.error?.message ?? 'Something went wrong' };
|
||||
return { ok: true, data: data as T };
|
||||
} catch {
|
||||
return { ok: false, error: 'Unable to connect to the server' };
|
||||
}
|
||||
}
|
||||
|
||||
@ -22,8 +22,8 @@
|
||||
};
|
||||
|
||||
const managementItems: NavItem[] = [
|
||||
{ label: 'Hosts', icon: IconServer, href: '/admin/hosts' },
|
||||
{ label: 'Templates', icon: IconTemplate, href: '/admin/templates' }
|
||||
{ label: 'Templates', icon: IconTemplate, href: '/admin/templates' },
|
||||
{ label: 'Hosts', icon: IconServer, href: '/admin/hosts' }
|
||||
];
|
||||
|
||||
function isActive(href: string): boolean {
|
||||
|
||||
@ -1,5 +1,5 @@
|
||||
<script lang="ts">
|
||||
import { goto } from '$app/navigation';
|
||||
import { onMount } from 'svelte';
|
||||
onMount(() => goto('/admin/hosts', { replaceState: true }));
|
||||
onMount(() => goto('/admin/templates', { replaceState: true }));
|
||||
</script>
|
||||
|
||||
@ -56,7 +56,8 @@
|
||||
memory_mb: 512,
|
||||
recipe: '',
|
||||
healthcheck: '',
|
||||
skip_pre_post: false
|
||||
skip_pre_post: false,
|
||||
archive: null as File | null
|
||||
});
|
||||
let creating = $state(false);
|
||||
let createError = $state<string | null>(null);
|
||||
@ -131,12 +132,13 @@
|
||||
healthcheck: createForm.healthcheck.trim() || undefined,
|
||||
vcpus: createForm.vcpus,
|
||||
memory_mb: createForm.memory_mb,
|
||||
skip_pre_post: createForm.skip_pre_post
|
||||
skip_pre_post: createForm.skip_pre_post,
|
||||
archive: createForm.archive || undefined
|
||||
});
|
||||
|
||||
if (result.ok) {
|
||||
showCreate = false;
|
||||
createForm = { name: '', base_template: 'minimal', vcpus: 1, memory_mb: 512, recipe: '', healthcheck: '', skip_pre_post: false };
|
||||
createForm = { name: '', base_template: 'minimal', vcpus: 1, memory_mb: 512, recipe: '', healthcheck: '', skip_pre_post: false, archive: null };
|
||||
builds = [result.data, ...builds];
|
||||
activeTab = 'builds';
|
||||
expandedBuildId = result.data.id;
|
||||
@ -235,6 +237,8 @@
|
||||
case 'RUN': return 'var(--color-blue)';
|
||||
case 'START': return 'var(--color-accent-bright)';
|
||||
case 'ENV': return 'var(--color-amber)';
|
||||
case 'USER': return 'var(--color-accent)';
|
||||
case 'COPY': return 'var(--color-text-bright)';
|
||||
case 'WORKDIR': return 'var(--color-text-tertiary)';
|
||||
default: return 'var(--color-text-muted)';
|
||||
}
|
||||
@ -277,7 +281,7 @@
|
||||
</p>
|
||||
</div>
|
||||
<button
|
||||
onclick={() => { showCreate = true; createError = null; createForm = { name: '', base_template: 'minimal', vcpus: 1, memory_mb: 512, recipe: '', healthcheck: '', skip_pre_post: false }; }}
|
||||
onclick={() => { showCreate = true; createError = null; createForm = { name: '', base_template: 'minimal', vcpus: 1, memory_mb: 512, recipe: '', healthcheck: '', skip_pre_post: false, archive: null }; }}
|
||||
class="flex items-center gap-2 rounded-[var(--radius-button)] bg-[var(--color-accent)] px-4 py-2 text-ui font-semibold text-white shadow-sm transition-all duration-150 hover:brightness-115 hover:-translate-y-px active:translate-y-0"
|
||||
>
|
||||
<svg width="13" height="13" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2.5" stroke-linecap="round" stroke-linejoin="round"><line x1="12" y1="5" x2="12" y2="19"/><line x1="5" y1="12" x2="19" y2="12"/></svg>
|
||||
@ -608,7 +612,7 @@
|
||||
<svg width="14" height="14" viewBox="0 0 24 24" fill="none" stroke="var(--color-red)" stroke-width="2.5" stroke-linecap="round" stroke-linejoin="round" class="shrink-0"><line x1="18" y1="6" x2="6" y2="18"/><line x1="6" y1="6" x2="18" y2="18"/></svg>
|
||||
{/if}
|
||||
<span class="shrink-0 text-label font-semibold text-[var(--color-text-tertiary)]">{phaseLabel}</span>
|
||||
<code class="flex-1 truncate font-mono text-meta"><span style="color: {keywordColor(kw)}">{kw}</span>{#if kwRest}<span class="text-[var(--color-text-secondary)]"> {kwRest}</span>{/if}</code>
|
||||
<code class="flex-1 truncate font-mono text-meta"><span style="color: {keywordColor(kw)}">{kw}</span>{#if kwRest}{' '}<span class="text-[var(--color-text-secondary)]">{kwRest}</span>{/if}</code>
|
||||
<span class="shrink-0 font-mono text-label text-[var(--color-text-muted)]">{log.elapsed_ms}ms</span>
|
||||
{#if log.exit !== 0}
|
||||
<span class="shrink-0 rounded-full bg-[var(--color-red)]/10 px-1.5 py-0.5 font-mono text-label text-[var(--color-red)]">
|
||||
@ -661,13 +665,16 @@
|
||||
<!-- Recipe reference -->
|
||||
{#if build.recipe && build.recipe.length > 0}
|
||||
<div class="mt-4 border-t border-[var(--color-border)] pt-4">
|
||||
<div class="flex items-center gap-1.5">
|
||||
<span class="text-label font-semibold uppercase tracking-[0.06em] text-[var(--color-text-tertiary)]">Recipe</span>
|
||||
<CopyButton value={build.recipe.join('\n')} />
|
||||
</div>
|
||||
<div class="mt-2 rounded-[var(--radius-input)] bg-[var(--color-bg-1)] border border-[var(--color-border)] px-3 py-2">
|
||||
{#each build.recipe as cmd, i}
|
||||
{@const [kw, kwRest] = splitInstruction(cmd)}
|
||||
<div class="flex gap-2 py-0.5">
|
||||
<span class="shrink-0 font-mono text-label text-[var(--color-text-muted)] tabular-nums">{i + 1}.</span>
|
||||
<code class="font-mono text-meta"><span style="color: {keywordColor(kw)}">{kw}</span>{#if kwRest}<span class="text-[var(--color-text-secondary)]"> {kwRest}</span>{/if}</code>
|
||||
<code class="font-mono text-meta"><span style="color: {keywordColor(kw)}">{kw}</span>{#if kwRest}{' '}<span class="text-[var(--color-text-secondary)]">{kwRest}</span>{/if}</code>
|
||||
</div>
|
||||
{/each}
|
||||
</div>
|
||||
@ -787,10 +794,45 @@
|
||||
class="w-full resize-y rounded-[var(--radius-input)] border border-[var(--color-border)] bg-[var(--color-bg-4)] px-3 py-2 font-mono text-meta leading-relaxed text-[var(--color-text-bright)] outline-none placeholder:text-[var(--color-text-muted)] transition-colors duration-150 focus:border-[var(--color-accent)] disabled:opacity-60"
|
||||
></textarea>
|
||||
<p class="mt-1 text-label text-[var(--color-text-muted)]">
|
||||
Supports <code class="font-mono">RUN</code>, <code class="font-mono">START</code>, <code class="font-mono">WORKDIR</code>, <code class="font-mono">ENV key=value</code>. RUN steps have a 30s timeout; override with <code class="font-mono">RUN --timeout=5m</code>.
|
||||
Supports <code class="font-mono">RUN</code>, <code class="font-mono">START</code>, <code class="font-mono">WORKDIR</code>, <code class="font-mono">ENV key=value</code>, <code class="font-mono">USER name</code>, <code class="font-mono">COPY src dst</code>. RUN steps have a 30s timeout; override with <code class="font-mono">RUN --timeout=5m</code>. COPY references files from the uploaded archive.
|
||||
</p>
|
||||
</div>
|
||||
|
||||
<div>
|
||||
<label class="mb-1.5 block text-label font-semibold uppercase tracking-[0.05em] text-[var(--color-text-tertiary)]" for="tmpl-archive">
|
||||
Build Archive <span class="normal-case font-normal text-[var(--color-text-muted)]">(optional, for COPY commands)</span>
|
||||
</label>
|
||||
<div class="flex items-center gap-3">
|
||||
<label
|
||||
class="flex cursor-pointer items-center gap-2 rounded-[var(--radius-button)] border border-[var(--color-border)] bg-[var(--color-bg-4)] px-3 py-2 text-ui text-[var(--color-text-secondary)] transition-colors duration-150 hover:border-[var(--color-border-mid)] hover:text-[var(--color-text-primary)]"
|
||||
>
|
||||
<svg width="14" height="14" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"><path d="M21 15v4a2 2 0 0 1-2 2H5a2 2 0 0 1-2-2v-4"/><polyline points="17 8 12 3 7 8"/><line x1="12" y1="3" x2="12" y2="15"/></svg>
|
||||
Choose file
|
||||
<input
|
||||
id="tmpl-archive"
|
||||
type="file"
|
||||
accept=".tar,.tar.gz,.tgz,.zip"
|
||||
disabled={creating}
|
||||
onchange={(e) => { const f = (e.target as HTMLInputElement).files?.[0]; createForm.archive = f ?? null; }}
|
||||
class="hidden"
|
||||
/>
|
||||
</label>
|
||||
{#if createForm.archive}
|
||||
<span class="flex items-center gap-1.5 text-meta text-[var(--color-text-secondary)]">
|
||||
<span class="font-mono">{createForm.archive.name}</span>
|
||||
<button
|
||||
onclick={() => { createForm.archive = null; }}
|
||||
class="text-[var(--color-text-muted)] hover:text-[var(--color-red)] transition-colors"
|
||||
>
|
||||
<svg width="12" height="12" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2.5" stroke-linecap="round" stroke-linejoin="round"><line x1="18" y1="6" x2="6" y2="18"/><line x1="6" y1="6" x2="18" y2="18"/></svg>
|
||||
</button>
|
||||
</span>
|
||||
{:else}
|
||||
<span class="text-meta text-[var(--color-text-muted)]">tar, tar.gz, or zip</span>
|
||||
{/if}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div>
|
||||
<label class="mb-1.5 block text-label font-semibold uppercase tracking-[0.05em] text-[var(--color-text-tertiary)]" for="tmpl-healthcheck">
|
||||
Healthcheck <span class="normal-case font-normal text-[var(--color-text-muted)]">(optional)</span>
|
||||
|
||||
@ -3,8 +3,10 @@ package api
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"log/slog"
|
||||
"net/http"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"connectrpc.com/connect"
|
||||
@ -54,6 +56,8 @@ type buildResponse struct {
|
||||
Error *string `json:"error,omitempty"`
|
||||
SandboxID *string `json:"sandbox_id,omitempty"`
|
||||
HostID *string `json:"host_id,omitempty"`
|
||||
DefaultUser string `json:"default_user"`
|
||||
DefaultEnv json.RawMessage `json:"default_env"`
|
||||
CreatedAt string `json:"created_at"`
|
||||
StartedAt *string `json:"started_at,omitempty"`
|
||||
CompletedAt *string `json:"completed_at,omitempty"`
|
||||
@ -71,6 +75,8 @@ func buildToResponse(b db.TemplateBuild) buildResponse {
|
||||
CurrentStep: b.CurrentStep,
|
||||
TotalSteps: b.TotalSteps,
|
||||
Logs: b.Logs,
|
||||
DefaultUser: b.DefaultUser,
|
||||
DefaultEnv: b.DefaultEnv,
|
||||
}
|
||||
if b.Healthcheck != "" {
|
||||
resp.Healthcheck = &b.Healthcheck
|
||||
@ -101,12 +107,55 @@ func buildToResponse(b db.TemplateBuild) buildResponse {
|
||||
}
|
||||
|
||||
// Create handles POST /v1/admin/builds.
|
||||
// Accepts either JSON body or multipart/form-data with a "config" JSON part
|
||||
// and an optional "archive" file part (tar/tar.gz/zip for COPY commands).
|
||||
func (h *buildHandler) Create(w http.ResponseWriter, r *http.Request) {
|
||||
var req createBuildRequest
|
||||
var archive []byte
|
||||
var archiveName string
|
||||
|
||||
ct := r.Header.Get("Content-Type")
|
||||
if strings.HasPrefix(ct, "multipart/") {
|
||||
// 100 MB max for multipart (archive + JSON config).
|
||||
if err := r.ParseMultipartForm(100 << 20); err != nil {
|
||||
writeError(w, http.StatusBadRequest, "invalid_request", "failed to parse multipart form")
|
||||
return
|
||||
}
|
||||
|
||||
// Parse JSON config from "config" field.
|
||||
configStr := r.FormValue("config")
|
||||
if configStr == "" {
|
||||
writeError(w, http.StatusBadRequest, "invalid_request", "multipart form requires a 'config' JSON field")
|
||||
return
|
||||
}
|
||||
if err := json.Unmarshal([]byte(configStr), &req); err != nil {
|
||||
writeError(w, http.StatusBadRequest, "invalid_request", "invalid config JSON in multipart form")
|
||||
return
|
||||
}
|
||||
|
||||
// Read optional archive file (max 100 MB).
|
||||
file, header, err := r.FormFile("archive")
|
||||
if err == nil {
|
||||
defer file.Close()
|
||||
const maxArchiveSize = 100 << 20 // 100 MB
|
||||
lr := io.LimitReader(file, maxArchiveSize+1)
|
||||
archive, err = io.ReadAll(lr)
|
||||
if err != nil {
|
||||
writeError(w, http.StatusBadRequest, "invalid_request", "failed to read archive file")
|
||||
return
|
||||
}
|
||||
if int64(len(archive)) > maxArchiveSize {
|
||||
writeError(w, http.StatusRequestEntityTooLarge, "invalid_request", "archive exceeds 100 MB limit")
|
||||
return
|
||||
}
|
||||
archiveName = header.Filename
|
||||
}
|
||||
} else {
|
||||
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
|
||||
writeError(w, http.StatusBadRequest, "invalid_request", "invalid JSON body")
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
if req.Name == "" {
|
||||
writeError(w, http.StatusBadRequest, "invalid_request", "name is required")
|
||||
@ -129,6 +178,8 @@ func (h *buildHandler) Create(w http.ResponseWriter, r *http.Request) {
|
||||
VCPUs: req.VCPUs,
|
||||
MemoryMB: req.MemoryMB,
|
||||
SkipPrePost: req.SkipPrePost,
|
||||
Archive: archive,
|
||||
ArchiveName: archiveName,
|
||||
})
|
||||
if err != nil {
|
||||
slog.Error("failed to create build", "error", err)
|
||||
|
||||
@ -217,6 +217,8 @@ func (h *snapshotHandler) Create(w http.ResponseWriter, r *http.Request) {
|
||||
MemoryMb: sb.MemoryMb,
|
||||
SizeBytes: resp.Msg.SizeBytes,
|
||||
TeamID: ac.TeamID,
|
||||
DefaultUser: "root",
|
||||
DefaultEnv: []byte("{}"),
|
||||
})
|
||||
if err != nil {
|
||||
slog.Error("failed to insert template record", "name", req.Name, "error", err)
|
||||
|
||||
@ -160,6 +160,8 @@ type Template struct {
|
||||
CreatedAt pgtype.Timestamptz `json:"created_at"`
|
||||
TeamID pgtype.UUID `json:"team_id"`
|
||||
ID pgtype.UUID `json:"id"`
|
||||
DefaultUser string `json:"default_user"`
|
||||
DefaultEnv []byte `json:"default_env"`
|
||||
}
|
||||
|
||||
type TemplateBuild struct {
|
||||
@ -183,6 +185,8 @@ type TemplateBuild struct {
|
||||
TemplateID pgtype.UUID `json:"template_id"`
|
||||
TeamID pgtype.UUID `json:"team_id"`
|
||||
SkipPrePost bool `json:"skip_pre_post"`
|
||||
DefaultUser string `json:"default_user"`
|
||||
DefaultEnv []byte `json:"default_env"`
|
||||
}
|
||||
|
||||
type User struct {
|
||||
|
||||
@ -12,7 +12,7 @@ import (
|
||||
)
|
||||
|
||||
const getTemplateBuild = `-- name: GetTemplateBuild :one
|
||||
SELECT id, name, base_template, recipe, healthcheck, vcpus, memory_mb, status, current_step, total_steps, logs, error, sandbox_id, host_id, created_at, started_at, completed_at, template_id, team_id, skip_pre_post FROM template_builds WHERE id = $1
|
||||
SELECT id, name, base_template, recipe, healthcheck, vcpus, memory_mb, status, current_step, total_steps, logs, error, sandbox_id, host_id, created_at, started_at, completed_at, template_id, team_id, skip_pre_post, default_user, default_env FROM template_builds WHERE id = $1
|
||||
`
|
||||
|
||||
func (q *Queries) GetTemplateBuild(ctx context.Context, id pgtype.UUID) (TemplateBuild, error) {
|
||||
@ -39,6 +39,8 @@ func (q *Queries) GetTemplateBuild(ctx context.Context, id pgtype.UUID) (Templat
|
||||
&i.TemplateID,
|
||||
&i.TeamID,
|
||||
&i.SkipPrePost,
|
||||
&i.DefaultUser,
|
||||
&i.DefaultEnv,
|
||||
)
|
||||
return i, err
|
||||
}
|
||||
@ -46,7 +48,7 @@ func (q *Queries) GetTemplateBuild(ctx context.Context, id pgtype.UUID) (Templat
|
||||
const insertTemplateBuild = `-- name: InsertTemplateBuild :one
|
||||
INSERT INTO template_builds (id, name, base_template, recipe, healthcheck, vcpus, memory_mb, status, total_steps, template_id, team_id, skip_pre_post)
|
||||
VALUES ($1, $2, $3, $4, $5, $6, $7, 'pending', $8, $9, $10, $11)
|
||||
RETURNING id, name, base_template, recipe, healthcheck, vcpus, memory_mb, status, current_step, total_steps, logs, error, sandbox_id, host_id, created_at, started_at, completed_at, template_id, team_id, skip_pre_post
|
||||
RETURNING id, name, base_template, recipe, healthcheck, vcpus, memory_mb, status, current_step, total_steps, logs, error, sandbox_id, host_id, created_at, started_at, completed_at, template_id, team_id, skip_pre_post, default_user, default_env
|
||||
`
|
||||
|
||||
type InsertTemplateBuildParams struct {
|
||||
@ -99,12 +101,14 @@ func (q *Queries) InsertTemplateBuild(ctx context.Context, arg InsertTemplateBui
|
||||
&i.TemplateID,
|
||||
&i.TeamID,
|
||||
&i.SkipPrePost,
|
||||
&i.DefaultUser,
|
||||
&i.DefaultEnv,
|
||||
)
|
||||
return i, err
|
||||
}
|
||||
|
||||
const listTemplateBuilds = `-- name: ListTemplateBuilds :many
|
||||
SELECT id, name, base_template, recipe, healthcheck, vcpus, memory_mb, status, current_step, total_steps, logs, error, sandbox_id, host_id, created_at, started_at, completed_at, template_id, team_id, skip_pre_post FROM template_builds ORDER BY created_at DESC
|
||||
SELECT id, name, base_template, recipe, healthcheck, vcpus, memory_mb, status, current_step, total_steps, logs, error, sandbox_id, host_id, created_at, started_at, completed_at, template_id, team_id, skip_pre_post, default_user, default_env FROM template_builds ORDER BY created_at DESC
|
||||
`
|
||||
|
||||
func (q *Queries) ListTemplateBuilds(ctx context.Context) ([]TemplateBuild, error) {
|
||||
@ -137,6 +141,8 @@ func (q *Queries) ListTemplateBuilds(ctx context.Context) ([]TemplateBuild, erro
|
||||
&i.TemplateID,
|
||||
&i.TeamID,
|
||||
&i.SkipPrePost,
|
||||
&i.DefaultUser,
|
||||
&i.DefaultEnv,
|
||||
); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@ -148,6 +154,23 @@ func (q *Queries) ListTemplateBuilds(ctx context.Context) ([]TemplateBuild, erro
|
||||
return items, nil
|
||||
}
|
||||
|
||||
const updateBuildDefaults = `-- name: UpdateBuildDefaults :exec
|
||||
UPDATE template_builds
|
||||
SET default_user = $2, default_env = $3
|
||||
WHERE id = $1
|
||||
`
|
||||
|
||||
type UpdateBuildDefaultsParams struct {
|
||||
ID pgtype.UUID `json:"id"`
|
||||
DefaultUser string `json:"default_user"`
|
||||
DefaultEnv []byte `json:"default_env"`
|
||||
}
|
||||
|
||||
func (q *Queries) UpdateBuildDefaults(ctx context.Context, arg UpdateBuildDefaultsParams) error {
|
||||
_, err := q.db.Exec(ctx, updateBuildDefaults, arg.ID, arg.DefaultUser, arg.DefaultEnv)
|
||||
return err
|
||||
}
|
||||
|
||||
const updateBuildError = `-- name: UpdateBuildError :exec
|
||||
UPDATE template_builds
|
||||
SET error = $2, status = 'failed', completed_at = NOW()
|
||||
@ -204,7 +227,7 @@ SET status = $2,
|
||||
started_at = CASE WHEN $2 = 'running' AND started_at IS NULL THEN NOW() ELSE started_at END,
|
||||
completed_at = CASE WHEN $2 IN ('success', 'failed', 'cancelled') THEN NOW() ELSE completed_at END
|
||||
WHERE id = $1
|
||||
RETURNING id, name, base_template, recipe, healthcheck, vcpus, memory_mb, status, current_step, total_steps, logs, error, sandbox_id, host_id, created_at, started_at, completed_at, template_id, team_id, skip_pre_post
|
||||
RETURNING id, name, base_template, recipe, healthcheck, vcpus, memory_mb, status, current_step, total_steps, logs, error, sandbox_id, host_id, created_at, started_at, completed_at, template_id, team_id, skip_pre_post, default_user, default_env
|
||||
`
|
||||
|
||||
type UpdateBuildStatusParams struct {
|
||||
@ -236,6 +259,8 @@ func (q *Queries) UpdateBuildStatus(ctx context.Context, arg UpdateBuildStatusPa
|
||||
&i.TemplateID,
|
||||
&i.TeamID,
|
||||
&i.SkipPrePost,
|
||||
&i.DefaultUser,
|
||||
&i.DefaultEnv,
|
||||
)
|
||||
return i, err
|
||||
}
|
||||
|
||||
@ -45,7 +45,7 @@ func (q *Queries) DeleteTemplatesByTeam(ctx context.Context, teamID pgtype.UUID)
|
||||
}
|
||||
|
||||
const getPlatformTemplateByName = `-- name: GetPlatformTemplateByName :one
|
||||
SELECT name, type, vcpus, memory_mb, size_bytes, created_at, team_id, id FROM templates WHERE team_id = '00000000-0000-0000-0000-000000000000' AND name = $1
|
||||
SELECT name, type, vcpus, memory_mb, size_bytes, created_at, team_id, id, default_user, default_env FROM templates WHERE team_id = '00000000-0000-0000-0000-000000000000' AND name = $1
|
||||
`
|
||||
|
||||
// Check if a global (platform) template exists with the given name.
|
||||
@ -61,12 +61,14 @@ func (q *Queries) GetPlatformTemplateByName(ctx context.Context, name string) (T
|
||||
&i.CreatedAt,
|
||||
&i.TeamID,
|
||||
&i.ID,
|
||||
&i.DefaultUser,
|
||||
&i.DefaultEnv,
|
||||
)
|
||||
return i, err
|
||||
}
|
||||
|
||||
const getTemplate = `-- name: GetTemplate :one
|
||||
SELECT name, type, vcpus, memory_mb, size_bytes, created_at, team_id, id FROM templates WHERE id = $1
|
||||
SELECT name, type, vcpus, memory_mb, size_bytes, created_at, team_id, id, default_user, default_env FROM templates WHERE id = $1
|
||||
`
|
||||
|
||||
func (q *Queries) GetTemplate(ctx context.Context, id pgtype.UUID) (Template, error) {
|
||||
@ -81,12 +83,14 @@ func (q *Queries) GetTemplate(ctx context.Context, id pgtype.UUID) (Template, er
|
||||
&i.CreatedAt,
|
||||
&i.TeamID,
|
||||
&i.ID,
|
||||
&i.DefaultUser,
|
||||
&i.DefaultEnv,
|
||||
)
|
||||
return i, err
|
||||
}
|
||||
|
||||
const getTemplateByName = `-- name: GetTemplateByName :one
|
||||
SELECT name, type, vcpus, memory_mb, size_bytes, created_at, team_id, id FROM templates WHERE team_id = $1 AND name = $2
|
||||
SELECT name, type, vcpus, memory_mb, size_bytes, created_at, team_id, id, default_user, default_env FROM templates WHERE team_id = $1 AND name = $2
|
||||
`
|
||||
|
||||
type GetTemplateByNameParams struct {
|
||||
@ -107,12 +111,14 @@ func (q *Queries) GetTemplateByName(ctx context.Context, arg GetTemplateByNamePa
|
||||
&i.CreatedAt,
|
||||
&i.TeamID,
|
||||
&i.ID,
|
||||
&i.DefaultUser,
|
||||
&i.DefaultEnv,
|
||||
)
|
||||
return i, err
|
||||
}
|
||||
|
||||
const getTemplateByTeam = `-- name: GetTemplateByTeam :one
|
||||
SELECT name, type, vcpus, memory_mb, size_bytes, created_at, team_id, id FROM templates WHERE name = $1 AND (team_id = $2 OR team_id = '00000000-0000-0000-0000-000000000000')
|
||||
SELECT name, type, vcpus, memory_mb, size_bytes, created_at, team_id, id, default_user, default_env FROM templates WHERE name = $1 AND (team_id = $2 OR team_id = '00000000-0000-0000-0000-000000000000')
|
||||
`
|
||||
|
||||
type GetTemplateByTeamParams struct {
|
||||
@ -133,14 +139,16 @@ func (q *Queries) GetTemplateByTeam(ctx context.Context, arg GetTemplateByTeamPa
|
||||
&i.CreatedAt,
|
||||
&i.TeamID,
|
||||
&i.ID,
|
||||
&i.DefaultUser,
|
||||
&i.DefaultEnv,
|
||||
)
|
||||
return i, err
|
||||
}
|
||||
|
||||
const insertTemplate = `-- name: InsertTemplate :one
|
||||
INSERT INTO templates (id, name, type, vcpus, memory_mb, size_bytes, team_id)
|
||||
VALUES ($1, $2, $3, $4, $5, $6, $7)
|
||||
RETURNING name, type, vcpus, memory_mb, size_bytes, created_at, team_id, id
|
||||
INSERT INTO templates (id, name, type, vcpus, memory_mb, size_bytes, team_id, default_user, default_env)
|
||||
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9)
|
||||
RETURNING name, type, vcpus, memory_mb, size_bytes, created_at, team_id, id, default_user, default_env
|
||||
`
|
||||
|
||||
type InsertTemplateParams struct {
|
||||
@ -151,6 +159,8 @@ type InsertTemplateParams struct {
|
||||
MemoryMb int32 `json:"memory_mb"`
|
||||
SizeBytes int64 `json:"size_bytes"`
|
||||
TeamID pgtype.UUID `json:"team_id"`
|
||||
DefaultUser string `json:"default_user"`
|
||||
DefaultEnv []byte `json:"default_env"`
|
||||
}
|
||||
|
||||
func (q *Queries) InsertTemplate(ctx context.Context, arg InsertTemplateParams) (Template, error) {
|
||||
@ -162,6 +172,8 @@ func (q *Queries) InsertTemplate(ctx context.Context, arg InsertTemplateParams)
|
||||
arg.MemoryMb,
|
||||
arg.SizeBytes,
|
||||
arg.TeamID,
|
||||
arg.DefaultUser,
|
||||
arg.DefaultEnv,
|
||||
)
|
||||
var i Template
|
||||
err := row.Scan(
|
||||
@ -173,12 +185,14 @@ func (q *Queries) InsertTemplate(ctx context.Context, arg InsertTemplateParams)
|
||||
&i.CreatedAt,
|
||||
&i.TeamID,
|
||||
&i.ID,
|
||||
&i.DefaultUser,
|
||||
&i.DefaultEnv,
|
||||
)
|
||||
return i, err
|
||||
}
|
||||
|
||||
const listTemplates = `-- name: ListTemplates :many
|
||||
SELECT name, type, vcpus, memory_mb, size_bytes, created_at, team_id, id FROM templates ORDER BY created_at DESC
|
||||
SELECT name, type, vcpus, memory_mb, size_bytes, created_at, team_id, id, default_user, default_env FROM templates ORDER BY created_at DESC
|
||||
`
|
||||
|
||||
func (q *Queries) ListTemplates(ctx context.Context) ([]Template, error) {
|
||||
@ -199,6 +213,8 @@ func (q *Queries) ListTemplates(ctx context.Context) ([]Template, error) {
|
||||
&i.CreatedAt,
|
||||
&i.TeamID,
|
||||
&i.ID,
|
||||
&i.DefaultUser,
|
||||
&i.DefaultEnv,
|
||||
); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@ -211,7 +227,7 @@ func (q *Queries) ListTemplates(ctx context.Context) ([]Template, error) {
|
||||
}
|
||||
|
||||
const listTemplatesByTeam = `-- name: ListTemplatesByTeam :many
|
||||
SELECT name, type, vcpus, memory_mb, size_bytes, created_at, team_id, id FROM templates WHERE (team_id = $1 OR team_id = '00000000-0000-0000-0000-000000000000') ORDER BY created_at DESC
|
||||
SELECT name, type, vcpus, memory_mb, size_bytes, created_at, team_id, id, default_user, default_env FROM templates WHERE (team_id = $1 OR team_id = '00000000-0000-0000-0000-000000000000') ORDER BY created_at DESC
|
||||
`
|
||||
|
||||
// Platform templates are visible to all teams.
|
||||
@ -233,6 +249,8 @@ func (q *Queries) ListTemplatesByTeam(ctx context.Context, teamID pgtype.UUID) (
|
||||
&i.CreatedAt,
|
||||
&i.TeamID,
|
||||
&i.ID,
|
||||
&i.DefaultUser,
|
||||
&i.DefaultEnv,
|
||||
); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@ -245,7 +263,7 @@ func (q *Queries) ListTemplatesByTeam(ctx context.Context, teamID pgtype.UUID) (
|
||||
}
|
||||
|
||||
const listTemplatesByTeamAndType = `-- name: ListTemplatesByTeamAndType :many
|
||||
SELECT name, type, vcpus, memory_mb, size_bytes, created_at, team_id, id FROM templates WHERE (team_id = $1 OR team_id = '00000000-0000-0000-0000-000000000000') AND type = $2 ORDER BY created_at DESC
|
||||
SELECT name, type, vcpus, memory_mb, size_bytes, created_at, team_id, id, default_user, default_env FROM templates WHERE (team_id = $1 OR team_id = '00000000-0000-0000-0000-000000000000') AND type = $2 ORDER BY created_at DESC
|
||||
`
|
||||
|
||||
type ListTemplatesByTeamAndTypeParams struct {
|
||||
@ -272,6 +290,8 @@ func (q *Queries) ListTemplatesByTeamAndType(ctx context.Context, arg ListTempla
|
||||
&i.CreatedAt,
|
||||
&i.TeamID,
|
||||
&i.ID,
|
||||
&i.DefaultUser,
|
||||
&i.DefaultEnv,
|
||||
); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@ -284,7 +304,7 @@ func (q *Queries) ListTemplatesByTeamAndType(ctx context.Context, arg ListTempla
|
||||
}
|
||||
|
||||
const listTemplatesByTeamOnly = `-- name: ListTemplatesByTeamOnly :many
|
||||
SELECT name, type, vcpus, memory_mb, size_bytes, created_at, team_id, id FROM templates WHERE team_id = $1 ORDER BY created_at DESC
|
||||
SELECT name, type, vcpus, memory_mb, size_bytes, created_at, team_id, id, default_user, default_env FROM templates WHERE team_id = $1 ORDER BY created_at DESC
|
||||
`
|
||||
|
||||
// List templates owned by a specific team (NOT including platform templates).
|
||||
@ -306,6 +326,8 @@ func (q *Queries) ListTemplatesByTeamOnly(ctx context.Context, teamID pgtype.UUI
|
||||
&i.CreatedAt,
|
||||
&i.TeamID,
|
||||
&i.ID,
|
||||
&i.DefaultUser,
|
||||
&i.DefaultEnv,
|
||||
); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@ -318,7 +340,7 @@ func (q *Queries) ListTemplatesByTeamOnly(ctx context.Context, teamID pgtype.UUI
|
||||
}
|
||||
|
||||
const listTemplatesByType = `-- name: ListTemplatesByType :many
|
||||
SELECT name, type, vcpus, memory_mb, size_bytes, created_at, team_id, id FROM templates WHERE type = $1 ORDER BY created_at DESC
|
||||
SELECT name, type, vcpus, memory_mb, size_bytes, created_at, team_id, id, default_user, default_env FROM templates WHERE type = $1 ORDER BY created_at DESC
|
||||
`
|
||||
|
||||
func (q *Queries) ListTemplatesByType(ctx context.Context, type_ string) ([]Template, error) {
|
||||
@ -339,6 +361,8 @@ func (q *Queries) ListTemplatesByType(ctx context.Context, type_ string) ([]Temp
|
||||
&i.CreatedAt,
|
||||
&i.TeamID,
|
||||
&i.ID,
|
||||
&i.DefaultUser,
|
||||
&i.DefaultEnv,
|
||||
); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
@ -3,6 +3,7 @@ package envdclient
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"log/slog"
|
||||
@ -273,10 +274,36 @@ func (c *Client) ReadFile(ctx context.Context, path string) ([]byte, error) {
|
||||
// env vars and the corresponding files under /run/wrenn/ inside the guest.
|
||||
// Must be called after snapshot restore so envd picks up the new sandbox's metadata.
|
||||
func (c *Client) PostInit(ctx context.Context) error {
|
||||
req, err := http.NewRequestWithContext(ctx, http.MethodPost, c.base+"/init", nil)
|
||||
return c.PostInitWithDefaults(ctx, "", nil)
|
||||
}
|
||||
|
||||
// PostInitWithDefaults calls envd's POST /init endpoint with optional default
|
||||
// user and environment variables. These are applied to envd's defaults so all
|
||||
// subsequent process executions use them.
|
||||
func (c *Client) PostInitWithDefaults(ctx context.Context, defaultUser string, envVars map[string]string) error {
|
||||
var body io.Reader
|
||||
if defaultUser != "" || len(envVars) > 0 {
|
||||
payload := make(map[string]any)
|
||||
if defaultUser != "" {
|
||||
payload["defaultUser"] = defaultUser
|
||||
}
|
||||
if len(envVars) > 0 {
|
||||
payload["envVars"] = envVars
|
||||
}
|
||||
data, err := json.Marshal(payload)
|
||||
if err != nil {
|
||||
return fmt.Errorf("marshal init body: %w", err)
|
||||
}
|
||||
body = bytes.NewReader(data)
|
||||
}
|
||||
|
||||
req, err := http.NewRequestWithContext(ctx, http.MethodPost, c.base+"/init", body)
|
||||
if err != nil {
|
||||
return fmt.Errorf("create request: %w", err)
|
||||
}
|
||||
if body != nil {
|
||||
req.Header.Set("Content-Type", "application/json")
|
||||
}
|
||||
|
||||
resp, err := c.httpClient.Do(req)
|
||||
if err != nil {
|
||||
@ -285,8 +312,8 @@ func (c *Client) PostInit(ctx context.Context) error {
|
||||
defer resp.Body.Close()
|
||||
|
||||
if resp.StatusCode != http.StatusNoContent {
|
||||
body, _ := io.ReadAll(resp.Body)
|
||||
return fmt.Errorf("post init: status %d: %s", resp.StatusCode, string(body))
|
||||
respBody, _ := io.ReadAll(resp.Body)
|
||||
return fmt.Errorf("post init: status %d: %s", resp.StatusCode, string(respBody))
|
||||
}
|
||||
|
||||
return nil
|
||||
|
||||
@ -69,6 +69,13 @@ func (s *Server) CreateSandbox(
|
||||
return nil, connect.NewError(connect.CodeInternal, fmt.Errorf("create sandbox: %w", err))
|
||||
}
|
||||
|
||||
// Apply template defaults (user, env vars) if provided.
|
||||
if msg.DefaultUser != "" || len(msg.DefaultEnv) > 0 {
|
||||
if err := s.mgr.SetDefaults(ctx, sb.ID, msg.DefaultUser, msg.DefaultEnv); err != nil {
|
||||
slog.Warn("failed to set sandbox defaults", "sandbox", sb.ID, "error", err)
|
||||
}
|
||||
}
|
||||
|
||||
return connect.NewResponse(&pb.CreateSandboxResponse{
|
||||
SandboxId: sb.ID,
|
||||
Status: string(sb.Status),
|
||||
@ -100,10 +107,19 @@ func (s *Server) ResumeSandbox(
|
||||
ctx context.Context,
|
||||
req *connect.Request[pb.ResumeSandboxRequest],
|
||||
) (*connect.Response[pb.ResumeSandboxResponse], error) {
|
||||
sb, err := s.mgr.Resume(ctx, req.Msg.SandboxId, int(req.Msg.TimeoutSec))
|
||||
msg := req.Msg
|
||||
sb, err := s.mgr.Resume(ctx, msg.SandboxId, int(msg.TimeoutSec))
|
||||
if err != nil {
|
||||
return nil, connect.NewError(connect.CodeInternal, err)
|
||||
}
|
||||
|
||||
// Apply template defaults (user, env vars) if provided.
|
||||
if msg.DefaultUser != "" || len(msg.DefaultEnv) > 0 {
|
||||
if err := s.mgr.SetDefaults(ctx, sb.ID, msg.DefaultUser, msg.DefaultEnv); err != nil {
|
||||
slog.Warn("failed to set sandbox defaults on resume", "sandbox", sb.ID, "error", err)
|
||||
}
|
||||
}
|
||||
|
||||
return connect.NewResponse(&pb.ResumeSandboxResponse{
|
||||
SandboxId: sb.ID,
|
||||
Status: string(sb.Status),
|
||||
|
||||
@ -7,10 +7,11 @@ import (
|
||||
)
|
||||
|
||||
// ExecContext holds mutable state that persists across recipe steps.
|
||||
// It is initialized empty and updated by ENV and WORKDIR steps.
|
||||
// It is initialized empty and updated by ENV, WORKDIR, and USER steps.
|
||||
type ExecContext struct {
|
||||
WorkDir string
|
||||
EnvVars map[string]string
|
||||
User string // Current unix user for command execution.
|
||||
}
|
||||
|
||||
// This regex matches:
|
||||
@ -25,7 +26,20 @@ var envRegex = regexp.MustCompile(`\$\$|\$\{([a-zA-Z0-9_]*)\}|\$([a-zA-Z0-9_]+)`
|
||||
// If WORKDIR and/or ENV are set, they are prepended as a shell preamble:
|
||||
//
|
||||
// cd '/the/dir' && KEY='val' /bin/sh -c 'original command'
|
||||
//
|
||||
// If USER is set to a non-root user, the entire command is wrapped with su:
|
||||
//
|
||||
// su <user> -s /bin/sh -c '<preamble + command>'
|
||||
func (c *ExecContext) WrappedCommand(cmd string) string {
|
||||
inner := c.innerCommand(cmd)
|
||||
if c.User != "" && c.User != "root" {
|
||||
return "su " + shellescape(c.User) + " -s /bin/sh -c " + shellescape(inner)
|
||||
}
|
||||
return inner
|
||||
}
|
||||
|
||||
// innerCommand builds the command with workdir/env preamble but without user wrapping.
|
||||
func (c *ExecContext) innerCommand(cmd string) string {
|
||||
prefix := c.shellPrefix()
|
||||
if prefix == "" {
|
||||
return cmd
|
||||
@ -42,7 +56,11 @@ func (c *ExecContext) WrappedCommand(cmd string) string {
|
||||
// simultaneously before a healthcheck is evaluated.
|
||||
func (c *ExecContext) StartCommand(cmd string) string {
|
||||
prefix := c.shellPrefix()
|
||||
return prefix + "nohup /bin/sh -c " + shellescape(cmd) + " >/dev/null 2>&1 &"
|
||||
inner := prefix + "nohup /bin/sh -c " + shellescape(cmd) + " >/dev/null 2>&1 &"
|
||||
if c.User != "" && c.User != "root" {
|
||||
return "su " + shellescape(c.User) + " -s /bin/sh -c " + shellescape(inner)
|
||||
}
|
||||
return inner
|
||||
}
|
||||
|
||||
// shellPrefix builds the "cd ... && KEY=val " preamble for a shell command.
|
||||
|
||||
@ -4,6 +4,7 @@ import (
|
||||
"context"
|
||||
"fmt"
|
||||
"log/slog"
|
||||
"path"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
@ -16,6 +17,10 @@ import (
|
||||
// explicit --timeout flag.
|
||||
const DefaultStepTimeout = 30 * time.Second
|
||||
|
||||
// BuildFilesDir is the directory inside the sandbox where uploaded build
|
||||
// archives are extracted. COPY instructions reference paths relative to this.
|
||||
const BuildFilesDir = "/tmp/build-files"
|
||||
|
||||
// BuildLogEntry is the per-step record stored in template_builds.logs (JSONB).
|
||||
type BuildLogEntry struct {
|
||||
Step int `json:"step"`
|
||||
@ -32,13 +37,18 @@ type BuildLogEntry struct {
|
||||
// the method on the hostagent Connect RPC client.
|
||||
type ExecFunc func(ctx context.Context, req *connect.Request[pb.ExecRequest]) (*connect.Response[pb.ExecResponse], error)
|
||||
|
||||
// ProgressFunc is called after each step with the current step counter and
|
||||
// accumulated log entries. Used for per-step DB progress updates.
|
||||
type ProgressFunc func(step int, entries []BuildLogEntry)
|
||||
|
||||
// Execute runs steps sequentially against sandboxID using execFn.
|
||||
//
|
||||
// - phase labels the log entries (e.g., "pre-build", "recipe", "post-build").
|
||||
// - startStep is the 1-based offset so entries are globally numbered across phases.
|
||||
// - defaultTimeout applies to RUN steps with no per-step --timeout; 0 → 10 minutes.
|
||||
// - bctx is mutated in place as ENV/WORKDIR steps execute, and carries forward
|
||||
// - bctx is mutated in place as ENV/WORKDIR/USER steps execute, and carries forward
|
||||
// into subsequent phases when the caller passes the same pointer.
|
||||
// - onProgress is called after each step for live progress updates (may be nil).
|
||||
//
|
||||
// Returns all log entries appended during this call, the next step counter
|
||||
// value, and whether all steps succeeded. On false the last entry contains
|
||||
@ -53,6 +63,7 @@ func Execute(
|
||||
defaultTimeout time.Duration,
|
||||
bctx *ExecContext,
|
||||
execFn ExecFunc,
|
||||
onProgress ProgressFunc,
|
||||
) (entries []BuildLogEntry, nextStep int, ok bool) {
|
||||
if defaultTimeout <= 0 {
|
||||
defaultTimeout = 10 * time.Minute
|
||||
@ -72,19 +83,30 @@ func Execute(
|
||||
entries = append(entries, BuildLogEntry{Step: step, Phase: phase, Cmd: st.Raw, Ok: true})
|
||||
|
||||
case KindWORKDIR:
|
||||
bctx.WorkDir = st.Path
|
||||
entries = append(entries, BuildLogEntry{Step: step, Phase: phase, Cmd: st.Raw, Ok: true})
|
||||
|
||||
case KindUSER, KindCOPY:
|
||||
verb := strings.ToUpper(strings.Fields(st.Raw)[0])
|
||||
entries = append(entries, BuildLogEntry{
|
||||
Step: step,
|
||||
Phase: phase,
|
||||
Cmd: st.Raw,
|
||||
Stderr: verb + " is not yet supported",
|
||||
Ok: false,
|
||||
})
|
||||
// Create the directory if it doesn't exist.
|
||||
mkdirEntry := execRawShell(ctx, st.Raw, sandboxID, phase, step, 10*time.Second, execFn,
|
||||
"mkdir -p "+shellescape(st.Path))
|
||||
if !mkdirEntry.Ok {
|
||||
entries = append(entries, mkdirEntry)
|
||||
return entries, step, false
|
||||
}
|
||||
bctx.WorkDir = st.Path
|
||||
mkdirEntry.Ok = true
|
||||
entries = append(entries, mkdirEntry)
|
||||
|
||||
case KindUSER:
|
||||
entry, succeeded := execUser(ctx, st, sandboxID, phase, step, bctx, execFn)
|
||||
entries = append(entries, entry)
|
||||
if !succeeded {
|
||||
return entries, step, false
|
||||
}
|
||||
|
||||
case KindCOPY:
|
||||
entry, succeeded := execCopy(ctx, st, sandboxID, phase, step, bctx, execFn)
|
||||
entries = append(entries, entry)
|
||||
if !succeeded {
|
||||
return entries, step, false
|
||||
}
|
||||
|
||||
case KindSTART:
|
||||
entry, succeeded := execStart(ctx, st, sandboxID, phase, step, bctx, execFn)
|
||||
@ -104,6 +126,10 @@ func Execute(
|
||||
return entries, step, false
|
||||
}
|
||||
}
|
||||
|
||||
if onProgress != nil {
|
||||
onProgress(step, entries)
|
||||
}
|
||||
}
|
||||
return entries, step, true
|
||||
}
|
||||
@ -145,6 +171,106 @@ func execRun(
|
||||
return entry, entry.Ok
|
||||
}
|
||||
|
||||
// execUser creates a unix user (if not exists), grants passwordless sudo,
|
||||
// and updates bctx.User for subsequent steps.
|
||||
func execUser(
|
||||
ctx context.Context,
|
||||
st Step,
|
||||
sandboxID, phase string,
|
||||
step int,
|
||||
bctx *ExecContext,
|
||||
execFn ExecFunc,
|
||||
) (BuildLogEntry, bool) {
|
||||
username := st.Key
|
||||
// Create user if not exists, with home directory and bash shell.
|
||||
// Grant passwordless sudo access (E2B convention).
|
||||
// Uses printf %s to avoid shell injection in the sudoers line.
|
||||
script := fmt.Sprintf(
|
||||
"id %s >/dev/null 2>&1 || (adduser --disabled-password --gecos '' --shell /bin/bash %s && printf '%%s ALL=(ALL) NOPASSWD:ALL\\n' %s >> /etc/sudoers)",
|
||||
shellescape(username), shellescape(username), shellescape(username),
|
||||
)
|
||||
|
||||
entry := execRawShell(ctx, st.Raw, sandboxID, phase, step, 30*time.Second, execFn, script)
|
||||
if entry.Ok {
|
||||
bctx.User = username
|
||||
}
|
||||
return entry, entry.Ok
|
||||
}
|
||||
|
||||
// execCopy copies a file or directory from the build archive (extracted at
|
||||
// BuildFilesDir) to the destination path inside the sandbox. Ownership is
|
||||
// set to the current user from bctx.
|
||||
func execCopy(
|
||||
ctx context.Context,
|
||||
st Step,
|
||||
sandboxID, phase string,
|
||||
step int,
|
||||
bctx *ExecContext,
|
||||
execFn ExecFunc,
|
||||
) (BuildLogEntry, bool) {
|
||||
// Validate source path: must be relative and not escape the archive directory.
|
||||
cleaned := path.Clean(st.Src)
|
||||
if strings.HasPrefix(cleaned, "..") || strings.HasPrefix(cleaned, "/") {
|
||||
return BuildLogEntry{
|
||||
Step: step,
|
||||
Phase: phase,
|
||||
Cmd: st.Raw,
|
||||
Stderr: "COPY source must be a relative path within the archive",
|
||||
}, false
|
||||
}
|
||||
src := BuildFilesDir + "/" + cleaned
|
||||
dst := st.Dst
|
||||
owner := "root"
|
||||
if bctx.User != "" {
|
||||
owner = bctx.User
|
||||
}
|
||||
script := fmt.Sprintf(
|
||||
"cp -r %s %s && chown -R %s:%s %s",
|
||||
shellescape(src), shellescape(dst), shellescape(owner), shellescape(owner), shellescape(dst),
|
||||
)
|
||||
|
||||
entry := execRawShell(ctx, st.Raw, sandboxID, phase, step, 60*time.Second, execFn, script)
|
||||
return entry, entry.Ok
|
||||
}
|
||||
|
||||
// execRawShell runs a shell command directly (as root) without ExecContext
|
||||
// wrapping. Used for internal operations like user creation and file copy.
|
||||
func execRawShell(
|
||||
ctx context.Context,
|
||||
raw, sandboxID, phase string,
|
||||
step int,
|
||||
timeout time.Duration,
|
||||
execFn ExecFunc,
|
||||
shellCmd string,
|
||||
) BuildLogEntry {
|
||||
execCtx, cancel := context.WithTimeout(ctx, timeout)
|
||||
defer cancel()
|
||||
|
||||
start := time.Now()
|
||||
resp, err := execFn(execCtx, connect.NewRequest(&pb.ExecRequest{
|
||||
SandboxId: sandboxID,
|
||||
Cmd: "/bin/sh",
|
||||
Args: []string{"-c", shellCmd},
|
||||
TimeoutSec: int32(timeout.Seconds()),
|
||||
}))
|
||||
|
||||
entry := BuildLogEntry{
|
||||
Step: step,
|
||||
Phase: phase,
|
||||
Cmd: raw,
|
||||
Elapsed: time.Since(start).Milliseconds(),
|
||||
}
|
||||
if err != nil {
|
||||
entry.Stderr = fmt.Sprintf("exec error: %v", err)
|
||||
return entry
|
||||
}
|
||||
entry.Stdout = string(resp.Msg.Stdout)
|
||||
entry.Stderr = string(resp.Msg.Stderr)
|
||||
entry.Exit = resp.Msg.ExitCode
|
||||
entry.Ok = resp.Msg.ExitCode == 0
|
||||
return entry
|
||||
}
|
||||
|
||||
func execStart(
|
||||
ctx context.Context,
|
||||
st Step,
|
||||
|
||||
@ -24,9 +24,11 @@ type Step struct {
|
||||
Raw string // original string, preserved for logging
|
||||
Shell string // KindRUN, KindSTART: the shell command text
|
||||
Timeout time.Duration // KindRUN: 0 means use caller's default
|
||||
Key string // KindENV: variable name
|
||||
Key string // KindENV: variable name; KindUSER: username
|
||||
Value string // KindENV: variable value
|
||||
Path string // KindWORKDIR: directory path
|
||||
Src string // KindCOPY: source path (relative to build archive)
|
||||
Dst string // KindCOPY: destination path inside sandbox
|
||||
}
|
||||
|
||||
// ParseStep parses a single recipe instruction string into a Step.
|
||||
@ -61,9 +63,9 @@ func ParseStep(s string) (Step, error) {
|
||||
case "WORKDIR":
|
||||
return parseWORKDIR(s, rest)
|
||||
case "USER":
|
||||
return Step{Kind: KindUSER, Raw: s}, nil
|
||||
return parseUSER(s, rest)
|
||||
case "COPY":
|
||||
return Step{Kind: KindCOPY, Raw: s}, nil
|
||||
return parseCOPY(s, rest)
|
||||
default:
|
||||
return Step{}, fmt.Errorf("unknown instruction %q (expected RUN, START, ENV, WORKDIR, USER, or COPY)", keyword)
|
||||
}
|
||||
@ -127,3 +129,31 @@ func parseWORKDIR(raw, path string) (Step, error) {
|
||||
}
|
||||
return Step{Kind: KindWORKDIR, Raw: raw, Path: path}, nil
|
||||
}
|
||||
|
||||
func parseUSER(raw, username string) (Step, error) {
|
||||
if username == "" {
|
||||
return Step{}, fmt.Errorf("USER requires a username: %q", raw)
|
||||
}
|
||||
// Validate: alphanumeric, hyphens, underscores only; must start with a letter or underscore.
|
||||
for i, c := range username {
|
||||
if i == 0 && !((c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z') || c == '_') {
|
||||
return Step{}, fmt.Errorf("USER username must start with a letter or underscore: %q", raw)
|
||||
}
|
||||
if !((c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z') || (c >= '0' && c <= '9') || c == '_' || c == '-') {
|
||||
return Step{}, fmt.Errorf("USER username contains invalid character %q: %q", string(c), raw)
|
||||
}
|
||||
}
|
||||
return Step{Kind: KindUSER, Raw: raw, Key: username}, nil
|
||||
}
|
||||
|
||||
func parseCOPY(raw, rest string) (Step, error) {
|
||||
if rest == "" {
|
||||
return Step{}, fmt.Errorf("COPY requires <src> <dst>: %q", raw)
|
||||
}
|
||||
src, dst, found := strings.Cut(rest, " ")
|
||||
dst = strings.TrimSpace(dst)
|
||||
if !found || dst == "" {
|
||||
return Step{}, fmt.Errorf("COPY requires <src> <dst>: %q", raw)
|
||||
}
|
||||
return Step{Kind: KindCOPY, Raw: raw, Src: src, Dst: dst}, nil
|
||||
}
|
||||
|
||||
@ -111,16 +111,37 @@ func TestParseStep(t *testing.T) {
|
||||
input: "WORKDIR",
|
||||
wantErr: true,
|
||||
},
|
||||
// USER and COPY stubs
|
||||
// USER
|
||||
{
|
||||
name: "USER stub",
|
||||
name: "USER basic",
|
||||
input: "USER www-data",
|
||||
want: Step{Kind: KindUSER, Raw: "USER www-data"},
|
||||
want: Step{Kind: KindUSER, Raw: "USER www-data", Key: "www-data"},
|
||||
},
|
||||
{
|
||||
name: "COPY stub",
|
||||
name: "USER empty",
|
||||
input: "USER",
|
||||
wantErr: true,
|
||||
},
|
||||
{
|
||||
name: "USER invalid chars",
|
||||
input: "USER bad user",
|
||||
wantErr: true,
|
||||
},
|
||||
// COPY
|
||||
{
|
||||
name: "COPY basic",
|
||||
input: "COPY config.yaml /etc/app/config.yaml",
|
||||
want: Step{Kind: KindCOPY, Raw: "COPY config.yaml /etc/app/config.yaml"},
|
||||
want: Step{Kind: KindCOPY, Raw: "COPY config.yaml /etc/app/config.yaml", Src: "config.yaml", Dst: "/etc/app/config.yaml"},
|
||||
},
|
||||
{
|
||||
name: "COPY missing dst",
|
||||
input: "COPY config.yaml",
|
||||
wantErr: true,
|
||||
},
|
||||
{
|
||||
name: "COPY empty",
|
||||
input: "COPY",
|
||||
wantErr: true,
|
||||
},
|
||||
// Unknown keyword
|
||||
{
|
||||
|
||||
@ -1223,6 +1223,23 @@ func (m *Manager) GetClient(sandboxID string) (*envdclient.Client, error) {
|
||||
return sb.client, nil
|
||||
}
|
||||
|
||||
// SetDefaults calls envd's PostInit to configure the default user and
|
||||
// environment variables for a running sandbox. This is called by the host
|
||||
// agent after sandbox creation or resume when the template specifies defaults.
|
||||
func (m *Manager) SetDefaults(ctx context.Context, sandboxID, defaultUser string, defaultEnv map[string]string) error {
|
||||
if defaultUser == "" && len(defaultEnv) == 0 {
|
||||
return nil
|
||||
}
|
||||
sb, err := m.get(sandboxID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if sb.Status != models.StatusRunning {
|
||||
return fmt.Errorf("sandbox %s is not running (status: %s)", sandboxID, sb.Status)
|
||||
}
|
||||
return sb.client.PostInitWithDefaults(ctx, defaultUser, defaultEnv)
|
||||
}
|
||||
|
||||
// PtyAttach starts a new PTY process or reconnects to an existing one.
|
||||
// If cmd is non-empty, starts a new process. If empty, reconnects using tag.
|
||||
func (m *Manager) PtyAttach(ctx context.Context, sandboxID, tag, cmd string, args []string, cols, rows uint32, envs map[string]string, cwd string) (<-chan envdclient.PtyEvent, error) {
|
||||
|
||||
@ -27,8 +27,10 @@ const (
|
||||
)
|
||||
|
||||
// preBuildCmds run before the user recipe to prepare the build environment.
|
||||
// apt update runs as root first, then USER switches to wrenn-user for the recipe.
|
||||
var preBuildCmds = []string{
|
||||
"RUN apt update",
|
||||
"USER wrenn-user",
|
||||
}
|
||||
|
||||
// postBuildCmds run after the user recipe to clean up caches and reduce image size.
|
||||
@ -36,6 +38,7 @@ var postBuildCmds = []string{
|
||||
"RUN apt clean",
|
||||
"RUN apt autoremove -y",
|
||||
"RUN rm -rf /var/lib/apt/lists/*",
|
||||
"RUN rm -rf /tmp/build-files /tmp/build-files.*",
|
||||
}
|
||||
|
||||
// buildAgentClient is the subset of the host agent client used by the build worker.
|
||||
@ -43,6 +46,7 @@ type buildAgentClient interface {
|
||||
CreateSandbox(ctx context.Context, req *connect.Request[pb.CreateSandboxRequest]) (*connect.Response[pb.CreateSandboxResponse], error)
|
||||
DestroySandbox(ctx context.Context, req *connect.Request[pb.DestroySandboxRequest]) (*connect.Response[pb.DestroySandboxResponse], error)
|
||||
Exec(ctx context.Context, req *connect.Request[pb.ExecRequest]) (*connect.Response[pb.ExecResponse], error)
|
||||
WriteFile(ctx context.Context, req *connect.Request[pb.WriteFileRequest]) (*connect.Response[pb.WriteFileResponse], error)
|
||||
CreateSnapshot(ctx context.Context, req *connect.Request[pb.CreateSnapshotRequest]) (*connect.Response[pb.CreateSnapshotResponse], error)
|
||||
FlattenRootfs(ctx context.Context, req *connect.Request[pb.FlattenRootfsRequest]) (*connect.Response[pb.FlattenRootfsResponse], error)
|
||||
}
|
||||
@ -56,6 +60,7 @@ type BuildService struct {
|
||||
|
||||
mu sync.Mutex
|
||||
cancelMap map[string]context.CancelFunc // buildID → per-build cancel func
|
||||
filesMap map[string][]byte // buildID → uploaded archive bytes
|
||||
}
|
||||
|
||||
// BuildCreateParams holds the parameters for creating a template build.
|
||||
@ -67,6 +72,27 @@ type BuildCreateParams struct {
|
||||
VCPUs int32
|
||||
MemoryMB int32
|
||||
SkipPrePost bool
|
||||
Archive []byte // Optional tar/tar.gz/zip archive for COPY commands.
|
||||
ArchiveName string // Original filename (used to detect format).
|
||||
}
|
||||
|
||||
// storeArchive stores uploaded archive bytes keyed by build ID for the worker.
|
||||
func (s *BuildService) storeArchive(buildID string, data []byte) {
|
||||
s.mu.Lock()
|
||||
defer s.mu.Unlock()
|
||||
if s.filesMap == nil {
|
||||
s.filesMap = make(map[string][]byte)
|
||||
}
|
||||
s.filesMap[buildID] = data
|
||||
}
|
||||
|
||||
// takeArchive retrieves and removes stored archive bytes for a build.
|
||||
func (s *BuildService) takeArchive(buildID string) []byte {
|
||||
s.mu.Lock()
|
||||
defer s.mu.Unlock()
|
||||
data := s.filesMap[buildID]
|
||||
delete(s.filesMap, buildID)
|
||||
return data
|
||||
}
|
||||
|
||||
// Create inserts a new build record and enqueues it to Redis.
|
||||
@ -117,6 +143,11 @@ func (s *BuildService) Create(ctx context.Context, p BuildCreateParams) (db.Temp
|
||||
return db.TemplateBuild{}, fmt.Errorf("enqueue build: %w", err)
|
||||
}
|
||||
|
||||
// Store archive for the worker if provided.
|
||||
if len(p.Archive) > 0 {
|
||||
s.storeArchive(buildIDStr, p.Archive)
|
||||
}
|
||||
|
||||
return build, nil
|
||||
}
|
||||
|
||||
@ -303,6 +334,16 @@ func (s *BuildService) executeBuild(ctx context.Context, buildIDStr string) {
|
||||
HostID: host.ID,
|
||||
})
|
||||
|
||||
// Upload and extract build archive if provided.
|
||||
archive := s.takeArchive(buildIDStr)
|
||||
if len(archive) > 0 {
|
||||
if err := s.uploadAndExtractArchive(buildCtx, agent, sandboxIDStr, archive, buildIDStr); err != nil {
|
||||
s.destroySandbox(buildCtx, agent, sandboxIDStr)
|
||||
s.failBuild(buildCtx, buildID, fmt.Sprintf("archive upload failed: %v", err))
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
// Parse recipe steps. preBuildCmds and postBuildCmds are hardcoded and always
|
||||
// valid; panic on error is appropriate here since it would be a programmer mistake.
|
||||
preBuildSteps, err := recipe.ParseRecipe(preBuildCmds)
|
||||
@ -331,10 +372,18 @@ func (s *BuildService) executeBuild(ctx context.Context, buildIDStr string) {
|
||||
"HOME": "/root",
|
||||
}
|
||||
}
|
||||
bctx := &recipe.ExecContext{EnvVars: envVars}
|
||||
bctx := &recipe.ExecContext{EnvVars: envVars, User: "root"}
|
||||
|
||||
// Per-step progress callback for live UI updates.
|
||||
progressFn := func(currentStep int, allEntries []recipe.BuildLogEntry) {
|
||||
s.updateLogs(buildCtx, buildID, currentStep, allEntries)
|
||||
}
|
||||
|
||||
runPhase := func(phase string, steps []recipe.Step, defaultTimeout time.Duration) bool {
|
||||
newEntries, nextStep, ok := recipe.Execute(buildCtx, phase, steps, sandboxIDStr, step, defaultTimeout, bctx, agent.Exec)
|
||||
newEntries, nextStep, ok := recipe.Execute(buildCtx, phase, steps, sandboxIDStr, step, defaultTimeout, bctx, agent.Exec, func(currentStep int, phaseEntries []recipe.BuildLogEntry) {
|
||||
// Progress callback: combine prior logs with current phase entries.
|
||||
progressFn(currentStep, append(logs, phaseEntries...))
|
||||
})
|
||||
logs = append(logs, newEntries...)
|
||||
step = nextStep
|
||||
s.updateLogs(buildCtx, buildID, step, logs)
|
||||
@ -344,24 +393,38 @@ func (s *BuildService) executeBuild(ctx context.Context, buildIDStr string) {
|
||||
if buildCtx.Err() != nil {
|
||||
return false
|
||||
}
|
||||
reason := "unknown error"
|
||||
if len(newEntries) > 0 {
|
||||
last := newEntries[len(newEntries)-1]
|
||||
reason := last.Stderr
|
||||
reason = last.Stderr
|
||||
if reason == "" {
|
||||
reason = fmt.Sprintf("exit code %d", last.Exit)
|
||||
}
|
||||
}
|
||||
s.failBuild(buildCtx, buildID, fmt.Sprintf("%s step %d failed: %s", phase, step, reason))
|
||||
}
|
||||
return ok
|
||||
}
|
||||
|
||||
// Phase 1: Pre-build (as root) — creates wrenn-user, updates apt.
|
||||
if !build.SkipPrePost {
|
||||
if !runPhase("pre-build", preBuildSteps, 0) {
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
// Phase 2: User recipe — starts as wrenn-user (set by USER in pre-build)
|
||||
// or root if skip_pre_post.
|
||||
if !runPhase("recipe", userRecipeSteps, buildCommandTimeout) {
|
||||
return
|
||||
}
|
||||
|
||||
// Capture the final user and env vars as template defaults.
|
||||
templateDefaultUser := bctx.User
|
||||
templateDefaultEnv := bctx.EnvVars
|
||||
|
||||
// Phase 3: Post-build (as root) — cleanup.
|
||||
bctx.User = "root"
|
||||
if !build.SkipPrePost {
|
||||
if !runPhase("post-build", postBuildSteps, 0) {
|
||||
return
|
||||
@ -430,6 +493,12 @@ func (s *BuildService) executeBuild(ctx context.Context, buildIDStr string) {
|
||||
templateType = "snapshot"
|
||||
}
|
||||
|
||||
// Serialize env vars for DB storage.
|
||||
defaultEnvJSON, err := json.Marshal(templateDefaultEnv)
|
||||
if err != nil {
|
||||
defaultEnvJSON = []byte("{}")
|
||||
}
|
||||
|
||||
if _, err := s.DB.InsertTemplate(buildCtx, db.InsertTemplateParams{
|
||||
ID: build.TemplateID,
|
||||
Name: build.Name,
|
||||
@ -438,11 +507,20 @@ func (s *BuildService) executeBuild(ctx context.Context, buildIDStr string) {
|
||||
MemoryMb: build.MemoryMb,
|
||||
SizeBytes: sizeBytes,
|
||||
TeamID: id.PlatformTeamID,
|
||||
DefaultUser: templateDefaultUser,
|
||||
DefaultEnv: defaultEnvJSON,
|
||||
}); err != nil {
|
||||
log.Error("failed to insert template record", "error", err)
|
||||
// Build succeeded on disk, just DB record failed — don't mark as failed.
|
||||
}
|
||||
|
||||
// Record defaults on the build record for inspection.
|
||||
_ = s.DB.UpdateBuildDefaults(buildCtx, db.UpdateBuildDefaultsParams{
|
||||
ID: buildID,
|
||||
DefaultUser: templateDefaultUser,
|
||||
DefaultEnv: defaultEnvJSON,
|
||||
})
|
||||
|
||||
// For CreateSnapshot, the sandbox is already destroyed by the snapshot process.
|
||||
// For FlattenRootfs, the sandbox is already destroyed by the flatten process.
|
||||
// No additional destroy needed.
|
||||
@ -603,3 +681,61 @@ func parseSandboxEnv(raw string) map[string]string {
|
||||
|
||||
return envVars
|
||||
}
|
||||
|
||||
// uploadAndExtractArchive writes the archive to the sandbox and extracts it
|
||||
// to /tmp/build-files/. Detects format from content (tar.gz, tar, zip).
|
||||
func (s *BuildService) uploadAndExtractArchive(
|
||||
ctx context.Context,
|
||||
agent buildAgentClient,
|
||||
sandboxID string,
|
||||
archive []byte,
|
||||
buildID string,
|
||||
) error {
|
||||
// Detect archive type from magic bytes.
|
||||
var archivePath, extractCmd string
|
||||
switch {
|
||||
case len(archive) >= 2 && archive[0] == 0x1f && archive[1] == 0x8b:
|
||||
// gzip (tar.gz)
|
||||
archivePath = "/tmp/build-files.tar.gz"
|
||||
extractCmd = "mkdir -p /tmp/build-files && tar xzf /tmp/build-files.tar.gz -C /tmp/build-files"
|
||||
case len(archive) >= 4 && string(archive[:4]) == "PK\x03\x04":
|
||||
// zip
|
||||
archivePath = "/tmp/build-files.zip"
|
||||
extractCmd = "mkdir -p /tmp/build-files && unzip -o /tmp/build-files.zip -d /tmp/build-files"
|
||||
case len(archive) >= 262 && string(archive[257:262]) == "ustar":
|
||||
// tar (ustar magic at offset 257)
|
||||
archivePath = "/tmp/build-files.tar"
|
||||
extractCmd = "mkdir -p /tmp/build-files && tar xf /tmp/build-files.tar -C /tmp/build-files"
|
||||
default:
|
||||
// Fallback: try tar.gz
|
||||
archivePath = "/tmp/build-files.tar.gz"
|
||||
extractCmd = "mkdir -p /tmp/build-files && tar xzf /tmp/build-files.tar.gz -C /tmp/build-files"
|
||||
}
|
||||
|
||||
slog.Info("uploading build archive", "build_id", buildID, "path", archivePath, "size", len(archive))
|
||||
|
||||
// Write archive to VM.
|
||||
if _, err := agent.WriteFile(ctx, connect.NewRequest(&pb.WriteFileRequest{
|
||||
SandboxId: sandboxID,
|
||||
Path: archivePath,
|
||||
Content: archive,
|
||||
})); err != nil {
|
||||
return fmt.Errorf("write archive: %w", err)
|
||||
}
|
||||
|
||||
// Extract.
|
||||
resp, err := agent.Exec(ctx, connect.NewRequest(&pb.ExecRequest{
|
||||
SandboxId: sandboxID,
|
||||
Cmd: "/bin/sh",
|
||||
Args: []string{"-c", extractCmd},
|
||||
TimeoutSec: 120,
|
||||
}))
|
||||
if err != nil {
|
||||
return fmt.Errorf("extract archive: %w", err)
|
||||
}
|
||||
if resp.Msg.ExitCode != 0 {
|
||||
return fmt.Errorf("extract archive: exit code %d: %s", resp.Msg.ExitCode, string(resp.Msg.Stderr))
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
@ -2,6 +2,7 @@ package service
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"log/slog"
|
||||
"time"
|
||||
@ -85,6 +86,8 @@ func (s *SandboxService) Create(ctx context.Context, p SandboxCreateParams) (db.
|
||||
// Resolve template name → (teamID, templateID).
|
||||
templateTeamID := id.PlatformTeamID
|
||||
templateID := id.MinimalTemplateID
|
||||
var templateDefaultUser string
|
||||
var templateDefaultEnv map[string]string
|
||||
if p.Template != "minimal" {
|
||||
tmpl, err := s.DB.GetTemplateByTeam(ctx, db.GetTemplateByTeamParams{Name: p.Template, TeamID: p.TeamID})
|
||||
if err != nil {
|
||||
@ -92,6 +95,11 @@ func (s *SandboxService) Create(ctx context.Context, p SandboxCreateParams) (db.
|
||||
}
|
||||
templateTeamID = tmpl.TeamID
|
||||
templateID = tmpl.ID
|
||||
templateDefaultUser = tmpl.DefaultUser
|
||||
// Parse default_env JSONB into a map.
|
||||
if len(tmpl.DefaultEnv) > 0 {
|
||||
_ = json.Unmarshal(tmpl.DefaultEnv, &templateDefaultEnv)
|
||||
}
|
||||
// If the template is a snapshot, use its baked-in vcpus/memory.
|
||||
if tmpl.Type == "snapshot" {
|
||||
p.VCPUs = tmpl.Vcpus
|
||||
@ -148,6 +156,8 @@ func (s *SandboxService) Create(ctx context.Context, p SandboxCreateParams) (db.
|
||||
MemoryMb: p.MemoryMB,
|
||||
TimeoutSec: p.TimeoutSec,
|
||||
DiskSizeMb: p.DiskSizeMB,
|
||||
DefaultUser: templateDefaultUser,
|
||||
DefaultEnv: templateDefaultEnv,
|
||||
}))
|
||||
if err != nil {
|
||||
if _, dbErr := s.DB.UpdateSandboxStatus(ctx, db.UpdateSandboxStatusParams{
|
||||
@ -249,9 +259,24 @@ func (s *SandboxService) Resume(ctx context.Context, sandboxID, teamID pgtype.UU
|
||||
|
||||
sandboxIDStr := id.FormatSandboxID(sandboxID)
|
||||
|
||||
// Look up template defaults for resume.
|
||||
var resumeDefaultUser string
|
||||
var resumeDefaultEnv map[string]string
|
||||
if sb.TemplateID.Valid {
|
||||
tmpl, err := s.DB.GetTemplate(ctx, sb.TemplateID)
|
||||
if err == nil {
|
||||
resumeDefaultUser = tmpl.DefaultUser
|
||||
if len(tmpl.DefaultEnv) > 0 {
|
||||
_ = json.Unmarshal(tmpl.DefaultEnv, &resumeDefaultEnv)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
resp, err := agent.ResumeSandbox(ctx, connect.NewRequest(&pb.ResumeSandboxRequest{
|
||||
SandboxId: sandboxIDStr,
|
||||
TimeoutSec: sb.TimeoutSec,
|
||||
DefaultUser: resumeDefaultUser,
|
||||
DefaultEnv: resumeDefaultEnv,
|
||||
}))
|
||||
if err != nil {
|
||||
return db.Sandbox{}, fmt.Errorf("agent resume: %w", err)
|
||||
|
||||
@ -41,6 +41,10 @@ type CreateSandboxRequest struct {
|
||||
TeamId string `protobuf:"bytes,7,opt,name=team_id,json=teamId,proto3" json:"team_id,omitempty"`
|
||||
// Template UUID (hex string). Both zeros + team zeros = "minimal" sentinel.
|
||||
TemplateId string `protobuf:"bytes,8,opt,name=template_id,json=templateId,proto3" json:"template_id,omitempty"`
|
||||
// Default unix user for the sandbox (set in envd via PostInit).
|
||||
DefaultUser string `protobuf:"bytes,9,opt,name=default_user,json=defaultUser,proto3" json:"default_user,omitempty"`
|
||||
// Default environment variables (set in envd via PostInit).
|
||||
DefaultEnv map[string]string `protobuf:"bytes,10,rep,name=default_env,json=defaultEnv,proto3" json:"default_env,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"`
|
||||
unknownFields protoimpl.UnknownFields
|
||||
sizeCache protoimpl.SizeCache
|
||||
}
|
||||
@ -131,6 +135,20 @@ func (x *CreateSandboxRequest) GetTemplateId() string {
|
||||
return ""
|
||||
}
|
||||
|
||||
func (x *CreateSandboxRequest) GetDefaultUser() string {
|
||||
if x != nil {
|
||||
return x.DefaultUser
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (x *CreateSandboxRequest) GetDefaultEnv() map[string]string {
|
||||
if x != nil {
|
||||
return x.DefaultEnv
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
type CreateSandboxResponse struct {
|
||||
state protoimpl.MessageState `protogen:"open.v1"`
|
||||
SandboxId string `protobuf:"bytes,1,opt,name=sandbox_id,json=sandboxId,proto3" json:"sandbox_id,omitempty"`
|
||||
@ -357,6 +375,10 @@ type ResumeSandboxRequest struct {
|
||||
// TTL in seconds restored from the DB so the reaper can auto-pause
|
||||
// the sandbox again after inactivity. 0 means no auto-pause.
|
||||
TimeoutSec int32 `protobuf:"varint,2,opt,name=timeout_sec,json=timeoutSec,proto3" json:"timeout_sec,omitempty"`
|
||||
// Default unix user for the sandbox (set in envd via PostInit on resume).
|
||||
DefaultUser string `protobuf:"bytes,3,opt,name=default_user,json=defaultUser,proto3" json:"default_user,omitempty"`
|
||||
// Default environment variables (set in envd via PostInit on resume).
|
||||
DefaultEnv map[string]string `protobuf:"bytes,4,rep,name=default_env,json=defaultEnv,proto3" json:"default_env,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"`
|
||||
unknownFields protoimpl.UnknownFields
|
||||
sizeCache protoimpl.SizeCache
|
||||
}
|
||||
@ -405,6 +427,20 @@ func (x *ResumeSandboxRequest) GetTimeoutSec() int32 {
|
||||
return 0
|
||||
}
|
||||
|
||||
func (x *ResumeSandboxRequest) GetDefaultUser() string {
|
||||
if x != nil {
|
||||
return x.DefaultUser
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (x *ResumeSandboxRequest) GetDefaultEnv() map[string]string {
|
||||
if x != nil {
|
||||
return x.DefaultEnv
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
type ResumeSandboxResponse struct {
|
||||
state protoimpl.MessageState `protogen:"open.v1"`
|
||||
SandboxId string `protobuf:"bytes,1,opt,name=sandbox_id,json=sandboxId,proto3" json:"sandbox_id,omitempty"`
|
||||
@ -3429,7 +3465,7 @@ var File_hostagent_proto protoreflect.FileDescriptor
|
||||
|
||||
const file_hostagent_proto_rawDesc = "" +
|
||||
"\n" +
|
||||
"\x0fhostagent.proto\x12\fhostagent.v1\"\x81\x02\n" +
|
||||
"\x0fhostagent.proto\x12\fhostagent.v1\"\xb8\x03\n" +
|
||||
"\x14CreateSandboxRequest\x12\x1d\n" +
|
||||
"\n" +
|
||||
"sandbox_id\x18\x05 \x01(\tR\tsandboxId\x12\x1a\n" +
|
||||
@ -3442,7 +3478,14 @@ const file_hostagent_proto_rawDesc = "" +
|
||||
"diskSizeMb\x12\x17\n" +
|
||||
"\ateam_id\x18\a \x01(\tR\x06teamId\x12\x1f\n" +
|
||||
"\vtemplate_id\x18\b \x01(\tR\n" +
|
||||
"templateId\"g\n" +
|
||||
"templateId\x12!\n" +
|
||||
"\fdefault_user\x18\t \x01(\tR\vdefaultUser\x12S\n" +
|
||||
"\vdefault_env\x18\n" +
|
||||
" \x03(\v22.hostagent.v1.CreateSandboxRequest.DefaultEnvEntryR\n" +
|
||||
"defaultEnv\x1a=\n" +
|
||||
"\x0fDefaultEnvEntry\x12\x10\n" +
|
||||
"\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n" +
|
||||
"\x05value\x18\x02 \x01(\tR\x05value:\x028\x01\"g\n" +
|
||||
"\x15CreateSandboxResponse\x12\x1d\n" +
|
||||
"\n" +
|
||||
"sandbox_id\x18\x01 \x01(\tR\tsandboxId\x12\x16\n" +
|
||||
@ -3455,12 +3498,18 @@ const file_hostagent_proto_rawDesc = "" +
|
||||
"\x13PauseSandboxRequest\x12\x1d\n" +
|
||||
"\n" +
|
||||
"sandbox_id\x18\x01 \x01(\tR\tsandboxId\"\x16\n" +
|
||||
"\x14PauseSandboxResponse\"V\n" +
|
||||
"\x14PauseSandboxResponse\"\x8d\x02\n" +
|
||||
"\x14ResumeSandboxRequest\x12\x1d\n" +
|
||||
"\n" +
|
||||
"sandbox_id\x18\x01 \x01(\tR\tsandboxId\x12\x1f\n" +
|
||||
"\vtimeout_sec\x18\x02 \x01(\x05R\n" +
|
||||
"timeoutSec\"g\n" +
|
||||
"timeoutSec\x12!\n" +
|
||||
"\fdefault_user\x18\x03 \x01(\tR\vdefaultUser\x12S\n" +
|
||||
"\vdefault_env\x18\x04 \x03(\v22.hostagent.v1.ResumeSandboxRequest.DefaultEnvEntryR\n" +
|
||||
"defaultEnv\x1a=\n" +
|
||||
"\x0fDefaultEnvEntry\x12\x10\n" +
|
||||
"\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n" +
|
||||
"\x05value\x18\x02 \x01(\tR\x05value:\x028\x01\"g\n" +
|
||||
"\x15ResumeSandboxResponse\x12\x1d\n" +
|
||||
"\n" +
|
||||
"sandbox_id\x18\x01 \x01(\tR\tsandboxId\x12\x16\n" +
|
||||
@ -3719,7 +3768,7 @@ func file_hostagent_proto_rawDescGZIP() []byte {
|
||||
return file_hostagent_proto_rawDescData
|
||||
}
|
||||
|
||||
var file_hostagent_proto_msgTypes = make([]protoimpl.MessageInfo, 61)
|
||||
var file_hostagent_proto_msgTypes = make([]protoimpl.MessageInfo, 63)
|
||||
var file_hostagent_proto_goTypes = []any{
|
||||
(*CreateSandboxRequest)(nil), // 0: hostagent.v1.CreateSandboxRequest
|
||||
(*CreateSandboxResponse)(nil), // 1: hostagent.v1.CreateSandboxResponse
|
||||
@ -3781,79 +3830,83 @@ var file_hostagent_proto_goTypes = []any{
|
||||
(*PtyResizeResponse)(nil), // 57: hostagent.v1.PtyResizeResponse
|
||||
(*PtyKillRequest)(nil), // 58: hostagent.v1.PtyKillRequest
|
||||
(*PtyKillResponse)(nil), // 59: hostagent.v1.PtyKillResponse
|
||||
nil, // 60: hostagent.v1.PtyAttachRequest.EnvsEntry
|
||||
nil, // 60: hostagent.v1.CreateSandboxRequest.DefaultEnvEntry
|
||||
nil, // 61: hostagent.v1.ResumeSandboxRequest.DefaultEnvEntry
|
||||
nil, // 62: hostagent.v1.PtyAttachRequest.EnvsEntry
|
||||
}
|
||||
var file_hostagent_proto_depIdxs = []int32{
|
||||
16, // 0: hostagent.v1.ListSandboxesResponse.sandboxes:type_name -> hostagent.v1.SandboxInfo
|
||||
23, // 1: hostagent.v1.ExecStreamResponse.start:type_name -> hostagent.v1.ExecStreamStart
|
||||
24, // 2: hostagent.v1.ExecStreamResponse.data:type_name -> hostagent.v1.ExecStreamData
|
||||
25, // 3: hostagent.v1.ExecStreamResponse.end:type_name -> hostagent.v1.ExecStreamEnd
|
||||
27, // 4: hostagent.v1.WriteFileStreamRequest.meta:type_name -> hostagent.v1.WriteFileStreamMeta
|
||||
33, // 5: hostagent.v1.ListDirResponse.entries:type_name -> hostagent.v1.FileEntry
|
||||
33, // 6: hostagent.v1.MakeDirResponse.entry:type_name -> hostagent.v1.FileEntry
|
||||
42, // 7: hostagent.v1.GetSandboxMetricsResponse.points:type_name -> hostagent.v1.MetricPoint
|
||||
42, // 8: hostagent.v1.FlushSandboxMetricsResponse.points_10m:type_name -> hostagent.v1.MetricPoint
|
||||
42, // 9: hostagent.v1.FlushSandboxMetricsResponse.points_2h:type_name -> hostagent.v1.MetricPoint
|
||||
42, // 10: hostagent.v1.FlushSandboxMetricsResponse.points_24h:type_name -> hostagent.v1.MetricPoint
|
||||
60, // 11: hostagent.v1.PtyAttachRequest.envs:type_name -> hostagent.v1.PtyAttachRequest.EnvsEntry
|
||||
51, // 12: hostagent.v1.PtyAttachResponse.started:type_name -> hostagent.v1.PtyStarted
|
||||
52, // 13: hostagent.v1.PtyAttachResponse.output:type_name -> hostagent.v1.PtyOutput
|
||||
53, // 14: hostagent.v1.PtyAttachResponse.exited:type_name -> hostagent.v1.PtyExited
|
||||
0, // 15: hostagent.v1.HostAgentService.CreateSandbox:input_type -> hostagent.v1.CreateSandboxRequest
|
||||
2, // 16: hostagent.v1.HostAgentService.DestroySandbox:input_type -> hostagent.v1.DestroySandboxRequest
|
||||
4, // 17: hostagent.v1.HostAgentService.PauseSandbox:input_type -> hostagent.v1.PauseSandboxRequest
|
||||
6, // 18: hostagent.v1.HostAgentService.ResumeSandbox:input_type -> hostagent.v1.ResumeSandboxRequest
|
||||
12, // 19: hostagent.v1.HostAgentService.Exec:input_type -> hostagent.v1.ExecRequest
|
||||
14, // 20: hostagent.v1.HostAgentService.ListSandboxes:input_type -> hostagent.v1.ListSandboxesRequest
|
||||
17, // 21: hostagent.v1.HostAgentService.WriteFile:input_type -> hostagent.v1.WriteFileRequest
|
||||
19, // 22: hostagent.v1.HostAgentService.ReadFile:input_type -> hostagent.v1.ReadFileRequest
|
||||
31, // 23: hostagent.v1.HostAgentService.ListDir:input_type -> hostagent.v1.ListDirRequest
|
||||
34, // 24: hostagent.v1.HostAgentService.MakeDir:input_type -> hostagent.v1.MakeDirRequest
|
||||
36, // 25: hostagent.v1.HostAgentService.RemovePath:input_type -> hostagent.v1.RemovePathRequest
|
||||
8, // 26: hostagent.v1.HostAgentService.CreateSnapshot:input_type -> hostagent.v1.CreateSnapshotRequest
|
||||
10, // 27: hostagent.v1.HostAgentService.DeleteSnapshot:input_type -> hostagent.v1.DeleteSnapshotRequest
|
||||
21, // 28: hostagent.v1.HostAgentService.ExecStream:input_type -> hostagent.v1.ExecStreamRequest
|
||||
26, // 29: hostagent.v1.HostAgentService.WriteFileStream:input_type -> hostagent.v1.WriteFileStreamRequest
|
||||
29, // 30: hostagent.v1.HostAgentService.ReadFileStream:input_type -> hostagent.v1.ReadFileStreamRequest
|
||||
38, // 31: hostagent.v1.HostAgentService.PingSandbox:input_type -> hostagent.v1.PingSandboxRequest
|
||||
40, // 32: hostagent.v1.HostAgentService.Terminate:input_type -> hostagent.v1.TerminateRequest
|
||||
43, // 33: hostagent.v1.HostAgentService.GetSandboxMetrics:input_type -> hostagent.v1.GetSandboxMetricsRequest
|
||||
45, // 34: hostagent.v1.HostAgentService.FlushSandboxMetrics:input_type -> hostagent.v1.FlushSandboxMetricsRequest
|
||||
47, // 35: hostagent.v1.HostAgentService.FlattenRootfs:input_type -> hostagent.v1.FlattenRootfsRequest
|
||||
49, // 36: hostagent.v1.HostAgentService.PtyAttach:input_type -> hostagent.v1.PtyAttachRequest
|
||||
54, // 37: hostagent.v1.HostAgentService.PtySendInput:input_type -> hostagent.v1.PtySendInputRequest
|
||||
56, // 38: hostagent.v1.HostAgentService.PtyResize:input_type -> hostagent.v1.PtyResizeRequest
|
||||
58, // 39: hostagent.v1.HostAgentService.PtyKill:input_type -> hostagent.v1.PtyKillRequest
|
||||
1, // 40: hostagent.v1.HostAgentService.CreateSandbox:output_type -> hostagent.v1.CreateSandboxResponse
|
||||
3, // 41: hostagent.v1.HostAgentService.DestroySandbox:output_type -> hostagent.v1.DestroySandboxResponse
|
||||
5, // 42: hostagent.v1.HostAgentService.PauseSandbox:output_type -> hostagent.v1.PauseSandboxResponse
|
||||
7, // 43: hostagent.v1.HostAgentService.ResumeSandbox:output_type -> hostagent.v1.ResumeSandboxResponse
|
||||
13, // 44: hostagent.v1.HostAgentService.Exec:output_type -> hostagent.v1.ExecResponse
|
||||
15, // 45: hostagent.v1.HostAgentService.ListSandboxes:output_type -> hostagent.v1.ListSandboxesResponse
|
||||
18, // 46: hostagent.v1.HostAgentService.WriteFile:output_type -> hostagent.v1.WriteFileResponse
|
||||
20, // 47: hostagent.v1.HostAgentService.ReadFile:output_type -> hostagent.v1.ReadFileResponse
|
||||
32, // 48: hostagent.v1.HostAgentService.ListDir:output_type -> hostagent.v1.ListDirResponse
|
||||
35, // 49: hostagent.v1.HostAgentService.MakeDir:output_type -> hostagent.v1.MakeDirResponse
|
||||
37, // 50: hostagent.v1.HostAgentService.RemovePath:output_type -> hostagent.v1.RemovePathResponse
|
||||
9, // 51: hostagent.v1.HostAgentService.CreateSnapshot:output_type -> hostagent.v1.CreateSnapshotResponse
|
||||
11, // 52: hostagent.v1.HostAgentService.DeleteSnapshot:output_type -> hostagent.v1.DeleteSnapshotResponse
|
||||
22, // 53: hostagent.v1.HostAgentService.ExecStream:output_type -> hostagent.v1.ExecStreamResponse
|
||||
28, // 54: hostagent.v1.HostAgentService.WriteFileStream:output_type -> hostagent.v1.WriteFileStreamResponse
|
||||
30, // 55: hostagent.v1.HostAgentService.ReadFileStream:output_type -> hostagent.v1.ReadFileStreamResponse
|
||||
39, // 56: hostagent.v1.HostAgentService.PingSandbox:output_type -> hostagent.v1.PingSandboxResponse
|
||||
41, // 57: hostagent.v1.HostAgentService.Terminate:output_type -> hostagent.v1.TerminateResponse
|
||||
44, // 58: hostagent.v1.HostAgentService.GetSandboxMetrics:output_type -> hostagent.v1.GetSandboxMetricsResponse
|
||||
46, // 59: hostagent.v1.HostAgentService.FlushSandboxMetrics:output_type -> hostagent.v1.FlushSandboxMetricsResponse
|
||||
48, // 60: hostagent.v1.HostAgentService.FlattenRootfs:output_type -> hostagent.v1.FlattenRootfsResponse
|
||||
50, // 61: hostagent.v1.HostAgentService.PtyAttach:output_type -> hostagent.v1.PtyAttachResponse
|
||||
55, // 62: hostagent.v1.HostAgentService.PtySendInput:output_type -> hostagent.v1.PtySendInputResponse
|
||||
57, // 63: hostagent.v1.HostAgentService.PtyResize:output_type -> hostagent.v1.PtyResizeResponse
|
||||
59, // 64: hostagent.v1.HostAgentService.PtyKill:output_type -> hostagent.v1.PtyKillResponse
|
||||
40, // [40:65] is the sub-list for method output_type
|
||||
15, // [15:40] is the sub-list for method input_type
|
||||
15, // [15:15] is the sub-list for extension type_name
|
||||
15, // [15:15] is the sub-list for extension extendee
|
||||
0, // [0:15] is the sub-list for field type_name
|
||||
60, // 0: hostagent.v1.CreateSandboxRequest.default_env:type_name -> hostagent.v1.CreateSandboxRequest.DefaultEnvEntry
|
||||
61, // 1: hostagent.v1.ResumeSandboxRequest.default_env:type_name -> hostagent.v1.ResumeSandboxRequest.DefaultEnvEntry
|
||||
16, // 2: hostagent.v1.ListSandboxesResponse.sandboxes:type_name -> hostagent.v1.SandboxInfo
|
||||
23, // 3: hostagent.v1.ExecStreamResponse.start:type_name -> hostagent.v1.ExecStreamStart
|
||||
24, // 4: hostagent.v1.ExecStreamResponse.data:type_name -> hostagent.v1.ExecStreamData
|
||||
25, // 5: hostagent.v1.ExecStreamResponse.end:type_name -> hostagent.v1.ExecStreamEnd
|
||||
27, // 6: hostagent.v1.WriteFileStreamRequest.meta:type_name -> hostagent.v1.WriteFileStreamMeta
|
||||
33, // 7: hostagent.v1.ListDirResponse.entries:type_name -> hostagent.v1.FileEntry
|
||||
33, // 8: hostagent.v1.MakeDirResponse.entry:type_name -> hostagent.v1.FileEntry
|
||||
42, // 9: hostagent.v1.GetSandboxMetricsResponse.points:type_name -> hostagent.v1.MetricPoint
|
||||
42, // 10: hostagent.v1.FlushSandboxMetricsResponse.points_10m:type_name -> hostagent.v1.MetricPoint
|
||||
42, // 11: hostagent.v1.FlushSandboxMetricsResponse.points_2h:type_name -> hostagent.v1.MetricPoint
|
||||
42, // 12: hostagent.v1.FlushSandboxMetricsResponse.points_24h:type_name -> hostagent.v1.MetricPoint
|
||||
62, // 13: hostagent.v1.PtyAttachRequest.envs:type_name -> hostagent.v1.PtyAttachRequest.EnvsEntry
|
||||
51, // 14: hostagent.v1.PtyAttachResponse.started:type_name -> hostagent.v1.PtyStarted
|
||||
52, // 15: hostagent.v1.PtyAttachResponse.output:type_name -> hostagent.v1.PtyOutput
|
||||
53, // 16: hostagent.v1.PtyAttachResponse.exited:type_name -> hostagent.v1.PtyExited
|
||||
0, // 17: hostagent.v1.HostAgentService.CreateSandbox:input_type -> hostagent.v1.CreateSandboxRequest
|
||||
2, // 18: hostagent.v1.HostAgentService.DestroySandbox:input_type -> hostagent.v1.DestroySandboxRequest
|
||||
4, // 19: hostagent.v1.HostAgentService.PauseSandbox:input_type -> hostagent.v1.PauseSandboxRequest
|
||||
6, // 20: hostagent.v1.HostAgentService.ResumeSandbox:input_type -> hostagent.v1.ResumeSandboxRequest
|
||||
12, // 21: hostagent.v1.HostAgentService.Exec:input_type -> hostagent.v1.ExecRequest
|
||||
14, // 22: hostagent.v1.HostAgentService.ListSandboxes:input_type -> hostagent.v1.ListSandboxesRequest
|
||||
17, // 23: hostagent.v1.HostAgentService.WriteFile:input_type -> hostagent.v1.WriteFileRequest
|
||||
19, // 24: hostagent.v1.HostAgentService.ReadFile:input_type -> hostagent.v1.ReadFileRequest
|
||||
31, // 25: hostagent.v1.HostAgentService.ListDir:input_type -> hostagent.v1.ListDirRequest
|
||||
34, // 26: hostagent.v1.HostAgentService.MakeDir:input_type -> hostagent.v1.MakeDirRequest
|
||||
36, // 27: hostagent.v1.HostAgentService.RemovePath:input_type -> hostagent.v1.RemovePathRequest
|
||||
8, // 28: hostagent.v1.HostAgentService.CreateSnapshot:input_type -> hostagent.v1.CreateSnapshotRequest
|
||||
10, // 29: hostagent.v1.HostAgentService.DeleteSnapshot:input_type -> hostagent.v1.DeleteSnapshotRequest
|
||||
21, // 30: hostagent.v1.HostAgentService.ExecStream:input_type -> hostagent.v1.ExecStreamRequest
|
||||
26, // 31: hostagent.v1.HostAgentService.WriteFileStream:input_type -> hostagent.v1.WriteFileStreamRequest
|
||||
29, // 32: hostagent.v1.HostAgentService.ReadFileStream:input_type -> hostagent.v1.ReadFileStreamRequest
|
||||
38, // 33: hostagent.v1.HostAgentService.PingSandbox:input_type -> hostagent.v1.PingSandboxRequest
|
||||
40, // 34: hostagent.v1.HostAgentService.Terminate:input_type -> hostagent.v1.TerminateRequest
|
||||
43, // 35: hostagent.v1.HostAgentService.GetSandboxMetrics:input_type -> hostagent.v1.GetSandboxMetricsRequest
|
||||
45, // 36: hostagent.v1.HostAgentService.FlushSandboxMetrics:input_type -> hostagent.v1.FlushSandboxMetricsRequest
|
||||
47, // 37: hostagent.v1.HostAgentService.FlattenRootfs:input_type -> hostagent.v1.FlattenRootfsRequest
|
||||
49, // 38: hostagent.v1.HostAgentService.PtyAttach:input_type -> hostagent.v1.PtyAttachRequest
|
||||
54, // 39: hostagent.v1.HostAgentService.PtySendInput:input_type -> hostagent.v1.PtySendInputRequest
|
||||
56, // 40: hostagent.v1.HostAgentService.PtyResize:input_type -> hostagent.v1.PtyResizeRequest
|
||||
58, // 41: hostagent.v1.HostAgentService.PtyKill:input_type -> hostagent.v1.PtyKillRequest
|
||||
1, // 42: hostagent.v1.HostAgentService.CreateSandbox:output_type -> hostagent.v1.CreateSandboxResponse
|
||||
3, // 43: hostagent.v1.HostAgentService.DestroySandbox:output_type -> hostagent.v1.DestroySandboxResponse
|
||||
5, // 44: hostagent.v1.HostAgentService.PauseSandbox:output_type -> hostagent.v1.PauseSandboxResponse
|
||||
7, // 45: hostagent.v1.HostAgentService.ResumeSandbox:output_type -> hostagent.v1.ResumeSandboxResponse
|
||||
13, // 46: hostagent.v1.HostAgentService.Exec:output_type -> hostagent.v1.ExecResponse
|
||||
15, // 47: hostagent.v1.HostAgentService.ListSandboxes:output_type -> hostagent.v1.ListSandboxesResponse
|
||||
18, // 48: hostagent.v1.HostAgentService.WriteFile:output_type -> hostagent.v1.WriteFileResponse
|
||||
20, // 49: hostagent.v1.HostAgentService.ReadFile:output_type -> hostagent.v1.ReadFileResponse
|
||||
32, // 50: hostagent.v1.HostAgentService.ListDir:output_type -> hostagent.v1.ListDirResponse
|
||||
35, // 51: hostagent.v1.HostAgentService.MakeDir:output_type -> hostagent.v1.MakeDirResponse
|
||||
37, // 52: hostagent.v1.HostAgentService.RemovePath:output_type -> hostagent.v1.RemovePathResponse
|
||||
9, // 53: hostagent.v1.HostAgentService.CreateSnapshot:output_type -> hostagent.v1.CreateSnapshotResponse
|
||||
11, // 54: hostagent.v1.HostAgentService.DeleteSnapshot:output_type -> hostagent.v1.DeleteSnapshotResponse
|
||||
22, // 55: hostagent.v1.HostAgentService.ExecStream:output_type -> hostagent.v1.ExecStreamResponse
|
||||
28, // 56: hostagent.v1.HostAgentService.WriteFileStream:output_type -> hostagent.v1.WriteFileStreamResponse
|
||||
30, // 57: hostagent.v1.HostAgentService.ReadFileStream:output_type -> hostagent.v1.ReadFileStreamResponse
|
||||
39, // 58: hostagent.v1.HostAgentService.PingSandbox:output_type -> hostagent.v1.PingSandboxResponse
|
||||
41, // 59: hostagent.v1.HostAgentService.Terminate:output_type -> hostagent.v1.TerminateResponse
|
||||
44, // 60: hostagent.v1.HostAgentService.GetSandboxMetrics:output_type -> hostagent.v1.GetSandboxMetricsResponse
|
||||
46, // 61: hostagent.v1.HostAgentService.FlushSandboxMetrics:output_type -> hostagent.v1.FlushSandboxMetricsResponse
|
||||
48, // 62: hostagent.v1.HostAgentService.FlattenRootfs:output_type -> hostagent.v1.FlattenRootfsResponse
|
||||
50, // 63: hostagent.v1.HostAgentService.PtyAttach:output_type -> hostagent.v1.PtyAttachResponse
|
||||
55, // 64: hostagent.v1.HostAgentService.PtySendInput:output_type -> hostagent.v1.PtySendInputResponse
|
||||
57, // 65: hostagent.v1.HostAgentService.PtyResize:output_type -> hostagent.v1.PtyResizeResponse
|
||||
59, // 66: hostagent.v1.HostAgentService.PtyKill:output_type -> hostagent.v1.PtyKillResponse
|
||||
42, // [42:67] is the sub-list for method output_type
|
||||
17, // [17:42] is the sub-list for method input_type
|
||||
17, // [17:17] is the sub-list for extension type_name
|
||||
17, // [17:17] is the sub-list for extension extendee
|
||||
0, // [0:17] is the sub-list for field type_name
|
||||
}
|
||||
|
||||
func init() { file_hostagent_proto_init() }
|
||||
@ -3886,7 +3939,7 @@ func file_hostagent_proto_init() {
|
||||
GoPackagePath: reflect.TypeOf(x{}).PkgPath(),
|
||||
RawDescriptor: unsafe.Slice(unsafe.StringData(file_hostagent_proto_rawDesc), len(file_hostagent_proto_rawDesc)),
|
||||
NumEnums: 0,
|
||||
NumMessages: 61,
|
||||
NumMessages: 63,
|
||||
NumExtensions: 0,
|
||||
NumServices: 1,
|
||||
},
|
||||
|
||||
@ -119,6 +119,12 @@ message CreateSandboxRequest {
|
||||
|
||||
// Template UUID (hex string). Both zeros + team zeros = "minimal" sentinel.
|
||||
string template_id = 8;
|
||||
|
||||
// Default unix user for the sandbox (set in envd via PostInit).
|
||||
string default_user = 9;
|
||||
|
||||
// Default environment variables (set in envd via PostInit).
|
||||
map<string, string> default_env = 10;
|
||||
}
|
||||
|
||||
message CreateSandboxResponse {
|
||||
@ -145,6 +151,12 @@ message ResumeSandboxRequest {
|
||||
// TTL in seconds restored from the DB so the reaper can auto-pause
|
||||
// the sandbox again after inactivity. 0 means no auto-pause.
|
||||
int32 timeout_sec = 2;
|
||||
|
||||
// Default unix user for the sandbox (set in envd via PostInit on resume).
|
||||
string default_user = 3;
|
||||
|
||||
// Default environment variables (set in envd via PostInit on resume).
|
||||
map<string, string> default_env = 4;
|
||||
}
|
||||
|
||||
message ResumeSandboxResponse {
|
||||
|
||||
Reference in New Issue
Block a user