forked from wrenn/wrenn
Add USER, COPY, ENV persistence to template build system
Implement three new recipe commands for the admin template builder: - USER <name>: creates the user (adduser + passwordless sudo), switches execution context so subsequent RUN/START commands run as that user via su wrapping. Last USER becomes the template's default_user. - COPY <src> <dst>: copies files from an uploaded build archive (tar/tar.gz/zip) into the sandbox. Source paths validated against traversal. Ownership set to the current USER. - ENV persistence: accumulated env vars stored in templates.default_env (JSONB) and injected via PostInit when sandboxes are created from the template, mirroring Docker's image metadata approach. Supporting changes: - Pre-build creates wrenn-user as default (via USER command) - WORKDIR now creates the directory if it doesn't exist (mkdir -p) - Per-step progress updates (ProgressFunc callback) for live UI - Multipart form support on POST /v1/admin/builds for archive upload - Proto: default_user/default_env fields on Create/ResumeSandboxRequest - Host agent: SetDefaults calls PostInitWithDefaults on envd - Control plane: reads template defaults, passes on sandbox create/resume - Frontend: file upload widget, recipe copy button, keyword colors for USER/COPY, fixed Svelte whitespace stripping in step display - Admin panel defaults to /admin/templates instead of /admin/hosts - Migration adds default_user and default_env to templates and template_builds tables
This commit is contained in:
@ -3,8 +3,10 @@ package api
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"log/slog"
|
||||
"net/http"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"connectrpc.com/connect"
|
||||
@ -54,6 +56,8 @@ type buildResponse struct {
|
||||
Error *string `json:"error,omitempty"`
|
||||
SandboxID *string `json:"sandbox_id,omitempty"`
|
||||
HostID *string `json:"host_id,omitempty"`
|
||||
DefaultUser string `json:"default_user"`
|
||||
DefaultEnv json.RawMessage `json:"default_env"`
|
||||
CreatedAt string `json:"created_at"`
|
||||
StartedAt *string `json:"started_at,omitempty"`
|
||||
CompletedAt *string `json:"completed_at,omitempty"`
|
||||
@ -71,6 +75,8 @@ func buildToResponse(b db.TemplateBuild) buildResponse {
|
||||
CurrentStep: b.CurrentStep,
|
||||
TotalSteps: b.TotalSteps,
|
||||
Logs: b.Logs,
|
||||
DefaultUser: b.DefaultUser,
|
||||
DefaultEnv: b.DefaultEnv,
|
||||
}
|
||||
if b.Healthcheck != "" {
|
||||
resp.Healthcheck = &b.Healthcheck
|
||||
@ -101,11 +107,54 @@ func buildToResponse(b db.TemplateBuild) buildResponse {
|
||||
}
|
||||
|
||||
// Create handles POST /v1/admin/builds.
|
||||
// Accepts either JSON body or multipart/form-data with a "config" JSON part
|
||||
// and an optional "archive" file part (tar/tar.gz/zip for COPY commands).
|
||||
func (h *buildHandler) Create(w http.ResponseWriter, r *http.Request) {
|
||||
var req createBuildRequest
|
||||
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
|
||||
writeError(w, http.StatusBadRequest, "invalid_request", "invalid JSON body")
|
||||
return
|
||||
var archive []byte
|
||||
var archiveName string
|
||||
|
||||
ct := r.Header.Get("Content-Type")
|
||||
if strings.HasPrefix(ct, "multipart/") {
|
||||
// 100 MB max for multipart (archive + JSON config).
|
||||
if err := r.ParseMultipartForm(100 << 20); err != nil {
|
||||
writeError(w, http.StatusBadRequest, "invalid_request", "failed to parse multipart form")
|
||||
return
|
||||
}
|
||||
|
||||
// Parse JSON config from "config" field.
|
||||
configStr := r.FormValue("config")
|
||||
if configStr == "" {
|
||||
writeError(w, http.StatusBadRequest, "invalid_request", "multipart form requires a 'config' JSON field")
|
||||
return
|
||||
}
|
||||
if err := json.Unmarshal([]byte(configStr), &req); err != nil {
|
||||
writeError(w, http.StatusBadRequest, "invalid_request", "invalid config JSON in multipart form")
|
||||
return
|
||||
}
|
||||
|
||||
// Read optional archive file (max 100 MB).
|
||||
file, header, err := r.FormFile("archive")
|
||||
if err == nil {
|
||||
defer file.Close()
|
||||
const maxArchiveSize = 100 << 20 // 100 MB
|
||||
lr := io.LimitReader(file, maxArchiveSize+1)
|
||||
archive, err = io.ReadAll(lr)
|
||||
if err != nil {
|
||||
writeError(w, http.StatusBadRequest, "invalid_request", "failed to read archive file")
|
||||
return
|
||||
}
|
||||
if int64(len(archive)) > maxArchiveSize {
|
||||
writeError(w, http.StatusRequestEntityTooLarge, "invalid_request", "archive exceeds 100 MB limit")
|
||||
return
|
||||
}
|
||||
archiveName = header.Filename
|
||||
}
|
||||
} else {
|
||||
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
|
||||
writeError(w, http.StatusBadRequest, "invalid_request", "invalid JSON body")
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
if req.Name == "" {
|
||||
@ -129,6 +178,8 @@ func (h *buildHandler) Create(w http.ResponseWriter, r *http.Request) {
|
||||
VCPUs: req.VCPUs,
|
||||
MemoryMB: req.MemoryMB,
|
||||
SkipPrePost: req.SkipPrePost,
|
||||
Archive: archive,
|
||||
ArchiveName: archiveName,
|
||||
})
|
||||
if err != nil {
|
||||
slog.Error("failed to create build", "error", err)
|
||||
|
||||
@ -210,13 +210,15 @@ func (h *snapshotHandler) Create(w http.ResponseWriter, r *http.Request) {
|
||||
}
|
||||
|
||||
tmpl, err := h.db.InsertTemplate(snapCtx, db.InsertTemplateParams{
|
||||
ID: newTemplateID,
|
||||
Name: req.Name,
|
||||
Type: "snapshot",
|
||||
Vcpus: sb.Vcpus,
|
||||
MemoryMb: sb.MemoryMb,
|
||||
SizeBytes: resp.Msg.SizeBytes,
|
||||
TeamID: ac.TeamID,
|
||||
ID: newTemplateID,
|
||||
Name: req.Name,
|
||||
Type: "snapshot",
|
||||
Vcpus: sb.Vcpus,
|
||||
MemoryMb: sb.MemoryMb,
|
||||
SizeBytes: resp.Msg.SizeBytes,
|
||||
TeamID: ac.TeamID,
|
||||
DefaultUser: "root",
|
||||
DefaultEnv: []byte("{}"),
|
||||
})
|
||||
if err != nil {
|
||||
slog.Error("failed to insert template record", "name", req.Name, "error", err)
|
||||
|
||||
@ -152,14 +152,16 @@ type TeamApiKey struct {
|
||||
}
|
||||
|
||||
type Template struct {
|
||||
Name string `json:"name"`
|
||||
Type string `json:"type"`
|
||||
Vcpus int32 `json:"vcpus"`
|
||||
MemoryMb int32 `json:"memory_mb"`
|
||||
SizeBytes int64 `json:"size_bytes"`
|
||||
CreatedAt pgtype.Timestamptz `json:"created_at"`
|
||||
TeamID pgtype.UUID `json:"team_id"`
|
||||
ID pgtype.UUID `json:"id"`
|
||||
Name string `json:"name"`
|
||||
Type string `json:"type"`
|
||||
Vcpus int32 `json:"vcpus"`
|
||||
MemoryMb int32 `json:"memory_mb"`
|
||||
SizeBytes int64 `json:"size_bytes"`
|
||||
CreatedAt pgtype.Timestamptz `json:"created_at"`
|
||||
TeamID pgtype.UUID `json:"team_id"`
|
||||
ID pgtype.UUID `json:"id"`
|
||||
DefaultUser string `json:"default_user"`
|
||||
DefaultEnv []byte `json:"default_env"`
|
||||
}
|
||||
|
||||
type TemplateBuild struct {
|
||||
@ -183,6 +185,8 @@ type TemplateBuild struct {
|
||||
TemplateID pgtype.UUID `json:"template_id"`
|
||||
TeamID pgtype.UUID `json:"team_id"`
|
||||
SkipPrePost bool `json:"skip_pre_post"`
|
||||
DefaultUser string `json:"default_user"`
|
||||
DefaultEnv []byte `json:"default_env"`
|
||||
}
|
||||
|
||||
type User struct {
|
||||
|
||||
@ -12,7 +12,7 @@ import (
|
||||
)
|
||||
|
||||
const getTemplateBuild = `-- name: GetTemplateBuild :one
|
||||
SELECT id, name, base_template, recipe, healthcheck, vcpus, memory_mb, status, current_step, total_steps, logs, error, sandbox_id, host_id, created_at, started_at, completed_at, template_id, team_id, skip_pre_post FROM template_builds WHERE id = $1
|
||||
SELECT id, name, base_template, recipe, healthcheck, vcpus, memory_mb, status, current_step, total_steps, logs, error, sandbox_id, host_id, created_at, started_at, completed_at, template_id, team_id, skip_pre_post, default_user, default_env FROM template_builds WHERE id = $1
|
||||
`
|
||||
|
||||
func (q *Queries) GetTemplateBuild(ctx context.Context, id pgtype.UUID) (TemplateBuild, error) {
|
||||
@ -39,6 +39,8 @@ func (q *Queries) GetTemplateBuild(ctx context.Context, id pgtype.UUID) (Templat
|
||||
&i.TemplateID,
|
||||
&i.TeamID,
|
||||
&i.SkipPrePost,
|
||||
&i.DefaultUser,
|
||||
&i.DefaultEnv,
|
||||
)
|
||||
return i, err
|
||||
}
|
||||
@ -46,7 +48,7 @@ func (q *Queries) GetTemplateBuild(ctx context.Context, id pgtype.UUID) (Templat
|
||||
const insertTemplateBuild = `-- name: InsertTemplateBuild :one
|
||||
INSERT INTO template_builds (id, name, base_template, recipe, healthcheck, vcpus, memory_mb, status, total_steps, template_id, team_id, skip_pre_post)
|
||||
VALUES ($1, $2, $3, $4, $5, $6, $7, 'pending', $8, $9, $10, $11)
|
||||
RETURNING id, name, base_template, recipe, healthcheck, vcpus, memory_mb, status, current_step, total_steps, logs, error, sandbox_id, host_id, created_at, started_at, completed_at, template_id, team_id, skip_pre_post
|
||||
RETURNING id, name, base_template, recipe, healthcheck, vcpus, memory_mb, status, current_step, total_steps, logs, error, sandbox_id, host_id, created_at, started_at, completed_at, template_id, team_id, skip_pre_post, default_user, default_env
|
||||
`
|
||||
|
||||
type InsertTemplateBuildParams struct {
|
||||
@ -99,12 +101,14 @@ func (q *Queries) InsertTemplateBuild(ctx context.Context, arg InsertTemplateBui
|
||||
&i.TemplateID,
|
||||
&i.TeamID,
|
||||
&i.SkipPrePost,
|
||||
&i.DefaultUser,
|
||||
&i.DefaultEnv,
|
||||
)
|
||||
return i, err
|
||||
}
|
||||
|
||||
const listTemplateBuilds = `-- name: ListTemplateBuilds :many
|
||||
SELECT id, name, base_template, recipe, healthcheck, vcpus, memory_mb, status, current_step, total_steps, logs, error, sandbox_id, host_id, created_at, started_at, completed_at, template_id, team_id, skip_pre_post FROM template_builds ORDER BY created_at DESC
|
||||
SELECT id, name, base_template, recipe, healthcheck, vcpus, memory_mb, status, current_step, total_steps, logs, error, sandbox_id, host_id, created_at, started_at, completed_at, template_id, team_id, skip_pre_post, default_user, default_env FROM template_builds ORDER BY created_at DESC
|
||||
`
|
||||
|
||||
func (q *Queries) ListTemplateBuilds(ctx context.Context) ([]TemplateBuild, error) {
|
||||
@ -137,6 +141,8 @@ func (q *Queries) ListTemplateBuilds(ctx context.Context) ([]TemplateBuild, erro
|
||||
&i.TemplateID,
|
||||
&i.TeamID,
|
||||
&i.SkipPrePost,
|
||||
&i.DefaultUser,
|
||||
&i.DefaultEnv,
|
||||
); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@ -148,6 +154,23 @@ func (q *Queries) ListTemplateBuilds(ctx context.Context) ([]TemplateBuild, erro
|
||||
return items, nil
|
||||
}
|
||||
|
||||
const updateBuildDefaults = `-- name: UpdateBuildDefaults :exec
|
||||
UPDATE template_builds
|
||||
SET default_user = $2, default_env = $3
|
||||
WHERE id = $1
|
||||
`
|
||||
|
||||
type UpdateBuildDefaultsParams struct {
|
||||
ID pgtype.UUID `json:"id"`
|
||||
DefaultUser string `json:"default_user"`
|
||||
DefaultEnv []byte `json:"default_env"`
|
||||
}
|
||||
|
||||
func (q *Queries) UpdateBuildDefaults(ctx context.Context, arg UpdateBuildDefaultsParams) error {
|
||||
_, err := q.db.Exec(ctx, updateBuildDefaults, arg.ID, arg.DefaultUser, arg.DefaultEnv)
|
||||
return err
|
||||
}
|
||||
|
||||
const updateBuildError = `-- name: UpdateBuildError :exec
|
||||
UPDATE template_builds
|
||||
SET error = $2, status = 'failed', completed_at = NOW()
|
||||
@ -204,7 +227,7 @@ SET status = $2,
|
||||
started_at = CASE WHEN $2 = 'running' AND started_at IS NULL THEN NOW() ELSE started_at END,
|
||||
completed_at = CASE WHEN $2 IN ('success', 'failed', 'cancelled') THEN NOW() ELSE completed_at END
|
||||
WHERE id = $1
|
||||
RETURNING id, name, base_template, recipe, healthcheck, vcpus, memory_mb, status, current_step, total_steps, logs, error, sandbox_id, host_id, created_at, started_at, completed_at, template_id, team_id, skip_pre_post
|
||||
RETURNING id, name, base_template, recipe, healthcheck, vcpus, memory_mb, status, current_step, total_steps, logs, error, sandbox_id, host_id, created_at, started_at, completed_at, template_id, team_id, skip_pre_post, default_user, default_env
|
||||
`
|
||||
|
||||
type UpdateBuildStatusParams struct {
|
||||
@ -236,6 +259,8 @@ func (q *Queries) UpdateBuildStatus(ctx context.Context, arg UpdateBuildStatusPa
|
||||
&i.TemplateID,
|
||||
&i.TeamID,
|
||||
&i.SkipPrePost,
|
||||
&i.DefaultUser,
|
||||
&i.DefaultEnv,
|
||||
)
|
||||
return i, err
|
||||
}
|
||||
|
||||
@ -45,7 +45,7 @@ func (q *Queries) DeleteTemplatesByTeam(ctx context.Context, teamID pgtype.UUID)
|
||||
}
|
||||
|
||||
const getPlatformTemplateByName = `-- name: GetPlatformTemplateByName :one
|
||||
SELECT name, type, vcpus, memory_mb, size_bytes, created_at, team_id, id FROM templates WHERE team_id = '00000000-0000-0000-0000-000000000000' AND name = $1
|
||||
SELECT name, type, vcpus, memory_mb, size_bytes, created_at, team_id, id, default_user, default_env FROM templates WHERE team_id = '00000000-0000-0000-0000-000000000000' AND name = $1
|
||||
`
|
||||
|
||||
// Check if a global (platform) template exists with the given name.
|
||||
@ -61,12 +61,14 @@ func (q *Queries) GetPlatformTemplateByName(ctx context.Context, name string) (T
|
||||
&i.CreatedAt,
|
||||
&i.TeamID,
|
||||
&i.ID,
|
||||
&i.DefaultUser,
|
||||
&i.DefaultEnv,
|
||||
)
|
||||
return i, err
|
||||
}
|
||||
|
||||
const getTemplate = `-- name: GetTemplate :one
|
||||
SELECT name, type, vcpus, memory_mb, size_bytes, created_at, team_id, id FROM templates WHERE id = $1
|
||||
SELECT name, type, vcpus, memory_mb, size_bytes, created_at, team_id, id, default_user, default_env FROM templates WHERE id = $1
|
||||
`
|
||||
|
||||
func (q *Queries) GetTemplate(ctx context.Context, id pgtype.UUID) (Template, error) {
|
||||
@ -81,12 +83,14 @@ func (q *Queries) GetTemplate(ctx context.Context, id pgtype.UUID) (Template, er
|
||||
&i.CreatedAt,
|
||||
&i.TeamID,
|
||||
&i.ID,
|
||||
&i.DefaultUser,
|
||||
&i.DefaultEnv,
|
||||
)
|
||||
return i, err
|
||||
}
|
||||
|
||||
const getTemplateByName = `-- name: GetTemplateByName :one
|
||||
SELECT name, type, vcpus, memory_mb, size_bytes, created_at, team_id, id FROM templates WHERE team_id = $1 AND name = $2
|
||||
SELECT name, type, vcpus, memory_mb, size_bytes, created_at, team_id, id, default_user, default_env FROM templates WHERE team_id = $1 AND name = $2
|
||||
`
|
||||
|
||||
type GetTemplateByNameParams struct {
|
||||
@ -107,12 +111,14 @@ func (q *Queries) GetTemplateByName(ctx context.Context, arg GetTemplateByNamePa
|
||||
&i.CreatedAt,
|
||||
&i.TeamID,
|
||||
&i.ID,
|
||||
&i.DefaultUser,
|
||||
&i.DefaultEnv,
|
||||
)
|
||||
return i, err
|
||||
}
|
||||
|
||||
const getTemplateByTeam = `-- name: GetTemplateByTeam :one
|
||||
SELECT name, type, vcpus, memory_mb, size_bytes, created_at, team_id, id FROM templates WHERE name = $1 AND (team_id = $2 OR team_id = '00000000-0000-0000-0000-000000000000')
|
||||
SELECT name, type, vcpus, memory_mb, size_bytes, created_at, team_id, id, default_user, default_env FROM templates WHERE name = $1 AND (team_id = $2 OR team_id = '00000000-0000-0000-0000-000000000000')
|
||||
`
|
||||
|
||||
type GetTemplateByTeamParams struct {
|
||||
@ -133,24 +139,28 @@ func (q *Queries) GetTemplateByTeam(ctx context.Context, arg GetTemplateByTeamPa
|
||||
&i.CreatedAt,
|
||||
&i.TeamID,
|
||||
&i.ID,
|
||||
&i.DefaultUser,
|
||||
&i.DefaultEnv,
|
||||
)
|
||||
return i, err
|
||||
}
|
||||
|
||||
const insertTemplate = `-- name: InsertTemplate :one
|
||||
INSERT INTO templates (id, name, type, vcpus, memory_mb, size_bytes, team_id)
|
||||
VALUES ($1, $2, $3, $4, $5, $6, $7)
|
||||
RETURNING name, type, vcpus, memory_mb, size_bytes, created_at, team_id, id
|
||||
INSERT INTO templates (id, name, type, vcpus, memory_mb, size_bytes, team_id, default_user, default_env)
|
||||
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9)
|
||||
RETURNING name, type, vcpus, memory_mb, size_bytes, created_at, team_id, id, default_user, default_env
|
||||
`
|
||||
|
||||
type InsertTemplateParams struct {
|
||||
ID pgtype.UUID `json:"id"`
|
||||
Name string `json:"name"`
|
||||
Type string `json:"type"`
|
||||
Vcpus int32 `json:"vcpus"`
|
||||
MemoryMb int32 `json:"memory_mb"`
|
||||
SizeBytes int64 `json:"size_bytes"`
|
||||
TeamID pgtype.UUID `json:"team_id"`
|
||||
ID pgtype.UUID `json:"id"`
|
||||
Name string `json:"name"`
|
||||
Type string `json:"type"`
|
||||
Vcpus int32 `json:"vcpus"`
|
||||
MemoryMb int32 `json:"memory_mb"`
|
||||
SizeBytes int64 `json:"size_bytes"`
|
||||
TeamID pgtype.UUID `json:"team_id"`
|
||||
DefaultUser string `json:"default_user"`
|
||||
DefaultEnv []byte `json:"default_env"`
|
||||
}
|
||||
|
||||
func (q *Queries) InsertTemplate(ctx context.Context, arg InsertTemplateParams) (Template, error) {
|
||||
@ -162,6 +172,8 @@ func (q *Queries) InsertTemplate(ctx context.Context, arg InsertTemplateParams)
|
||||
arg.MemoryMb,
|
||||
arg.SizeBytes,
|
||||
arg.TeamID,
|
||||
arg.DefaultUser,
|
||||
arg.DefaultEnv,
|
||||
)
|
||||
var i Template
|
||||
err := row.Scan(
|
||||
@ -173,12 +185,14 @@ func (q *Queries) InsertTemplate(ctx context.Context, arg InsertTemplateParams)
|
||||
&i.CreatedAt,
|
||||
&i.TeamID,
|
||||
&i.ID,
|
||||
&i.DefaultUser,
|
||||
&i.DefaultEnv,
|
||||
)
|
||||
return i, err
|
||||
}
|
||||
|
||||
const listTemplates = `-- name: ListTemplates :many
|
||||
SELECT name, type, vcpus, memory_mb, size_bytes, created_at, team_id, id FROM templates ORDER BY created_at DESC
|
||||
SELECT name, type, vcpus, memory_mb, size_bytes, created_at, team_id, id, default_user, default_env FROM templates ORDER BY created_at DESC
|
||||
`
|
||||
|
||||
func (q *Queries) ListTemplates(ctx context.Context) ([]Template, error) {
|
||||
@ -199,6 +213,8 @@ func (q *Queries) ListTemplates(ctx context.Context) ([]Template, error) {
|
||||
&i.CreatedAt,
|
||||
&i.TeamID,
|
||||
&i.ID,
|
||||
&i.DefaultUser,
|
||||
&i.DefaultEnv,
|
||||
); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@ -211,7 +227,7 @@ func (q *Queries) ListTemplates(ctx context.Context) ([]Template, error) {
|
||||
}
|
||||
|
||||
const listTemplatesByTeam = `-- name: ListTemplatesByTeam :many
|
||||
SELECT name, type, vcpus, memory_mb, size_bytes, created_at, team_id, id FROM templates WHERE (team_id = $1 OR team_id = '00000000-0000-0000-0000-000000000000') ORDER BY created_at DESC
|
||||
SELECT name, type, vcpus, memory_mb, size_bytes, created_at, team_id, id, default_user, default_env FROM templates WHERE (team_id = $1 OR team_id = '00000000-0000-0000-0000-000000000000') ORDER BY created_at DESC
|
||||
`
|
||||
|
||||
// Platform templates are visible to all teams.
|
||||
@ -233,6 +249,8 @@ func (q *Queries) ListTemplatesByTeam(ctx context.Context, teamID pgtype.UUID) (
|
||||
&i.CreatedAt,
|
||||
&i.TeamID,
|
||||
&i.ID,
|
||||
&i.DefaultUser,
|
||||
&i.DefaultEnv,
|
||||
); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@ -245,7 +263,7 @@ func (q *Queries) ListTemplatesByTeam(ctx context.Context, teamID pgtype.UUID) (
|
||||
}
|
||||
|
||||
const listTemplatesByTeamAndType = `-- name: ListTemplatesByTeamAndType :many
|
||||
SELECT name, type, vcpus, memory_mb, size_bytes, created_at, team_id, id FROM templates WHERE (team_id = $1 OR team_id = '00000000-0000-0000-0000-000000000000') AND type = $2 ORDER BY created_at DESC
|
||||
SELECT name, type, vcpus, memory_mb, size_bytes, created_at, team_id, id, default_user, default_env FROM templates WHERE (team_id = $1 OR team_id = '00000000-0000-0000-0000-000000000000') AND type = $2 ORDER BY created_at DESC
|
||||
`
|
||||
|
||||
type ListTemplatesByTeamAndTypeParams struct {
|
||||
@ -272,6 +290,8 @@ func (q *Queries) ListTemplatesByTeamAndType(ctx context.Context, arg ListTempla
|
||||
&i.CreatedAt,
|
||||
&i.TeamID,
|
||||
&i.ID,
|
||||
&i.DefaultUser,
|
||||
&i.DefaultEnv,
|
||||
); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@ -284,7 +304,7 @@ func (q *Queries) ListTemplatesByTeamAndType(ctx context.Context, arg ListTempla
|
||||
}
|
||||
|
||||
const listTemplatesByTeamOnly = `-- name: ListTemplatesByTeamOnly :many
|
||||
SELECT name, type, vcpus, memory_mb, size_bytes, created_at, team_id, id FROM templates WHERE team_id = $1 ORDER BY created_at DESC
|
||||
SELECT name, type, vcpus, memory_mb, size_bytes, created_at, team_id, id, default_user, default_env FROM templates WHERE team_id = $1 ORDER BY created_at DESC
|
||||
`
|
||||
|
||||
// List templates owned by a specific team (NOT including platform templates).
|
||||
@ -306,6 +326,8 @@ func (q *Queries) ListTemplatesByTeamOnly(ctx context.Context, teamID pgtype.UUI
|
||||
&i.CreatedAt,
|
||||
&i.TeamID,
|
||||
&i.ID,
|
||||
&i.DefaultUser,
|
||||
&i.DefaultEnv,
|
||||
); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@ -318,7 +340,7 @@ func (q *Queries) ListTemplatesByTeamOnly(ctx context.Context, teamID pgtype.UUI
|
||||
}
|
||||
|
||||
const listTemplatesByType = `-- name: ListTemplatesByType :many
|
||||
SELECT name, type, vcpus, memory_mb, size_bytes, created_at, team_id, id FROM templates WHERE type = $1 ORDER BY created_at DESC
|
||||
SELECT name, type, vcpus, memory_mb, size_bytes, created_at, team_id, id, default_user, default_env FROM templates WHERE type = $1 ORDER BY created_at DESC
|
||||
`
|
||||
|
||||
func (q *Queries) ListTemplatesByType(ctx context.Context, type_ string) ([]Template, error) {
|
||||
@ -339,6 +361,8 @@ func (q *Queries) ListTemplatesByType(ctx context.Context, type_ string) ([]Temp
|
||||
&i.CreatedAt,
|
||||
&i.TeamID,
|
||||
&i.ID,
|
||||
&i.DefaultUser,
|
||||
&i.DefaultEnv,
|
||||
); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
@ -3,6 +3,7 @@ package envdclient
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"log/slog"
|
||||
@ -273,10 +274,36 @@ func (c *Client) ReadFile(ctx context.Context, path string) ([]byte, error) {
|
||||
// env vars and the corresponding files under /run/wrenn/ inside the guest.
|
||||
// Must be called after snapshot restore so envd picks up the new sandbox's metadata.
|
||||
func (c *Client) PostInit(ctx context.Context) error {
|
||||
req, err := http.NewRequestWithContext(ctx, http.MethodPost, c.base+"/init", nil)
|
||||
return c.PostInitWithDefaults(ctx, "", nil)
|
||||
}
|
||||
|
||||
// PostInitWithDefaults calls envd's POST /init endpoint with optional default
|
||||
// user and environment variables. These are applied to envd's defaults so all
|
||||
// subsequent process executions use them.
|
||||
func (c *Client) PostInitWithDefaults(ctx context.Context, defaultUser string, envVars map[string]string) error {
|
||||
var body io.Reader
|
||||
if defaultUser != "" || len(envVars) > 0 {
|
||||
payload := make(map[string]any)
|
||||
if defaultUser != "" {
|
||||
payload["defaultUser"] = defaultUser
|
||||
}
|
||||
if len(envVars) > 0 {
|
||||
payload["envVars"] = envVars
|
||||
}
|
||||
data, err := json.Marshal(payload)
|
||||
if err != nil {
|
||||
return fmt.Errorf("marshal init body: %w", err)
|
||||
}
|
||||
body = bytes.NewReader(data)
|
||||
}
|
||||
|
||||
req, err := http.NewRequestWithContext(ctx, http.MethodPost, c.base+"/init", body)
|
||||
if err != nil {
|
||||
return fmt.Errorf("create request: %w", err)
|
||||
}
|
||||
if body != nil {
|
||||
req.Header.Set("Content-Type", "application/json")
|
||||
}
|
||||
|
||||
resp, err := c.httpClient.Do(req)
|
||||
if err != nil {
|
||||
@ -285,8 +312,8 @@ func (c *Client) PostInit(ctx context.Context) error {
|
||||
defer resp.Body.Close()
|
||||
|
||||
if resp.StatusCode != http.StatusNoContent {
|
||||
body, _ := io.ReadAll(resp.Body)
|
||||
return fmt.Errorf("post init: status %d: %s", resp.StatusCode, string(body))
|
||||
respBody, _ := io.ReadAll(resp.Body)
|
||||
return fmt.Errorf("post init: status %d: %s", resp.StatusCode, string(respBody))
|
||||
}
|
||||
|
||||
return nil
|
||||
|
||||
@ -69,6 +69,13 @@ func (s *Server) CreateSandbox(
|
||||
return nil, connect.NewError(connect.CodeInternal, fmt.Errorf("create sandbox: %w", err))
|
||||
}
|
||||
|
||||
// Apply template defaults (user, env vars) if provided.
|
||||
if msg.DefaultUser != "" || len(msg.DefaultEnv) > 0 {
|
||||
if err := s.mgr.SetDefaults(ctx, sb.ID, msg.DefaultUser, msg.DefaultEnv); err != nil {
|
||||
slog.Warn("failed to set sandbox defaults", "sandbox", sb.ID, "error", err)
|
||||
}
|
||||
}
|
||||
|
||||
return connect.NewResponse(&pb.CreateSandboxResponse{
|
||||
SandboxId: sb.ID,
|
||||
Status: string(sb.Status),
|
||||
@ -100,10 +107,19 @@ func (s *Server) ResumeSandbox(
|
||||
ctx context.Context,
|
||||
req *connect.Request[pb.ResumeSandboxRequest],
|
||||
) (*connect.Response[pb.ResumeSandboxResponse], error) {
|
||||
sb, err := s.mgr.Resume(ctx, req.Msg.SandboxId, int(req.Msg.TimeoutSec))
|
||||
msg := req.Msg
|
||||
sb, err := s.mgr.Resume(ctx, msg.SandboxId, int(msg.TimeoutSec))
|
||||
if err != nil {
|
||||
return nil, connect.NewError(connect.CodeInternal, err)
|
||||
}
|
||||
|
||||
// Apply template defaults (user, env vars) if provided.
|
||||
if msg.DefaultUser != "" || len(msg.DefaultEnv) > 0 {
|
||||
if err := s.mgr.SetDefaults(ctx, sb.ID, msg.DefaultUser, msg.DefaultEnv); err != nil {
|
||||
slog.Warn("failed to set sandbox defaults on resume", "sandbox", sb.ID, "error", err)
|
||||
}
|
||||
}
|
||||
|
||||
return connect.NewResponse(&pb.ResumeSandboxResponse{
|
||||
SandboxId: sb.ID,
|
||||
Status: string(sb.Status),
|
||||
|
||||
@ -7,10 +7,11 @@ import (
|
||||
)
|
||||
|
||||
// ExecContext holds mutable state that persists across recipe steps.
|
||||
// It is initialized empty and updated by ENV and WORKDIR steps.
|
||||
// It is initialized empty and updated by ENV, WORKDIR, and USER steps.
|
||||
type ExecContext struct {
|
||||
WorkDir string
|
||||
EnvVars map[string]string
|
||||
User string // Current unix user for command execution.
|
||||
}
|
||||
|
||||
// This regex matches:
|
||||
@ -25,7 +26,20 @@ var envRegex = regexp.MustCompile(`\$\$|\$\{([a-zA-Z0-9_]*)\}|\$([a-zA-Z0-9_]+)`
|
||||
// If WORKDIR and/or ENV are set, they are prepended as a shell preamble:
|
||||
//
|
||||
// cd '/the/dir' && KEY='val' /bin/sh -c 'original command'
|
||||
//
|
||||
// If USER is set to a non-root user, the entire command is wrapped with su:
|
||||
//
|
||||
// su <user> -s /bin/sh -c '<preamble + command>'
|
||||
func (c *ExecContext) WrappedCommand(cmd string) string {
|
||||
inner := c.innerCommand(cmd)
|
||||
if c.User != "" && c.User != "root" {
|
||||
return "su " + shellescape(c.User) + " -s /bin/sh -c " + shellescape(inner)
|
||||
}
|
||||
return inner
|
||||
}
|
||||
|
||||
// innerCommand builds the command with workdir/env preamble but without user wrapping.
|
||||
func (c *ExecContext) innerCommand(cmd string) string {
|
||||
prefix := c.shellPrefix()
|
||||
if prefix == "" {
|
||||
return cmd
|
||||
@ -42,7 +56,11 @@ func (c *ExecContext) WrappedCommand(cmd string) string {
|
||||
// simultaneously before a healthcheck is evaluated.
|
||||
func (c *ExecContext) StartCommand(cmd string) string {
|
||||
prefix := c.shellPrefix()
|
||||
return prefix + "nohup /bin/sh -c " + shellescape(cmd) + " >/dev/null 2>&1 &"
|
||||
inner := prefix + "nohup /bin/sh -c " + shellescape(cmd) + " >/dev/null 2>&1 &"
|
||||
if c.User != "" && c.User != "root" {
|
||||
return "su " + shellescape(c.User) + " -s /bin/sh -c " + shellescape(inner)
|
||||
}
|
||||
return inner
|
||||
}
|
||||
|
||||
// shellPrefix builds the "cd ... && KEY=val " preamble for a shell command.
|
||||
|
||||
@ -4,6 +4,7 @@ import (
|
||||
"context"
|
||||
"fmt"
|
||||
"log/slog"
|
||||
"path"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
@ -16,6 +17,10 @@ import (
|
||||
// explicit --timeout flag.
|
||||
const DefaultStepTimeout = 30 * time.Second
|
||||
|
||||
// BuildFilesDir is the directory inside the sandbox where uploaded build
|
||||
// archives are extracted. COPY instructions reference paths relative to this.
|
||||
const BuildFilesDir = "/tmp/build-files"
|
||||
|
||||
// BuildLogEntry is the per-step record stored in template_builds.logs (JSONB).
|
||||
type BuildLogEntry struct {
|
||||
Step int `json:"step"`
|
||||
@ -32,13 +37,18 @@ type BuildLogEntry struct {
|
||||
// the method on the hostagent Connect RPC client.
|
||||
type ExecFunc func(ctx context.Context, req *connect.Request[pb.ExecRequest]) (*connect.Response[pb.ExecResponse], error)
|
||||
|
||||
// ProgressFunc is called after each step with the current step counter and
|
||||
// accumulated log entries. Used for per-step DB progress updates.
|
||||
type ProgressFunc func(step int, entries []BuildLogEntry)
|
||||
|
||||
// Execute runs steps sequentially against sandboxID using execFn.
|
||||
//
|
||||
// - phase labels the log entries (e.g., "pre-build", "recipe", "post-build").
|
||||
// - startStep is the 1-based offset so entries are globally numbered across phases.
|
||||
// - defaultTimeout applies to RUN steps with no per-step --timeout; 0 → 10 minutes.
|
||||
// - bctx is mutated in place as ENV/WORKDIR steps execute, and carries forward
|
||||
// - bctx is mutated in place as ENV/WORKDIR/USER steps execute, and carries forward
|
||||
// into subsequent phases when the caller passes the same pointer.
|
||||
// - onProgress is called after each step for live progress updates (may be nil).
|
||||
//
|
||||
// Returns all log entries appended during this call, the next step counter
|
||||
// value, and whether all steps succeeded. On false the last entry contains
|
||||
@ -53,6 +63,7 @@ func Execute(
|
||||
defaultTimeout time.Duration,
|
||||
bctx *ExecContext,
|
||||
execFn ExecFunc,
|
||||
onProgress ProgressFunc,
|
||||
) (entries []BuildLogEntry, nextStep int, ok bool) {
|
||||
if defaultTimeout <= 0 {
|
||||
defaultTimeout = 10 * time.Minute
|
||||
@ -72,19 +83,30 @@ func Execute(
|
||||
entries = append(entries, BuildLogEntry{Step: step, Phase: phase, Cmd: st.Raw, Ok: true})
|
||||
|
||||
case KindWORKDIR:
|
||||
// Create the directory if it doesn't exist.
|
||||
mkdirEntry := execRawShell(ctx, st.Raw, sandboxID, phase, step, 10*time.Second, execFn,
|
||||
"mkdir -p "+shellescape(st.Path))
|
||||
if !mkdirEntry.Ok {
|
||||
entries = append(entries, mkdirEntry)
|
||||
return entries, step, false
|
||||
}
|
||||
bctx.WorkDir = st.Path
|
||||
entries = append(entries, BuildLogEntry{Step: step, Phase: phase, Cmd: st.Raw, Ok: true})
|
||||
mkdirEntry.Ok = true
|
||||
entries = append(entries, mkdirEntry)
|
||||
|
||||
case KindUSER, KindCOPY:
|
||||
verb := strings.ToUpper(strings.Fields(st.Raw)[0])
|
||||
entries = append(entries, BuildLogEntry{
|
||||
Step: step,
|
||||
Phase: phase,
|
||||
Cmd: st.Raw,
|
||||
Stderr: verb + " is not yet supported",
|
||||
Ok: false,
|
||||
})
|
||||
return entries, step, false
|
||||
case KindUSER:
|
||||
entry, succeeded := execUser(ctx, st, sandboxID, phase, step, bctx, execFn)
|
||||
entries = append(entries, entry)
|
||||
if !succeeded {
|
||||
return entries, step, false
|
||||
}
|
||||
|
||||
case KindCOPY:
|
||||
entry, succeeded := execCopy(ctx, st, sandboxID, phase, step, bctx, execFn)
|
||||
entries = append(entries, entry)
|
||||
if !succeeded {
|
||||
return entries, step, false
|
||||
}
|
||||
|
||||
case KindSTART:
|
||||
entry, succeeded := execStart(ctx, st, sandboxID, phase, step, bctx, execFn)
|
||||
@ -104,6 +126,10 @@ func Execute(
|
||||
return entries, step, false
|
||||
}
|
||||
}
|
||||
|
||||
if onProgress != nil {
|
||||
onProgress(step, entries)
|
||||
}
|
||||
}
|
||||
return entries, step, true
|
||||
}
|
||||
@ -145,6 +171,106 @@ func execRun(
|
||||
return entry, entry.Ok
|
||||
}
|
||||
|
||||
// execUser creates a unix user (if not exists), grants passwordless sudo,
|
||||
// and updates bctx.User for subsequent steps.
|
||||
func execUser(
|
||||
ctx context.Context,
|
||||
st Step,
|
||||
sandboxID, phase string,
|
||||
step int,
|
||||
bctx *ExecContext,
|
||||
execFn ExecFunc,
|
||||
) (BuildLogEntry, bool) {
|
||||
username := st.Key
|
||||
// Create user if not exists, with home directory and bash shell.
|
||||
// Grant passwordless sudo access (E2B convention).
|
||||
// Uses printf %s to avoid shell injection in the sudoers line.
|
||||
script := fmt.Sprintf(
|
||||
"id %s >/dev/null 2>&1 || (adduser --disabled-password --gecos '' --shell /bin/bash %s && printf '%%s ALL=(ALL) NOPASSWD:ALL\\n' %s >> /etc/sudoers)",
|
||||
shellescape(username), shellescape(username), shellescape(username),
|
||||
)
|
||||
|
||||
entry := execRawShell(ctx, st.Raw, sandboxID, phase, step, 30*time.Second, execFn, script)
|
||||
if entry.Ok {
|
||||
bctx.User = username
|
||||
}
|
||||
return entry, entry.Ok
|
||||
}
|
||||
|
||||
// execCopy copies a file or directory from the build archive (extracted at
|
||||
// BuildFilesDir) to the destination path inside the sandbox. Ownership is
|
||||
// set to the current user from bctx.
|
||||
func execCopy(
|
||||
ctx context.Context,
|
||||
st Step,
|
||||
sandboxID, phase string,
|
||||
step int,
|
||||
bctx *ExecContext,
|
||||
execFn ExecFunc,
|
||||
) (BuildLogEntry, bool) {
|
||||
// Validate source path: must be relative and not escape the archive directory.
|
||||
cleaned := path.Clean(st.Src)
|
||||
if strings.HasPrefix(cleaned, "..") || strings.HasPrefix(cleaned, "/") {
|
||||
return BuildLogEntry{
|
||||
Step: step,
|
||||
Phase: phase,
|
||||
Cmd: st.Raw,
|
||||
Stderr: "COPY source must be a relative path within the archive",
|
||||
}, false
|
||||
}
|
||||
src := BuildFilesDir + "/" + cleaned
|
||||
dst := st.Dst
|
||||
owner := "root"
|
||||
if bctx.User != "" {
|
||||
owner = bctx.User
|
||||
}
|
||||
script := fmt.Sprintf(
|
||||
"cp -r %s %s && chown -R %s:%s %s",
|
||||
shellescape(src), shellescape(dst), shellescape(owner), shellescape(owner), shellescape(dst),
|
||||
)
|
||||
|
||||
entry := execRawShell(ctx, st.Raw, sandboxID, phase, step, 60*time.Second, execFn, script)
|
||||
return entry, entry.Ok
|
||||
}
|
||||
|
||||
// execRawShell runs a shell command directly (as root) without ExecContext
|
||||
// wrapping. Used for internal operations like user creation and file copy.
|
||||
func execRawShell(
|
||||
ctx context.Context,
|
||||
raw, sandboxID, phase string,
|
||||
step int,
|
||||
timeout time.Duration,
|
||||
execFn ExecFunc,
|
||||
shellCmd string,
|
||||
) BuildLogEntry {
|
||||
execCtx, cancel := context.WithTimeout(ctx, timeout)
|
||||
defer cancel()
|
||||
|
||||
start := time.Now()
|
||||
resp, err := execFn(execCtx, connect.NewRequest(&pb.ExecRequest{
|
||||
SandboxId: sandboxID,
|
||||
Cmd: "/bin/sh",
|
||||
Args: []string{"-c", shellCmd},
|
||||
TimeoutSec: int32(timeout.Seconds()),
|
||||
}))
|
||||
|
||||
entry := BuildLogEntry{
|
||||
Step: step,
|
||||
Phase: phase,
|
||||
Cmd: raw,
|
||||
Elapsed: time.Since(start).Milliseconds(),
|
||||
}
|
||||
if err != nil {
|
||||
entry.Stderr = fmt.Sprintf("exec error: %v", err)
|
||||
return entry
|
||||
}
|
||||
entry.Stdout = string(resp.Msg.Stdout)
|
||||
entry.Stderr = string(resp.Msg.Stderr)
|
||||
entry.Exit = resp.Msg.ExitCode
|
||||
entry.Ok = resp.Msg.ExitCode == 0
|
||||
return entry
|
||||
}
|
||||
|
||||
func execStart(
|
||||
ctx context.Context,
|
||||
st Step,
|
||||
|
||||
@ -24,9 +24,11 @@ type Step struct {
|
||||
Raw string // original string, preserved for logging
|
||||
Shell string // KindRUN, KindSTART: the shell command text
|
||||
Timeout time.Duration // KindRUN: 0 means use caller's default
|
||||
Key string // KindENV: variable name
|
||||
Key string // KindENV: variable name; KindUSER: username
|
||||
Value string // KindENV: variable value
|
||||
Path string // KindWORKDIR: directory path
|
||||
Src string // KindCOPY: source path (relative to build archive)
|
||||
Dst string // KindCOPY: destination path inside sandbox
|
||||
}
|
||||
|
||||
// ParseStep parses a single recipe instruction string into a Step.
|
||||
@ -61,9 +63,9 @@ func ParseStep(s string) (Step, error) {
|
||||
case "WORKDIR":
|
||||
return parseWORKDIR(s, rest)
|
||||
case "USER":
|
||||
return Step{Kind: KindUSER, Raw: s}, nil
|
||||
return parseUSER(s, rest)
|
||||
case "COPY":
|
||||
return Step{Kind: KindCOPY, Raw: s}, nil
|
||||
return parseCOPY(s, rest)
|
||||
default:
|
||||
return Step{}, fmt.Errorf("unknown instruction %q (expected RUN, START, ENV, WORKDIR, USER, or COPY)", keyword)
|
||||
}
|
||||
@ -127,3 +129,31 @@ func parseWORKDIR(raw, path string) (Step, error) {
|
||||
}
|
||||
return Step{Kind: KindWORKDIR, Raw: raw, Path: path}, nil
|
||||
}
|
||||
|
||||
func parseUSER(raw, username string) (Step, error) {
|
||||
if username == "" {
|
||||
return Step{}, fmt.Errorf("USER requires a username: %q", raw)
|
||||
}
|
||||
// Validate: alphanumeric, hyphens, underscores only; must start with a letter or underscore.
|
||||
for i, c := range username {
|
||||
if i == 0 && !((c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z') || c == '_') {
|
||||
return Step{}, fmt.Errorf("USER username must start with a letter or underscore: %q", raw)
|
||||
}
|
||||
if !((c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z') || (c >= '0' && c <= '9') || c == '_' || c == '-') {
|
||||
return Step{}, fmt.Errorf("USER username contains invalid character %q: %q", string(c), raw)
|
||||
}
|
||||
}
|
||||
return Step{Kind: KindUSER, Raw: raw, Key: username}, nil
|
||||
}
|
||||
|
||||
func parseCOPY(raw, rest string) (Step, error) {
|
||||
if rest == "" {
|
||||
return Step{}, fmt.Errorf("COPY requires <src> <dst>: %q", raw)
|
||||
}
|
||||
src, dst, found := strings.Cut(rest, " ")
|
||||
dst = strings.TrimSpace(dst)
|
||||
if !found || dst == "" {
|
||||
return Step{}, fmt.Errorf("COPY requires <src> <dst>: %q", raw)
|
||||
}
|
||||
return Step{Kind: KindCOPY, Raw: raw, Src: src, Dst: dst}, nil
|
||||
}
|
||||
|
||||
@ -111,16 +111,37 @@ func TestParseStep(t *testing.T) {
|
||||
input: "WORKDIR",
|
||||
wantErr: true,
|
||||
},
|
||||
// USER and COPY stubs
|
||||
// USER
|
||||
{
|
||||
name: "USER stub",
|
||||
name: "USER basic",
|
||||
input: "USER www-data",
|
||||
want: Step{Kind: KindUSER, Raw: "USER www-data"},
|
||||
want: Step{Kind: KindUSER, Raw: "USER www-data", Key: "www-data"},
|
||||
},
|
||||
{
|
||||
name: "COPY stub",
|
||||
name: "USER empty",
|
||||
input: "USER",
|
||||
wantErr: true,
|
||||
},
|
||||
{
|
||||
name: "USER invalid chars",
|
||||
input: "USER bad user",
|
||||
wantErr: true,
|
||||
},
|
||||
// COPY
|
||||
{
|
||||
name: "COPY basic",
|
||||
input: "COPY config.yaml /etc/app/config.yaml",
|
||||
want: Step{Kind: KindCOPY, Raw: "COPY config.yaml /etc/app/config.yaml"},
|
||||
want: Step{Kind: KindCOPY, Raw: "COPY config.yaml /etc/app/config.yaml", Src: "config.yaml", Dst: "/etc/app/config.yaml"},
|
||||
},
|
||||
{
|
||||
name: "COPY missing dst",
|
||||
input: "COPY config.yaml",
|
||||
wantErr: true,
|
||||
},
|
||||
{
|
||||
name: "COPY empty",
|
||||
input: "COPY",
|
||||
wantErr: true,
|
||||
},
|
||||
// Unknown keyword
|
||||
{
|
||||
|
||||
@ -1223,6 +1223,23 @@ func (m *Manager) GetClient(sandboxID string) (*envdclient.Client, error) {
|
||||
return sb.client, nil
|
||||
}
|
||||
|
||||
// SetDefaults calls envd's PostInit to configure the default user and
|
||||
// environment variables for a running sandbox. This is called by the host
|
||||
// agent after sandbox creation or resume when the template specifies defaults.
|
||||
func (m *Manager) SetDefaults(ctx context.Context, sandboxID, defaultUser string, defaultEnv map[string]string) error {
|
||||
if defaultUser == "" && len(defaultEnv) == 0 {
|
||||
return nil
|
||||
}
|
||||
sb, err := m.get(sandboxID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if sb.Status != models.StatusRunning {
|
||||
return fmt.Errorf("sandbox %s is not running (status: %s)", sandboxID, sb.Status)
|
||||
}
|
||||
return sb.client.PostInitWithDefaults(ctx, defaultUser, defaultEnv)
|
||||
}
|
||||
|
||||
// PtyAttach starts a new PTY process or reconnects to an existing one.
|
||||
// If cmd is non-empty, starts a new process. If empty, reconnects using tag.
|
||||
func (m *Manager) PtyAttach(ctx context.Context, sandboxID, tag, cmd string, args []string, cols, rows uint32, envs map[string]string, cwd string) (<-chan envdclient.PtyEvent, error) {
|
||||
|
||||
@ -27,8 +27,10 @@ const (
|
||||
)
|
||||
|
||||
// preBuildCmds run before the user recipe to prepare the build environment.
|
||||
// apt update runs as root first, then USER switches to wrenn-user for the recipe.
|
||||
var preBuildCmds = []string{
|
||||
"RUN apt update",
|
||||
"USER wrenn-user",
|
||||
}
|
||||
|
||||
// postBuildCmds run after the user recipe to clean up caches and reduce image size.
|
||||
@ -36,6 +38,7 @@ var postBuildCmds = []string{
|
||||
"RUN apt clean",
|
||||
"RUN apt autoremove -y",
|
||||
"RUN rm -rf /var/lib/apt/lists/*",
|
||||
"RUN rm -rf /tmp/build-files /tmp/build-files.*",
|
||||
}
|
||||
|
||||
// buildAgentClient is the subset of the host agent client used by the build worker.
|
||||
@ -43,6 +46,7 @@ type buildAgentClient interface {
|
||||
CreateSandbox(ctx context.Context, req *connect.Request[pb.CreateSandboxRequest]) (*connect.Response[pb.CreateSandboxResponse], error)
|
||||
DestroySandbox(ctx context.Context, req *connect.Request[pb.DestroySandboxRequest]) (*connect.Response[pb.DestroySandboxResponse], error)
|
||||
Exec(ctx context.Context, req *connect.Request[pb.ExecRequest]) (*connect.Response[pb.ExecResponse], error)
|
||||
WriteFile(ctx context.Context, req *connect.Request[pb.WriteFileRequest]) (*connect.Response[pb.WriteFileResponse], error)
|
||||
CreateSnapshot(ctx context.Context, req *connect.Request[pb.CreateSnapshotRequest]) (*connect.Response[pb.CreateSnapshotResponse], error)
|
||||
FlattenRootfs(ctx context.Context, req *connect.Request[pb.FlattenRootfsRequest]) (*connect.Response[pb.FlattenRootfsResponse], error)
|
||||
}
|
||||
@ -56,6 +60,7 @@ type BuildService struct {
|
||||
|
||||
mu sync.Mutex
|
||||
cancelMap map[string]context.CancelFunc // buildID → per-build cancel func
|
||||
filesMap map[string][]byte // buildID → uploaded archive bytes
|
||||
}
|
||||
|
||||
// BuildCreateParams holds the parameters for creating a template build.
|
||||
@ -67,6 +72,27 @@ type BuildCreateParams struct {
|
||||
VCPUs int32
|
||||
MemoryMB int32
|
||||
SkipPrePost bool
|
||||
Archive []byte // Optional tar/tar.gz/zip archive for COPY commands.
|
||||
ArchiveName string // Original filename (used to detect format).
|
||||
}
|
||||
|
||||
// storeArchive stores uploaded archive bytes keyed by build ID for the worker.
|
||||
func (s *BuildService) storeArchive(buildID string, data []byte) {
|
||||
s.mu.Lock()
|
||||
defer s.mu.Unlock()
|
||||
if s.filesMap == nil {
|
||||
s.filesMap = make(map[string][]byte)
|
||||
}
|
||||
s.filesMap[buildID] = data
|
||||
}
|
||||
|
||||
// takeArchive retrieves and removes stored archive bytes for a build.
|
||||
func (s *BuildService) takeArchive(buildID string) []byte {
|
||||
s.mu.Lock()
|
||||
defer s.mu.Unlock()
|
||||
data := s.filesMap[buildID]
|
||||
delete(s.filesMap, buildID)
|
||||
return data
|
||||
}
|
||||
|
||||
// Create inserts a new build record and enqueues it to Redis.
|
||||
@ -117,6 +143,11 @@ func (s *BuildService) Create(ctx context.Context, p BuildCreateParams) (db.Temp
|
||||
return db.TemplateBuild{}, fmt.Errorf("enqueue build: %w", err)
|
||||
}
|
||||
|
||||
// Store archive for the worker if provided.
|
||||
if len(p.Archive) > 0 {
|
||||
s.storeArchive(buildIDStr, p.Archive)
|
||||
}
|
||||
|
||||
return build, nil
|
||||
}
|
||||
|
||||
@ -303,6 +334,16 @@ func (s *BuildService) executeBuild(ctx context.Context, buildIDStr string) {
|
||||
HostID: host.ID,
|
||||
})
|
||||
|
||||
// Upload and extract build archive if provided.
|
||||
archive := s.takeArchive(buildIDStr)
|
||||
if len(archive) > 0 {
|
||||
if err := s.uploadAndExtractArchive(buildCtx, agent, sandboxIDStr, archive, buildIDStr); err != nil {
|
||||
s.destroySandbox(buildCtx, agent, sandboxIDStr)
|
||||
s.failBuild(buildCtx, buildID, fmt.Sprintf("archive upload failed: %v", err))
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
// Parse recipe steps. preBuildCmds and postBuildCmds are hardcoded and always
|
||||
// valid; panic on error is appropriate here since it would be a programmer mistake.
|
||||
preBuildSteps, err := recipe.ParseRecipe(preBuildCmds)
|
||||
@ -331,10 +372,18 @@ func (s *BuildService) executeBuild(ctx context.Context, buildIDStr string) {
|
||||
"HOME": "/root",
|
||||
}
|
||||
}
|
||||
bctx := &recipe.ExecContext{EnvVars: envVars}
|
||||
bctx := &recipe.ExecContext{EnvVars: envVars, User: "root"}
|
||||
|
||||
// Per-step progress callback for live UI updates.
|
||||
progressFn := func(currentStep int, allEntries []recipe.BuildLogEntry) {
|
||||
s.updateLogs(buildCtx, buildID, currentStep, allEntries)
|
||||
}
|
||||
|
||||
runPhase := func(phase string, steps []recipe.Step, defaultTimeout time.Duration) bool {
|
||||
newEntries, nextStep, ok := recipe.Execute(buildCtx, phase, steps, sandboxIDStr, step, defaultTimeout, bctx, agent.Exec)
|
||||
newEntries, nextStep, ok := recipe.Execute(buildCtx, phase, steps, sandboxIDStr, step, defaultTimeout, bctx, agent.Exec, func(currentStep int, phaseEntries []recipe.BuildLogEntry) {
|
||||
// Progress callback: combine prior logs with current phase entries.
|
||||
progressFn(currentStep, append(logs, phaseEntries...))
|
||||
})
|
||||
logs = append(logs, newEntries...)
|
||||
step = nextStep
|
||||
s.updateLogs(buildCtx, buildID, step, logs)
|
||||
@ -344,24 +393,38 @@ func (s *BuildService) executeBuild(ctx context.Context, buildIDStr string) {
|
||||
if buildCtx.Err() != nil {
|
||||
return false
|
||||
}
|
||||
last := newEntries[len(newEntries)-1]
|
||||
reason := last.Stderr
|
||||
if reason == "" {
|
||||
reason = fmt.Sprintf("exit code %d", last.Exit)
|
||||
reason := "unknown error"
|
||||
if len(newEntries) > 0 {
|
||||
last := newEntries[len(newEntries)-1]
|
||||
reason = last.Stderr
|
||||
if reason == "" {
|
||||
reason = fmt.Sprintf("exit code %d", last.Exit)
|
||||
}
|
||||
}
|
||||
s.failBuild(buildCtx, buildID, fmt.Sprintf("%s step %d failed: %s", phase, step, reason))
|
||||
}
|
||||
return ok
|
||||
}
|
||||
|
||||
// Phase 1: Pre-build (as root) — creates wrenn-user, updates apt.
|
||||
if !build.SkipPrePost {
|
||||
if !runPhase("pre-build", preBuildSteps, 0) {
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
// Phase 2: User recipe — starts as wrenn-user (set by USER in pre-build)
|
||||
// or root if skip_pre_post.
|
||||
if !runPhase("recipe", userRecipeSteps, buildCommandTimeout) {
|
||||
return
|
||||
}
|
||||
|
||||
// Capture the final user and env vars as template defaults.
|
||||
templateDefaultUser := bctx.User
|
||||
templateDefaultEnv := bctx.EnvVars
|
||||
|
||||
// Phase 3: Post-build (as root) — cleanup.
|
||||
bctx.User = "root"
|
||||
if !build.SkipPrePost {
|
||||
if !runPhase("post-build", postBuildSteps, 0) {
|
||||
return
|
||||
@ -430,19 +493,34 @@ func (s *BuildService) executeBuild(ctx context.Context, buildIDStr string) {
|
||||
templateType = "snapshot"
|
||||
}
|
||||
|
||||
// Serialize env vars for DB storage.
|
||||
defaultEnvJSON, err := json.Marshal(templateDefaultEnv)
|
||||
if err != nil {
|
||||
defaultEnvJSON = []byte("{}")
|
||||
}
|
||||
|
||||
if _, err := s.DB.InsertTemplate(buildCtx, db.InsertTemplateParams{
|
||||
ID: build.TemplateID,
|
||||
Name: build.Name,
|
||||
Type: templateType,
|
||||
Vcpus: build.Vcpus,
|
||||
MemoryMb: build.MemoryMb,
|
||||
SizeBytes: sizeBytes,
|
||||
TeamID: id.PlatformTeamID,
|
||||
ID: build.TemplateID,
|
||||
Name: build.Name,
|
||||
Type: templateType,
|
||||
Vcpus: build.Vcpus,
|
||||
MemoryMb: build.MemoryMb,
|
||||
SizeBytes: sizeBytes,
|
||||
TeamID: id.PlatformTeamID,
|
||||
DefaultUser: templateDefaultUser,
|
||||
DefaultEnv: defaultEnvJSON,
|
||||
}); err != nil {
|
||||
log.Error("failed to insert template record", "error", err)
|
||||
// Build succeeded on disk, just DB record failed — don't mark as failed.
|
||||
}
|
||||
|
||||
// Record defaults on the build record for inspection.
|
||||
_ = s.DB.UpdateBuildDefaults(buildCtx, db.UpdateBuildDefaultsParams{
|
||||
ID: buildID,
|
||||
DefaultUser: templateDefaultUser,
|
||||
DefaultEnv: defaultEnvJSON,
|
||||
})
|
||||
|
||||
// For CreateSnapshot, the sandbox is already destroyed by the snapshot process.
|
||||
// For FlattenRootfs, the sandbox is already destroyed by the flatten process.
|
||||
// No additional destroy needed.
|
||||
@ -603,3 +681,61 @@ func parseSandboxEnv(raw string) map[string]string {
|
||||
|
||||
return envVars
|
||||
}
|
||||
|
||||
// uploadAndExtractArchive writes the archive to the sandbox and extracts it
|
||||
// to /tmp/build-files/. Detects format from content (tar.gz, tar, zip).
|
||||
func (s *BuildService) uploadAndExtractArchive(
|
||||
ctx context.Context,
|
||||
agent buildAgentClient,
|
||||
sandboxID string,
|
||||
archive []byte,
|
||||
buildID string,
|
||||
) error {
|
||||
// Detect archive type from magic bytes.
|
||||
var archivePath, extractCmd string
|
||||
switch {
|
||||
case len(archive) >= 2 && archive[0] == 0x1f && archive[1] == 0x8b:
|
||||
// gzip (tar.gz)
|
||||
archivePath = "/tmp/build-files.tar.gz"
|
||||
extractCmd = "mkdir -p /tmp/build-files && tar xzf /tmp/build-files.tar.gz -C /tmp/build-files"
|
||||
case len(archive) >= 4 && string(archive[:4]) == "PK\x03\x04":
|
||||
// zip
|
||||
archivePath = "/tmp/build-files.zip"
|
||||
extractCmd = "mkdir -p /tmp/build-files && unzip -o /tmp/build-files.zip -d /tmp/build-files"
|
||||
case len(archive) >= 262 && string(archive[257:262]) == "ustar":
|
||||
// tar (ustar magic at offset 257)
|
||||
archivePath = "/tmp/build-files.tar"
|
||||
extractCmd = "mkdir -p /tmp/build-files && tar xf /tmp/build-files.tar -C /tmp/build-files"
|
||||
default:
|
||||
// Fallback: try tar.gz
|
||||
archivePath = "/tmp/build-files.tar.gz"
|
||||
extractCmd = "mkdir -p /tmp/build-files && tar xzf /tmp/build-files.tar.gz -C /tmp/build-files"
|
||||
}
|
||||
|
||||
slog.Info("uploading build archive", "build_id", buildID, "path", archivePath, "size", len(archive))
|
||||
|
||||
// Write archive to VM.
|
||||
if _, err := agent.WriteFile(ctx, connect.NewRequest(&pb.WriteFileRequest{
|
||||
SandboxId: sandboxID,
|
||||
Path: archivePath,
|
||||
Content: archive,
|
||||
})); err != nil {
|
||||
return fmt.Errorf("write archive: %w", err)
|
||||
}
|
||||
|
||||
// Extract.
|
||||
resp, err := agent.Exec(ctx, connect.NewRequest(&pb.ExecRequest{
|
||||
SandboxId: sandboxID,
|
||||
Cmd: "/bin/sh",
|
||||
Args: []string{"-c", extractCmd},
|
||||
TimeoutSec: 120,
|
||||
}))
|
||||
if err != nil {
|
||||
return fmt.Errorf("extract archive: %w", err)
|
||||
}
|
||||
if resp.Msg.ExitCode != 0 {
|
||||
return fmt.Errorf("extract archive: exit code %d: %s", resp.Msg.ExitCode, string(resp.Msg.Stderr))
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
@ -2,6 +2,7 @@ package service
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"log/slog"
|
||||
"time"
|
||||
@ -85,6 +86,8 @@ func (s *SandboxService) Create(ctx context.Context, p SandboxCreateParams) (db.
|
||||
// Resolve template name → (teamID, templateID).
|
||||
templateTeamID := id.PlatformTeamID
|
||||
templateID := id.MinimalTemplateID
|
||||
var templateDefaultUser string
|
||||
var templateDefaultEnv map[string]string
|
||||
if p.Template != "minimal" {
|
||||
tmpl, err := s.DB.GetTemplateByTeam(ctx, db.GetTemplateByTeamParams{Name: p.Template, TeamID: p.TeamID})
|
||||
if err != nil {
|
||||
@ -92,6 +95,11 @@ func (s *SandboxService) Create(ctx context.Context, p SandboxCreateParams) (db.
|
||||
}
|
||||
templateTeamID = tmpl.TeamID
|
||||
templateID = tmpl.ID
|
||||
templateDefaultUser = tmpl.DefaultUser
|
||||
// Parse default_env JSONB into a map.
|
||||
if len(tmpl.DefaultEnv) > 0 {
|
||||
_ = json.Unmarshal(tmpl.DefaultEnv, &templateDefaultEnv)
|
||||
}
|
||||
// If the template is a snapshot, use its baked-in vcpus/memory.
|
||||
if tmpl.Type == "snapshot" {
|
||||
p.VCPUs = tmpl.Vcpus
|
||||
@ -140,14 +148,16 @@ func (s *SandboxService) Create(ctx context.Context, p SandboxCreateParams) (db.
|
||||
}
|
||||
|
||||
resp, err := agent.CreateSandbox(ctx, connect.NewRequest(&pb.CreateSandboxRequest{
|
||||
SandboxId: sandboxIDStr,
|
||||
Template: p.Template,
|
||||
TeamId: id.UUIDString(templateTeamID),
|
||||
TemplateId: id.UUIDString(templateID),
|
||||
Vcpus: p.VCPUs,
|
||||
MemoryMb: p.MemoryMB,
|
||||
TimeoutSec: p.TimeoutSec,
|
||||
DiskSizeMb: p.DiskSizeMB,
|
||||
SandboxId: sandboxIDStr,
|
||||
Template: p.Template,
|
||||
TeamId: id.UUIDString(templateTeamID),
|
||||
TemplateId: id.UUIDString(templateID),
|
||||
Vcpus: p.VCPUs,
|
||||
MemoryMb: p.MemoryMB,
|
||||
TimeoutSec: p.TimeoutSec,
|
||||
DiskSizeMb: p.DiskSizeMB,
|
||||
DefaultUser: templateDefaultUser,
|
||||
DefaultEnv: templateDefaultEnv,
|
||||
}))
|
||||
if err != nil {
|
||||
if _, dbErr := s.DB.UpdateSandboxStatus(ctx, db.UpdateSandboxStatusParams{
|
||||
@ -249,9 +259,24 @@ func (s *SandboxService) Resume(ctx context.Context, sandboxID, teamID pgtype.UU
|
||||
|
||||
sandboxIDStr := id.FormatSandboxID(sandboxID)
|
||||
|
||||
// Look up template defaults for resume.
|
||||
var resumeDefaultUser string
|
||||
var resumeDefaultEnv map[string]string
|
||||
if sb.TemplateID.Valid {
|
||||
tmpl, err := s.DB.GetTemplate(ctx, sb.TemplateID)
|
||||
if err == nil {
|
||||
resumeDefaultUser = tmpl.DefaultUser
|
||||
if len(tmpl.DefaultEnv) > 0 {
|
||||
_ = json.Unmarshal(tmpl.DefaultEnv, &resumeDefaultEnv)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
resp, err := agent.ResumeSandbox(ctx, connect.NewRequest(&pb.ResumeSandboxRequest{
|
||||
SandboxId: sandboxIDStr,
|
||||
TimeoutSec: sb.TimeoutSec,
|
||||
SandboxId: sandboxIDStr,
|
||||
TimeoutSec: sb.TimeoutSec,
|
||||
DefaultUser: resumeDefaultUser,
|
||||
DefaultEnv: resumeDefaultEnv,
|
||||
}))
|
||||
if err != nil {
|
||||
return db.Sandbox{}, fmt.Errorf("agent resume: %w", err)
|
||||
|
||||
Reference in New Issue
Block a user