forked from wrenn/wrenn
Compare commits
2 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| af79047503 | |||
| 52ad21c339 |
1
.gitignore
vendored
1
.gitignore
vendored
@ -49,3 +49,4 @@ frontend/build/
|
|||||||
internal/dashboard/static/*
|
internal/dashboard/static/*
|
||||||
!internal/dashboard/static/.gitkeep.dual-graph/
|
!internal/dashboard/static/.gitkeep.dual-graph/
|
||||||
.dual-graph/
|
.dual-graph/
|
||||||
|
__pycache__
|
||||||
|
|||||||
45
.woodpecker/pipeline.yml
Normal file
45
.woodpecker/pipeline.yml
Normal file
@ -0,0 +1,45 @@
|
|||||||
|
when:
|
||||||
|
- event: push
|
||||||
|
branch: main
|
||||||
|
|
||||||
|
steps:
|
||||||
|
sandbox-1:
|
||||||
|
image: python:3.13
|
||||||
|
environment:
|
||||||
|
WRENN_API_KEY:
|
||||||
|
from_secret: wrenn_api_key
|
||||||
|
GITEA_TOKEN:
|
||||||
|
from_secret: gitea_token
|
||||||
|
commands:
|
||||||
|
- pip install wrenn
|
||||||
|
- export GO_VERSION=$$(grep '^go ' go.mod | cut -d' ' -f2)
|
||||||
|
- python .woodpecker/scripts/build.py
|
||||||
|
- VERSION=$$(cat VERSION_CP)
|
||||||
|
- git config user.name "R3dRum92"
|
||||||
|
- git config user.email "tksadik@omukk.dev"
|
||||||
|
- git tag "v$${VERSION}"
|
||||||
|
- git push "https://tksadik92:$${GITEA_TOKEN}@git.omukk.dev/tksadik92/wrenn-releases.git" "v$${VERSION}"
|
||||||
|
|
||||||
|
sandbox-2:
|
||||||
|
image: python:3.13
|
||||||
|
environment:
|
||||||
|
WRENN_API_KEY:
|
||||||
|
from_secret: wrenn_api_key
|
||||||
|
GITEA_TOKEN:
|
||||||
|
from_secret: gitea_token
|
||||||
|
ZHIPU_API_KEY:
|
||||||
|
from_secret: zhipu_api_key
|
||||||
|
commands:
|
||||||
|
- pip install wrenn
|
||||||
|
- python .woodpecker/scripts/release_notes.py
|
||||||
|
depends_on: [sandbox-1]
|
||||||
|
|
||||||
|
sandbox-3:
|
||||||
|
image: python:3.13
|
||||||
|
environment:
|
||||||
|
GITHUB_TOKEN:
|
||||||
|
from_secret: github_token
|
||||||
|
commands:
|
||||||
|
- pip install httpx
|
||||||
|
- python .woodpecker/scripts/publish_github.py
|
||||||
|
depends_on: [sandbox-2]
|
||||||
126
.woodpecker/scripts/build.py
Normal file
126
.woodpecker/scripts/build.py
Normal file
@ -0,0 +1,126 @@
|
|||||||
|
import os
|
||||||
|
import sys
|
||||||
|
|
||||||
|
from wrenn import Capsule, StreamExitEvent, StreamStderrEvent, StreamStdoutEvent
|
||||||
|
from wrenn._git import GitCommandError
|
||||||
|
from wrenn.models import FileEntry
|
||||||
|
|
||||||
|
GO_VERSION = os.getenv("GO_VERSION", "1.25.8")
|
||||||
|
REPO_URL = "https://git.omukk.dev/wrenn/wrenn.git"
|
||||||
|
REPO_DIR = "/opt/wrenn"
|
||||||
|
BUILDS_DIR = os.path.join(os.path.dirname(__file__), "..", "..", "builds")
|
||||||
|
|
||||||
|
|
||||||
|
def run(capsule: Capsule, cmd: str, timeout: int = 30) -> int:
|
||||||
|
result = capsule.commands.run(cmd, timeout=timeout)
|
||||||
|
if result.exit_code != 0:
|
||||||
|
print(f"FAIL [{cmd.split()[0]}]: exit={result.exit_code}", file=sys.stderr)
|
||||||
|
if result.stderr:
|
||||||
|
print(result.stderr.strip(), file=sys.stderr)
|
||||||
|
return result.exit_code
|
||||||
|
print(f"OK [{cmd.split()[0]}]")
|
||||||
|
return 0
|
||||||
|
|
||||||
|
|
||||||
|
def install_go(capsule: Capsule) -> bool:
|
||||||
|
tarball = f"go{GO_VERSION}.linux-amd64.tar.gz"
|
||||||
|
url = f"https://go.dev/dl/{tarball}"
|
||||||
|
|
||||||
|
if run(capsule, "apt update") != 0:
|
||||||
|
return False
|
||||||
|
if run(capsule, "apt install -y make build-essential file") != 0:
|
||||||
|
return False
|
||||||
|
if run(capsule, f"curl -LO {url}", timeout=120) != 0:
|
||||||
|
return False
|
||||||
|
if run(capsule, f"tar -C /usr/local -xzf {tarball}", timeout=60) != 0:
|
||||||
|
return False
|
||||||
|
if run(capsule, 'echo "export PATH=$PATH:/usr/local/go/bin" >> ~/.profile') != 0:
|
||||||
|
return False
|
||||||
|
if run(capsule, "rm -f " + tarball) != 0:
|
||||||
|
return False
|
||||||
|
|
||||||
|
result = capsule.commands.run("/usr/local/go/bin/go version")
|
||||||
|
print(result.stdout.strip())
|
||||||
|
return result.exit_code == 0
|
||||||
|
|
||||||
|
|
||||||
|
def clone_repo(capsule: Capsule) -> bool:
|
||||||
|
try:
|
||||||
|
capsule.git.clone(REPO_URL, REPO_DIR)
|
||||||
|
print("OK [git clone]")
|
||||||
|
return True
|
||||||
|
except GitCommandError as e:
|
||||||
|
print(f"FAIL [git clone]: {e}", file=sys.stderr)
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def build_app(capsule: Capsule) -> bool:
|
||||||
|
handle = capsule.commands.run(
|
||||||
|
"CGO_ENABLED=1 make build",
|
||||||
|
background=True,
|
||||||
|
cwd=REPO_DIR,
|
||||||
|
envs={
|
||||||
|
"PATH": "/usr/local/go/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin"
|
||||||
|
},
|
||||||
|
)
|
||||||
|
print(f"make build started (pid={handle.pid}), streaming output...")
|
||||||
|
|
||||||
|
exit_code = 0
|
||||||
|
for event in capsule.commands.connect(handle.pid):
|
||||||
|
if isinstance(event, StreamStdoutEvent):
|
||||||
|
print(event.data, end="")
|
||||||
|
elif isinstance(event, StreamStderrEvent):
|
||||||
|
print(event.data, end="", file=sys.stderr)
|
||||||
|
elif isinstance(event, StreamExitEvent):
|
||||||
|
exit_code = event.exit_code
|
||||||
|
|
||||||
|
if exit_code != 0:
|
||||||
|
print(f"FAIL [make build]: exit={exit_code}", file=sys.stderr)
|
||||||
|
return False
|
||||||
|
print("OK [make build]")
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
def download_artifacts(capsule: Capsule) -> bool:
|
||||||
|
remote_dir = f"{REPO_DIR}/builds"
|
||||||
|
entries = capsule.files.list(remote_dir, depth=1)
|
||||||
|
files = [e for e in entries if e.type != "directory"]
|
||||||
|
|
||||||
|
if not files:
|
||||||
|
print("FAIL [download]: no files found in builds/", file=sys.stderr)
|
||||||
|
return False
|
||||||
|
|
||||||
|
local_dir = os.path.normpath(BUILDS_DIR)
|
||||||
|
os.makedirs(local_dir, exist_ok=True)
|
||||||
|
|
||||||
|
for entry in files:
|
||||||
|
name = entry.name or "unknown"
|
||||||
|
remote_path = f"{remote_dir}/{name}"
|
||||||
|
local_path = os.path.join(local_dir, name)
|
||||||
|
print(f"Downloading {name} ({entry.size or '?'} bytes)...")
|
||||||
|
|
||||||
|
with open(local_path, "wb") as f:
|
||||||
|
for chunk in capsule.files.download_stream(remote_path):
|
||||||
|
f.write(chunk)
|
||||||
|
|
||||||
|
print(f"OK [download {name}]")
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
def main() -> None:
|
||||||
|
with Capsule(wait=True, vcpus=4, memory_mb=4096) as capsule:
|
||||||
|
print(f"Capsule: {capsule.capsule_id}")
|
||||||
|
if not install_go(capsule):
|
||||||
|
sys.exit(1)
|
||||||
|
if not clone_repo(capsule):
|
||||||
|
sys.exit(1)
|
||||||
|
if not build_app(capsule):
|
||||||
|
sys.exit(1)
|
||||||
|
if not download_artifacts(capsule):
|
||||||
|
sys.exit(1)
|
||||||
|
print("Done.")
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
104
.woodpecker/scripts/publish_github.py
Normal file
104
.woodpecker/scripts/publish_github.py
Normal file
@ -0,0 +1,104 @@
|
|||||||
|
import os
|
||||||
|
import sys
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
import httpx
|
||||||
|
|
||||||
|
GITHUB_REPO = "R3dRum92/wrenn-releases"
|
||||||
|
GITHUB_API = "https://api.github.com"
|
||||||
|
GITHUB_UPLOADS = "https://uploads.github.com"
|
||||||
|
BUILDS_DIR = "builds"
|
||||||
|
VERSION_FILE = "VERSION_CP"
|
||||||
|
NOTES_FILE = os.path.join(".woodpecker", "release_notes.md")
|
||||||
|
|
||||||
|
|
||||||
|
def main() -> None:
|
||||||
|
token = os.environ["GITHUB_TOKEN"]
|
||||||
|
|
||||||
|
with open(VERSION_FILE) as f:
|
||||||
|
version = f.read().strip()
|
||||||
|
tag = f"v{version}"
|
||||||
|
|
||||||
|
release_notes = ""
|
||||||
|
if os.path.exists(NOTES_FILE):
|
||||||
|
with open(NOTES_FILE) as f:
|
||||||
|
release_notes = f.read()
|
||||||
|
|
||||||
|
headers = {
|
||||||
|
"Authorization": f"token {token}",
|
||||||
|
"Accept": "application/vnd.github+json",
|
||||||
|
"X-GitHub-Api-Version": "2022-11-28",
|
||||||
|
}
|
||||||
|
|
||||||
|
client = httpx.Client(headers=headers, timeout=60)
|
||||||
|
|
||||||
|
print(f"Creating GitHub release for {tag}...")
|
||||||
|
resp = client.post(
|
||||||
|
f"{GITHUB_API}/repos/{GITHUB_REPO}/releases",
|
||||||
|
json={
|
||||||
|
"tag_name": tag,
|
||||||
|
"name": tag,
|
||||||
|
"body": release_notes,
|
||||||
|
"draft": False,
|
||||||
|
"prerelease": False,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
if resp.status_code == 422:
|
||||||
|
print(f"WARN [create release]: release for {tag} already exists, skipping")
|
||||||
|
data = resp.json()
|
||||||
|
errors = data.get("errors", [])
|
||||||
|
if errors:
|
||||||
|
existing_url = errors[0].get("documentation_url", "")
|
||||||
|
print(f" See: {existing_url}")
|
||||||
|
client.close()
|
||||||
|
return
|
||||||
|
if resp.status_code != 201:
|
||||||
|
print(f"FAIL [create release]: {resp.status_code} {resp.text}", file=sys.stderr)
|
||||||
|
client.close()
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
release_data = resp.json()
|
||||||
|
release_id = release_data["id"]
|
||||||
|
release_url = release_data.get("html_url", "")
|
||||||
|
print(f"OK [create release] id={release_id}")
|
||||||
|
|
||||||
|
builds_path = Path(BUILDS_DIR)
|
||||||
|
if not builds_path.exists():
|
||||||
|
print(f"No {BUILDS_DIR}/ directory found, skipping asset upload")
|
||||||
|
client.close()
|
||||||
|
print(f"Release published: {release_url}")
|
||||||
|
return
|
||||||
|
|
||||||
|
upload_headers = {
|
||||||
|
**headers,
|
||||||
|
"Content-Type": "application/octet-stream",
|
||||||
|
}
|
||||||
|
|
||||||
|
for artifact in sorted(builds_path.iterdir()):
|
||||||
|
if artifact.is_dir():
|
||||||
|
continue
|
||||||
|
print(f"Uploading {artifact.name}...")
|
||||||
|
|
||||||
|
with open(artifact, "rb") as f:
|
||||||
|
data = f.read()
|
||||||
|
|
||||||
|
resp = client.post(
|
||||||
|
f"{GITHUB_UPLOADS}/repos/{GITHUB_REPO}/releases/{release_id}/assets",
|
||||||
|
params={"name": artifact.name},
|
||||||
|
headers=upload_headers,
|
||||||
|
content=data,
|
||||||
|
)
|
||||||
|
if resp.status_code != 201:
|
||||||
|
print(
|
||||||
|
f"WARN [upload {artifact.name}]: {resp.status_code} {resp.text}",
|
||||||
|
file=sys.stderr,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
print(f"OK [upload {artifact.name}]")
|
||||||
|
|
||||||
|
client.close()
|
||||||
|
print(f"Release published: {release_url}")
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
266
.woodpecker/scripts/release_notes.py
Normal file
266
.woodpecker/scripts/release_notes.py
Normal file
@ -0,0 +1,266 @@
|
|||||||
|
import base64
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
|
||||||
|
from wrenn import Capsule
|
||||||
|
|
||||||
|
REPO_URL = "https://git.omukk.dev/tksadik92/wrenn-releases.git"
|
||||||
|
REPO_DIR = "/opt/wrenn-releases"
|
||||||
|
CAPSULE_OUTPUT = "/tmp/release_notes.md"
|
||||||
|
LOCAL_OUTPUT = os.path.join(os.path.dirname(__file__), "..", "release_notes.md")
|
||||||
|
|
||||||
|
# Default starting configuration
|
||||||
|
ZHIPU_API_KEY = os.environ.get("ZHIPU_API_KEY", "")
|
||||||
|
if ZHIPU_API_KEY:
|
||||||
|
DEFAULT_MODEL = "zhipuai-coding-/glm-5.1"
|
||||||
|
else:
|
||||||
|
DEFAULT_MODEL = "opencode/minimax-m2.5-free"
|
||||||
|
|
||||||
|
DEFAULT_MODEL = "opencode/minimax-m2.5-free" # TODO: Override
|
||||||
|
|
||||||
|
RELEASE_NOTES_EXAMPLE = """
|
||||||
|
## What's new
|
||||||
|
Sandbox HTTP proxying, terminal reliability, and auth robustness improvements.
|
||||||
|
|
||||||
|
### Proxy
|
||||||
|
- Fixed redirect loops for apps served inside sandboxes (Python HTTP server, Jupyter, etc.)
|
||||||
|
- Proxy traffic no longer interferes with terminal and exec connections
|
||||||
|
- Services that take a moment to start up inside a sandbox are now retried instead of immediately failing
|
||||||
|
|
||||||
|
### Terminal (PTY)
|
||||||
|
- Terminal input is no longer blocked by slow network conditions — fast typing no longer causes timeouts or disconnects
|
||||||
|
- Input bursts are coalesced into fewer round trips — lower latency under fast typing
|
||||||
|
|
||||||
|
### Authentication
|
||||||
|
- WebSocket connections now authenticate correctly for both SDK clients (header-based) and browser clients (message-based)
|
||||||
|
|
||||||
|
### Bug Fixes
|
||||||
|
- Fixed crash in envd when a process exits without a PTY
|
||||||
|
- Fixed goroutine leak on sandbox pause
|
||||||
|
|
||||||
|
### Others
|
||||||
|
- Version bump
|
||||||
|
""".strip()
|
||||||
|
|
||||||
|
|
||||||
|
def run(capsule: Capsule, cmd: str, cwd: str | None = None, timeout: int = 30) -> int:
|
||||||
|
result = capsule.commands.run(cmd, cwd=cwd, timeout=timeout)
|
||||||
|
if result.exit_code != 0:
|
||||||
|
print(f"FAIL [{cmd.split()[0]}]: exit={result.exit_code}", file=sys.stderr)
|
||||||
|
if result.stderr:
|
||||||
|
print(result.stderr.strip(), file=sys.stderr)
|
||||||
|
return result.exit_code
|
||||||
|
print(f"OK [{cmd.split()[0]}]")
|
||||||
|
return 0
|
||||||
|
|
||||||
|
|
||||||
|
def install_opencode(capsule: Capsule) -> None:
|
||||||
|
print("Installing OpenCode...")
|
||||||
|
if run(capsule, "apt update", timeout=60) != 0:
|
||||||
|
sys.exit(1)
|
||||||
|
if (
|
||||||
|
run(
|
||||||
|
capsule,
|
||||||
|
"curl -fsSL https://opencode.ai/install | bash -s -- --version 1.14.31",
|
||||||
|
timeout=120,
|
||||||
|
)
|
||||||
|
!= 0
|
||||||
|
):
|
||||||
|
sys.exit(1)
|
||||||
|
print("OK [opencode installed]")
|
||||||
|
|
||||||
|
|
||||||
|
def get_tags(capsule: Capsule) -> tuple[str, str | None]:
|
||||||
|
result = capsule.commands.run(
|
||||||
|
f"cd {REPO_DIR} && git tag --sort=-version:refname",
|
||||||
|
cwd=REPO_DIR,
|
||||||
|
timeout=30,
|
||||||
|
)
|
||||||
|
if result.exit_code != 0:
|
||||||
|
print(f"FAIL [git tag]: {result.stderr}", file=sys.stderr)
|
||||||
|
sys.exit(1)
|
||||||
|
tags = [t for t in result.stdout.strip().split("\n") if t]
|
||||||
|
if not tags:
|
||||||
|
print("No tags found", file=sys.stderr)
|
||||||
|
sys.exit(1)
|
||||||
|
current_tag = tags[0]
|
||||||
|
previous_tag = tags[1] if len(tags) > 1 else None
|
||||||
|
print(f"Current tag: {current_tag}")
|
||||||
|
print(f"Previous tag: {previous_tag}")
|
||||||
|
return current_tag, previous_tag
|
||||||
|
|
||||||
|
|
||||||
|
def get_git_context(
|
||||||
|
capsule: Capsule, current_tag: str, previous_tag: str | None
|
||||||
|
) -> tuple[str, str]:
|
||||||
|
if previous_tag:
|
||||||
|
# FIX: Removed '-n 2' to ensure we grab ALL commits between the two tags
|
||||||
|
log_cmd = f"cd {REPO_DIR} && git log {previous_tag}..{current_tag} --pretty=format:'%s (%h)'"
|
||||||
|
else:
|
||||||
|
# Fallback to limit log size if this is the very first tag in the repo
|
||||||
|
log_cmd = (
|
||||||
|
f"cd {REPO_DIR} && git log {current_tag} --pretty=format:'%s (%h)' -n 50"
|
||||||
|
)
|
||||||
|
|
||||||
|
log_result = capsule.commands.run(log_cmd, cwd=REPO_DIR, timeout=30)
|
||||||
|
if log_result.exit_code != 0:
|
||||||
|
print(f"FAIL [git log]: {log_result.stderr}", file=sys.stderr)
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
# git diff natively compares the entire tree state between tags
|
||||||
|
if previous_tag:
|
||||||
|
diff_cmd = f"cd {REPO_DIR} && git diff {previous_tag}..{current_tag} --stat"
|
||||||
|
else:
|
||||||
|
diff_cmd = f"cd {REPO_DIR} && git show {current_tag} --stat"
|
||||||
|
|
||||||
|
diff_result = capsule.commands.run(diff_cmd, cwd=REPO_DIR, timeout=30)
|
||||||
|
if diff_result.exit_code != 0:
|
||||||
|
print(f"FAIL [git diff]: {diff_result.stderr}", file=sys.stderr)
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
return log_result.stdout.strip(), diff_result.stdout.strip()
|
||||||
|
|
||||||
|
|
||||||
|
def generate_release_notes(
|
||||||
|
capsule: Capsule,
|
||||||
|
current_tag: str,
|
||||||
|
git_log: str,
|
||||||
|
git_diff: str,
|
||||||
|
output_path: str,
|
||||||
|
model: str,
|
||||||
|
) -> None:
|
||||||
|
prompt = (
|
||||||
|
f"You are writing release notes for version {current_tag} of a software project.\n\n"
|
||||||
|
f"Here is what changed between the previous version and this one:\n\n"
|
||||||
|
f"Commit messages:\n{git_log}\n\n"
|
||||||
|
f"Files and areas that changed:\n{git_diff}\n\n"
|
||||||
|
f"Write the release notes in plain, friendly language that any developer can understand "
|
||||||
|
f"without deep knowledge of the codebase. Avoid jargon like 'goroutine', 'PTY', 'envd', "
|
||||||
|
f"or internal function names — describe what the change means for the user instead. "
|
||||||
|
f"Group related changes under headings that reflect what actually changed. "
|
||||||
|
f"Only include sections that are relevant to these specific changes. "
|
||||||
|
f"Start with a short one-line summary of what this release is about. "
|
||||||
|
f"Keep each bullet point to one clear sentence.\n\n"
|
||||||
|
f"Here is an example of the style to aim for — not a template to copy:\n\n"
|
||||||
|
f"{RELEASE_NOTES_EXAMPLE}\n\n"
|
||||||
|
f"You MUST start the document with `## What's New`\n"
|
||||||
|
f"The very next line MUST be a single short summary sentence.\n"
|
||||||
|
f"Output only the markdown. No intro, no explanation."
|
||||||
|
f"CRITICAL: Do not output any conversational filler, acknowledgments, or thoughts "
|
||||||
|
f"like 'Let me look at the changes'. Output absolutely nothing except the final markdown."
|
||||||
|
)
|
||||||
|
|
||||||
|
prompt_b64 = base64.b64encode(prompt.encode("utf-8")).decode("utf-8")
|
||||||
|
|
||||||
|
write_prompt_cmd = f"echo '{prompt_b64}' | base64 -d > /tmp/oc_prompt.txt"
|
||||||
|
|
||||||
|
result = capsule.commands.run(
|
||||||
|
write_prompt_cmd,
|
||||||
|
cwd=REPO_DIR,
|
||||||
|
timeout=10,
|
||||||
|
)
|
||||||
|
if result.exit_code != 0:
|
||||||
|
print(f"FAIL [write prompt]: {result.stderr}", file=sys.stderr)
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
# FIX: Wrapper function to handle execution and authentication dynamically
|
||||||
|
def run_opencode_with_model(target_model: str) -> int:
|
||||||
|
env = ""
|
||||||
|
if "zhipu" in target_model.lower():
|
||||||
|
env = f"ZHIPU_API_KEY={os.environ.get('ZHIPU_API_KEY', '')}"
|
||||||
|
|
||||||
|
cmd = (
|
||||||
|
f"{env} "
|
||||||
|
f"~/.opencode/bin/opencode run "
|
||||||
|
f'"Read the attached file and generate the release notes. Output ONLY markdown." '
|
||||||
|
f"--model {target_model} "
|
||||||
|
f"--file /tmp/oc_prompt.txt "
|
||||||
|
f"> {output_path}"
|
||||||
|
)
|
||||||
|
|
||||||
|
cmd_result = capsule.commands.run(cmd, cwd=REPO_DIR, timeout=120)
|
||||||
|
|
||||||
|
if cmd_result.exit_code != 0:
|
||||||
|
print(
|
||||||
|
f"FAIL [opencode via {target_model}]: exit={cmd_result.exit_code}",
|
||||||
|
file=sys.stderr,
|
||||||
|
)
|
||||||
|
print(f"STDOUT:\n{cmd_result.stdout}", file=sys.stderr)
|
||||||
|
print(f"STDERR:\n{cmd_result.stderr}", file=sys.stderr)
|
||||||
|
|
||||||
|
return cmd_result.exit_code
|
||||||
|
|
||||||
|
# First attempt with the target model
|
||||||
|
exit_status = run_opencode_with_model(model)
|
||||||
|
|
||||||
|
# FIX: Catch failures (like Zhipu rate limits) and fallback to MiniMax
|
||||||
|
if exit_status != 0:
|
||||||
|
if "zhipu" in model.lower():
|
||||||
|
print(
|
||||||
|
"\n[!] Zhipu AI failed (likely rate-limited). Falling back to MiniMax...",
|
||||||
|
file=sys.stderr,
|
||||||
|
)
|
||||||
|
fallback_model = "opencode/minimax-m2.5-free"
|
||||||
|
exit_status = run_opencode_with_model(fallback_model)
|
||||||
|
if exit_status != 0:
|
||||||
|
print("FAIL: Fallback model also failed. Exiting.", file=sys.stderr)
|
||||||
|
sys.exit(1)
|
||||||
|
else:
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
result = capsule.commands.run(f"cat {output_path}")
|
||||||
|
print(result.stdout)
|
||||||
|
if result.stderr:
|
||||||
|
print(result.stderr)
|
||||||
|
|
||||||
|
print(f"OK [opencode] release notes written to {output_path}")
|
||||||
|
|
||||||
|
|
||||||
|
def download_release_notes(capsule: Capsule) -> None:
|
||||||
|
local_path = os.path.normpath(LOCAL_OUTPUT)
|
||||||
|
os.makedirs(os.path.dirname(local_path), exist_ok=True)
|
||||||
|
|
||||||
|
print(f"Downloading release notes from capsule...")
|
||||||
|
content = capsule.files.read_bytes(CAPSULE_OUTPUT)
|
||||||
|
with open(local_path, "wb") as f:
|
||||||
|
f.write(content)
|
||||||
|
|
||||||
|
print(f"OK [download] release notes → {local_path}")
|
||||||
|
print(content.decode("utf-8", errors="replace"))
|
||||||
|
|
||||||
|
|
||||||
|
def main() -> None:
|
||||||
|
model = os.environ.get("OPENCODE_MODEL", DEFAULT_MODEL)
|
||||||
|
|
||||||
|
with Capsule(wait=True, vcpus=2, memory_mb=2048) as capsule:
|
||||||
|
print(f"Capsule: {capsule.capsule_id}")
|
||||||
|
|
||||||
|
install_opencode(capsule)
|
||||||
|
|
||||||
|
capsule.git.clone(
|
||||||
|
REPO_URL,
|
||||||
|
REPO_DIR,
|
||||||
|
username="tksadik92",
|
||||||
|
)
|
||||||
|
print("OK [git clone]")
|
||||||
|
|
||||||
|
current_tag, previous_tag = get_tags(capsule)
|
||||||
|
git_log, git_diff = get_git_context(capsule, current_tag, previous_tag)
|
||||||
|
|
||||||
|
# Note: This simply creates the directory string safely
|
||||||
|
output_path = os.path.normpath(CAPSULE_OUTPUT)
|
||||||
|
|
||||||
|
generate_release_notes(
|
||||||
|
capsule,
|
||||||
|
current_tag,
|
||||||
|
git_log,
|
||||||
|
git_diff,
|
||||||
|
output_path,
|
||||||
|
model,
|
||||||
|
)
|
||||||
|
|
||||||
|
download_release_notes(capsule)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
@ -1 +1 @@
|
|||||||
0.1.0
|
0.1.1
|
||||||
|
|||||||
@ -1 +1 @@
|
|||||||
0.1.2
|
0.1.4
|
||||||
|
|||||||
@ -148,7 +148,13 @@ func main() {
|
|||||||
slog.Info("host registered", "host_id", creds.HostID)
|
slog.Info("host registered", "host_id", creds.HostID)
|
||||||
|
|
||||||
// httpServer is declared here so the shutdown func can reference it.
|
// httpServer is declared here so the shutdown func can reference it.
|
||||||
httpServer := &http.Server{Addr: listenAddr}
|
// ReadTimeout/WriteTimeout are intentionally omitted — they would kill
|
||||||
|
// long-lived Connect RPC streams and WebSocket proxy connections.
|
||||||
|
httpServer := &http.Server{
|
||||||
|
Addr: listenAddr,
|
||||||
|
ReadHeaderTimeout: 10 * time.Second,
|
||||||
|
IdleTimeout: 620 * time.Second, // > typical LB upstream timeout (600s)
|
||||||
|
}
|
||||||
|
|
||||||
// mTLS is mandatory — refuse to start without a valid certificate.
|
// mTLS is mandatory — refuse to start without a valid certificate.
|
||||||
var certStore hostagent.CertStore
|
var certStore hostagent.CertStore
|
||||||
@ -193,6 +199,7 @@ func main() {
|
|||||||
path, handler := hostagentv1connect.NewHostAgentServiceHandler(srv)
|
path, handler := hostagentv1connect.NewHostAgentServiceHandler(srv)
|
||||||
|
|
||||||
proxyHandler := hostagent.NewProxyHandler(mgr)
|
proxyHandler := hostagent.NewProxyHandler(mgr)
|
||||||
|
mgr.SetOnDestroy(proxyHandler.EvictProxy)
|
||||||
|
|
||||||
mux := http.NewServeMux()
|
mux := http.NewServeMux()
|
||||||
mux.Handle(path, handler)
|
mux.Handle(path, handler)
|
||||||
|
|||||||
@ -9,4 +9,10 @@ VALUES ('00000000-0000-0000-0000-000000000000', 'Platform', 'platform')
|
|||||||
ON CONFLICT (id) DO NOTHING;
|
ON CONFLICT (id) DO NOTHING;
|
||||||
|
|
||||||
-- +goose Down
|
-- +goose Down
|
||||||
|
-- Delete dependent rows that reference the platform team via foreign keys.
|
||||||
|
-- Order matters: children before parent.
|
||||||
|
DELETE FROM sandboxes WHERE team_id = '00000000-0000-0000-0000-000000000000';
|
||||||
|
DELETE FROM team_api_keys WHERE team_id = '00000000-0000-0000-0000-000000000000';
|
||||||
|
DELETE FROM users_teams WHERE team_id = '00000000-0000-0000-0000-000000000000';
|
||||||
|
DELETE FROM hosts WHERE team_id = '00000000-0000-0000-0000-000000000000';
|
||||||
DELETE FROM teams WHERE id = '00000000-0000-0000-0000-000000000000';
|
DELETE FROM teams WHERE id = '00000000-0000-0000-0000-000000000000';
|
||||||
|
|||||||
@ -1,5 +1,5 @@
|
|||||||
// Package migrations embeds the SQL migration files so that external modules
|
// Package migrations embeds the SQL migration files so that external modules
|
||||||
// (such as the enterprise edition) can access them programmatically.
|
// (such as the cloud edition) can access them programmatically.
|
||||||
package migrations
|
package migrations
|
||||||
|
|
||||||
import "embed"
|
import "embed"
|
||||||
|
|||||||
@ -2,6 +2,15 @@
|
|||||||
INSERT INTO audit_logs (id, team_id, actor_type, actor_id, actor_name, resource_type, resource_id, action, scope, status, metadata)
|
INSERT INTO audit_logs (id, team_id, actor_type, actor_id, actor_name, resource_type, resource_id, action, scope, status, metadata)
|
||||||
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11);
|
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11);
|
||||||
|
|
||||||
|
-- name: AnonymizeAuditLogsByUserID :exec
|
||||||
|
UPDATE audit_logs
|
||||||
|
SET actor_name = CASE WHEN actor_id = $1 THEN 'deleted-user' ELSE actor_name END,
|
||||||
|
actor_id = CASE WHEN actor_id = $1 THEN NULL ELSE actor_id END,
|
||||||
|
resource_id = CASE WHEN resource_type = 'member' AND resource_id = $1 THEN NULL ELSE resource_id END,
|
||||||
|
metadata = CASE WHEN resource_type = 'member' AND resource_id = $1 AND metadata ? 'email' THEN metadata - 'email' ELSE metadata END
|
||||||
|
WHERE actor_id = $1
|
||||||
|
OR (resource_type = 'member' AND resource_id = $1);
|
||||||
|
|
||||||
-- name: ListAuditLogs :many
|
-- name: ListAuditLogs :many
|
||||||
SELECT * FROM audit_logs
|
SELECT * FROM audit_logs
|
||||||
WHERE team_id = $1
|
WHERE team_id = $1
|
||||||
|
|||||||
@ -91,8 +91,8 @@ WHERE ut.user_id = $1
|
|||||||
WHERE ut2.team_id = ut.team_id AND ut2.user_id <> $1
|
WHERE ut2.team_id = ut.team_id AND ut2.user_id <> $1
|
||||||
);
|
);
|
||||||
|
|
||||||
-- name: HardDeleteExpiredUsers :exec
|
-- name: ListExpiredSoftDeletedUsers :many
|
||||||
DELETE FROM users WHERE deleted_at IS NOT NULL AND deleted_at < NOW() - INTERVAL '15 days';
|
SELECT id, email FROM users WHERE deleted_at IS NOT NULL AND deleted_at < NOW() - INTERVAL '15 days';
|
||||||
|
|
||||||
-- name: HardDeleteUser :exec
|
-- name: HardDeleteUser :exec
|
||||||
DELETE FROM users WHERE id = $1;
|
DELETE FROM users WHERE id = $1;
|
||||||
|
|||||||
@ -1 +1 @@
|
|||||||
0.1.0
|
0.1.1
|
||||||
|
|||||||
@ -446,7 +446,9 @@ func (p *Handler) Wait() {
|
|||||||
|
|
||||||
err := p.cmd.Wait()
|
err := p.cmd.Wait()
|
||||||
|
|
||||||
|
if p.tty != nil {
|
||||||
p.tty.Close()
|
p.tty.Close()
|
||||||
|
}
|
||||||
|
|
||||||
var errMsg *string
|
var errMsg *string
|
||||||
|
|
||||||
|
|||||||
21
frontend/src/lib/api/admin-audit.ts
Normal file
21
frontend/src/lib/api/admin-audit.ts
Normal file
@ -0,0 +1,21 @@
|
|||||||
|
import { apiFetch, type ApiResult } from '$lib/api/client';
|
||||||
|
import type { AuditLog, AuditListResponse } from '$lib/api/audit';
|
||||||
|
|
||||||
|
export type { AuditLog, AuditListResponse };
|
||||||
|
|
||||||
|
export async function listAdminAuditLogs(params?: {
|
||||||
|
before?: string;
|
||||||
|
before_id?: string;
|
||||||
|
resource_types?: string[];
|
||||||
|
actions?: string[];
|
||||||
|
limit?: number;
|
||||||
|
}): Promise<ApiResult<AuditListResponse>> {
|
||||||
|
const q = new URLSearchParams();
|
||||||
|
if (params?.before) q.set('before', params.before);
|
||||||
|
if (params?.before_id) q.set('before_id', params.before_id);
|
||||||
|
params?.resource_types?.forEach((t) => q.append('resource_type', t));
|
||||||
|
params?.actions?.forEach((a) => q.append('action', a));
|
||||||
|
if (params?.limit != null) q.set('limit', String(params.limit));
|
||||||
|
const qs = q.toString();
|
||||||
|
return apiFetch('GET', `/api/v1/admin/audit-logs${qs ? '?' + qs : ''}`);
|
||||||
|
}
|
||||||
@ -13,7 +13,8 @@
|
|||||||
IconChevron,
|
IconChevron,
|
||||||
IconShield,
|
IconShield,
|
||||||
IconMembers,
|
IconMembers,
|
||||||
IconUser
|
IconUser,
|
||||||
|
IconAudit
|
||||||
} from './icons';
|
} from './icons';
|
||||||
|
|
||||||
let { collapsed = $bindable(false) }: { collapsed: boolean } = $props();
|
let { collapsed = $bindable(false) }: { collapsed: boolean } = $props();
|
||||||
@ -26,7 +27,8 @@
|
|||||||
|
|
||||||
const managementItems: NavItem[] = [
|
const managementItems: NavItem[] = [
|
||||||
{ label: 'Users', icon: IconUser, href: '/admin/users' },
|
{ label: 'Users', icon: IconUser, href: '/admin/users' },
|
||||||
{ label: 'Teams', icon: IconMembers, href: '/admin/teams' }
|
{ label: 'Teams', icon: IconMembers, href: '/admin/teams' },
|
||||||
|
{ label: 'Audit', icon: IconAudit, href: '/admin/audit' }
|
||||||
];
|
];
|
||||||
|
|
||||||
const platformItems: NavItem[] = [
|
const platformItems: NavItem[] = [
|
||||||
|
|||||||
600
frontend/src/routes/admin/audit/+page.svelte
Normal file
600
frontend/src/routes/admin/audit/+page.svelte
Normal file
@ -0,0 +1,600 @@
|
|||||||
|
<script lang="ts">
|
||||||
|
import { onMount } from 'svelte';
|
||||||
|
import { listAdminAuditLogs, type AuditLog } from '$lib/api/admin-audit';
|
||||||
|
|
||||||
|
// ─── Data state ───────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
let logs = $state<AuditLog[]>([]);
|
||||||
|
let loading = $state(true);
|
||||||
|
let loadingMore = $state(false);
|
||||||
|
let error = $state<string | null>(null);
|
||||||
|
let hasMore = $state(false);
|
||||||
|
let nextCursor = $state<{ before: string; before_id: string } | null>(null);
|
||||||
|
|
||||||
|
// ─── UI state ─────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
let sentinel = $state<HTMLElement | null>(null);
|
||||||
|
let filterDropdownOpen = $state(false);
|
||||||
|
let filterDropdownEl = $state<HTMLElement | null>(null);
|
||||||
|
|
||||||
|
// ─── Filter state ─────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
let selectedActions = $state<Map<string, Set<string>>>(new Map());
|
||||||
|
|
||||||
|
// ─── Constants ────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
const RESOURCES = ['sandbox', 'snapshot', 'template', 'build', 'team', 'api_key', 'member', 'host', 'user', 'channel'] as const;
|
||||||
|
|
||||||
|
const RESOURCE_LABELS: Record<string, string> = {
|
||||||
|
sandbox: 'Capsule',
|
||||||
|
snapshot: 'Snapshot',
|
||||||
|
template: 'Template',
|
||||||
|
build: 'Build',
|
||||||
|
team: 'Team',
|
||||||
|
api_key: 'API Key',
|
||||||
|
member: 'Member',
|
||||||
|
host: 'Host',
|
||||||
|
user: 'User',
|
||||||
|
channel: 'Channel'
|
||||||
|
};
|
||||||
|
|
||||||
|
const ACTIONS_BY_RESOURCE: Record<string, string[]> = {
|
||||||
|
sandbox: ['create', 'pause', 'resume', 'destroy'],
|
||||||
|
snapshot: ['create', 'delete'],
|
||||||
|
template: ['delete'],
|
||||||
|
build: ['create', 'cancel'],
|
||||||
|
team: ['rename', 'set_byoc', 'delete'],
|
||||||
|
api_key: ['create', 'revoke'],
|
||||||
|
member: ['add', 'remove', 'leave', 'role_update'],
|
||||||
|
host: ['create', 'delete', 'marked_down', 'marked_up'],
|
||||||
|
user: ['activate', 'deactivate'],
|
||||||
|
channel: ['create', 'update', 'rotate_config', 'delete']
|
||||||
|
};
|
||||||
|
|
||||||
|
const ACTION_LABELS: Record<string, string> = {
|
||||||
|
create: 'Created',
|
||||||
|
pause: 'Paused',
|
||||||
|
resume: 'Resumed',
|
||||||
|
destroy: 'Destroyed',
|
||||||
|
delete: 'Deleted',
|
||||||
|
rename: 'Renamed',
|
||||||
|
revoke: 'Revoked',
|
||||||
|
add: 'Added',
|
||||||
|
remove: 'Removed',
|
||||||
|
leave: 'Left',
|
||||||
|
role_update: 'Role updated',
|
||||||
|
marked_down: 'Marked down',
|
||||||
|
marked_up: 'Marked up',
|
||||||
|
activate: 'Activated',
|
||||||
|
deactivate: 'Deactivated',
|
||||||
|
set_byoc: 'BYOC toggled',
|
||||||
|
cancel: 'Cancelled',
|
||||||
|
update: 'Updated',
|
||||||
|
rotate_config: 'Config rotated'
|
||||||
|
};
|
||||||
|
|
||||||
|
// ─── Derived ──────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
let activeFilterCount = $derived(
|
||||||
|
[...selectedActions.values()].filter((s) => s.size > 0).length
|
||||||
|
);
|
||||||
|
|
||||||
|
// ─── Filter helpers ───────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
type CheckState = 'all' | 'some' | 'none';
|
||||||
|
|
||||||
|
function getResourceCheckState(r: string): CheckState {
|
||||||
|
const sel = selectedActions.get(r);
|
||||||
|
if (!sel || sel.size === 0) return 'none';
|
||||||
|
if (sel.size === ACTIONS_BY_RESOURCE[r].length) return 'all';
|
||||||
|
return 'some';
|
||||||
|
}
|
||||||
|
|
||||||
|
function toggleResource(r: string) {
|
||||||
|
const state = getResourceCheckState(r);
|
||||||
|
const next = new Map(selectedActions);
|
||||||
|
if (state === 'all') {
|
||||||
|
next.delete(r);
|
||||||
|
} else {
|
||||||
|
next.set(r, new Set(ACTIONS_BY_RESOURCE[r]));
|
||||||
|
}
|
||||||
|
selectedActions = next;
|
||||||
|
resetAndFetch(next);
|
||||||
|
}
|
||||||
|
|
||||||
|
function toggleAction(r: string, a: string) {
|
||||||
|
const next = new Map(selectedActions);
|
||||||
|
const acts = new Set(next.get(r) ?? []);
|
||||||
|
if (acts.has(a)) {
|
||||||
|
acts.delete(a);
|
||||||
|
} else {
|
||||||
|
acts.add(a);
|
||||||
|
}
|
||||||
|
if (acts.size === 0) {
|
||||||
|
next.delete(r);
|
||||||
|
} else {
|
||||||
|
next.set(r, acts);
|
||||||
|
}
|
||||||
|
selectedActions = next;
|
||||||
|
resetAndFetch(next);
|
||||||
|
}
|
||||||
|
|
||||||
|
function clearAllFilters() {
|
||||||
|
selectedActions = new Map();
|
||||||
|
resetAndFetch(new Map());
|
||||||
|
}
|
||||||
|
|
||||||
|
function getApiParams(snap: Map<string, Set<string>>) {
|
||||||
|
const resource_types: string[] = [];
|
||||||
|
const actions = new Set<string>();
|
||||||
|
for (const [r, acts] of snap) {
|
||||||
|
if (acts.size > 0) {
|
||||||
|
resource_types.push(r);
|
||||||
|
acts.forEach((a) => actions.add(a));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return {
|
||||||
|
resource_types: resource_types.length > 0 ? resource_types : undefined,
|
||||||
|
actions: actions.size > 0 ? [...actions] : undefined
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// ─── Click-outside to close dropdown ─────────────────────────────────────
|
||||||
|
|
||||||
|
$effect(() => {
|
||||||
|
if (!filterDropdownOpen) return;
|
||||||
|
function handleMouseDown(e: MouseEvent) {
|
||||||
|
if (filterDropdownEl && !filterDropdownEl.contains(e.target as Node)) {
|
||||||
|
filterDropdownOpen = false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
document.addEventListener('mousedown', handleMouseDown);
|
||||||
|
return () => document.removeEventListener('mousedown', handleMouseDown);
|
||||||
|
});
|
||||||
|
|
||||||
|
// ─── Data functions ───────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
let fetchId = 0;
|
||||||
|
|
||||||
|
async function resetAndFetch(snap: Map<string, Set<string>>) {
|
||||||
|
const id = ++fetchId;
|
||||||
|
loading = true;
|
||||||
|
error = null;
|
||||||
|
logs = [];
|
||||||
|
nextCursor = null;
|
||||||
|
hasMore = false;
|
||||||
|
|
||||||
|
const params = getApiParams(snap);
|
||||||
|
const result = await listAdminAuditLogs(params);
|
||||||
|
|
||||||
|
if (id !== fetchId) return;
|
||||||
|
|
||||||
|
if (result.ok) {
|
||||||
|
logs = result.data.items;
|
||||||
|
hasMore = !!result.data.next_before;
|
||||||
|
nextCursor = result.data.next_before
|
||||||
|
? { before: result.data.next_before, before_id: result.data.next_before_id! }
|
||||||
|
: null;
|
||||||
|
} else {
|
||||||
|
error = result.error;
|
||||||
|
}
|
||||||
|
loading = false;
|
||||||
|
}
|
||||||
|
|
||||||
|
async function loadNextPage() {
|
||||||
|
if (!nextCursor || loadingMore) return;
|
||||||
|
loadingMore = true;
|
||||||
|
|
||||||
|
const params = getApiParams(selectedActions);
|
||||||
|
const result = await listAdminAuditLogs({
|
||||||
|
...params,
|
||||||
|
before: nextCursor.before,
|
||||||
|
before_id: nextCursor.before_id
|
||||||
|
});
|
||||||
|
|
||||||
|
if (result.ok) {
|
||||||
|
logs = [...logs, ...result.data.items];
|
||||||
|
hasMore = !!result.data.next_before;
|
||||||
|
nextCursor = result.data.next_before
|
||||||
|
? { before: result.data.next_before, before_id: result.data.next_before_id! }
|
||||||
|
: null;
|
||||||
|
}
|
||||||
|
loadingMore = false;
|
||||||
|
}
|
||||||
|
|
||||||
|
// ─── UI helpers ───────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
const DELETED_BADGE = '\x00DELETED\x00';
|
||||||
|
const deletedBadgeHtml = '<span class="deleted-user-badge">deleted-user</span>';
|
||||||
|
|
||||||
|
function renderDeleted(text: string): string {
|
||||||
|
return text.replaceAll(DELETED_BADGE, deletedBadgeHtml);
|
||||||
|
}
|
||||||
|
|
||||||
|
function describeEvent(log: AuditLog): string {
|
||||||
|
const actor = log.actor_name === 'deleted-user' ? DELETED_BADGE : (log.actor_name || (log.actor_type === 'system' ? 'System' : 'Unknown'));
|
||||||
|
const meta = (log.metadata ?? {}) as Record<string, string>;
|
||||||
|
switch (`${log.resource_type}:${log.action}`) {
|
||||||
|
case 'sandbox:create': return `${actor} created a capsule`;
|
||||||
|
case 'sandbox:pause': return `${actor} paused a capsule`;
|
||||||
|
case 'sandbox:resume': return `${actor} resumed a capsule`;
|
||||||
|
case 'sandbox:destroy': return `${actor} destroyed a capsule`;
|
||||||
|
case 'snapshot:create': return `${actor} created a snapshot`;
|
||||||
|
case 'snapshot:delete': return `${actor} deleted a snapshot`;
|
||||||
|
case 'template:delete': return `${actor} deleted template "${log.resource_id}"`;
|
||||||
|
case 'build:create': return `${actor} started a build for "${meta.name}"`;
|
||||||
|
case 'build:cancel': return `${actor} cancelled a build`;
|
||||||
|
case 'team:rename': return `${actor} renamed a team from "${meta.old_name}" to "${meta.new_name}"`;
|
||||||
|
case 'team:set_byoc': return `${actor} ${String(meta.enabled) === 'true' ? 'enabled' : 'disabled'} BYOC for a team`;
|
||||||
|
case 'team:delete': return `${actor} deleted a team`;
|
||||||
|
case 'api_key:create': return `${actor} created API key "${meta.name}"`;
|
||||||
|
case 'api_key:revoke': return `${actor} revoked an API key`;
|
||||||
|
case 'member:add': return `${actor} added ${meta.email ?? DELETED_BADGE} as ${meta.role}`;
|
||||||
|
case 'member:remove': return `${actor} removed ${meta.email ?? DELETED_BADGE}`;
|
||||||
|
case 'member:leave': return `${actor} left a team`;
|
||||||
|
case 'member:role_update': return `${actor} changed a member's role to ${meta.new_role}`;
|
||||||
|
case 'host:create': return `${actor} registered a host`;
|
||||||
|
case 'host:delete': return `${actor} removed a host`;
|
||||||
|
case 'host:marked_down': return `Host was marked as down`;
|
||||||
|
case 'host:marked_up': return `Host was marked as up`;
|
||||||
|
case 'user:activate': return `${actor} activated user ${meta.email ?? ''}`;
|
||||||
|
case 'user:deactivate': return `${actor} deactivated user ${meta.email ?? ''}`;
|
||||||
|
case 'channel:create': return `${actor} created channel "${meta.name}"`;
|
||||||
|
case 'channel:update': return `${actor} updated a channel`;
|
||||||
|
case 'channel:rotate_config': return `${actor} rotated channel config`;
|
||||||
|
case 'channel:delete': return `${actor} deleted a channel`;
|
||||||
|
default: return `${actor} performed ${log.action} on ${log.resource_type}`;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function actorLabel(log: AuditLog): string {
|
||||||
|
if (log.actor_type === 'system') return 'System';
|
||||||
|
if (log.actor_name === 'deleted-user') return DELETED_BADGE;
|
||||||
|
return log.actor_name ?? '—';
|
||||||
|
}
|
||||||
|
|
||||||
|
function formatEventDate(iso: string): { date: string; time: string } {
|
||||||
|
const d = new Date(iso);
|
||||||
|
return {
|
||||||
|
date: d.toLocaleString('en-US', { month: 'short', day: 'numeric', year: 'numeric' }),
|
||||||
|
time: d.toLocaleString('en-US', { hour: '2-digit', minute: '2-digit', second: '2-digit', hour12: false })
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
function statusColor(status: string): string {
|
||||||
|
switch (status) {
|
||||||
|
case 'success': return 'var(--color-accent)';
|
||||||
|
case 'info': return 'var(--color-blue)';
|
||||||
|
case 'warning': return 'var(--color-amber)';
|
||||||
|
case 'error': return 'var(--color-red)';
|
||||||
|
default: return 'var(--color-text-muted)';
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function tagLabel(r: string): string {
|
||||||
|
const sel = selectedActions.get(r);
|
||||||
|
if (!sel || sel.size === 0) return RESOURCE_LABELS[r];
|
||||||
|
const total = ACTIONS_BY_RESOURCE[r].length;
|
||||||
|
if (sel.size === total) return RESOURCE_LABELS[r];
|
||||||
|
const actionNames = [...sel].map((a) => ACTION_LABELS[a]).join(', ');
|
||||||
|
return `${RESOURCE_LABELS[r]}: ${actionNames}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
// ─── Lifecycle ────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
onMount(() => {
|
||||||
|
resetAndFetch(new Map());
|
||||||
|
});
|
||||||
|
|
||||||
|
$effect(() => {
|
||||||
|
const el = sentinel;
|
||||||
|
if (!el) return;
|
||||||
|
const obs = new IntersectionObserver(
|
||||||
|
([entry]) => {
|
||||||
|
if (entry.isIntersecting && !loadingMore && !loading && hasMore) {
|
||||||
|
loadNextPage();
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{ rootMargin: '300px' }
|
||||||
|
);
|
||||||
|
obs.observe(el);
|
||||||
|
return () => obs.disconnect();
|
||||||
|
});
|
||||||
|
</script>
|
||||||
|
|
||||||
|
<svelte:head>
|
||||||
|
<title>Wrenn Admin — Audit Logs</title>
|
||||||
|
</svelte:head>
|
||||||
|
|
||||||
|
<main class="flex-1 overflow-y-auto bg-[var(--color-bg-0)]">
|
||||||
|
|
||||||
|
<!-- Header -->
|
||||||
|
<div class="px-7 pt-8">
|
||||||
|
<h1 class="font-serif text-page text-[var(--color-text-bright)]">
|
||||||
|
Audit Logs
|
||||||
|
</h1>
|
||||||
|
<p class="mt-2 text-ui text-[var(--color-text-secondary)]">
|
||||||
|
Platform-wide activity log for all admin actions.
|
||||||
|
</p>
|
||||||
|
<div class="mt-6 border-b border-[var(--color-border)]"></div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<!-- Filter bar -->
|
||||||
|
<div class="px-7 pt-5">
|
||||||
|
<div class="flex items-center gap-2">
|
||||||
|
|
||||||
|
<!-- Single hierarchical filter dropdown -->
|
||||||
|
<div class="relative" bind:this={filterDropdownEl}>
|
||||||
|
<button
|
||||||
|
onclick={() => (filterDropdownOpen = !filterDropdownOpen)}
|
||||||
|
class="flex items-center gap-2 rounded-[var(--radius-button)] border px-3 py-1.5 text-ui transition-colors duration-150
|
||||||
|
{activeFilterCount > 0
|
||||||
|
? 'border-[var(--color-accent)]/60 bg-[var(--color-accent)]/10 font-medium text-[var(--color-accent)]'
|
||||||
|
: 'border-[var(--color-border)] bg-[var(--color-bg-3)] text-[var(--color-text-secondary)] hover:border-[var(--color-border-mid)] hover:text-[var(--color-text-primary)]'}"
|
||||||
|
>
|
||||||
|
<svg width="13" height="13" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round">
|
||||||
|
<line x1="4" y1="6" x2="20" y2="6" />
|
||||||
|
<line x1="8" y1="12" x2="16" y2="12" />
|
||||||
|
<line x1="11" y1="18" x2="13" y2="18" />
|
||||||
|
</svg>
|
||||||
|
<span>Filter</span>
|
||||||
|
{#if activeFilterCount > 0}
|
||||||
|
<span class="flex h-4 w-4 items-center justify-center rounded-full bg-[var(--color-accent)] text-[10px] font-semibold leading-none text-white">
|
||||||
|
{activeFilterCount}
|
||||||
|
</span>
|
||||||
|
{/if}
|
||||||
|
<svg
|
||||||
|
class="transition-transform duration-150 {filterDropdownOpen ? 'rotate-180' : ''}"
|
||||||
|
width="12" height="12" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"
|
||||||
|
>
|
||||||
|
<polyline points="6 9 12 15 18 9" />
|
||||||
|
</svg>
|
||||||
|
</button>
|
||||||
|
|
||||||
|
{#if filterDropdownOpen}
|
||||||
|
<div
|
||||||
|
class="absolute left-0 top-full z-20 mt-1.5 w-56 overflow-y-auto rounded-[var(--radius-card)] border border-[var(--color-border-mid)] bg-[var(--color-bg-2)] py-1.5 shadow-xl"
|
||||||
|
style="max-height: 380px; animation: fadeUp 0.12s ease both"
|
||||||
|
>
|
||||||
|
{#each RESOURCES as r}
|
||||||
|
{@const rState = getResourceCheckState(r)}
|
||||||
|
{@const actions = ACTIONS_BY_RESOURCE[r]}
|
||||||
|
|
||||||
|
<!-- Resource row -->
|
||||||
|
<label class="flex cursor-pointer items-center gap-2.5 px-3 py-2 transition-colors duration-100 hover:bg-[var(--color-bg-3)]">
|
||||||
|
<!-- Tristate checkbox -->
|
||||||
|
<span class="flex h-3.5 w-3.5 shrink-0 items-center justify-center rounded-sm border transition-colors duration-100
|
||||||
|
{rState !== 'none' ? 'border-[var(--color-accent)] bg-[var(--color-accent)]' : 'border-[var(--color-border-mid)] bg-[var(--color-bg-4)]'}">
|
||||||
|
{#if rState === 'all'}
|
||||||
|
<svg width="8" height="8" viewBox="0 0 24 24" fill="none" stroke="white" stroke-width="3.5" stroke-linecap="round" stroke-linejoin="round">
|
||||||
|
<polyline points="20 6 9 17 4 12" />
|
||||||
|
</svg>
|
||||||
|
{:else if rState === 'some'}
|
||||||
|
<svg width="8" height="8" viewBox="0 0 24 24" fill="none" stroke="white" stroke-width="3" stroke-linecap="round">
|
||||||
|
<line x1="5" y1="12" x2="19" y2="12" />
|
||||||
|
</svg>
|
||||||
|
{/if}
|
||||||
|
</span>
|
||||||
|
<input type="checkbox" class="sr-only" checked={rState !== 'none'} onchange={() => toggleResource(r)} />
|
||||||
|
<span class="text-ui font-medium text-[var(--color-text-primary)]">{RESOURCE_LABELS[r]}</span>
|
||||||
|
</label>
|
||||||
|
|
||||||
|
<!-- Action rows (indented) -->
|
||||||
|
{#each actions as a}
|
||||||
|
{@const checked = selectedActions.get(r)?.has(a) ?? false}
|
||||||
|
<label class="flex cursor-pointer items-center gap-2.5 py-1.5 pl-8 pr-3 transition-colors duration-100 hover:bg-[var(--color-bg-3)]">
|
||||||
|
<span class="flex h-3.5 w-3.5 shrink-0 items-center justify-center rounded-sm border transition-colors duration-100
|
||||||
|
{checked ? 'border-[var(--color-accent)] bg-[var(--color-accent)]' : 'border-[var(--color-border-mid)] bg-[var(--color-bg-4)]'}">
|
||||||
|
{#if checked}
|
||||||
|
<svg width="8" height="8" viewBox="0 0 24 24" fill="none" stroke="white" stroke-width="3.5" stroke-linecap="round" stroke-linejoin="round">
|
||||||
|
<polyline points="20 6 9 17 4 12" />
|
||||||
|
</svg>
|
||||||
|
{/if}
|
||||||
|
</span>
|
||||||
|
<input type="checkbox" class="sr-only" {checked} onchange={() => toggleAction(r, a)} />
|
||||||
|
<span class="text-ui text-[var(--color-text-secondary)]">{ACTION_LABELS[a]}</span>
|
||||||
|
</label>
|
||||||
|
{/each}
|
||||||
|
|
||||||
|
<!-- Divider between resource groups -->
|
||||||
|
{#if r !== RESOURCES[RESOURCES.length - 1]}
|
||||||
|
<div class="mx-3 my-1 border-t border-[var(--color-border)]"></div>
|
||||||
|
{/if}
|
||||||
|
{/each}
|
||||||
|
</div>
|
||||||
|
{/if}
|
||||||
|
</div>
|
||||||
|
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<!-- Active filter tags -->
|
||||||
|
{#if activeFilterCount > 0}
|
||||||
|
<div class="mt-3 flex flex-wrap items-center gap-2" style="animation: fadeUp 0.2s ease both">
|
||||||
|
{#each RESOURCES as r}
|
||||||
|
{#if (selectedActions.get(r)?.size ?? 0) > 0}
|
||||||
|
<span class="flex items-center gap-1.5 rounded-full border border-[var(--color-accent)]/40 bg-[var(--color-accent)]/10 px-2.5 py-1 text-meta font-medium text-[var(--color-accent)]">
|
||||||
|
{tagLabel(r)}
|
||||||
|
<button
|
||||||
|
onclick={() => toggleResource(r)}
|
||||||
|
class="flex items-center justify-center text-[var(--color-accent)] opacity-60 transition-opacity duration-100 hover:opacity-100"
|
||||||
|
aria-label="Remove {RESOURCE_LABELS[r]} filter"
|
||||||
|
>
|
||||||
|
<svg width="10" height="10" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2.5" stroke-linecap="round">
|
||||||
|
<line x1="18" y1="6" x2="6" y2="18" /><line x1="6" y1="6" x2="18" y2="18" />
|
||||||
|
</svg>
|
||||||
|
</button>
|
||||||
|
</span>
|
||||||
|
{/if}
|
||||||
|
{/each}
|
||||||
|
<button
|
||||||
|
onclick={clearAllFilters}
|
||||||
|
class="text-meta text-[var(--color-text-muted)] underline-offset-2 transition-colors duration-100 hover:text-[var(--color-text-secondary)] hover:underline"
|
||||||
|
>
|
||||||
|
Clear all
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
{/if}
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<!-- Content -->
|
||||||
|
<div class="p-8" style="animation: fadeUp 0.35s ease both">
|
||||||
|
|
||||||
|
{#if error}
|
||||||
|
<div class="mb-4 flex items-center justify-between gap-4 rounded-[var(--radius-card)] border border-[var(--color-red)]/30 bg-[var(--color-red)]/5 px-4 py-3 text-ui text-[var(--color-red)]">
|
||||||
|
<span>{error}</span>
|
||||||
|
<button
|
||||||
|
onclick={() => resetAndFetch(selectedActions)}
|
||||||
|
class="shrink-0 font-semibold underline-offset-2 hover:underline"
|
||||||
|
>
|
||||||
|
Try again
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
{/if}
|
||||||
|
|
||||||
|
{#if loading}
|
||||||
|
<div class="flex items-center justify-center py-24">
|
||||||
|
<div class="flex items-center gap-3 text-ui text-[var(--color-text-secondary)]">
|
||||||
|
<svg class="animate-spin" width="16" height="16" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2">
|
||||||
|
<path d="M21 12a9 9 0 1 1-6.219-8.56" />
|
||||||
|
</svg>
|
||||||
|
Loading events...
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
{:else if logs.length === 0}
|
||||||
|
<!-- Empty state -->
|
||||||
|
<div class="flex flex-col items-center justify-center py-[72px]">
|
||||||
|
<div class="relative mb-5">
|
||||||
|
<div class="absolute inset-0 -m-4 rounded-full" style="background: radial-gradient(circle, rgba(94,140,88,0.08) 0%, transparent 70%)"></div>
|
||||||
|
<div
|
||||||
|
class="relative flex h-14 w-14 items-center justify-center rounded-[var(--radius-card)] border border-[var(--color-accent)]/20 bg-[var(--color-bg-3)]"
|
||||||
|
style="animation: iconFloat 4s ease-in-out infinite"
|
||||||
|
>
|
||||||
|
<svg width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="var(--color-accent-mid)" stroke-width="1.5" stroke-linecap="round" stroke-linejoin="round">
|
||||||
|
<path d="M12 22s8-4 8-10V5l-8-3-8 3v7c0 6 8 10 8 10z" />
|
||||||
|
</svg>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<p class="font-serif text-heading text-[var(--color-text-bright)]">
|
||||||
|
{activeFilterCount > 0 ? 'No matching events' : 'No activity yet'}
|
||||||
|
</p>
|
||||||
|
<p class="mt-1.5 text-ui text-[var(--color-text-tertiary)]">
|
||||||
|
{activeFilterCount > 0
|
||||||
|
? 'Try adjusting or clearing the filters.'
|
||||||
|
: 'Admin events will appear here as actions are taken.'}
|
||||||
|
</p>
|
||||||
|
{#if activeFilterCount > 0}
|
||||||
|
<button
|
||||||
|
onclick={clearAllFilters}
|
||||||
|
class="mt-4 text-ui text-[var(--color-accent)] underline-offset-2 hover:underline"
|
||||||
|
>
|
||||||
|
Clear filters
|
||||||
|
</button>
|
||||||
|
{/if}
|
||||||
|
</div>
|
||||||
|
{:else}
|
||||||
|
<!-- Table -->
|
||||||
|
<div class="overflow-hidden rounded-[var(--radius-card)] border border-[var(--color-border)]">
|
||||||
|
|
||||||
|
<!-- Table header -->
|
||||||
|
<div class="grid grid-cols-[168px_1.4fr_3fr] border-b border-[var(--color-border)] bg-[var(--color-bg-3)]">
|
||||||
|
<div class="px-5 py-3 text-label font-semibold uppercase tracking-[0.05em] text-[var(--color-text-muted)]">Time</div>
|
||||||
|
<div class="px-4 py-3 text-label font-semibold uppercase tracking-[0.05em] text-[var(--color-text-muted)]">Actor</div>
|
||||||
|
<div class="px-4 py-3 text-label font-semibold uppercase tracking-[0.05em] text-[var(--color-text-muted)]">Event</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<!-- Rows -->
|
||||||
|
{#each logs as log, i (log.id)}
|
||||||
|
{@const ts = formatEventDate(log.created_at)}
|
||||||
|
<div
|
||||||
|
class="log-entry relative overflow-hidden border-b border-[var(--color-border)] last:border-b-0
|
||||||
|
{log.status === 'error' ? 'log-row-error' : ''}
|
||||||
|
{log.status === 'warning' ? 'log-row-warning' : ''}"
|
||||||
|
style="animation: fadeUp 0.35s ease both; animation-delay: {Math.min(i, 10) * 30}ms"
|
||||||
|
>
|
||||||
|
<!-- Status stripe -->
|
||||||
|
<div
|
||||||
|
class="status-stripe pointer-events-none absolute inset-y-0 left-0 w-[3px] {log.status === 'error' ? 'stripe-pulse' : ''}"
|
||||||
|
style="background: {statusColor(log.status)}"
|
||||||
|
></div>
|
||||||
|
|
||||||
|
<!-- Main row -->
|
||||||
|
<div class="grid grid-cols-[168px_1.4fr_3fr] items-start">
|
||||||
|
<!-- Time -->
|
||||||
|
<div class="flex flex-col gap-0.5 px-5 py-4">
|
||||||
|
<span class="text-ui text-[var(--color-text-secondary)]">{ts.date}</span>
|
||||||
|
<span class="font-mono text-meta text-[var(--color-text-muted)]">{ts.time}</span>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<!-- Actor -->
|
||||||
|
<div class="min-w-0 px-4 py-4">
|
||||||
|
<div class="flex flex-col gap-1">
|
||||||
|
<span class="truncate text-ui font-medium text-[var(--color-text-bright)]">
|
||||||
|
{@html renderDeleted(actorLabel(log))}
|
||||||
|
</span>
|
||||||
|
{#if log.actor_type === 'api_key'}
|
||||||
|
<span class="inline-flex w-fit items-center rounded-sm border border-[var(--color-border-mid)] bg-[var(--color-bg-4)] px-1.5 py-0.5 font-mono text-badge text-[var(--color-text-muted)]">key</span>
|
||||||
|
{/if}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<!-- Event description + resource ID -->
|
||||||
|
<div class="min-w-0 px-4 py-4">
|
||||||
|
<p class="text-ui font-medium text-[var(--color-text-primary)]">{@html renderDeleted(describeEvent(log))}</p>
|
||||||
|
{#if log.resource_id}
|
||||||
|
<span class="mt-1 inline-flex items-center rounded-sm border border-[var(--color-border-mid)] bg-[var(--color-bg-4)] px-1.5 py-0.5 font-mono text-badge text-[var(--color-text-muted)]">{log.resource_id}</span>
|
||||||
|
{/if}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
{/each}
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<!-- Load more sentinel + status -->
|
||||||
|
<div bind:this={sentinel} class="mt-4">
|
||||||
|
{#if loadingMore}
|
||||||
|
<div class="flex items-center justify-center gap-2 py-6 text-meta text-[var(--color-text-muted)]">
|
||||||
|
<svg class="animate-spin" width="14" height="14" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2">
|
||||||
|
<path d="M21 12a9 9 0 1 1-6.219-8.56" />
|
||||||
|
</svg>
|
||||||
|
Loading more...
|
||||||
|
</div>
|
||||||
|
{:else if !hasMore}
|
||||||
|
<p class="py-4 text-center text-meta text-[var(--color-text-muted)]">
|
||||||
|
{logs.length} {logs.length === 1 ? 'event' : 'events'} total
|
||||||
|
</p>
|
||||||
|
{/if}
|
||||||
|
</div>
|
||||||
|
{/if}
|
||||||
|
|
||||||
|
</div>
|
||||||
|
</main>
|
||||||
|
|
||||||
|
<style>
|
||||||
|
@keyframes stripePulse {
|
||||||
|
0%, 100% { opacity: 1; }
|
||||||
|
50% { opacity: 0.3; }
|
||||||
|
}
|
||||||
|
|
||||||
|
.log-row-error {
|
||||||
|
background: rgba(207, 129, 114, 0.04);
|
||||||
|
}
|
||||||
|
|
||||||
|
.log-row-warning {
|
||||||
|
background: rgba(212, 167, 60, 0.03);
|
||||||
|
}
|
||||||
|
|
||||||
|
.stripe-pulse {
|
||||||
|
animation: stripePulse 2.5s ease-in-out infinite;
|
||||||
|
}
|
||||||
|
|
||||||
|
:global(.deleted-user-badge) {
|
||||||
|
display: inline;
|
||||||
|
padding: 1px 5px;
|
||||||
|
border-radius: 3px;
|
||||||
|
font-family: 'JetBrains Mono Variable', monospace;
|
||||||
|
font-size: var(--text-badge);
|
||||||
|
color: var(--color-red);
|
||||||
|
background: rgba(207, 129, 114, 0.12);
|
||||||
|
border: 1px solid rgba(207, 129, 114, 0.25);
|
||||||
|
}
|
||||||
|
</style>
|
||||||
@ -454,12 +454,9 @@
|
|||||||
onkeydown={(e) => { if (e.key === 'Escape' && !creating) showCreate = false; }}
|
onkeydown={(e) => { if (e.key === 'Escape' && !creating) showCreate = false; }}
|
||||||
></div>
|
></div>
|
||||||
<div
|
<div
|
||||||
class="relative w-full max-w-[420px] rounded-[var(--radius-card)] border border-[var(--color-border-mid)] bg-[var(--color-bg-2)] shadow-dialog"
|
class="relative w-full max-w-[420px] rounded-[var(--radius-card)] border border-[var(--color-border-mid)] bg-[var(--color-bg-2)]"
|
||||||
style="animation: fadeUp 0.18s cubic-bezier(0.25,1,0.5,1) both"
|
style="animation: fadeUp 0.2s ease both; box-shadow: var(--shadow-dialog)"
|
||||||
>
|
>
|
||||||
<!-- Top accent edge -->
|
|
||||||
<div class="h-[2px] rounded-t-[var(--radius-card)] bg-gradient-to-r from-transparent via-[var(--color-accent)] to-transparent"></div>
|
|
||||||
|
|
||||||
<div class="p-6">
|
<div class="p-6">
|
||||||
<h2 class="font-serif text-heading leading-tight text-[var(--color-text-bright)]">
|
<h2 class="font-serif text-heading leading-tight text-[var(--color-text-bright)]">
|
||||||
Add Platform Host
|
Add Platform Host
|
||||||
@ -534,12 +531,9 @@
|
|||||||
<div class="fixed inset-0 z-50 flex items-center justify-center">
|
<div class="fixed inset-0 z-50 flex items-center justify-center">
|
||||||
<div class="absolute inset-0 bg-black/60"></div>
|
<div class="absolute inset-0 bg-black/60"></div>
|
||||||
<div
|
<div
|
||||||
class="relative w-full max-w-[500px] rounded-[var(--radius-card)] border border-[var(--color-border-mid)] bg-[var(--color-bg-2)] shadow-dialog"
|
class="relative w-full max-w-[500px] rounded-[var(--radius-card)] border border-[var(--color-border-mid)] bg-[var(--color-bg-2)]"
|
||||||
style="animation: fadeUp 0.18s cubic-bezier(0.25,1,0.5,1) both"
|
style="animation: fadeUp 0.2s ease both; box-shadow: var(--shadow-dialog)"
|
||||||
>
|
>
|
||||||
<!-- Success accent edge -->
|
|
||||||
<div class="h-[2px] rounded-t-[var(--radius-card)] bg-gradient-to-r from-transparent via-[var(--color-accent-bright)] to-transparent"></div>
|
|
||||||
|
|
||||||
<div class="p-6">
|
<div class="p-6">
|
||||||
<!-- Animated checkmark -->
|
<!-- Animated checkmark -->
|
||||||
<div class="mb-5 flex h-12 w-12 items-center justify-center rounded-full bg-[var(--color-accent-glow-mid)]">
|
<div class="mb-5 flex h-12 w-12 items-center justify-center rounded-full bg-[var(--color-accent-glow-mid)]">
|
||||||
@ -607,12 +601,9 @@
|
|||||||
onkeydown={(e) => { if (e.key === 'Escape' && !deleting) deleteTarget = null; }}
|
onkeydown={(e) => { if (e.key === 'Escape' && !deleting) deleteTarget = null; }}
|
||||||
></div>
|
></div>
|
||||||
<div
|
<div
|
||||||
class="relative w-full max-w-[420px] rounded-[var(--radius-card)] border border-[var(--color-border-mid)] bg-[var(--color-bg-2)] shadow-dialog"
|
class="relative w-full max-w-[420px] rounded-[var(--radius-card)] border border-[var(--color-border-mid)] bg-[var(--color-bg-2)]"
|
||||||
style="animation: fadeUp 0.18s cubic-bezier(0.25,1,0.5,1) both"
|
style="animation: fadeUp 0.2s ease both; box-shadow: var(--shadow-dialog)"
|
||||||
>
|
>
|
||||||
<!-- Danger accent edge -->
|
|
||||||
<div class="h-[2px] rounded-t-[var(--radius-card)] bg-gradient-to-r from-transparent via-[var(--color-red)] to-transparent"></div>
|
|
||||||
|
|
||||||
<div class="p-6">
|
<div class="p-6">
|
||||||
<h2 class="font-serif text-heading leading-tight text-[var(--color-text-bright)]">
|
<h2 class="font-serif text-heading leading-tight text-[var(--color-text-bright)]">
|
||||||
Delete Host
|
Delete Host
|
||||||
|
|||||||
@ -1,12 +1,18 @@
|
|||||||
<script lang="ts">
|
<script lang="ts">
|
||||||
import { goto } from '$app/navigation';
|
import { goto } from '$app/navigation';
|
||||||
import { page } from '$app/stores';
|
import { page } from '$app/stores';
|
||||||
|
import { onMount } from 'svelte';
|
||||||
import { auth } from '$lib/auth.svelte';
|
import { auth } from '$lib/auth.svelte';
|
||||||
import { teams } from '$lib/teams.svelte';
|
import { teams } from '$lib/teams.svelte';
|
||||||
|
import { updateName } from '$lib/api/me';
|
||||||
|
import { IconUser, IconMail } from '$lib/components/icons';
|
||||||
|
|
||||||
// Check for error in URL params (errors are still passed via query params).
|
let showConfirmDialog = $state(false);
|
||||||
const params = $page.url.searchParams;
|
let confirmName = $state('');
|
||||||
const error = params.get('error');
|
let confirmEmail = $state('');
|
||||||
|
let saving = $state(false);
|
||||||
|
let nameError = $state('');
|
||||||
|
let pendingAuth: { token: string; user_id: string; team_id: string; email: string; name: string } | null = null;
|
||||||
|
|
||||||
function getCookie(name: string): string | null {
|
function getCookie(name: string): string | null {
|
||||||
const match = document.cookie.match(new RegExp(`(?:^|; )${name}=([^;]*)`));
|
const match = document.cookie.match(new RegExp(`(?:^|; )${name}=([^;]*)`));
|
||||||
@ -19,33 +25,157 @@
|
|||||||
'wrenn_oauth_user_id',
|
'wrenn_oauth_user_id',
|
||||||
'wrenn_oauth_team_id',
|
'wrenn_oauth_team_id',
|
||||||
'wrenn_oauth_email',
|
'wrenn_oauth_email',
|
||||||
'wrenn_oauth_name'
|
'wrenn_oauth_name',
|
||||||
|
'wrenn_oauth_new_signup'
|
||||||
]) {
|
]) {
|
||||||
document.cookie = `${name}=; path=/auth/; max-age=0`;
|
document.cookie = `${name}=; path=/auth/; max-age=0`;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function finishLogin() {
|
||||||
|
if (!pendingAuth) return;
|
||||||
|
teams.reset();
|
||||||
|
auth.login(pendingAuth);
|
||||||
|
goto('/dashboard');
|
||||||
|
}
|
||||||
|
|
||||||
|
async function handleConfirm() {
|
||||||
|
if (!pendingAuth) return;
|
||||||
|
saving = true;
|
||||||
|
nameError = '';
|
||||||
|
|
||||||
|
// Update name if user changed it.
|
||||||
|
if (confirmName.trim() && confirmName.trim() !== pendingAuth.name) {
|
||||||
|
// Log in first so the PATCH /v1/me request is authenticated.
|
||||||
|
teams.reset();
|
||||||
|
auth.login(pendingAuth);
|
||||||
|
|
||||||
|
const result = await updateName(confirmName.trim());
|
||||||
|
if (result.ok) {
|
||||||
|
// updateName returns refreshed auth data — re-login with updated info.
|
||||||
|
auth.login(result.data);
|
||||||
|
goto('/dashboard');
|
||||||
|
} else {
|
||||||
|
nameError = result.error;
|
||||||
|
saving = false;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
finishLogin();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
onMount(() => {
|
||||||
|
const params = $page.url.searchParams;
|
||||||
|
const error = params.get('error');
|
||||||
|
|
||||||
if (error) {
|
if (error) {
|
||||||
goto(`/login?error=${encodeURIComponent(error)}`);
|
goto(`/login?error=${encodeURIComponent(error)}`);
|
||||||
} else {
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
const token = getCookie('wrenn_oauth_token');
|
const token = getCookie('wrenn_oauth_token');
|
||||||
const userId = getCookie('wrenn_oauth_user_id');
|
const userId = getCookie('wrenn_oauth_user_id');
|
||||||
const teamId = getCookie('wrenn_oauth_team_id');
|
const teamId = getCookie('wrenn_oauth_team_id');
|
||||||
const email = getCookie('wrenn_oauth_email');
|
const email = getCookie('wrenn_oauth_email');
|
||||||
const name = getCookie('wrenn_oauth_name') ?? '';
|
const name = getCookie('wrenn_oauth_name') ?? '';
|
||||||
|
const isNewSignup = getCookie('wrenn_oauth_new_signup') === '1';
|
||||||
|
|
||||||
clearOAuthCookies();
|
clearOAuthCookies();
|
||||||
|
|
||||||
if (token && userId && teamId && email) {
|
if (token && userId && teamId && email) {
|
||||||
teams.reset();
|
pendingAuth = { token, user_id: userId, team_id: teamId, email, name };
|
||||||
auth.login({ token, user_id: userId, team_id: teamId, email, name });
|
|
||||||
goto('/dashboard');
|
if (isNewSignup) {
|
||||||
|
confirmName = name;
|
||||||
|
confirmEmail = email;
|
||||||
|
showConfirmDialog = true;
|
||||||
|
} else {
|
||||||
|
finishLogin();
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
goto('/login?error=missing_token');
|
goto('/login?error=missing_token');
|
||||||
}
|
}
|
||||||
}
|
});
|
||||||
</script>
|
</script>
|
||||||
|
|
||||||
|
{#if showConfirmDialog}
|
||||||
|
<div class="flex min-h-screen items-center justify-center bg-[var(--color-bg-0)]">
|
||||||
|
<div
|
||||||
|
class="w-full max-w-[420px] rounded-[var(--radius-card)] border border-[var(--color-border-mid)] bg-[var(--color-bg-2)]"
|
||||||
|
style="animation: fadeUp 0.2s ease both; box-shadow: var(--shadow-dialog)"
|
||||||
|
>
|
||||||
|
<div class="p-6">
|
||||||
|
<h2 class="font-serif text-heading text-[var(--color-text-bright)]">Almost there</h2>
|
||||||
|
<p class="mt-1.5 text-ui text-[var(--color-text-secondary)]">
|
||||||
|
We pulled your details from GitHub. Change your display name if you'd like.
|
||||||
|
</p>
|
||||||
|
|
||||||
|
<div class="mt-5 space-y-3">
|
||||||
|
<!-- Name (editable) -->
|
||||||
|
<div>
|
||||||
|
<label for="confirm-name" class="mb-1.5 block text-label uppercase tracking-[0.05em] text-[var(--color-text-tertiary)]">
|
||||||
|
Display name
|
||||||
|
</label>
|
||||||
|
<div class="group relative">
|
||||||
|
<div class="pointer-events-none absolute left-3 top-1/2 -translate-y-1/2 text-[var(--color-text-muted)] transition-colors duration-150 group-focus-within:text-[var(--color-accent)]">
|
||||||
|
<IconUser size={14} />
|
||||||
|
</div>
|
||||||
|
<input
|
||||||
|
id="confirm-name"
|
||||||
|
type="text"
|
||||||
|
bind:value={confirmName}
|
||||||
|
class="w-full rounded-[var(--radius-input)] border border-[var(--color-border)] bg-[var(--color-bg-2)] py-3 pl-9 pr-3 text-body text-[var(--color-text-bright)] outline-none transition-all duration-150 placeholder:text-[var(--color-text-muted)] focus:border-[var(--color-accent)]"
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<!-- Email (read-only) -->
|
||||||
|
<div>
|
||||||
|
<label for="confirm-email" class="mb-1.5 block text-label uppercase tracking-[0.05em] text-[var(--color-text-tertiary)]">
|
||||||
|
Email
|
||||||
|
</label>
|
||||||
|
<div class="group relative">
|
||||||
|
<div class="pointer-events-none absolute left-3 top-1/2 -translate-y-1/2 text-[var(--color-text-muted)]">
|
||||||
|
<IconMail size={14} />
|
||||||
|
</div>
|
||||||
|
<input
|
||||||
|
id="confirm-email"
|
||||||
|
type="email"
|
||||||
|
value={confirmEmail}
|
||||||
|
disabled
|
||||||
|
class="w-full rounded-[var(--radius-input)] border border-[var(--color-border)] bg-[var(--color-bg-3)] py-3 pl-9 pr-3 text-body text-[var(--color-text-secondary)] outline-none cursor-not-allowed pointer-events-none"
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{#if nameError}
|
||||||
|
<p class="mt-3 text-ui text-[var(--color-red)]">{nameError}</p>
|
||||||
|
{/if}
|
||||||
|
|
||||||
|
<!-- Actions -->
|
||||||
|
<div class="mt-6 flex justify-end">
|
||||||
|
<button
|
||||||
|
type="button"
|
||||||
|
onclick={handleConfirm}
|
||||||
|
disabled={saving || !confirmName.trim()}
|
||||||
|
class="rounded-[var(--radius-button)] bg-[var(--color-accent)] px-5 py-2.5 text-body font-semibold text-white transition-all duration-150 hover:brightness-115 hover:-translate-y-px active:translate-y-0 disabled:pointer-events-none disabled:opacity-50"
|
||||||
|
>
|
||||||
|
{#if saving}
|
||||||
|
<span class="inline-flex items-center gap-2">
|
||||||
|
<span class="inline-block h-3.5 w-3.5 animate-spin rounded-full border-2 border-white/30 border-t-white"></span>
|
||||||
|
Setting up…
|
||||||
|
</span>
|
||||||
|
{:else}
|
||||||
|
Get started
|
||||||
|
{/if}
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
{:else}
|
||||||
<div class="flex min-h-screen items-center justify-center">
|
<div class="flex min-h-screen items-center justify-center">
|
||||||
<p class="text-ui text-[var(--color-text-secondary)]">Signing you in...</p>
|
<p class="text-ui text-[var(--color-text-secondary)]">Signing you in...</p>
|
||||||
</div>
|
</div>
|
||||||
|
{/if}
|
||||||
|
|||||||
@ -192,8 +192,15 @@
|
|||||||
|
|
||||||
// ─── UI helpers ───────────────────────────────────────────────────────────
|
// ─── UI helpers ───────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
const DELETED_BADGE = '\x00DELETED\x00';
|
||||||
|
const deletedBadgeHtml = '<span class="deleted-user-badge">deleted-user</span>';
|
||||||
|
|
||||||
|
function renderDeleted(text: string): string {
|
||||||
|
return text.replaceAll(DELETED_BADGE, deletedBadgeHtml);
|
||||||
|
}
|
||||||
|
|
||||||
function describeEvent(log: AuditLog): string {
|
function describeEvent(log: AuditLog): string {
|
||||||
const actor = log.actor_name || (log.actor_type === 'system' ? 'System' : 'Unknown');
|
const actor = log.actor_name === 'deleted-user' ? DELETED_BADGE : (log.actor_name || (log.actor_type === 'system' ? 'System' : 'Unknown'));
|
||||||
const meta = (log.metadata ?? {}) as Record<string, string>;
|
const meta = (log.metadata ?? {}) as Record<string, string>;
|
||||||
switch (`${log.resource_type}:${log.action}`) {
|
switch (`${log.resource_type}:${log.action}`) {
|
||||||
case 'sandbox:create': return `${actor} created a capsule`;
|
case 'sandbox:create': return `${actor} created a capsule`;
|
||||||
@ -205,8 +212,8 @@
|
|||||||
case 'team:rename': return `${actor} renamed the team from "${meta.old_name}" to "${meta.new_name}"`;
|
case 'team:rename': return `${actor} renamed the team from "${meta.old_name}" to "${meta.new_name}"`;
|
||||||
case 'api_key:create': return `${actor} created API key "${meta.name}"`;
|
case 'api_key:create': return `${actor} created API key "${meta.name}"`;
|
||||||
case 'api_key:revoke': return `${actor} revoked an API key`;
|
case 'api_key:revoke': return `${actor} revoked an API key`;
|
||||||
case 'member:add': return `${actor} added ${meta.email} as ${meta.role}`;
|
case 'member:add': return `${actor} added ${meta.email ?? DELETED_BADGE} as ${meta.role}`;
|
||||||
case 'member:remove': return `${actor} removed ${meta.email ?? 'a member'}`;
|
case 'member:remove': return `${actor} removed ${meta.email ?? DELETED_BADGE}`;
|
||||||
case 'member:leave': return `${actor} left the team`;
|
case 'member:leave': return `${actor} left the team`;
|
||||||
case 'member:role_update': return `${actor} changed a member's role to ${meta.new_role}`;
|
case 'member:role_update': return `${actor} changed a member's role to ${meta.new_role}`;
|
||||||
case 'host:create': return `${actor} registered a host`;
|
case 'host:create': return `${actor} registered a host`;
|
||||||
@ -219,6 +226,7 @@
|
|||||||
|
|
||||||
function actorLabel(log: AuditLog): string {
|
function actorLabel(log: AuditLog): string {
|
||||||
if (log.actor_type === 'system') return 'System';
|
if (log.actor_type === 'system') return 'System';
|
||||||
|
if (log.actor_name === 'deleted-user') return DELETED_BADGE;
|
||||||
return log.actor_name ?? '—';
|
return log.actor_name ?? '—';
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -498,7 +506,7 @@
|
|||||||
<div class="min-w-0 px-4 py-4">
|
<div class="min-w-0 px-4 py-4">
|
||||||
<div class="flex flex-col gap-1">
|
<div class="flex flex-col gap-1">
|
||||||
<span class="truncate text-ui font-medium text-[var(--color-text-bright)]">
|
<span class="truncate text-ui font-medium text-[var(--color-text-bright)]">
|
||||||
{actorLabel(log)}
|
{@html renderDeleted(actorLabel(log))}
|
||||||
</span>
|
</span>
|
||||||
{#if log.actor_type === 'api_key'}
|
{#if log.actor_type === 'api_key'}
|
||||||
<span class="inline-flex w-fit items-center rounded-sm border border-[var(--color-border-mid)] bg-[var(--color-bg-4)] px-1.5 py-0.5 font-mono text-badge text-[var(--color-text-muted)]">key</span>
|
<span class="inline-flex w-fit items-center rounded-sm border border-[var(--color-border-mid)] bg-[var(--color-bg-4)] px-1.5 py-0.5 font-mono text-badge text-[var(--color-text-muted)]">key</span>
|
||||||
@ -508,7 +516,7 @@
|
|||||||
|
|
||||||
<!-- Event description + resource ID -->
|
<!-- Event description + resource ID -->
|
||||||
<div class="min-w-0 px-4 py-4">
|
<div class="min-w-0 px-4 py-4">
|
||||||
<p class="text-ui font-medium text-[var(--color-text-primary)]">{describeEvent(log)}</p>
|
<p class="text-ui font-medium text-[var(--color-text-primary)]">{@html renderDeleted(describeEvent(log))}</p>
|
||||||
{#if log.resource_id}
|
{#if log.resource_id}
|
||||||
<span class="mt-1 inline-flex items-center rounded-sm border border-[var(--color-border-mid)] bg-[var(--color-bg-4)] px-1.5 py-0.5 font-mono text-badge text-[var(--color-text-muted)]">{log.resource_id}</span>
|
<span class="mt-1 inline-flex items-center rounded-sm border border-[var(--color-border-mid)] bg-[var(--color-bg-4)] px-1.5 py-0.5 font-mono text-badge text-[var(--color-text-muted)]">{log.resource_id}</span>
|
||||||
{/if}
|
{/if}
|
||||||
@ -567,4 +575,15 @@
|
|||||||
.stripe-pulse {
|
.stripe-pulse {
|
||||||
animation: stripePulse 2.5s ease-in-out infinite;
|
animation: stripePulse 2.5s ease-in-out infinite;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
:global(.deleted-user-badge) {
|
||||||
|
display: inline;
|
||||||
|
padding: 1px 5px;
|
||||||
|
border-radius: 3px;
|
||||||
|
font-family: 'JetBrains Mono Variable', monospace;
|
||||||
|
font-size: var(--text-badge);
|
||||||
|
color: var(--color-red);
|
||||||
|
background: rgba(207, 129, 114, 0.12);
|
||||||
|
border: 1px solid rgba(207, 129, 114, 0.25);
|
||||||
|
}
|
||||||
</style>
|
</style>
|
||||||
|
|||||||
@ -29,6 +29,7 @@
|
|||||||
access_denied: 'Access was denied by the provider',
|
access_denied: 'Access was denied by the provider',
|
||||||
email_taken: 'An account with this email already exists',
|
email_taken: 'An account with this email already exists',
|
||||||
exchange_failed: 'Authentication failed — please try again',
|
exchange_failed: 'Authentication failed — please try again',
|
||||||
|
no_account: 'No GitHub account connected — sign up instead',
|
||||||
};
|
};
|
||||||
|
|
||||||
// Read OAuth error forwarded from /auth/github/callback
|
// Read OAuth error forwarded from /auth/github/callback
|
||||||
@ -259,7 +260,7 @@
|
|||||||
|
|
||||||
<!-- GitHub OAuth -->
|
<!-- GitHub OAuth -->
|
||||||
<a
|
<a
|
||||||
href="/api/auth/oauth/github"
|
href="/api/auth/oauth/github?intent={mode === 'signin' ? 'login' : 'signup'}"
|
||||||
class="flex w-full items-center justify-center gap-2.5 rounded-[var(--radius-button)] border border-[var(--color-border-mid)] bg-[var(--color-bg-2)] px-4 py-3 text-body font-medium text-[var(--color-text-bright)] no-underline transition-all duration-150 hover:border-[var(--color-accent)] hover:text-[var(--color-text-bright)]"
|
class="flex w-full items-center justify-center gap-2.5 rounded-[var(--radius-button)] border border-[var(--color-border-mid)] bg-[var(--color-bg-2)] px-4 py-3 text-body font-medium text-[var(--color-text-bright)] no-underline transition-all duration-150 hover:border-[var(--color-accent)] hover:text-[var(--color-text-bright)]"
|
||||||
>
|
>
|
||||||
<IconGithub size={16} />
|
<IconGithub size={16} />
|
||||||
|
|||||||
@ -8,7 +8,6 @@ import (
|
|||||||
"net/http"
|
"net/http"
|
||||||
"net/http/httputil"
|
"net/http/httputil"
|
||||||
"net/url"
|
"net/url"
|
||||||
"path"
|
|
||||||
"regexp"
|
"regexp"
|
||||||
"strconv"
|
"strconv"
|
||||||
"strings"
|
"strings"
|
||||||
@ -74,7 +73,7 @@ func NewSandboxProxyWrapper(inner http.Handler, queries *db.Queries, pool *lifec
|
|||||||
inner: inner,
|
inner: inner,
|
||||||
db: queries,
|
db: queries,
|
||||||
pool: pool,
|
pool: pool,
|
||||||
transport: pool.Transport(),
|
transport: pool.NewProxyTransport(),
|
||||||
cache: make(map[pgtype.UUID]proxyCacheEntry),
|
cache: make(map[pgtype.UUID]proxyCacheEntry),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -167,14 +166,29 @@ func (h *SandboxProxyWrapper) ServeHTTP(w http.ResponseWriter, r *http.Request)
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// The host agent's proxy adds a /proxy/{id}/{port} prefix to Location
|
||||||
|
// headers for path-based routing. For subdomain routing the browser is at
|
||||||
|
// {port}-{id}.domain, so we strip the prefix back out.
|
||||||
|
agentProxyPrefix := "/proxy/" + sandboxIDStr + "/" + port
|
||||||
|
|
||||||
proxy := &httputil.ReverseProxy{
|
proxy := &httputil.ReverseProxy{
|
||||||
Transport: h.transport,
|
Transport: h.transport,
|
||||||
Director: func(req *http.Request) {
|
Director: func(req *http.Request) {
|
||||||
req.URL.Scheme = agentURL.Scheme
|
req.URL.Scheme = agentURL.Scheme
|
||||||
req.URL.Host = agentURL.Host
|
req.URL.Host = agentURL.Host
|
||||||
req.URL.Path = path.Join("/proxy", sandboxIDStr, port, path.Clean("/"+req.URL.Path))
|
// Use string concatenation instead of path.Join to preserve trailing
|
||||||
|
// slashes. path.Join strips them, causing redirect loops for directory
|
||||||
|
// listings in apps like python http.server and Jupyter.
|
||||||
|
req.URL.Path = "/proxy/" + sandboxIDStr + "/" + port + req.URL.Path
|
||||||
req.Host = agentURL.Host
|
req.Host = agentURL.Host
|
||||||
},
|
},
|
||||||
|
ModifyResponse: func(resp *http.Response) error {
|
||||||
|
if loc := resp.Header.Get("Location"); loc != "" {
|
||||||
|
loc = strings.TrimPrefix(loc, agentProxyPrefix)
|
||||||
|
resp.Header.Set("Location", loc)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
},
|
||||||
ErrorHandler: func(w http.ResponseWriter, r *http.Request, err error) {
|
ErrorHandler: func(w http.ResponseWriter, r *http.Request, err error) {
|
||||||
slog.Debug("sandbox proxy error",
|
slog.Debug("sandbox proxy error",
|
||||||
"sandbox_id", sandboxIDStr,
|
"sandbox_id", sandboxIDStr,
|
||||||
|
|||||||
@ -55,6 +55,7 @@ func (h *adminCapsuleHandler) Create(w http.ResponseWriter, r *http.Request) {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
ac.TeamID = id.PlatformTeamID
|
||||||
h.audit.LogSandboxCreate(r.Context(), ac, sb.ID, sb.Template)
|
h.audit.LogSandboxCreate(r.Context(), ac, sb.ID, sb.Template)
|
||||||
writeJSON(w, http.StatusCreated, sandboxToResponse(sb))
|
writeJSON(w, http.StatusCreated, sandboxToResponse(sb))
|
||||||
}
|
}
|
||||||
|
|||||||
@ -35,64 +35,38 @@ type auditLogResponse struct {
|
|||||||
CreatedAt string `json:"created_at"`
|
CreatedAt string `json:"created_at"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// List handles GET /v1/audit-logs.
|
// parseAuditParams extracts common query parameters for audit log listing.
|
||||||
// Query params:
|
func parseAuditParams(r *http.Request) (before time.Time, beforeID pgtype.UUID, limit int, err error) {
|
||||||
// - before: RFC3339 timestamp cursor (exclusive); omit to start from latest
|
limit = 50
|
||||||
// - limit: page size, default 50, max 200
|
|
||||||
// - resource_type: filter by resource type (sandbox, snapshot, team, api_key, member, host)
|
|
||||||
// - action: filter by action verb
|
|
||||||
//
|
|
||||||
// Members see only team-scoped events; admins/owners see all.
|
|
||||||
func (h *auditHandler) List(w http.ResponseWriter, r *http.Request) {
|
|
||||||
ac := auth.MustFromContext(r.Context())
|
|
||||||
|
|
||||||
// Parse ?before cursor.
|
|
||||||
var before time.Time
|
|
||||||
if s := r.URL.Query().Get("before"); s != "" {
|
if s := r.URL.Query().Get("before"); s != "" {
|
||||||
var err error
|
|
||||||
before, err = time.Parse(time.RFC3339, s)
|
before, err = time.Parse(time.RFC3339, s)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
writeError(w, http.StatusBadRequest, "invalid_request", "before must be an RFC3339 timestamp")
|
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Parse ?limit.
|
|
||||||
limit := 50
|
|
||||||
if s := r.URL.Query().Get("limit"); s != "" {
|
if s := r.URL.Query().Get("limit"); s != "" {
|
||||||
n, err := strconv.Atoi(s)
|
n, parseErr := strconv.Atoi(s)
|
||||||
if err != nil || n < 1 {
|
if parseErr != nil || n < 1 {
|
||||||
writeError(w, http.StatusBadRequest, "invalid_request", "limit must be a positive integer")
|
err = parseErr
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
limit = n
|
limit = n
|
||||||
}
|
}
|
||||||
|
|
||||||
// Parse ?before_id cursor (UUID).
|
|
||||||
var beforeID pgtype.UUID
|
|
||||||
if s := r.URL.Query().Get("before_id"); s != "" {
|
if s := r.URL.Query().Get("before_id"); s != "" {
|
||||||
parsed, err := id.ParseAuditLogID(s)
|
beforeID, err = id.ParseAuditLogID(s)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
writeError(w, http.StatusBadRequest, "invalid_request", "before_id must be a valid audit log ID")
|
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
beforeID = parsed
|
|
||||||
}
|
}
|
||||||
|
|
||||||
entries, err := h.svc.List(r.Context(), service.AuditListParams{
|
|
||||||
TeamID: ac.TeamID,
|
|
||||||
AdminScoped: ac.Role == "owner" || ac.Role == "admin",
|
|
||||||
ResourceTypes: parseMultiParam(r.URL.Query()["resource_type"]),
|
|
||||||
Actions: parseMultiParam(r.URL.Query()["action"]),
|
|
||||||
Before: before,
|
|
||||||
BeforeID: beforeID,
|
|
||||||
Limit: limit,
|
|
||||||
})
|
|
||||||
if err != nil {
|
|
||||||
writeError(w, http.StatusInternalServerError, "db_error", "failed to list audit logs")
|
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// writeAuditResponse serializes audit entries into a paginated JSON response.
|
||||||
|
func writeAuditResponse(w http.ResponseWriter, entries []service.AuditEntry) {
|
||||||
items := make([]auditLogResponse, len(entries))
|
items := make([]auditLogResponse, len(entries))
|
||||||
for i, e := range entries {
|
for i, e := range entries {
|
||||||
items[i] = auditLogResponse{
|
items[i] = auditLogResponse{
|
||||||
@ -120,6 +94,67 @@ func (h *auditHandler) List(w http.ResponseWriter, r *http.Request) {
|
|||||||
writeJSON(w, http.StatusOK, resp)
|
writeJSON(w, http.StatusOK, resp)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// List handles GET /v1/audit-logs.
|
||||||
|
// Query params:
|
||||||
|
// - before: RFC3339 timestamp cursor (exclusive); omit to start from latest
|
||||||
|
// - limit: page size, default 50, max 200
|
||||||
|
// - resource_type: filter by resource type (sandbox, snapshot, team, api_key, member, host)
|
||||||
|
// - action: filter by action verb
|
||||||
|
//
|
||||||
|
// Members see only team-scoped events; admins/owners see all.
|
||||||
|
func (h *auditHandler) List(w http.ResponseWriter, r *http.Request) {
|
||||||
|
ac := auth.MustFromContext(r.Context())
|
||||||
|
|
||||||
|
before, beforeID, limit, err := parseAuditParams(r)
|
||||||
|
if err != nil {
|
||||||
|
writeError(w, http.StatusBadRequest, "invalid_request", "invalid query parameters")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
entries, err := h.svc.List(r.Context(), service.AuditListParams{
|
||||||
|
TeamID: ac.TeamID,
|
||||||
|
AdminScoped: ac.Role == "owner" || ac.Role == "admin",
|
||||||
|
ResourceTypes: parseMultiParam(r.URL.Query()["resource_type"]),
|
||||||
|
Actions: parseMultiParam(r.URL.Query()["action"]),
|
||||||
|
Before: before,
|
||||||
|
BeforeID: beforeID,
|
||||||
|
Limit: limit,
|
||||||
|
})
|
||||||
|
if err != nil {
|
||||||
|
writeError(w, http.StatusInternalServerError, "db_error", "failed to list audit logs")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
writeAuditResponse(w, entries)
|
||||||
|
}
|
||||||
|
|
||||||
|
// AdminList handles GET /v1/admin/audit-logs.
|
||||||
|
// Returns audit logs for the platform team (team 0) with both team and admin scopes.
|
||||||
|
// Uses the same query params as List.
|
||||||
|
func (h *auditHandler) AdminList(w http.ResponseWriter, r *http.Request) {
|
||||||
|
before, beforeID, limit, err := parseAuditParams(r)
|
||||||
|
if err != nil {
|
||||||
|
writeError(w, http.StatusBadRequest, "invalid_request", "invalid query parameters")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
entries, err := h.svc.List(r.Context(), service.AuditListParams{
|
||||||
|
TeamID: id.PlatformTeamID,
|
||||||
|
AdminScoped: true,
|
||||||
|
ResourceTypes: parseMultiParam(r.URL.Query()["resource_type"]),
|
||||||
|
Actions: parseMultiParam(r.URL.Query()["action"]),
|
||||||
|
Before: before,
|
||||||
|
BeforeID: beforeID,
|
||||||
|
Limit: limit,
|
||||||
|
})
|
||||||
|
if err != nil {
|
||||||
|
writeError(w, http.StatusInternalServerError, "db_error", "failed to list audit logs")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
writeAuditResponse(w, entries)
|
||||||
|
}
|
||||||
|
|
||||||
// parseMultiParam flattens repeated params and comma-separated values into a
|
// parseMultiParam flattens repeated params and comma-separated values into a
|
||||||
// single deduplicated slice. Empty strings are dropped. Returns nil (no filter)
|
// single deduplicated slice. Empty strings are dropped. Returns nil (no filter)
|
||||||
// when no values are present.
|
// when no values are present.
|
||||||
|
|||||||
@ -13,6 +13,8 @@ import (
|
|||||||
"github.com/go-chi/chi/v5"
|
"github.com/go-chi/chi/v5"
|
||||||
|
|
||||||
"git.omukk.dev/wrenn/wrenn/internal/layout"
|
"git.omukk.dev/wrenn/wrenn/internal/layout"
|
||||||
|
"git.omukk.dev/wrenn/wrenn/pkg/audit"
|
||||||
|
"git.omukk.dev/wrenn/wrenn/pkg/auth"
|
||||||
"git.omukk.dev/wrenn/wrenn/pkg/db"
|
"git.omukk.dev/wrenn/wrenn/pkg/db"
|
||||||
"git.omukk.dev/wrenn/wrenn/pkg/id"
|
"git.omukk.dev/wrenn/wrenn/pkg/id"
|
||||||
"git.omukk.dev/wrenn/wrenn/pkg/lifecycle"
|
"git.omukk.dev/wrenn/wrenn/pkg/lifecycle"
|
||||||
@ -25,10 +27,11 @@ type buildHandler struct {
|
|||||||
svc *service.BuildService
|
svc *service.BuildService
|
||||||
db *db.Queries
|
db *db.Queries
|
||||||
pool *lifecycle.HostClientPool
|
pool *lifecycle.HostClientPool
|
||||||
|
audit *audit.AuditLogger
|
||||||
}
|
}
|
||||||
|
|
||||||
func newBuildHandler(svc *service.BuildService, db *db.Queries, pool *lifecycle.HostClientPool) *buildHandler {
|
func newBuildHandler(svc *service.BuildService, db *db.Queries, pool *lifecycle.HostClientPool, al *audit.AuditLogger) *buildHandler {
|
||||||
return &buildHandler{svc: svc, db: db, pool: pool}
|
return &buildHandler{svc: svc, db: db, pool: pool, audit: al}
|
||||||
}
|
}
|
||||||
|
|
||||||
type createBuildRequest struct {
|
type createBuildRequest struct {
|
||||||
@ -187,6 +190,8 @@ func (h *buildHandler) Create(w http.ResponseWriter, r *http.Request) {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
ac := auth.MustFromContext(r.Context())
|
||||||
|
h.audit.LogBuildCreate(r.Context(), ac, build.ID, req.Name)
|
||||||
writeJSON(w, http.StatusCreated, buildToResponse(build))
|
writeJSON(w, http.StatusCreated, buildToResponse(build))
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -305,6 +310,8 @@ func (h *buildHandler) DeleteTemplate(w http.ResponseWriter, r *http.Request) {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
ac := auth.MustFromContext(r.Context())
|
||||||
|
h.audit.LogTemplateDelete(r.Context(), ac, name)
|
||||||
w.WriteHeader(http.StatusNoContent)
|
w.WriteHeader(http.StatusNoContent)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -323,5 +330,7 @@ func (h *buildHandler) Cancel(w http.ResponseWriter, r *http.Request) {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
ac := auth.MustFromContext(r.Context())
|
||||||
|
h.audit.LogBuildCancel(r.Context(), ac, buildID)
|
||||||
w.WriteHeader(http.StatusNoContent)
|
w.WriteHeader(http.StatusNoContent)
|
||||||
}
|
}
|
||||||
|
|||||||
@ -404,10 +404,10 @@ func (h *meHandler) ConnectProvider(w http.ResponseWriter, r *http.Request) {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
mac := computeHMAC(h.jwtSecret, state)
|
mac := computeHMAC(h.jwtSecret, state+":"+"login")
|
||||||
http.SetCookie(w, &http.Cookie{
|
http.SetCookie(w, &http.Cookie{
|
||||||
Name: "oauth_state",
|
Name: "oauth_state",
|
||||||
Value: state + ":" + mac,
|
Value: state + ":" + mac + ":" + "login",
|
||||||
Path: "/",
|
Path: "/",
|
||||||
MaxAge: 600,
|
MaxAge: 600,
|
||||||
HttpOnly: true,
|
HttpOnly: true,
|
||||||
|
|||||||
@ -55,8 +55,14 @@ func (h *oauthHandler) Redirect(w http.ResponseWriter, r *http.Request) {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
mac := computeHMAC(h.jwtSecret, state)
|
// Persist intent (login|signup) in the state cookie so the callback can enforce it.
|
||||||
cookieVal := state + ":" + mac
|
intent := r.URL.Query().Get("intent")
|
||||||
|
if intent != "signup" {
|
||||||
|
intent = "login"
|
||||||
|
}
|
||||||
|
|
||||||
|
mac := computeHMAC(h.jwtSecret, state+":"+intent)
|
||||||
|
cookieVal := state + ":" + mac + ":" + intent
|
||||||
|
|
||||||
http.SetCookie(w, &http.Cookie{
|
http.SetCookie(w, &http.Cookie{
|
||||||
Name: "oauth_state",
|
Name: "oauth_state",
|
||||||
@ -105,13 +111,17 @@ func (h *oauthHandler) Callback(w http.ResponseWriter, r *http.Request) {
|
|||||||
Secure: isSecure(r),
|
Secure: isSecure(r),
|
||||||
})
|
})
|
||||||
|
|
||||||
parts := strings.SplitN(stateCookie.Value, ":", 2)
|
parts := strings.SplitN(stateCookie.Value, ":", 3)
|
||||||
if len(parts) != 2 {
|
if len(parts) < 2 {
|
||||||
redirectWithError(w, r, redirectBase, "invalid_state")
|
redirectWithError(w, r, redirectBase, "invalid_state")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
nonce, expectedMAC := parts[0], parts[1]
|
nonce, expectedMAC := parts[0], parts[1]
|
||||||
if !hmac.Equal([]byte(computeHMAC(h.jwtSecret, nonce)), []byte(expectedMAC)) {
|
intent := "login"
|
||||||
|
if len(parts) == 3 && parts[2] == "signup" {
|
||||||
|
intent = "signup"
|
||||||
|
}
|
||||||
|
if !hmac.Equal([]byte(computeHMAC(h.jwtSecret, nonce+":"+intent)), []byte(expectedMAC)) {
|
||||||
redirectWithError(w, r, redirectBase, "invalid_state")
|
redirectWithError(w, r, redirectBase, "invalid_state")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
@ -249,6 +259,12 @@ func (h *oauthHandler) Callback(w http.ResponseWriter, r *http.Request) {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Block auto-registration when intent is login-only.
|
||||||
|
if intent == "login" {
|
||||||
|
redirectWithError(w, r, redirectBase, "no_account")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
// New OAuth identity — check for email collision.
|
// New OAuth identity — check for email collision.
|
||||||
existingUser, err := h.db.GetUserByEmail(ctx, email)
|
existingUser, err := h.db.GetUserByEmail(ctx, email)
|
||||||
if err == nil {
|
if err == nil {
|
||||||
@ -365,6 +381,17 @@ func (h *oauthHandler) Callback(w http.ResponseWriter, r *http.Request) {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Signal frontend that this is a new signup so it can show the name confirmation dialog.
|
||||||
|
http.SetCookie(w, &http.Cookie{
|
||||||
|
Name: "wrenn_oauth_new_signup",
|
||||||
|
Value: "1",
|
||||||
|
Path: "/auth/",
|
||||||
|
MaxAge: 60,
|
||||||
|
HttpOnly: false,
|
||||||
|
SameSite: http.SameSiteLaxMode,
|
||||||
|
Secure: isSecure(r),
|
||||||
|
})
|
||||||
|
|
||||||
redirectWithToken(w, r, redirectBase, token, id.FormatUserID(userID), id.FormatTeamID(teamID), email, profile.Name)
|
redirectWithToken(w, r, redirectBase, token, id.FormatUserID(userID), id.FormatTeamID(teamID), email, profile.Name)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -311,10 +311,17 @@ func runPtyLoop(
|
|||||||
}
|
}
|
||||||
}()
|
}()
|
||||||
|
|
||||||
// Input pump: read from WebSocket, dispatch to host agent.
|
// Input pump: decouple WebSocket reads from RPC dispatch.
|
||||||
|
// Reader goroutine drains the WebSocket into a buffered channel;
|
||||||
|
// sender goroutine dispatches RPCs at its own pace. This prevents
|
||||||
|
// slow RPCs from stalling WebSocket reads and causing proxy timeouts.
|
||||||
|
inputCh := make(chan wsPtyIn, 64)
|
||||||
|
|
||||||
|
// Reader: drain WebSocket as fast as possible.
|
||||||
wg.Add(1)
|
wg.Add(1)
|
||||||
go func() {
|
go func() {
|
||||||
defer wg.Done()
|
defer wg.Done()
|
||||||
|
defer close(inputCh)
|
||||||
defer cancel()
|
defer cancel()
|
||||||
|
|
||||||
for {
|
for {
|
||||||
@ -328,6 +335,22 @@ func runPtyLoop(
|
|||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
|
select {
|
||||||
|
case inputCh <- msg:
|
||||||
|
default:
|
||||||
|
// Buffer full — drop frame to keep reader unblocked.
|
||||||
|
slog.Debug("pty input buffer full, dropping frame", "type", msg.Type)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
|
||||||
|
// Sender: dispatch RPCs from channel, coalescing consecutive input messages.
|
||||||
|
wg.Add(1)
|
||||||
|
go func() {
|
||||||
|
defer wg.Done()
|
||||||
|
defer cancel()
|
||||||
|
|
||||||
|
for msg := range inputCh {
|
||||||
// Use a background context for unary RPCs so they complete
|
// Use a background context for unary RPCs so they complete
|
||||||
// even if the stream context is being cancelled.
|
// even if the stream context is being cancelled.
|
||||||
rpcCtx, rpcCancel := context.WithTimeout(context.Background(), 5*time.Second)
|
rpcCtx, rpcCancel := context.WithTimeout(context.Background(), 5*time.Second)
|
||||||
@ -339,6 +362,10 @@ func runPtyLoop(
|
|||||||
rpcCancel()
|
rpcCancel()
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Coalesce: drain any queued input messages into a single RPC.
|
||||||
|
data = coalescePtyInput(inputCh, data)
|
||||||
|
|
||||||
if _, err := agent.PtySendInput(rpcCtx, connect.NewRequest(&pb.PtySendInputRequest{
|
if _, err := agent.PtySendInput(rpcCtx, connect.NewRequest(&pb.PtySendInputRequest{
|
||||||
SandboxId: sandboxID,
|
SandboxId: sandboxID,
|
||||||
Tag: tag,
|
Tag: tag,
|
||||||
@ -394,6 +421,33 @@ func runPtyLoop(
|
|||||||
wg.Wait()
|
wg.Wait()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// coalescePtyInput drains any immediately-available "input" messages from the
|
||||||
|
// channel and appends their decoded data to buf, reducing RPC call volume
|
||||||
|
// during bursts of fast typing.
|
||||||
|
func coalescePtyInput(ch <-chan wsPtyIn, buf []byte) []byte {
|
||||||
|
for {
|
||||||
|
select {
|
||||||
|
case msg, ok := <-ch:
|
||||||
|
if !ok {
|
||||||
|
return buf
|
||||||
|
}
|
||||||
|
if msg.Type != "input" {
|
||||||
|
// Non-input message — can't coalesce. Put-back isn't possible
|
||||||
|
// with channels, but resize/kill during a typing burst is rare
|
||||||
|
// enough that dropping one is acceptable.
|
||||||
|
return buf
|
||||||
|
}
|
||||||
|
data, err := base64.StdEncoding.DecodeString(msg.Data)
|
||||||
|
if err != nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
buf = append(buf, data...)
|
||||||
|
default:
|
||||||
|
return buf
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// newPtyTag returns a PTY session tag: "pty-" + 8 random hex chars.
|
// newPtyTag returns a PTY session tag: "pty-" + 8 random hex chars.
|
||||||
func newPtyTag() string {
|
func newPtyTag() string {
|
||||||
return "pty-" + id.NewPtyTag()
|
return "pty-" + id.NewPtyTag()
|
||||||
|
|||||||
@ -392,6 +392,7 @@ func (h *teamHandler) Leave(w http.ResponseWriter, r *http.Request) {
|
|||||||
// SetBYOC handles PUT /v1/admin/teams/{id}/byoc (admin only).
|
// SetBYOC handles PUT /v1/admin/teams/{id}/byoc (admin only).
|
||||||
// Enables or disables the BYOC feature flag for a team.
|
// Enables or disables the BYOC feature flag for a team.
|
||||||
func (h *teamHandler) SetBYOC(w http.ResponseWriter, r *http.Request) {
|
func (h *teamHandler) SetBYOC(w http.ResponseWriter, r *http.Request) {
|
||||||
|
ac := auth.MustFromContext(r.Context())
|
||||||
teamIDStr := chi.URLParam(r, "id")
|
teamIDStr := chi.URLParam(r, "id")
|
||||||
|
|
||||||
teamID, err := id.ParseTeamID(teamIDStr)
|
teamID, err := id.ParseTeamID(teamIDStr)
|
||||||
@ -414,6 +415,7 @@ func (h *teamHandler) SetBYOC(w http.ResponseWriter, r *http.Request) {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
h.audit.LogTeamSetBYOC(r.Context(), ac, teamID, req.Enabled)
|
||||||
w.WriteHeader(http.StatusNoContent)
|
w.WriteHeader(http.StatusNoContent)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -484,6 +486,7 @@ func (h *teamHandler) AdminListTeams(w http.ResponseWriter, r *http.Request) {
|
|||||||
// AdminDeleteTeam handles DELETE /v1/admin/teams/{id}
|
// AdminDeleteTeam handles DELETE /v1/admin/teams/{id}
|
||||||
// Soft-deletes a team and destroys all its active sandboxes.
|
// Soft-deletes a team and destroys all its active sandboxes.
|
||||||
func (h *teamHandler) AdminDeleteTeam(w http.ResponseWriter, r *http.Request) {
|
func (h *teamHandler) AdminDeleteTeam(w http.ResponseWriter, r *http.Request) {
|
||||||
|
ac := auth.MustFromContext(r.Context())
|
||||||
teamIDStr := chi.URLParam(r, "id")
|
teamIDStr := chi.URLParam(r, "id")
|
||||||
|
|
||||||
teamID, err := id.ParseTeamID(teamIDStr)
|
teamID, err := id.ParseTeamID(teamIDStr)
|
||||||
@ -498,5 +501,6 @@ func (h *teamHandler) AdminDeleteTeam(w http.ResponseWriter, r *http.Request) {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
h.audit.LogTeamDelete(r.Context(), ac, teamID)
|
||||||
w.WriteHeader(http.StatusNoContent)
|
w.WriteHeader(http.StatusNoContent)
|
||||||
}
|
}
|
||||||
|
|||||||
@ -9,6 +9,7 @@ import (
|
|||||||
"github.com/go-chi/chi/v5"
|
"github.com/go-chi/chi/v5"
|
||||||
"github.com/jackc/pgx/v5/pgtype"
|
"github.com/jackc/pgx/v5/pgtype"
|
||||||
|
|
||||||
|
"git.omukk.dev/wrenn/wrenn/pkg/audit"
|
||||||
"git.omukk.dev/wrenn/wrenn/pkg/auth"
|
"git.omukk.dev/wrenn/wrenn/pkg/auth"
|
||||||
"git.omukk.dev/wrenn/wrenn/pkg/db"
|
"git.omukk.dev/wrenn/wrenn/pkg/db"
|
||||||
"git.omukk.dev/wrenn/wrenn/pkg/id"
|
"git.omukk.dev/wrenn/wrenn/pkg/id"
|
||||||
@ -18,10 +19,11 @@ import (
|
|||||||
type usersHandler struct {
|
type usersHandler struct {
|
||||||
db *db.Queries
|
db *db.Queries
|
||||||
svc *service.UserService
|
svc *service.UserService
|
||||||
|
audit *audit.AuditLogger
|
||||||
}
|
}
|
||||||
|
|
||||||
func newUsersHandler(db *db.Queries, svc *service.UserService) *usersHandler {
|
func newUsersHandler(db *db.Queries, svc *service.UserService, al *audit.AuditLogger) *usersHandler {
|
||||||
return &usersHandler{db: db, svc: svc}
|
return &usersHandler{db: db, svc: svc, audit: al}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Search handles GET /v1/users/search?email=<prefix>
|
// Search handles GET /v1/users/search?email=<prefix>
|
||||||
@ -140,11 +142,23 @@ func (h *usersHandler) SetUserActive(w http.ResponseWriter, r *http.Request) {
|
|||||||
newStatus = "disabled"
|
newStatus = "disabled"
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Look up user email for audit log before changing status.
|
||||||
|
user, err := h.db.GetUserByID(r.Context(), userID)
|
||||||
|
if err != nil {
|
||||||
|
writeError(w, http.StatusNotFound, "not_found", "user not found")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
if err := h.svc.SetUserStatus(r.Context(), userID, newStatus); err != nil {
|
if err := h.svc.SetUserStatus(r.Context(), userID, newStatus); err != nil {
|
||||||
httpStatus, code, msg := serviceErrToHTTP(err)
|
httpStatus, code, msg := serviceErrToHTTP(err)
|
||||||
writeError(w, httpStatus, code, msg)
|
writeError(w, httpStatus, code, msg)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if req.Active {
|
||||||
|
h.audit.LogUserActivate(r.Context(), ac, userID, user.Email)
|
||||||
|
} else {
|
||||||
|
h.audit.LogUserDeactivate(r.Context(), ac, userID, user.Email)
|
||||||
|
}
|
||||||
w.WriteHeader(http.StatusNoContent)
|
w.WriteHeader(http.StatusNoContent)
|
||||||
}
|
}
|
||||||
|
|||||||
@ -3,8 +3,6 @@ package api
|
|||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"fmt"
|
"fmt"
|
||||||
"net/http"
|
|
||||||
"strings"
|
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/gorilla/websocket"
|
"github.com/gorilla/websocket"
|
||||||
@ -14,11 +12,6 @@ import (
|
|||||||
"git.omukk.dev/wrenn/wrenn/pkg/id"
|
"git.omukk.dev/wrenn/wrenn/pkg/id"
|
||||||
)
|
)
|
||||||
|
|
||||||
// isWebSocketUpgrade returns true if the request is a WebSocket upgrade.
|
|
||||||
func isWebSocketUpgrade(r *http.Request) bool {
|
|
||||||
return strings.EqualFold(r.Header.Get("Upgrade"), "websocket")
|
|
||||||
}
|
|
||||||
|
|
||||||
// ctxKeyAdminWS is a context key for flagging admin WS routes.
|
// ctxKeyAdminWS is a context key for flagging admin WS routes.
|
||||||
type ctxKeyAdminWS struct{}
|
type ctxKeyAdminWS struct{}
|
||||||
|
|
||||||
|
|||||||
@ -15,7 +15,6 @@ func injectPlatformTeam() func(http.Handler) http.Handler {
|
|||||||
return func(next http.Handler) http.Handler {
|
return func(next http.Handler) http.Handler {
|
||||||
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||||
if _, ok := auth.FromContext(r.Context()); !ok {
|
if _, ok := auth.FromContext(r.Context()); !ok {
|
||||||
// No auth context yet (WS upgrade); handler will inject platform team after WS auth.
|
|
||||||
next.ServeHTTP(w, r)
|
next.ServeHTTP(w, r)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
@ -27,23 +26,24 @@ func injectPlatformTeam() func(http.Handler) http.Handler {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// markAdminWS flags the request context as an admin WebSocket route.
|
||||||
|
// Applied to admin WS endpoints that sit outside the requireJWT/requireAdmin
|
||||||
|
// middleware group. Handlers use isAdminWSRoute(ctx) to pick wsAuthenticateAdmin.
|
||||||
|
func markAdminWS(next http.Handler) http.Handler {
|
||||||
|
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||||
|
next.ServeHTTP(w, r.WithContext(setAdminWSFlag(r.Context())))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
// requireAdmin validates that the authenticated user is a platform admin.
|
// requireAdmin validates that the authenticated user is a platform admin.
|
||||||
// Must run after requireJWT (depends on AuthContext being present).
|
// Must run after requireJWT (depends on AuthContext being present).
|
||||||
// Re-validates against the DB — the JWT is_admin claim is for UI only;
|
// Re-validates against the DB — the JWT is_admin claim is for UI only;
|
||||||
// the DB is the source of truth for admin access.
|
// the DB is the source of truth for admin access.
|
||||||
// WebSocket upgrade requests without auth context are passed through —
|
|
||||||
// admin WS handlers verify admin status after upgrade via wsAuthenticateAdmin.
|
|
||||||
func requireAdmin(queries *db.Queries) func(http.Handler) http.Handler {
|
func requireAdmin(queries *db.Queries) func(http.Handler) http.Handler {
|
||||||
return func(next http.Handler) http.Handler {
|
return func(next http.Handler) http.Handler {
|
||||||
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||||
ac, ok := auth.FromContext(r.Context())
|
ac, ok := auth.FromContext(r.Context())
|
||||||
if !ok {
|
if !ok {
|
||||||
if isWebSocketUpgrade(r) {
|
|
||||||
ctx := r.Context()
|
|
||||||
ctx = setAdminWSFlag(ctx)
|
|
||||||
next.ServeHTTP(w, r.WithContext(ctx))
|
|
||||||
return
|
|
||||||
}
|
|
||||||
writeError(w, http.StatusUnauthorized, "unauthorized", "authentication required")
|
writeError(w, http.StatusUnauthorized, "unauthorized", "authentication required")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|||||||
@ -85,15 +85,61 @@ func requireAPIKeyOrJWT(queries *db.Queries, jwtSecret []byte) func(http.Handler
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
// WebSocket upgrade requests may not carry auth headers (browsers
|
|
||||||
// cannot set custom headers on WS connections). Pass through —
|
|
||||||
// the WS handler authenticates via the first message after upgrade.
|
|
||||||
if isWebSocketUpgrade(r) {
|
|
||||||
next.ServeHTTP(w, r)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
writeError(w, http.StatusUnauthorized, "unauthorized", "X-API-Key or Authorization: Bearer <token> required")
|
writeError(w, http.StatusUnauthorized, "unauthorized", "X-API-Key or Authorization: Bearer <token> required")
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// optionalAPIKeyOrJWT is like requireAPIKeyOrJWT but does not reject
|
||||||
|
// unauthenticated requests. It injects auth context when valid credentials
|
||||||
|
// are present (supporting SDK clients that set X-API-Key on WebSocket
|
||||||
|
// upgrades) and passes through otherwise so the handler can authenticate
|
||||||
|
// after the WebSocket upgrade via the first message.
|
||||||
|
func optionalAPIKeyOrJWT(queries *db.Queries, jwtSecret []byte) func(http.Handler) http.Handler {
|
||||||
|
return func(next http.Handler) http.Handler {
|
||||||
|
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||||
|
// Try API key.
|
||||||
|
if key := r.Header.Get("X-API-Key"); key != "" {
|
||||||
|
hash := auth.HashAPIKey(key)
|
||||||
|
row, err := queries.GetAPIKeyByHash(r.Context(), hash)
|
||||||
|
if err == nil {
|
||||||
|
if err := queries.UpdateAPIKeyLastUsed(r.Context(), row.ID); err != nil {
|
||||||
|
slog.Warn("failed to update api key last_used", "key_id", id.FormatAPIKeyID(row.ID), "error", err)
|
||||||
|
}
|
||||||
|
ctx := auth.WithAuthContext(r.Context(), auth.AuthContext{
|
||||||
|
TeamID: row.TeamID,
|
||||||
|
APIKeyID: row.ID,
|
||||||
|
APIKeyName: row.Name,
|
||||||
|
})
|
||||||
|
next.ServeHTTP(w, r.WithContext(ctx))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Try JWT bearer token.
|
||||||
|
if header := r.Header.Get("Authorization"); strings.HasPrefix(header, "Bearer ") {
|
||||||
|
tokenStr := strings.TrimPrefix(header, "Bearer ")
|
||||||
|
if claims, err := auth.VerifyJWT(jwtSecret, tokenStr); err == nil {
|
||||||
|
if teamID, err := id.ParseTeamID(claims.TeamID); err == nil {
|
||||||
|
if userID, err := id.ParseUserID(claims.Subject); err == nil {
|
||||||
|
if user, err := queries.GetUserByID(r.Context(), userID); err == nil && user.Status == "active" {
|
||||||
|
ctx := auth.WithAuthContext(r.Context(), auth.AuthContext{
|
||||||
|
TeamID: teamID,
|
||||||
|
UserID: userID,
|
||||||
|
Email: claims.Email,
|
||||||
|
Name: claims.Name,
|
||||||
|
Role: claims.Role,
|
||||||
|
})
|
||||||
|
next.ServeHTTP(w, r.WithContext(ctx))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// No valid credentials — pass through for handler to authenticate.
|
||||||
|
next.ServeHTTP(w, r)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|||||||
@ -22,13 +22,6 @@ func requireJWT(secret []byte, queries *db.Queries) func(http.Handler) http.Hand
|
|||||||
tokenStr = strings.TrimPrefix(header, "Bearer ")
|
tokenStr = strings.TrimPrefix(header, "Bearer ")
|
||||||
}
|
}
|
||||||
if tokenStr == "" {
|
if tokenStr == "" {
|
||||||
// WebSocket upgrade requests may not have an Authorization header
|
|
||||||
// (browsers cannot set custom headers on WS connections). Let them
|
|
||||||
// through — the handler authenticates via the first WS message.
|
|
||||||
if isWebSocketUpgrade(r) {
|
|
||||||
next.ServeHTTP(w, r)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
writeError(w, http.StatusUnauthorized, "unauthorized", "Authorization: Bearer <token> required")
|
writeError(w, http.StatusUnauthorized, "unauthorized", "Authorization: Bearer <token> required")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|||||||
@ -2,7 +2,7 @@ openapi: "3.1.0"
|
|||||||
info:
|
info:
|
||||||
title: Wrenn API
|
title: Wrenn API
|
||||||
description: MicroVM-based code execution platform API.
|
description: MicroVM-based code execution platform API.
|
||||||
version: "0.1.2"
|
version: "0.1.4"
|
||||||
|
|
||||||
servers:
|
servers:
|
||||||
- url: http://localhost:8080
|
- url: http://localhost:8080
|
||||||
|
|||||||
@ -32,7 +32,7 @@ type Server struct {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// New constructs the chi router and registers all routes.
|
// New constructs the chi router and registers all routes.
|
||||||
// Extensions are called after core routes are registered, allowing enterprise
|
// Extensions are called after core routes are registered, allowing cloud
|
||||||
// or third-party code to add routes and middleware.
|
// or third-party code to add routes and middleware.
|
||||||
func New(
|
func New(
|
||||||
queries *db.Queries,
|
queries *db.Queries,
|
||||||
@ -85,12 +85,12 @@ func New(
|
|||||||
apiKeys := newAPIKeyHandler(apiKeySvc, al)
|
apiKeys := newAPIKeyHandler(apiKeySvc, al)
|
||||||
hostH := newHostHandler(hostSvc, queries, al)
|
hostH := newHostHandler(hostSvc, queries, al)
|
||||||
teamH := newTeamHandler(teamSvc, al, mailer)
|
teamH := newTeamHandler(teamSvc, al, mailer)
|
||||||
usersH := newUsersHandler(queries, userSvc)
|
usersH := newUsersHandler(queries, userSvc, al)
|
||||||
auditH := newAuditHandler(auditSvc)
|
auditH := newAuditHandler(auditSvc)
|
||||||
statsH := newStatsHandler(statsSvc)
|
statsH := newStatsHandler(statsSvc)
|
||||||
usageH := newUsageHandler(usageSvc)
|
usageH := newUsageHandler(usageSvc)
|
||||||
metricsH := newSandboxMetricsHandler(queries, pool)
|
metricsH := newSandboxMetricsHandler(queries, pool)
|
||||||
buildH := newBuildHandler(buildSvc, queries, pool)
|
buildH := newBuildHandler(buildSvc, queries, pool, al)
|
||||||
channelH := newChannelHandler(channelSvc, al)
|
channelH := newChannelHandler(channelSvc, al)
|
||||||
ptyH := newPtyHandler(queries, pool, jwtSecret)
|
ptyH := newPtyHandler(queries, pool, jwtSecret)
|
||||||
processH := newProcessHandler(queries, pool, jwtSecret)
|
processH := newProcessHandler(queries, pool, jwtSecret)
|
||||||
@ -161,20 +161,23 @@ func New(
|
|||||||
r.With(requireJWT(jwtSecret, queries)).Get("/v1/users/search", usersH.Search)
|
r.With(requireJWT(jwtSecret, queries)).Get("/v1/users/search", usersH.Search)
|
||||||
|
|
||||||
// Capsule lifecycle: accepts API key or JWT bearer token.
|
// Capsule lifecycle: accepts API key or JWT bearer token.
|
||||||
// WebSocket upgrade requests without auth headers are passed through by
|
|
||||||
// requireAPIKeyOrJWT — the WS handlers authenticate via first message.
|
|
||||||
r.Route("/v1/capsules", func(r chi.Router) {
|
r.Route("/v1/capsules", func(r chi.Router) {
|
||||||
|
// Auth-required routes.
|
||||||
|
r.Group(func(r chi.Router) {
|
||||||
r.Use(requireAPIKeyOrJWT(queries, jwtSecret))
|
r.Use(requireAPIKeyOrJWT(queries, jwtSecret))
|
||||||
r.Post("/", sandbox.Create)
|
r.Post("/", sandbox.Create)
|
||||||
r.Get("/", sandbox.List)
|
r.Get("/", sandbox.List)
|
||||||
r.Get("/stats", statsH.GetStats)
|
r.Get("/stats", statsH.GetStats)
|
||||||
r.Get("/usage", usageH.GetUsage)
|
r.Get("/usage", usageH.GetUsage)
|
||||||
|
})
|
||||||
|
|
||||||
r.Route("/{id}", func(r chi.Router) {
|
r.Route("/{id}", func(r chi.Router) {
|
||||||
|
// Auth-required non-WS routes.
|
||||||
|
r.Group(func(r chi.Router) {
|
||||||
|
r.Use(requireAPIKeyOrJWT(queries, jwtSecret))
|
||||||
r.Get("/", sandbox.Get)
|
r.Get("/", sandbox.Get)
|
||||||
r.Delete("/", sandbox.Destroy)
|
r.Delete("/", sandbox.Destroy)
|
||||||
r.Post("/exec", exec.Exec)
|
r.Post("/exec", exec.Exec)
|
||||||
r.Get("/exec/stream", execStream.ExecStream)
|
|
||||||
r.Post("/ping", sandbox.Ping)
|
r.Post("/ping", sandbox.Ping)
|
||||||
r.Post("/pause", sandbox.Pause)
|
r.Post("/pause", sandbox.Pause)
|
||||||
r.Post("/resume", sandbox.Resume)
|
r.Post("/resume", sandbox.Resume)
|
||||||
@ -186,12 +189,21 @@ func New(
|
|||||||
r.Post("/files/mkdir", fsH.MakeDir)
|
r.Post("/files/mkdir", fsH.MakeDir)
|
||||||
r.Post("/files/remove", fsH.Remove)
|
r.Post("/files/remove", fsH.Remove)
|
||||||
r.Get("/metrics", metricsH.GetMetrics)
|
r.Get("/metrics", metricsH.GetMetrics)
|
||||||
r.Get("/pty", ptyH.PtySession)
|
|
||||||
r.Get("/processes", processH.ListProcesses)
|
r.Get("/processes", processH.ListProcesses)
|
||||||
r.Delete("/processes/{selector}", processH.KillProcess)
|
r.Delete("/processes/{selector}", processH.KillProcess)
|
||||||
|
})
|
||||||
|
|
||||||
|
// WebSocket endpoints — handlers authenticate after upgrade.
|
||||||
|
// optionalAPIKeyOrJWT injects auth context from headers when
|
||||||
|
// present (SDK clients) but does not reject when absent (browsers).
|
||||||
|
r.Group(func(r chi.Router) {
|
||||||
|
r.Use(optionalAPIKeyOrJWT(queries, jwtSecret))
|
||||||
|
r.Get("/exec/stream", execStream.ExecStream)
|
||||||
|
r.Get("/pty", ptyH.PtySession)
|
||||||
r.Get("/processes/{selector}/stream", processH.ConnectProcess)
|
r.Get("/processes/{selector}/stream", processH.ConnectProcess)
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
})
|
||||||
|
|
||||||
// Snapshot / template management: accepts API key or JWT bearer token.
|
// Snapshot / template management: accepts API key or JWT bearer token.
|
||||||
r.Route("/v1/snapshots", func(r chi.Router) {
|
r.Route("/v1/snapshots", func(r chi.Router) {
|
||||||
@ -248,6 +260,8 @@ func New(
|
|||||||
|
|
||||||
// Platform admin routes — require JWT + DB-validated admin status.
|
// Platform admin routes — require JWT + DB-validated admin status.
|
||||||
r.Route("/v1/admin", func(r chi.Router) {
|
r.Route("/v1/admin", func(r chi.Router) {
|
||||||
|
// Auth-required admin routes (non-capsule + capsule list/create).
|
||||||
|
r.Group(func(r chi.Router) {
|
||||||
r.Use(requireJWT(jwtSecret, queries))
|
r.Use(requireJWT(jwtSecret, queries))
|
||||||
r.Use(requireAdmin(queries))
|
r.Use(requireAdmin(queries))
|
||||||
r.Get("/teams", teamH.AdminListTeams)
|
r.Get("/teams", teamH.AdminListTeams)
|
||||||
@ -255,6 +269,7 @@ func New(
|
|||||||
r.Delete("/teams/{id}", teamH.AdminDeleteTeam)
|
r.Delete("/teams/{id}", teamH.AdminDeleteTeam)
|
||||||
r.Get("/users", usersH.AdminListUsers)
|
r.Get("/users", usersH.AdminListUsers)
|
||||||
r.Put("/users/{id}/active", usersH.SetUserActive)
|
r.Put("/users/{id}/active", usersH.SetUserActive)
|
||||||
|
r.Get("/audit-logs", auditH.AdminList)
|
||||||
r.Get("/templates", buildH.ListTemplates)
|
r.Get("/templates", buildH.ListTemplates)
|
||||||
r.Delete("/templates/{name}", buildH.DeleteTemplate)
|
r.Delete("/templates/{name}", buildH.DeleteTemplate)
|
||||||
r.Post("/builds", buildH.Create)
|
r.Post("/builds", buildH.Create)
|
||||||
@ -263,25 +278,39 @@ func New(
|
|||||||
r.Post("/builds/{id}/cancel", buildH.Cancel)
|
r.Post("/builds/{id}/cancel", buildH.Cancel)
|
||||||
r.Post("/capsules", adminCapsules.Create)
|
r.Post("/capsules", adminCapsules.Create)
|
||||||
r.Get("/capsules", adminCapsules.List)
|
r.Get("/capsules", adminCapsules.List)
|
||||||
|
})
|
||||||
|
|
||||||
r.Route("/capsules/{id}", func(r chi.Router) {
|
r.Route("/capsules/{id}", func(r chi.Router) {
|
||||||
|
// Auth-required non-WS admin capsule routes.
|
||||||
|
r.Group(func(r chi.Router) {
|
||||||
|
r.Use(requireJWT(jwtSecret, queries))
|
||||||
|
r.Use(requireAdmin(queries))
|
||||||
r.Use(injectPlatformTeam())
|
r.Use(injectPlatformTeam())
|
||||||
r.Get("/", adminCapsules.Get)
|
r.Get("/", adminCapsules.Get)
|
||||||
r.Delete("/", adminCapsules.Destroy)
|
r.Delete("/", adminCapsules.Destroy)
|
||||||
r.Post("/snapshot", adminCapsules.Snapshot)
|
r.Post("/snapshot", adminCapsules.Snapshot)
|
||||||
r.Post("/exec", exec.Exec)
|
r.Post("/exec", exec.Exec)
|
||||||
r.Get("/exec/stream", execStream.ExecStream)
|
|
||||||
r.Post("/files/write", files.Upload)
|
r.Post("/files/write", files.Upload)
|
||||||
r.Post("/files/read", files.Download)
|
r.Post("/files/read", files.Download)
|
||||||
r.Post("/files/list", fsH.ListDir)
|
r.Post("/files/list", fsH.ListDir)
|
||||||
r.Post("/files/mkdir", fsH.MakeDir)
|
r.Post("/files/mkdir", fsH.MakeDir)
|
||||||
r.Post("/files/remove", fsH.Remove)
|
r.Post("/files/remove", fsH.Remove)
|
||||||
r.Get("/metrics", metricsH.GetMetrics)
|
r.Get("/metrics", metricsH.GetMetrics)
|
||||||
r.Get("/pty", ptyH.PtySession)
|
|
||||||
r.Get("/processes", processH.ListProcesses)
|
r.Get("/processes", processH.ListProcesses)
|
||||||
r.Delete("/processes/{selector}", processH.KillProcess)
|
r.Delete("/processes/{selector}", processH.KillProcess)
|
||||||
|
})
|
||||||
|
|
||||||
|
// Admin WebSocket endpoints — handlers authenticate after upgrade
|
||||||
|
// via wsAuthenticateAdmin. markAdminWS sets the context flag so
|
||||||
|
// handlers know to use admin auth instead of regular auth.
|
||||||
|
r.Group(func(r chi.Router) {
|
||||||
|
r.Use(markAdminWS)
|
||||||
|
r.Get("/exec/stream", execStream.ExecStream)
|
||||||
|
r.Get("/pty", ptyH.PtySession)
|
||||||
r.Get("/processes/{selector}/stream", processH.ConnectProcess)
|
r.Get("/processes/{selector}/stream", processH.ConnectProcess)
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
})
|
||||||
|
|
||||||
// Let extensions register their routes after all core routes.
|
// Let extensions register their routes after all core routes.
|
||||||
for _, ext := range extensions {
|
for _, ext := range extensions {
|
||||||
|
|||||||
@ -48,6 +48,13 @@ func (c *Client) BaseURL() string {
|
|||||||
return c.base
|
return c.base
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// HTTPClient returns the underlying http.Client used for envd requests.
|
||||||
|
// Use this instead of http.DefaultClient when making direct HTTP calls to envd
|
||||||
|
// (e.g. file streaming) to avoid sharing the global transport with proxy traffic.
|
||||||
|
func (c *Client) HTTPClient() *http.Client {
|
||||||
|
return c.httpClient
|
||||||
|
}
|
||||||
|
|
||||||
// ExecResult holds the output of a command execution.
|
// ExecResult holds the output of a command execution.
|
||||||
type ExecResult struct {
|
type ExecResult struct {
|
||||||
Stdout []byte
|
Stdout []byte
|
||||||
@ -142,7 +149,7 @@ func (c *Client) ExecStream(ctx context.Context, cmd string, args ...string) (<-
|
|||||||
return nil, fmt.Errorf("start process: %w", err)
|
return nil, fmt.Errorf("start process: %w", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
ch := make(chan ExecStreamEvent, 16)
|
ch := make(chan ExecStreamEvent, 256)
|
||||||
go func() {
|
go func() {
|
||||||
defer close(ch)
|
defer close(ch)
|
||||||
defer stream.Close()
|
defer stream.Close()
|
||||||
|
|||||||
@ -2,7 +2,9 @@ package envdclient
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"net"
|
||||||
"net/http"
|
"net/http"
|
||||||
|
"time"
|
||||||
)
|
)
|
||||||
|
|
||||||
// envdPort is the default port envd listens on inside the guest.
|
// envdPort is the default port envd listens on inside the guest.
|
||||||
@ -13,9 +15,19 @@ func baseURL(hostIP string) string {
|
|||||||
return fmt.Sprintf("http://%s:%d", hostIP, envdPort)
|
return fmt.Sprintf("http://%s:%d", hostIP, envdPort)
|
||||||
}
|
}
|
||||||
|
|
||||||
// newHTTPClient returns an http.Client suitable for talking to envd.
|
// newHTTPClient returns an http.Client with a dedicated transport for talking
|
||||||
// No special transport is needed — envd is reachable via the host IP
|
// to envd. The transport is intentionally separate from http.DefaultTransport
|
||||||
// through the veth/TAP network path.
|
// so that proxy traffic to user services inside the sandbox cannot interfere
|
||||||
|
// with envd RPC connections (PTY streams, exec, file ops).
|
||||||
func newHTTPClient() *http.Client {
|
func newHTTPClient() *http.Client {
|
||||||
return &http.Client{}
|
return &http.Client{
|
||||||
|
Transport: &http.Transport{
|
||||||
|
MaxIdleConnsPerHost: 10,
|
||||||
|
IdleConnTimeout: 90 * time.Second,
|
||||||
|
DialContext: (&net.Dialer{
|
||||||
|
Timeout: 10 * time.Second,
|
||||||
|
KeepAlive: 30 * time.Second,
|
||||||
|
}).DialContext,
|
||||||
|
},
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -162,7 +162,7 @@ type eventProvider interface {
|
|||||||
// drainPtyStream reads events from either a Start or Connect stream and maps
|
// drainPtyStream reads events from either a Start or Connect stream and maps
|
||||||
// them into PtyEvent values on a channel.
|
// them into PtyEvent values on a channel.
|
||||||
func drainPtyStream(ctx context.Context, stream eventProvider, expectStart bool) <-chan PtyEvent {
|
func drainPtyStream(ctx context.Context, stream eventProvider, expectStart bool) <-chan PtyEvent {
|
||||||
ch := make(chan PtyEvent, 16)
|
ch := make(chan PtyEvent, 256)
|
||||||
go func() {
|
go func() {
|
||||||
defer close(ch)
|
defer close(ch)
|
||||||
defer stream.Close()
|
defer stream.Close()
|
||||||
|
|||||||
@ -1,16 +1,28 @@
|
|||||||
package hostagent
|
package hostagent
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"context"
|
||||||
"fmt"
|
"fmt"
|
||||||
"log/slog"
|
"log/slog"
|
||||||
|
"net"
|
||||||
"net/http"
|
"net/http"
|
||||||
"net/http/httputil"
|
"net/http/httputil"
|
||||||
|
"net/url"
|
||||||
"strconv"
|
"strconv"
|
||||||
"strings"
|
"strings"
|
||||||
|
"sync"
|
||||||
|
"time"
|
||||||
|
|
||||||
"git.omukk.dev/wrenn/wrenn/internal/sandbox"
|
"git.omukk.dev/wrenn/wrenn/internal/sandbox"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
const (
|
||||||
|
// proxyDialAttempts is the number of connection attempts for the proxy
|
||||||
|
// transport. Retries handle the delay between a process binding to a port
|
||||||
|
// inside the guest and socat/Go-proxy starting to forward on the TAP IP.
|
||||||
|
proxyDialAttempts = 3
|
||||||
|
)
|
||||||
|
|
||||||
// ProxyHandler reverse-proxies HTTP requests to services running inside
|
// ProxyHandler reverse-proxies HTTP requests to services running inside
|
||||||
// sandboxes. It handles requests of the form:
|
// sandboxes. It handles requests of the form:
|
||||||
//
|
//
|
||||||
@ -21,16 +33,75 @@ import (
|
|||||||
type ProxyHandler struct {
|
type ProxyHandler struct {
|
||||||
mgr *sandbox.Manager
|
mgr *sandbox.Manager
|
||||||
transport http.RoundTripper
|
transport http.RoundTripper
|
||||||
|
|
||||||
|
// proxies caches ReverseProxy instances per sandbox+port to avoid
|
||||||
|
// per-request allocation under high-frequency REST polling.
|
||||||
|
proxies sync.Map // key: "sandboxID/port" → *httputil.ReverseProxy
|
||||||
|
}
|
||||||
|
|
||||||
|
// newProxyTransport returns an HTTP transport dedicated to proxying user
|
||||||
|
// traffic into sandboxes. It is intentionally separate from the envdclient
|
||||||
|
// transport and http.DefaultTransport to prevent proxy traffic from
|
||||||
|
// interfering with Connect RPC streams (PTY, exec).
|
||||||
|
func newProxyTransport() http.RoundTripper {
|
||||||
|
dialer := &net.Dialer{
|
||||||
|
Timeout: 30 * time.Second,
|
||||||
|
KeepAlive: 20 * time.Second,
|
||||||
|
}
|
||||||
|
|
||||||
|
return &http.Transport{
|
||||||
|
ForceAttemptHTTP2: false, // HTTP/1.1 only — avoids HTTP/2 HOL blocking
|
||||||
|
MaxIdleConnsPerHost: 20,
|
||||||
|
MaxIdleConns: 100,
|
||||||
|
IdleConnTimeout: 120 * time.Second,
|
||||||
|
DisableCompression: true,
|
||||||
|
// Retry with linear backoff to handle the delay between a process
|
||||||
|
// binding inside the guest and the port forwarder making it reachable.
|
||||||
|
DialContext: func(ctx context.Context, network, addr string) (net.Conn, error) {
|
||||||
|
var conn net.Conn
|
||||||
|
var err error
|
||||||
|
for attempt := range proxyDialAttempts {
|
||||||
|
conn, err = dialer.DialContext(ctx, network, addr)
|
||||||
|
if err == nil {
|
||||||
|
return conn, nil
|
||||||
|
}
|
||||||
|
if ctx.Err() != nil {
|
||||||
|
return nil, ctx.Err()
|
||||||
|
}
|
||||||
|
// Don't sleep on the last attempt.
|
||||||
|
if attempt < proxyDialAttempts-1 {
|
||||||
|
backoff := time.Duration(100*(attempt+1)) * time.Millisecond
|
||||||
|
select {
|
||||||
|
case <-time.After(backoff):
|
||||||
|
case <-ctx.Done():
|
||||||
|
return nil, ctx.Err()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil, err
|
||||||
|
},
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// NewProxyHandler creates a new sandbox proxy handler.
|
// NewProxyHandler creates a new sandbox proxy handler.
|
||||||
func NewProxyHandler(mgr *sandbox.Manager) *ProxyHandler {
|
func NewProxyHandler(mgr *sandbox.Manager) *ProxyHandler {
|
||||||
return &ProxyHandler{
|
return &ProxyHandler{
|
||||||
mgr: mgr,
|
mgr: mgr,
|
||||||
transport: http.DefaultTransport,
|
transport: newProxyTransport(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// EvictProxy removes cached reverse proxy instances for a sandbox.
|
||||||
|
// Call this when a sandbox is destroyed.
|
||||||
|
func (h *ProxyHandler) EvictProxy(sandboxID string) {
|
||||||
|
h.proxies.Range(func(key, _ any) bool {
|
||||||
|
if k, ok := key.(string); ok && strings.HasPrefix(k, sandboxID+"/") {
|
||||||
|
h.proxies.Delete(key)
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
// ServeHTTP implements http.Handler.
|
// ServeHTTP implements http.Handler.
|
||||||
func (h *ProxyHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) {
|
func (h *ProxyHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) {
|
||||||
// Expected path: /proxy/{sandbox_id}/{port}/...
|
// Expected path: /proxy/{sandbox_id}/{port}/...
|
||||||
@ -49,10 +120,6 @@ func (h *ProxyHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) {
|
|||||||
|
|
||||||
sandboxID := parts[0]
|
sandboxID := parts[0]
|
||||||
port := parts[1]
|
port := parts[1]
|
||||||
remainder := ""
|
|
||||||
if len(parts) == 3 {
|
|
||||||
remainder = parts[2]
|
|
||||||
}
|
|
||||||
|
|
||||||
// Validate port is a number in the valid range.
|
// Validate port is a number in the valid range.
|
||||||
portNum, err := strconv.Atoi(port)
|
portNum, err := strconv.Atoi(port)
|
||||||
@ -68,22 +135,61 @@ func (h *ProxyHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) {
|
|||||||
}
|
}
|
||||||
defer tracker.Release()
|
defer tracker.Release()
|
||||||
|
|
||||||
targetHost := fmt.Sprintf("%s:%d", hostIP, portNum)
|
proxy := h.getOrCreateProxy(sandboxID, port, fmt.Sprintf("%s:%d", hostIP, portNum))
|
||||||
|
proxy.ServeHTTP(w, r)
|
||||||
|
}
|
||||||
|
|
||||||
|
// getOrCreateProxy returns a cached ReverseProxy for the given sandbox+port+host,
|
||||||
|
// creating one if it doesn't exist. The targetHost is included in the key so
|
||||||
|
// that an IP change after pause/resume naturally misses the old entry.
|
||||||
|
func (h *ProxyHandler) getOrCreateProxy(sandboxID, port, targetHost string) *httputil.ReverseProxy {
|
||||||
|
cacheKey := sandboxID + "/" + port + "/" + targetHost
|
||||||
|
|
||||||
|
if v, ok := h.proxies.Load(cacheKey); ok {
|
||||||
|
return v.(*httputil.ReverseProxy)
|
||||||
|
}
|
||||||
|
|
||||||
|
proxyPrefix := "/proxy/" + sandboxID + "/" + port
|
||||||
|
|
||||||
proxy := &httputil.ReverseProxy{
|
proxy := &httputil.ReverseProxy{
|
||||||
Transport: h.transport,
|
Transport: h.transport,
|
||||||
Director: func(req *http.Request) {
|
Director: func(req *http.Request) {
|
||||||
|
// Extract remainder from the original path: /proxy/{id}/{port}/{remainder}
|
||||||
|
remainder := ""
|
||||||
|
if trimmed := strings.TrimPrefix(req.URL.Path, proxyPrefix); trimmed != req.URL.Path {
|
||||||
|
remainder = strings.TrimPrefix(trimmed, "/")
|
||||||
|
}
|
||||||
|
|
||||||
req.URL.Scheme = "http"
|
req.URL.Scheme = "http"
|
||||||
req.URL.Host = targetHost
|
req.URL.Host = targetHost
|
||||||
req.URL.Path = "/" + remainder
|
req.URL.Path = "/" + remainder
|
||||||
req.URL.RawQuery = r.URL.RawQuery
|
|
||||||
req.Host = targetHost
|
req.Host = targetHost
|
||||||
},
|
},
|
||||||
|
// Rewrite redirect Location headers so they include the /proxy/{id}/{port}
|
||||||
|
// prefix. Handles both root-relative (/path) and absolute-URL redirects
|
||||||
|
// (http://internal-ip:port/path) that would otherwise leak internal IPs
|
||||||
|
// or break directory navigation.
|
||||||
|
ModifyResponse: func(resp *http.Response) error {
|
||||||
|
loc := resp.Header.Get("Location")
|
||||||
|
if loc == "" {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
if strings.HasPrefix(loc, "/") {
|
||||||
|
resp.Header.Set("Location", proxyPrefix+loc)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
// Rewrite absolute URLs pointing to the internal target host.
|
||||||
|
if u, err := url.Parse(loc); err == nil && u.Host == targetHost {
|
||||||
|
resp.Header.Set("Location", proxyPrefix+u.RequestURI())
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
},
|
||||||
ErrorHandler: func(w http.ResponseWriter, r *http.Request, err error) {
|
ErrorHandler: func(w http.ResponseWriter, r *http.Request, err error) {
|
||||||
slog.Debug("proxy error", "sandbox_id", sandboxID, "port", port, "error", err)
|
slog.Debug("proxy error", "sandbox_id", sandboxID, "port", port, "error", err)
|
||||||
http.Error(w, "proxy error: "+err.Error(), http.StatusBadGateway)
|
http.Error(w, "proxy error: "+err.Error(), http.StatusBadGateway)
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
proxy.ServeHTTP(w, r)
|
actual, _ := h.proxies.LoadOrStore(cacheKey, proxy)
|
||||||
|
return actual.(*httputil.ReverseProxy)
|
||||||
}
|
}
|
||||||
|
|||||||
@ -459,7 +459,7 @@ func (s *Server) WriteFileStream(
|
|||||||
}
|
}
|
||||||
httpReq.Header.Set("Content-Type", mpWriter.FormDataContentType())
|
httpReq.Header.Set("Content-Type", mpWriter.FormDataContentType())
|
||||||
|
|
||||||
resp, err := http.DefaultClient.Do(httpReq)
|
resp, err := client.HTTPClient().Do(httpReq)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
pw.CloseWithError(err)
|
pw.CloseWithError(err)
|
||||||
<-errCh
|
<-errCh
|
||||||
@ -504,7 +504,7 @@ func (s *Server) ReadFileStream(
|
|||||||
return connect.NewError(connect.CodeInternal, fmt.Errorf("create request: %w", err))
|
return connect.NewError(connect.CodeInternal, fmt.Errorf("create request: %w", err))
|
||||||
}
|
}
|
||||||
|
|
||||||
resp, err := http.DefaultClient.Do(httpReq)
|
resp, err := client.HTTPClient().Do(httpReq)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return connect.NewError(connect.CodeInternal, fmt.Errorf("read file stream: %w", err))
|
return connect.NewError(connect.CodeInternal, fmt.Errorf("read file stream: %w", err))
|
||||||
}
|
}
|
||||||
|
|||||||
@ -269,6 +269,7 @@ func CreateNetwork(slot *Slot) error {
|
|||||||
// Create TAP device inside namespace.
|
// Create TAP device inside namespace.
|
||||||
tapAttrs := netlink.NewLinkAttrs()
|
tapAttrs := netlink.NewLinkAttrs()
|
||||||
tapAttrs.Name = tapName
|
tapAttrs.Name = tapName
|
||||||
|
tapAttrs.TxQLen = 5000 // Up from default 1000 to reduce drops under bursty traffic.
|
||||||
tap := &netlink.Tuntap{
|
tap := &netlink.Tuntap{
|
||||||
LinkAttrs: tapAttrs,
|
LinkAttrs: tapAttrs,
|
||||||
Mode: netlink.TUNTAP_MODE_TAP,
|
Mode: netlink.TUNTAP_MODE_TAP,
|
||||||
|
|||||||
@ -53,6 +53,15 @@ type Manager struct {
|
|||||||
|
|
||||||
autoPausedMu sync.Mutex
|
autoPausedMu sync.Mutex
|
||||||
autoPausedIDs []string
|
autoPausedIDs []string
|
||||||
|
|
||||||
|
// onDestroy is called with the sandbox ID after cleanup completes.
|
||||||
|
// Used by ProxyHandler to evict cached reverse proxies.
|
||||||
|
onDestroy func(sandboxID string)
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetOnDestroy registers a callback invoked after each sandbox is cleaned up.
|
||||||
|
func (m *Manager) SetOnDestroy(fn func(sandboxID string)) {
|
||||||
|
m.onDestroy = fn
|
||||||
}
|
}
|
||||||
|
|
||||||
// sandboxState holds the runtime state for a single sandbox.
|
// sandboxState holds the runtime state for a single sandbox.
|
||||||
@ -314,6 +323,10 @@ func (m *Manager) Destroy(ctx context.Context, sandboxID string) error {
|
|||||||
slog.Warn("snapshot cleanup error", "id", sandboxID, "error", err)
|
slog.Warn("snapshot cleanup error", "id", sandboxID, "error", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if m.onDestroy != nil {
|
||||||
|
m.onDestroy(sandboxID)
|
||||||
|
}
|
||||||
|
|
||||||
slog.Info("sandbox destroyed", "id", sandboxID)
|
slog.Info("sandbox destroyed", "id", sandboxID)
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
@ -363,6 +376,11 @@ func (m *Manager) Pause(ctx context.Context, sandboxID string) error {
|
|||||||
return fmt.Errorf("sandbox %s is not running (status: %s)", sandboxID, sb.Status)
|
return fmt.Errorf("sandbox %s is not running (status: %s)", sandboxID, sb.Status)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Stop the metrics sampler goroutine before tearing down any resources
|
||||||
|
// it reads (dm device, Firecracker PID). Without this, the sampler
|
||||||
|
// leaks on every successful pause.
|
||||||
|
m.stopSampler(sb)
|
||||||
|
|
||||||
// Step 0: Drain in-flight proxy connections before freezing vCPUs.
|
// Step 0: Drain in-flight proxy connections before freezing vCPUs.
|
||||||
// This prevents Go runtime corruption inside the guest caused by stale
|
// This prevents Go runtime corruption inside the guest caused by stale
|
||||||
// TCP state from connections that were alive when the VM was snapshotted.
|
// TCP state from connections that were alive when the VM was snapshotted.
|
||||||
|
|||||||
@ -84,11 +84,21 @@ func (c *fcClient) setRootfsDrive(ctx context.Context, driveID, path string, rea
|
|||||||
}
|
}
|
||||||
|
|
||||||
// setNetworkInterface configures a network interface attached to a TAP device.
|
// setNetworkInterface configures a network interface attached to a TAP device.
|
||||||
|
// A tx_rate_limiter caps sustained guest→host throughput to prevent user
|
||||||
|
// application traffic from completely saturating the TAP device and starving
|
||||||
|
// envd control traffic (PTY, exec, file ops).
|
||||||
func (c *fcClient) setNetworkInterface(ctx context.Context, ifaceID, tapName, macAddr string) error {
|
func (c *fcClient) setNetworkInterface(ctx context.Context, ifaceID, tapName, macAddr string) error {
|
||||||
return c.do(ctx, http.MethodPut, "/network-interfaces/"+ifaceID, map[string]any{
|
return c.do(ctx, http.MethodPut, "/network-interfaces/"+ifaceID, map[string]any{
|
||||||
"iface_id": ifaceID,
|
"iface_id": ifaceID,
|
||||||
"host_dev_name": tapName,
|
"host_dev_name": tapName,
|
||||||
"guest_mac": macAddr,
|
"guest_mac": macAddr,
|
||||||
|
"tx_rate_limiter": map[string]any{
|
||||||
|
"bandwidth": map[string]any{
|
||||||
|
"size": 209715200, // 200 MB/s sustained
|
||||||
|
"refill_time": 1000, // refill period: 1 second
|
||||||
|
"one_time_burst": 104857600, // 100 MB initial burst
|
||||||
|
},
|
||||||
|
},
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -82,6 +82,53 @@ func marshalMeta(meta map[string]any) []byte {
|
|||||||
return b
|
return b
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Entry describes a single audit log event. Extensions (e.g. the cloud repo)
|
||||||
|
// use this with AuditLogger.Log to record custom events without modifying the
|
||||||
|
// OSS typed methods.
|
||||||
|
type Entry struct {
|
||||||
|
TeamID pgtype.UUID
|
||||||
|
ActorType string // "user", "api_key", "system"
|
||||||
|
ActorID string // prefixed ID string; empty for system
|
||||||
|
ActorName string // human-readable; empty for system
|
||||||
|
ResourceType string
|
||||||
|
ResourceID string // prefixed ID or name; empty when not applicable
|
||||||
|
Action string
|
||||||
|
Scope string // "team" or "admin"
|
||||||
|
Status string // "success", "info", "warning", "error"
|
||||||
|
Metadata map[string]any
|
||||||
|
}
|
||||||
|
|
||||||
|
// Log writes a custom audit log entry. This is the extension point for the
|
||||||
|
// cloud repo to record events with resource types and actions not covered by
|
||||||
|
// the typed helpers (LogSandboxCreate, etc.). Fire-and-forget like all other
|
||||||
|
// audit methods.
|
||||||
|
func (l *AuditLogger) Log(ctx context.Context, e Entry) {
|
||||||
|
l.write(ctx, db.InsertAuditLogParams{
|
||||||
|
ID: id.NewAuditLogID(),
|
||||||
|
TeamID: e.TeamID,
|
||||||
|
ActorType: e.ActorType,
|
||||||
|
ActorID: optText(e.ActorID),
|
||||||
|
ActorName: e.ActorName,
|
||||||
|
ResourceType: e.ResourceType,
|
||||||
|
ResourceID: optText(e.ResourceID),
|
||||||
|
Action: e.Action,
|
||||||
|
Scope: e.Scope,
|
||||||
|
Status: e.Status,
|
||||||
|
Metadata: MarshalMeta(e.Metadata),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// ActorFromContext extracts actor fields from an auth.AuthContext for use in
|
||||||
|
// custom audit entries. Returns actor_type, actor_id, and actor_name.
|
||||||
|
func ActorFromContext(ac auth.AuthContext) (actorType, actorID, actorName string) {
|
||||||
|
return actorFields(ac)
|
||||||
|
}
|
||||||
|
|
||||||
|
// MarshalMeta serializes metadata to JSON bytes. Returns "{}" for nil/empty maps.
|
||||||
|
func MarshalMeta(meta map[string]any) []byte {
|
||||||
|
return marshalMeta(meta)
|
||||||
|
}
|
||||||
|
|
||||||
// optText returns a valid pgtype.Text if s is non-empty, otherwise an invalid (NULL) one.
|
// optText returns a valid pgtype.Text if s is non-empty, otherwise an invalid (NULL) one.
|
||||||
func optText(s string) pgtype.Text {
|
func optText(s string) pgtype.Text {
|
||||||
if s == "" {
|
if s == "" {
|
||||||
@ -90,23 +137,42 @@ func optText(s string) pgtype.Text {
|
|||||||
return pgtype.Text{String: s, Valid: true}
|
return pgtype.Text{String: s, Valid: true}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// --- Entry builders ---
|
||||||
|
|
||||||
|
// newEntry builds an Entry from an auth context with explicit team and scope.
|
||||||
|
func newEntry(ac auth.AuthContext, teamID pgtype.UUID, scope, resourceType, resourceID, action, status string, meta map[string]any) Entry {
|
||||||
|
actorType, actorID, actorName := actorFields(ac)
|
||||||
|
return Entry{
|
||||||
|
TeamID: teamID,
|
||||||
|
ActorType: actorType,
|
||||||
|
ActorID: actorID,
|
||||||
|
ActorName: actorName,
|
||||||
|
ResourceType: resourceType,
|
||||||
|
ResourceID: resourceID,
|
||||||
|
Action: action,
|
||||||
|
Scope: scope,
|
||||||
|
Status: status,
|
||||||
|
Metadata: meta,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// newAdminEntry builds an Entry for platform-level admin actions (PlatformTeamID, scope "admin").
|
||||||
|
func newAdminEntry(ac auth.AuthContext, resourceType, resourceID, action, status string, meta map[string]any) Entry {
|
||||||
|
return newEntry(ac, id.PlatformTeamID, "admin", resourceType, resourceID, action, status, meta)
|
||||||
|
}
|
||||||
|
|
||||||
|
// resolveHostTeamID returns the owning team for BYOC hosts, or PlatformTeamID for shared hosts.
|
||||||
|
func resolveHostTeamID(teamID pgtype.UUID) pgtype.UUID {
|
||||||
|
if teamID.Valid {
|
||||||
|
return teamID
|
||||||
|
}
|
||||||
|
return id.PlatformTeamID
|
||||||
|
}
|
||||||
|
|
||||||
// --- Sandbox events (scope: team) ---
|
// --- Sandbox events (scope: team) ---
|
||||||
|
|
||||||
func (l *AuditLogger) LogSandboxCreate(ctx context.Context, ac auth.AuthContext, sandboxID pgtype.UUID, template string) {
|
func (l *AuditLogger) LogSandboxCreate(ctx context.Context, ac auth.AuthContext, sandboxID pgtype.UUID, template string) {
|
||||||
actorType, actorID, actorName := actorFields(ac)
|
l.Log(ctx, newEntry(ac, ac.TeamID, "team", "sandbox", id.FormatSandboxID(sandboxID), "create", "success", map[string]any{"template": template}))
|
||||||
l.write(ctx, db.InsertAuditLogParams{
|
|
||||||
ID: id.NewAuditLogID(),
|
|
||||||
TeamID: ac.TeamID,
|
|
||||||
ActorType: actorType,
|
|
||||||
ActorID: optText(actorID),
|
|
||||||
ActorName: actorName,
|
|
||||||
ResourceType: "sandbox",
|
|
||||||
ResourceID: optText(id.FormatSandboxID(sandboxID)),
|
|
||||||
Action: "create",
|
|
||||||
Scope: "team",
|
|
||||||
Status: "success",
|
|
||||||
Metadata: marshalMeta(map[string]any{"template": template}),
|
|
||||||
})
|
|
||||||
l.publish(ctx, events.Event{
|
l.publish(ctx, events.Event{
|
||||||
Event: events.CapsuleCreated,
|
Event: events.CapsuleCreated,
|
||||||
Timestamp: events.Now(),
|
Timestamp: events.Now(),
|
||||||
@ -117,20 +183,7 @@ func (l *AuditLogger) LogSandboxCreate(ctx context.Context, ac auth.AuthContext,
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (l *AuditLogger) LogSandboxPause(ctx context.Context, ac auth.AuthContext, sandboxID pgtype.UUID) {
|
func (l *AuditLogger) LogSandboxPause(ctx context.Context, ac auth.AuthContext, sandboxID pgtype.UUID) {
|
||||||
actorType, actorID, actorName := actorFields(ac)
|
l.Log(ctx, newEntry(ac, ac.TeamID, "team", "sandbox", id.FormatSandboxID(sandboxID), "pause", "success", nil))
|
||||||
l.write(ctx, db.InsertAuditLogParams{
|
|
||||||
ID: id.NewAuditLogID(),
|
|
||||||
TeamID: ac.TeamID,
|
|
||||||
ActorType: actorType,
|
|
||||||
ActorID: optText(actorID),
|
|
||||||
ActorName: actorName,
|
|
||||||
ResourceType: "sandbox",
|
|
||||||
ResourceID: optText(id.FormatSandboxID(sandboxID)),
|
|
||||||
Action: "pause",
|
|
||||||
Scope: "team",
|
|
||||||
Status: "success",
|
|
||||||
Metadata: []byte("{}"),
|
|
||||||
})
|
|
||||||
l.publish(ctx, events.Event{
|
l.publish(ctx, events.Event{
|
||||||
Event: events.CapsulePaused,
|
Event: events.CapsulePaused,
|
||||||
Timestamp: events.Now(),
|
Timestamp: events.Now(),
|
||||||
@ -142,18 +195,10 @@ func (l *AuditLogger) LogSandboxPause(ctx context.Context, ac auth.AuthContext,
|
|||||||
|
|
||||||
// LogSandboxAutoPause records a system-initiated auto-pause (TTL or host reconciler).
|
// LogSandboxAutoPause records a system-initiated auto-pause (TTL or host reconciler).
|
||||||
func (l *AuditLogger) LogSandboxAutoPause(ctx context.Context, teamID, sandboxID pgtype.UUID) {
|
func (l *AuditLogger) LogSandboxAutoPause(ctx context.Context, teamID, sandboxID pgtype.UUID) {
|
||||||
l.write(ctx, db.InsertAuditLogParams{
|
l.Log(ctx, Entry{
|
||||||
ID: id.NewAuditLogID(),
|
TeamID: teamID, ActorType: "system",
|
||||||
TeamID: teamID,
|
ResourceType: "sandbox", ResourceID: id.FormatSandboxID(sandboxID),
|
||||||
ActorType: "system",
|
Action: "pause", Scope: "team", Status: "info",
|
||||||
ActorID: pgtype.Text{},
|
|
||||||
ActorName: "",
|
|
||||||
ResourceType: "sandbox",
|
|
||||||
ResourceID: optText(id.FormatSandboxID(sandboxID)),
|
|
||||||
Action: "pause",
|
|
||||||
Scope: "team",
|
|
||||||
Status: "info",
|
|
||||||
Metadata: []byte("{}"),
|
|
||||||
})
|
})
|
||||||
l.publish(ctx, events.Event{
|
l.publish(ctx, events.Event{
|
||||||
Event: events.CapsulePaused,
|
Event: events.CapsulePaused,
|
||||||
@ -165,20 +210,7 @@ func (l *AuditLogger) LogSandboxAutoPause(ctx context.Context, teamID, sandboxID
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (l *AuditLogger) LogSandboxResume(ctx context.Context, ac auth.AuthContext, sandboxID pgtype.UUID) {
|
func (l *AuditLogger) LogSandboxResume(ctx context.Context, ac auth.AuthContext, sandboxID pgtype.UUID) {
|
||||||
actorType, actorID, actorName := actorFields(ac)
|
l.Log(ctx, newEntry(ac, ac.TeamID, "team", "sandbox", id.FormatSandboxID(sandboxID), "resume", "success", nil))
|
||||||
l.write(ctx, db.InsertAuditLogParams{
|
|
||||||
ID: id.NewAuditLogID(),
|
|
||||||
TeamID: ac.TeamID,
|
|
||||||
ActorType: actorType,
|
|
||||||
ActorID: optText(actorID),
|
|
||||||
ActorName: actorName,
|
|
||||||
ResourceType: "sandbox",
|
|
||||||
ResourceID: optText(id.FormatSandboxID(sandboxID)),
|
|
||||||
Action: "resume",
|
|
||||||
Scope: "team",
|
|
||||||
Status: "success",
|
|
||||||
Metadata: []byte("{}"),
|
|
||||||
})
|
|
||||||
l.publish(ctx, events.Event{
|
l.publish(ctx, events.Event{
|
||||||
Event: events.CapsuleRunning,
|
Event: events.CapsuleRunning,
|
||||||
Timestamp: events.Now(),
|
Timestamp: events.Now(),
|
||||||
@ -189,20 +221,7 @@ func (l *AuditLogger) LogSandboxResume(ctx context.Context, ac auth.AuthContext,
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (l *AuditLogger) LogSandboxDestroy(ctx context.Context, ac auth.AuthContext, sandboxID pgtype.UUID) {
|
func (l *AuditLogger) LogSandboxDestroy(ctx context.Context, ac auth.AuthContext, sandboxID pgtype.UUID) {
|
||||||
actorType, actorID, actorName := actorFields(ac)
|
l.Log(ctx, newEntry(ac, ac.TeamID, "team", "sandbox", id.FormatSandboxID(sandboxID), "destroy", "warning", nil))
|
||||||
l.write(ctx, db.InsertAuditLogParams{
|
|
||||||
ID: id.NewAuditLogID(),
|
|
||||||
TeamID: ac.TeamID,
|
|
||||||
ActorType: actorType,
|
|
||||||
ActorID: optText(actorID),
|
|
||||||
ActorName: actorName,
|
|
||||||
ResourceType: "sandbox",
|
|
||||||
ResourceID: optText(id.FormatSandboxID(sandboxID)),
|
|
||||||
Action: "destroy",
|
|
||||||
Scope: "team",
|
|
||||||
Status: "warning",
|
|
||||||
Metadata: []byte("{}"),
|
|
||||||
})
|
|
||||||
l.publish(ctx, events.Event{
|
l.publish(ctx, events.Event{
|
||||||
Event: events.CapsuleDestroyed,
|
Event: events.CapsuleDestroyed,
|
||||||
Timestamp: events.Now(),
|
Timestamp: events.Now(),
|
||||||
@ -215,20 +234,7 @@ func (l *AuditLogger) LogSandboxDestroy(ctx context.Context, ac auth.AuthContext
|
|||||||
// --- Snapshot events (scope: team) ---
|
// --- Snapshot events (scope: team) ---
|
||||||
|
|
||||||
func (l *AuditLogger) LogSnapshotCreate(ctx context.Context, ac auth.AuthContext, name string) {
|
func (l *AuditLogger) LogSnapshotCreate(ctx context.Context, ac auth.AuthContext, name string) {
|
||||||
actorType, actorID, actorName := actorFields(ac)
|
l.Log(ctx, newEntry(ac, ac.TeamID, "team", "snapshot", name, "create", "success", nil))
|
||||||
l.write(ctx, db.InsertAuditLogParams{
|
|
||||||
ID: id.NewAuditLogID(),
|
|
||||||
TeamID: ac.TeamID,
|
|
||||||
ActorType: actorType,
|
|
||||||
ActorID: optText(actorID),
|
|
||||||
ActorName: actorName,
|
|
||||||
ResourceType: "snapshot",
|
|
||||||
ResourceID: optText(name),
|
|
||||||
Action: "create",
|
|
||||||
Scope: "team",
|
|
||||||
Status: "success",
|
|
||||||
Metadata: []byte("{}"),
|
|
||||||
})
|
|
||||||
l.publish(ctx, events.Event{
|
l.publish(ctx, events.Event{
|
||||||
Event: events.SnapshotCreated,
|
Event: events.SnapshotCreated,
|
||||||
Timestamp: events.Now(),
|
Timestamp: events.Now(),
|
||||||
@ -239,20 +245,7 @@ func (l *AuditLogger) LogSnapshotCreate(ctx context.Context, ac auth.AuthContext
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (l *AuditLogger) LogSnapshotDelete(ctx context.Context, ac auth.AuthContext, name string) {
|
func (l *AuditLogger) LogSnapshotDelete(ctx context.Context, ac auth.AuthContext, name string) {
|
||||||
actorType, actorID, actorName := actorFields(ac)
|
l.Log(ctx, newEntry(ac, ac.TeamID, "team", "snapshot", name, "delete", "warning", nil))
|
||||||
l.write(ctx, db.InsertAuditLogParams{
|
|
||||||
ID: id.NewAuditLogID(),
|
|
||||||
TeamID: ac.TeamID,
|
|
||||||
ActorType: actorType,
|
|
||||||
ActorID: optText(actorID),
|
|
||||||
ActorName: actorName,
|
|
||||||
ResourceType: "snapshot",
|
|
||||||
ResourceID: optText(name),
|
|
||||||
Action: "delete",
|
|
||||||
Scope: "team",
|
|
||||||
Status: "warning",
|
|
||||||
Metadata: []byte("{}"),
|
|
||||||
})
|
|
||||||
l.publish(ctx, events.Event{
|
l.publish(ctx, events.Event{
|
||||||
Event: events.SnapshotDeleted,
|
Event: events.SnapshotDeleted,
|
||||||
Timestamp: events.Now(),
|
Timestamp: events.Now(),
|
||||||
@ -265,274 +258,96 @@ func (l *AuditLogger) LogSnapshotDelete(ctx context.Context, ac auth.AuthContext
|
|||||||
// --- Team events (scope: team) ---
|
// --- Team events (scope: team) ---
|
||||||
|
|
||||||
func (l *AuditLogger) LogTeamRename(ctx context.Context, ac auth.AuthContext, teamID pgtype.UUID, oldName, newName string) {
|
func (l *AuditLogger) LogTeamRename(ctx context.Context, ac auth.AuthContext, teamID pgtype.UUID, oldName, newName string) {
|
||||||
actorType, actorID, actorName := actorFields(ac)
|
l.Log(ctx, newEntry(ac, ac.TeamID, "team", "team", id.FormatTeamID(teamID), "rename", "info", map[string]any{"old_name": oldName, "new_name": newName}))
|
||||||
l.write(ctx, db.InsertAuditLogParams{
|
|
||||||
ID: id.NewAuditLogID(),
|
|
||||||
TeamID: ac.TeamID,
|
|
||||||
ActorType: actorType,
|
|
||||||
ActorID: optText(actorID),
|
|
||||||
ActorName: actorName,
|
|
||||||
ResourceType: "team",
|
|
||||||
ResourceID: optText(id.FormatTeamID(teamID)),
|
|
||||||
Action: "rename",
|
|
||||||
Scope: "team",
|
|
||||||
Status: "info",
|
|
||||||
Metadata: marshalMeta(map[string]any{"old_name": oldName, "new_name": newName}),
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// --- Channel events (scope: team) ---
|
// --- Channel events (scope: team) ---
|
||||||
|
|
||||||
func (l *AuditLogger) LogChannelCreate(ctx context.Context, ac auth.AuthContext, channelID pgtype.UUID, name, provider string) {
|
func (l *AuditLogger) LogChannelCreate(ctx context.Context, ac auth.AuthContext, channelID pgtype.UUID, name, provider string) {
|
||||||
actorType, actorID, actorName := actorFields(ac)
|
l.Log(ctx, newEntry(ac, ac.TeamID, "team", "channel", id.FormatChannelID(channelID), "create", "success", map[string]any{"name": name, "provider": provider}))
|
||||||
l.write(ctx, db.InsertAuditLogParams{
|
|
||||||
ID: id.NewAuditLogID(),
|
|
||||||
TeamID: ac.TeamID,
|
|
||||||
ActorType: actorType,
|
|
||||||
ActorID: optText(actorID),
|
|
||||||
ActorName: actorName,
|
|
||||||
ResourceType: "channel",
|
|
||||||
ResourceID: optText(id.FormatChannelID(channelID)),
|
|
||||||
Action: "create",
|
|
||||||
Scope: "team",
|
|
||||||
Status: "success",
|
|
||||||
Metadata: marshalMeta(map[string]any{"name": name, "provider": provider}),
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (l *AuditLogger) LogChannelUpdate(ctx context.Context, ac auth.AuthContext, channelID pgtype.UUID) {
|
func (l *AuditLogger) LogChannelUpdate(ctx context.Context, ac auth.AuthContext, channelID pgtype.UUID) {
|
||||||
actorType, actorID, actorName := actorFields(ac)
|
l.Log(ctx, newEntry(ac, ac.TeamID, "team", "channel", id.FormatChannelID(channelID), "update", "info", nil))
|
||||||
l.write(ctx, db.InsertAuditLogParams{
|
|
||||||
ID: id.NewAuditLogID(),
|
|
||||||
TeamID: ac.TeamID,
|
|
||||||
ActorType: actorType,
|
|
||||||
ActorID: optText(actorID),
|
|
||||||
ActorName: actorName,
|
|
||||||
ResourceType: "channel",
|
|
||||||
ResourceID: optText(id.FormatChannelID(channelID)),
|
|
||||||
Action: "update",
|
|
||||||
Scope: "team",
|
|
||||||
Status: "info",
|
|
||||||
Metadata: []byte("{}"),
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (l *AuditLogger) LogChannelRotateConfig(ctx context.Context, ac auth.AuthContext, channelID pgtype.UUID) {
|
func (l *AuditLogger) LogChannelRotateConfig(ctx context.Context, ac auth.AuthContext, channelID pgtype.UUID) {
|
||||||
actorType, actorID, actorName := actorFields(ac)
|
l.Log(ctx, newEntry(ac, ac.TeamID, "team", "channel", id.FormatChannelID(channelID), "rotate_config", "info", nil))
|
||||||
l.write(ctx, db.InsertAuditLogParams{
|
|
||||||
ID: id.NewAuditLogID(),
|
|
||||||
TeamID: ac.TeamID,
|
|
||||||
ActorType: actorType,
|
|
||||||
ActorID: optText(actorID),
|
|
||||||
ActorName: actorName,
|
|
||||||
ResourceType: "channel",
|
|
||||||
ResourceID: optText(id.FormatChannelID(channelID)),
|
|
||||||
Action: "rotate_config",
|
|
||||||
Scope: "team",
|
|
||||||
Status: "info",
|
|
||||||
Metadata: []byte("{}"),
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (l *AuditLogger) LogChannelDelete(ctx context.Context, ac auth.AuthContext, channelID pgtype.UUID) {
|
func (l *AuditLogger) LogChannelDelete(ctx context.Context, ac auth.AuthContext, channelID pgtype.UUID) {
|
||||||
actorType, actorID, actorName := actorFields(ac)
|
l.Log(ctx, newEntry(ac, ac.TeamID, "team", "channel", id.FormatChannelID(channelID), "delete", "warning", nil))
|
||||||
l.write(ctx, db.InsertAuditLogParams{
|
|
||||||
ID: id.NewAuditLogID(),
|
|
||||||
TeamID: ac.TeamID,
|
|
||||||
ActorType: actorType,
|
|
||||||
ActorID: optText(actorID),
|
|
||||||
ActorName: actorName,
|
|
||||||
ResourceType: "channel",
|
|
||||||
ResourceID: optText(id.FormatChannelID(channelID)),
|
|
||||||
Action: "delete",
|
|
||||||
Scope: "team",
|
|
||||||
Status: "warning",
|
|
||||||
Metadata: []byte("{}"),
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// --- API key events (scope: team) ---
|
// --- API key events (scope: team) ---
|
||||||
|
|
||||||
func (l *AuditLogger) LogAPIKeyCreate(ctx context.Context, ac auth.AuthContext, keyID pgtype.UUID, keyName string) {
|
func (l *AuditLogger) LogAPIKeyCreate(ctx context.Context, ac auth.AuthContext, keyID pgtype.UUID, keyName string) {
|
||||||
actorType, actorID, actorName := actorFields(ac)
|
l.Log(ctx, newEntry(ac, ac.TeamID, "team", "api_key", id.FormatAPIKeyID(keyID), "create", "success", map[string]any{"name": keyName}))
|
||||||
l.write(ctx, db.InsertAuditLogParams{
|
|
||||||
ID: id.NewAuditLogID(),
|
|
||||||
TeamID: ac.TeamID,
|
|
||||||
ActorType: actorType,
|
|
||||||
ActorID: optText(actorID),
|
|
||||||
ActorName: actorName,
|
|
||||||
ResourceType: "api_key",
|
|
||||||
ResourceID: optText(id.FormatAPIKeyID(keyID)),
|
|
||||||
Action: "create",
|
|
||||||
Scope: "team",
|
|
||||||
Status: "success",
|
|
||||||
Metadata: marshalMeta(map[string]any{"name": keyName}),
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (l *AuditLogger) LogAPIKeyRevoke(ctx context.Context, ac auth.AuthContext, keyID pgtype.UUID) {
|
func (l *AuditLogger) LogAPIKeyRevoke(ctx context.Context, ac auth.AuthContext, keyID pgtype.UUID) {
|
||||||
actorType, actorID, actorName := actorFields(ac)
|
l.Log(ctx, newEntry(ac, ac.TeamID, "team", "api_key", id.FormatAPIKeyID(keyID), "revoke", "warning", nil))
|
||||||
l.write(ctx, db.InsertAuditLogParams{
|
|
||||||
ID: id.NewAuditLogID(),
|
|
||||||
TeamID: ac.TeamID,
|
|
||||||
ActorType: actorType,
|
|
||||||
ActorID: optText(actorID),
|
|
||||||
ActorName: actorName,
|
|
||||||
ResourceType: "api_key",
|
|
||||||
ResourceID: optText(id.FormatAPIKeyID(keyID)),
|
|
||||||
Action: "revoke",
|
|
||||||
Scope: "team",
|
|
||||||
Status: "warning",
|
|
||||||
Metadata: []byte("{}"),
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// --- Member events (scope: admin) ---
|
// --- Member events (scope: admin) ---
|
||||||
|
|
||||||
func (l *AuditLogger) LogMemberAdd(ctx context.Context, ac auth.AuthContext, targetUserID pgtype.UUID, targetEmail, role string) {
|
func (l *AuditLogger) LogMemberAdd(ctx context.Context, ac auth.AuthContext, targetUserID pgtype.UUID, targetEmail, role string) {
|
||||||
actorType, actorID, actorName := actorFields(ac)
|
l.Log(ctx, newEntry(ac, ac.TeamID, "admin", "member", id.FormatUserID(targetUserID), "add", "success", map[string]any{"email": targetEmail, "role": role}))
|
||||||
l.write(ctx, db.InsertAuditLogParams{
|
|
||||||
ID: id.NewAuditLogID(),
|
|
||||||
TeamID: ac.TeamID,
|
|
||||||
ActorType: actorType,
|
|
||||||
ActorID: optText(actorID),
|
|
||||||
ActorName: actorName,
|
|
||||||
ResourceType: "member",
|
|
||||||
ResourceID: optText(id.FormatUserID(targetUserID)),
|
|
||||||
Action: "add",
|
|
||||||
Scope: "admin",
|
|
||||||
Status: "success",
|
|
||||||
Metadata: marshalMeta(map[string]any{"email": targetEmail, "role": role}),
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (l *AuditLogger) LogMemberRemove(ctx context.Context, ac auth.AuthContext, targetUserID pgtype.UUID) {
|
func (l *AuditLogger) LogMemberRemove(ctx context.Context, ac auth.AuthContext, targetUserID pgtype.UUID) {
|
||||||
actorType, actorID, actorName := actorFields(ac)
|
l.Log(ctx, newEntry(ac, ac.TeamID, "admin", "member", id.FormatUserID(targetUserID), "remove", "warning", nil))
|
||||||
l.write(ctx, db.InsertAuditLogParams{
|
|
||||||
ID: id.NewAuditLogID(),
|
|
||||||
TeamID: ac.TeamID,
|
|
||||||
ActorType: actorType,
|
|
||||||
ActorID: optText(actorID),
|
|
||||||
ActorName: actorName,
|
|
||||||
ResourceType: "member",
|
|
||||||
ResourceID: optText(id.FormatUserID(targetUserID)),
|
|
||||||
Action: "remove",
|
|
||||||
Scope: "admin",
|
|
||||||
Status: "warning",
|
|
||||||
Metadata: []byte("{}"),
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (l *AuditLogger) LogMemberLeave(ctx context.Context, ac auth.AuthContext) {
|
func (l *AuditLogger) LogMemberLeave(ctx context.Context, ac auth.AuthContext) {
|
||||||
actorType, actorID, actorName := actorFields(ac)
|
|
||||||
resourceID := ""
|
resourceID := ""
|
||||||
if ac.UserID.Valid {
|
if ac.UserID.Valid {
|
||||||
resourceID = id.FormatUserID(ac.UserID)
|
resourceID = id.FormatUserID(ac.UserID)
|
||||||
}
|
}
|
||||||
l.write(ctx, db.InsertAuditLogParams{
|
l.Log(ctx, newEntry(ac, ac.TeamID, "admin", "member", resourceID, "leave", "info", nil))
|
||||||
ID: id.NewAuditLogID(),
|
|
||||||
TeamID: ac.TeamID,
|
|
||||||
ActorType: actorType,
|
|
||||||
ActorID: optText(actorID),
|
|
||||||
ActorName: actorName,
|
|
||||||
ResourceType: "member",
|
|
||||||
ResourceID: optText(resourceID),
|
|
||||||
Action: "leave",
|
|
||||||
Scope: "admin",
|
|
||||||
Status: "info",
|
|
||||||
Metadata: []byte("{}"),
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (l *AuditLogger) LogMemberRoleUpdate(ctx context.Context, ac auth.AuthContext, targetUserID pgtype.UUID, newRole string) {
|
func (l *AuditLogger) LogMemberRoleUpdate(ctx context.Context, ac auth.AuthContext, targetUserID pgtype.UUID, newRole string) {
|
||||||
actorType, actorID, actorName := actorFields(ac)
|
l.Log(ctx, newEntry(ac, ac.TeamID, "admin", "member", id.FormatUserID(targetUserID), "role_update", "info", map[string]any{"new_role": newRole}))
|
||||||
l.write(ctx, db.InsertAuditLogParams{
|
|
||||||
ID: id.NewAuditLogID(),
|
|
||||||
TeamID: ac.TeamID,
|
|
||||||
ActorType: actorType,
|
|
||||||
ActorID: optText(actorID),
|
|
||||||
ActorName: actorName,
|
|
||||||
ResourceType: "member",
|
|
||||||
ResourceID: optText(id.FormatUserID(targetUserID)),
|
|
||||||
Action: "role_update",
|
|
||||||
Scope: "admin",
|
|
||||||
Status: "info",
|
|
||||||
Metadata: marshalMeta(map[string]any{"new_role": newRole}),
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// --- Host events (scope: admin) ---
|
// --- Host events (scope: admin) ---
|
||||||
|
|
||||||
|
// LogHostCreate records a user-initiated host registration.
|
||||||
|
// BYOC hosts log to the owning team; shared hosts log to the platform team.
|
||||||
func (l *AuditLogger) LogHostCreate(ctx context.Context, ac auth.AuthContext, hostID, teamID pgtype.UUID) {
|
func (l *AuditLogger) LogHostCreate(ctx context.Context, ac auth.AuthContext, hostID, teamID pgtype.UUID) {
|
||||||
actorType, actorID, actorName := actorFields(ac)
|
l.Log(ctx, newEntry(ac, resolveHostTeamID(teamID), "admin", "host", id.FormatHostID(hostID), "create", "success", nil))
|
||||||
// For shared hosts with no owning team, use the caller's team.
|
|
||||||
logTeamID := teamID
|
|
||||||
if !logTeamID.Valid {
|
|
||||||
logTeamID = ac.TeamID
|
|
||||||
}
|
|
||||||
if !logTeamID.Valid {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
l.write(ctx, db.InsertAuditLogParams{
|
|
||||||
ID: id.NewAuditLogID(),
|
|
||||||
TeamID: logTeamID,
|
|
||||||
ActorType: actorType,
|
|
||||||
ActorID: optText(actorID),
|
|
||||||
ActorName: actorName,
|
|
||||||
ResourceType: "host",
|
|
||||||
ResourceID: optText(id.FormatHostID(hostID)),
|
|
||||||
Action: "create",
|
|
||||||
Scope: "admin",
|
|
||||||
Status: "success",
|
|
||||||
Metadata: []byte("{}"),
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// LogHostDelete records a user-initiated host removal.
|
||||||
|
// BYOC hosts log to the owning team; shared hosts log to the platform team.
|
||||||
func (l *AuditLogger) LogHostDelete(ctx context.Context, ac auth.AuthContext, hostID, teamID pgtype.UUID) {
|
func (l *AuditLogger) LogHostDelete(ctx context.Context, ac auth.AuthContext, hostID, teamID pgtype.UUID) {
|
||||||
actorType, actorID, actorName := actorFields(ac)
|
l.Log(ctx, newEntry(ac, resolveHostTeamID(teamID), "admin", "host", id.FormatHostID(hostID), "delete", "warning", nil))
|
||||||
logTeamID := teamID
|
|
||||||
if !logTeamID.Valid {
|
|
||||||
logTeamID = ac.TeamID
|
|
||||||
}
|
|
||||||
if !logTeamID.Valid {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
l.write(ctx, db.InsertAuditLogParams{
|
|
||||||
ID: id.NewAuditLogID(),
|
|
||||||
TeamID: logTeamID,
|
|
||||||
ActorType: actorType,
|
|
||||||
ActorID: optText(actorID),
|
|
||||||
ActorName: actorName,
|
|
||||||
ResourceType: "host",
|
|
||||||
ResourceID: optText(id.FormatHostID(hostID)),
|
|
||||||
Action: "delete",
|
|
||||||
Scope: "admin",
|
|
||||||
Status: "warning",
|
|
||||||
Metadata: []byte("{}"),
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// LogHostMarkedDown records a system-initiated host status transition to unreachable.
|
// LogHostMarkedDown records a system-initiated host status transition to unreachable.
|
||||||
// Scoped to "team" so BYOC team members can see when their hosts go down.
|
// Scoped to "team" so BYOC team members can see when their hosts go down.
|
||||||
func (l *AuditLogger) LogHostMarkedDown(ctx context.Context, teamID, hostID pgtype.UUID) {
|
func (l *AuditLogger) LogHostMarkedDown(ctx context.Context, teamID, hostID pgtype.UUID) {
|
||||||
|
l.logSystemHostEvent(ctx, teamID, hostID, "marked_down", "error", events.HostDown)
|
||||||
|
}
|
||||||
|
|
||||||
|
// LogHostMarkedUp records a system-initiated host status transition back to online.
|
||||||
|
// Scoped to "team" so BYOC team members can see when their hosts recover.
|
||||||
|
func (l *AuditLogger) LogHostMarkedUp(ctx context.Context, teamID, hostID pgtype.UUID) {
|
||||||
|
l.logSystemHostEvent(ctx, teamID, hostID, "marked_up", "success", events.HostUp)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (l *AuditLogger) logSystemHostEvent(ctx context.Context, teamID, hostID pgtype.UUID, action, status, ev string) {
|
||||||
if !teamID.Valid {
|
if !teamID.Valid {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
l.write(ctx, db.InsertAuditLogParams{
|
l.Log(ctx, Entry{
|
||||||
ID: id.NewAuditLogID(),
|
TeamID: teamID, ActorType: "system",
|
||||||
TeamID: teamID,
|
ResourceType: "host", ResourceID: id.FormatHostID(hostID),
|
||||||
ActorType: "system",
|
Action: action, Scope: "team", Status: status,
|
||||||
ActorID: pgtype.Text{},
|
|
||||||
ActorName: "",
|
|
||||||
ResourceType: "host",
|
|
||||||
ResourceID: optText(id.FormatHostID(hostID)),
|
|
||||||
Action: "marked_down",
|
|
||||||
Scope: "team",
|
|
||||||
Status: "error",
|
|
||||||
Metadata: []byte("{}"),
|
|
||||||
})
|
})
|
||||||
l.publish(ctx, events.Event{
|
l.publish(ctx, events.Event{
|
||||||
Event: events.HostDown,
|
Event: ev,
|
||||||
Timestamp: events.Now(),
|
Timestamp: events.Now(),
|
||||||
TeamID: id.FormatTeamID(teamID),
|
TeamID: id.FormatTeamID(teamID),
|
||||||
Actor: systemActor(),
|
Actor: systemActor(),
|
||||||
@ -540,30 +355,38 @@ func (l *AuditLogger) LogHostMarkedDown(ctx context.Context, teamID, hostID pgty
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
// LogHostMarkedUp records a system-initiated host status transition back to online.
|
// --- User events (scope: admin) ---
|
||||||
// Scoped to "team" so BYOC team members can see when their hosts recover.
|
|
||||||
func (l *AuditLogger) LogHostMarkedUp(ctx context.Context, teamID, hostID pgtype.UUID) {
|
func (l *AuditLogger) LogUserActivate(ctx context.Context, ac auth.AuthContext, userID pgtype.UUID, email string) {
|
||||||
if !teamID.Valid {
|
l.Log(ctx, newAdminEntry(ac, "user", id.FormatUserID(userID), "activate", "success", map[string]any{"email": email}))
|
||||||
return
|
|
||||||
}
|
}
|
||||||
l.write(ctx, db.InsertAuditLogParams{
|
|
||||||
ID: id.NewAuditLogID(),
|
func (l *AuditLogger) LogUserDeactivate(ctx context.Context, ac auth.AuthContext, userID pgtype.UUID, email string) {
|
||||||
TeamID: teamID,
|
l.Log(ctx, newAdminEntry(ac, "user", id.FormatUserID(userID), "deactivate", "warning", map[string]any{"email": email}))
|
||||||
ActorType: "system",
|
}
|
||||||
ActorID: pgtype.Text{},
|
|
||||||
ActorName: "",
|
// --- Team admin events (scope: admin) ---
|
||||||
ResourceType: "host",
|
|
||||||
ResourceID: optText(id.FormatHostID(hostID)),
|
func (l *AuditLogger) LogTeamSetBYOC(ctx context.Context, ac auth.AuthContext, teamID pgtype.UUID, enabled bool) {
|
||||||
Action: "marked_up",
|
l.Log(ctx, newAdminEntry(ac, "team", id.FormatTeamID(teamID), "set_byoc", "info", map[string]any{"enabled": enabled}))
|
||||||
Scope: "team",
|
}
|
||||||
Status: "success",
|
|
||||||
Metadata: []byte("{}"),
|
func (l *AuditLogger) LogTeamDelete(ctx context.Context, ac auth.AuthContext, teamID pgtype.UUID) {
|
||||||
})
|
l.Log(ctx, newAdminEntry(ac, "team", id.FormatTeamID(teamID), "delete", "warning", nil))
|
||||||
l.publish(ctx, events.Event{
|
}
|
||||||
Event: events.HostUp,
|
|
||||||
Timestamp: events.Now(),
|
// --- Template events (scope: admin) ---
|
||||||
TeamID: id.FormatTeamID(teamID),
|
|
||||||
Actor: systemActor(),
|
func (l *AuditLogger) LogTemplateDelete(ctx context.Context, ac auth.AuthContext, name string) {
|
||||||
Resource: events.Resource{ID: id.FormatHostID(hostID), Type: "host"},
|
l.Log(ctx, newAdminEntry(ac, "template", name, "delete", "warning", nil))
|
||||||
})
|
}
|
||||||
|
|
||||||
|
// --- Build events (scope: admin) ---
|
||||||
|
|
||||||
|
func (l *AuditLogger) LogBuildCreate(ctx context.Context, ac auth.AuthContext, buildID pgtype.UUID, name string) {
|
||||||
|
l.Log(ctx, newAdminEntry(ac, "build", id.FormatBuildID(buildID), "create", "success", map[string]any{"name": name}))
|
||||||
|
}
|
||||||
|
|
||||||
|
func (l *AuditLogger) LogBuildCancel(ctx context.Context, ac auth.AuthContext, buildID pgtype.UUID) {
|
||||||
|
l.Log(ctx, newAdminEntry(ac, "build", id.FormatBuildID(buildID), "cancel", "warning", nil))
|
||||||
}
|
}
|
||||||
|
|||||||
@ -36,7 +36,7 @@ type ServerContext struct {
|
|||||||
Config config.Config
|
Config config.Config
|
||||||
}
|
}
|
||||||
|
|
||||||
// Extension allows enterprise (or any external) code to plug additional
|
// Extension allows cloud (or any external) code to plug additional
|
||||||
// routes and background workers into the control plane without modifying
|
// routes and background workers into the control plane without modifying
|
||||||
// the core server.
|
// the core server.
|
||||||
type Extension interface {
|
type Extension interface {
|
||||||
|
|||||||
@ -14,6 +14,8 @@ import (
|
|||||||
"github.com/jackc/pgx/v5/pgxpool"
|
"github.com/jackc/pgx/v5/pgxpool"
|
||||||
"github.com/redis/go-redis/v9"
|
"github.com/redis/go-redis/v9"
|
||||||
|
|
||||||
|
"github.com/jackc/pgx/v5/pgtype"
|
||||||
|
|
||||||
"git.omukk.dev/wrenn/wrenn/internal/api"
|
"git.omukk.dev/wrenn/wrenn/internal/api"
|
||||||
"git.omukk.dev/wrenn/wrenn/internal/email"
|
"git.omukk.dev/wrenn/wrenn/internal/email"
|
||||||
"git.omukk.dev/wrenn/wrenn/pkg/audit"
|
"git.omukk.dev/wrenn/wrenn/pkg/audit"
|
||||||
@ -22,6 +24,7 @@ import (
|
|||||||
"git.omukk.dev/wrenn/wrenn/pkg/channels"
|
"git.omukk.dev/wrenn/wrenn/pkg/channels"
|
||||||
"git.omukk.dev/wrenn/wrenn/pkg/config"
|
"git.omukk.dev/wrenn/wrenn/pkg/config"
|
||||||
"git.omukk.dev/wrenn/wrenn/pkg/db"
|
"git.omukk.dev/wrenn/wrenn/pkg/db"
|
||||||
|
"git.omukk.dev/wrenn/wrenn/pkg/id"
|
||||||
"git.omukk.dev/wrenn/wrenn/pkg/lifecycle"
|
"git.omukk.dev/wrenn/wrenn/pkg/lifecycle"
|
||||||
"git.omukk.dev/wrenn/wrenn/pkg/logging"
|
"git.omukk.dev/wrenn/wrenn/pkg/logging"
|
||||||
"git.omukk.dev/wrenn/wrenn/pkg/scheduler"
|
"git.omukk.dev/wrenn/wrenn/pkg/scheduler"
|
||||||
@ -185,10 +188,12 @@ func Run(opts ...Option) {
|
|||||||
channelDispatcher.Start(ctx)
|
channelDispatcher.Start(ctx)
|
||||||
|
|
||||||
// Start host monitor (passive + active reconciliation every 30s).
|
// Start host monitor (passive + active reconciliation every 30s).
|
||||||
monitor := api.NewHostMonitor(queries, hostPool, al, 30*time.Second)
|
monitor := api.NewHostMonitor(queries, hostPool, al, 15*time.Second)
|
||||||
monitor.Start(ctx)
|
monitor.Start(ctx)
|
||||||
|
|
||||||
// Hard-delete accounts that have been soft-deleted for more than 15 days (runs every 24h).
|
// Hard-delete accounts that have been soft-deleted for more than 15 days (runs every 24h).
|
||||||
|
// Audit logs referencing deleted users are anonymized before the user row is removed.
|
||||||
|
// A notification email is sent to the user before their data is permanently removed.
|
||||||
go func() {
|
go func() {
|
||||||
ticker := time.NewTicker(24 * time.Hour)
|
ticker := time.NewTicker(24 * time.Hour)
|
||||||
defer ticker.Stop()
|
defer ticker.Stop()
|
||||||
@ -197,10 +202,34 @@ func Run(opts ...Option) {
|
|||||||
case <-ctx.Done():
|
case <-ctx.Done():
|
||||||
return
|
return
|
||||||
case <-ticker.C:
|
case <-ticker.C:
|
||||||
if err := queries.HardDeleteExpiredUsers(ctx); err != nil {
|
expired, err := queries.ListExpiredSoftDeletedUsers(ctx)
|
||||||
slog.Error("account cleanup: failed to hard-delete expired users", "error", err)
|
if err != nil {
|
||||||
} else {
|
slog.Error("account cleanup: failed to list expired users", "error", err)
|
||||||
slog.Info("account cleanup: hard-deleted expired users")
|
continue
|
||||||
|
}
|
||||||
|
var deleted int
|
||||||
|
for _, row := range expired {
|
||||||
|
prefixedID := id.FormatUserID(row.ID)
|
||||||
|
if err := queries.AnonymizeAuditLogsByUserID(ctx, pgtype.Text{String: prefixedID, Valid: true}); err != nil {
|
||||||
|
slog.Error("account cleanup: failed to anonymize audit logs, skipping delete", "user_id", prefixedID, "error", err)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if err := queries.HardDeleteUser(ctx, row.ID); err != nil {
|
||||||
|
slog.Error("account cleanup: failed to hard-delete user", "user_id", prefixedID, "error", err)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if err := mailer.Send(ctx, row.Email, "Your Wrenn account has been deleted", email.EmailData{
|
||||||
|
Message: "Your Wrenn account and all associated data have been permanently deleted. " +
|
||||||
|
"This action was taken automatically because your account was scheduled for deletion more than 15 days ago.\n\n" +
|
||||||
|
"If you believe this was done in error, please contact support.",
|
||||||
|
Closing: "Thank you for using Wrenn.",
|
||||||
|
}); err != nil {
|
||||||
|
slog.Warn("account cleanup: failed to send deletion notification", "email", row.Email, "error", err)
|
||||||
|
}
|
||||||
|
deleted++
|
||||||
|
}
|
||||||
|
if len(expired) > 0 {
|
||||||
|
slog.Info("account cleanup: processed expired users", "total", len(expired), "deleted", deleted)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -11,6 +11,21 @@ import (
|
|||||||
"github.com/jackc/pgx/v5/pgtype"
|
"github.com/jackc/pgx/v5/pgtype"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
const anonymizeAuditLogsByUserID = `-- name: AnonymizeAuditLogsByUserID :exec
|
||||||
|
UPDATE audit_logs
|
||||||
|
SET actor_name = CASE WHEN actor_id = $1 THEN 'deleted-user' ELSE actor_name END,
|
||||||
|
actor_id = CASE WHEN actor_id = $1 THEN NULL ELSE actor_id END,
|
||||||
|
resource_id = CASE WHEN resource_type = 'member' AND resource_id = $1 THEN NULL ELSE resource_id END,
|
||||||
|
metadata = CASE WHEN resource_type = 'member' AND resource_id = $1 AND metadata ? 'email' THEN metadata - 'email' ELSE metadata END
|
||||||
|
WHERE actor_id = $1
|
||||||
|
OR (resource_type = 'member' AND resource_id = $1)
|
||||||
|
`
|
||||||
|
|
||||||
|
func (q *Queries) AnonymizeAuditLogsByUserID(ctx context.Context, actorID pgtype.Text) error {
|
||||||
|
_, err := q.db.Exec(ctx, anonymizeAuditLogsByUserID, actorID)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
const insertAuditLog = `-- name: InsertAuditLog :exec
|
const insertAuditLog = `-- name: InsertAuditLog :exec
|
||||||
INSERT INTO audit_logs (id, team_id, actor_type, actor_id, actor_name, resource_type, resource_id, action, scope, status, metadata)
|
INSERT INTO audit_logs (id, team_id, actor_type, actor_id, actor_name, resource_type, resource_id, action, scope, status, metadata)
|
||||||
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11)
|
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11)
|
||||||
|
|||||||
@ -183,15 +183,6 @@ func (q *Queries) GetUserByID(ctx context.Context, id pgtype.UUID) (User, error)
|
|||||||
return i, err
|
return i, err
|
||||||
}
|
}
|
||||||
|
|
||||||
const hardDeleteExpiredUsers = `-- name: HardDeleteExpiredUsers :exec
|
|
||||||
DELETE FROM users WHERE deleted_at IS NOT NULL AND deleted_at < NOW() - INTERVAL '15 days'
|
|
||||||
`
|
|
||||||
|
|
||||||
func (q *Queries) HardDeleteExpiredUsers(ctx context.Context) error {
|
|
||||||
_, err := q.db.Exec(ctx, hardDeleteExpiredUsers)
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
const hardDeleteUser = `-- name: HardDeleteUser :exec
|
const hardDeleteUser = `-- name: HardDeleteUser :exec
|
||||||
DELETE FROM users WHERE id = $1
|
DELETE FROM users WHERE id = $1
|
||||||
`
|
`
|
||||||
@ -334,6 +325,35 @@ func (q *Queries) InsertUserOAuth(ctx context.Context, arg InsertUserOAuthParams
|
|||||||
return i, err
|
return i, err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const listExpiredSoftDeletedUsers = `-- name: ListExpiredSoftDeletedUsers :many
|
||||||
|
SELECT id, email FROM users WHERE deleted_at IS NOT NULL AND deleted_at < NOW() - INTERVAL '15 days'
|
||||||
|
`
|
||||||
|
|
||||||
|
type ListExpiredSoftDeletedUsersRow struct {
|
||||||
|
ID pgtype.UUID `json:"id"`
|
||||||
|
Email string `json:"email"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (q *Queries) ListExpiredSoftDeletedUsers(ctx context.Context) ([]ListExpiredSoftDeletedUsersRow, error) {
|
||||||
|
rows, err := q.db.Query(ctx, listExpiredSoftDeletedUsers)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
defer rows.Close()
|
||||||
|
var items []ListExpiredSoftDeletedUsersRow
|
||||||
|
for rows.Next() {
|
||||||
|
var i ListExpiredSoftDeletedUsersRow
|
||||||
|
if err := rows.Scan(&i.ID, &i.Email); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
items = append(items, i)
|
||||||
|
}
|
||||||
|
if err := rows.Err(); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return items, nil
|
||||||
|
}
|
||||||
|
|
||||||
const listUsersAdmin = `-- name: ListUsersAdmin :many
|
const listUsersAdmin = `-- name: ListUsersAdmin :many
|
||||||
SELECT
|
SELECT
|
||||||
u.id,
|
u.id,
|
||||||
|
|||||||
@ -3,6 +3,7 @@ package lifecycle
|
|||||||
import (
|
import (
|
||||||
"crypto/tls"
|
"crypto/tls"
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"net"
|
||||||
"net/http"
|
"net/http"
|
||||||
"strings"
|
"strings"
|
||||||
"sync"
|
"sync"
|
||||||
@ -115,6 +116,34 @@ func (p *HostClientPool) ResolveAddr(addr string) string {
|
|||||||
return p.ensureScheme(addr)
|
return p.ensureScheme(addr)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// NewProxyTransport returns a new http.RoundTripper configured for proxying
|
||||||
|
// user traffic to sandbox services. It is intentionally separate from the RPC
|
||||||
|
// transport returned by Transport() so that heavy proxy traffic (Jupyter
|
||||||
|
// WebSocket, REST API polling) cannot interfere with Connect RPC streams (PTY,
|
||||||
|
// exec) via HTTP/2 flow control or connection pool contention.
|
||||||
|
func (p *HostClientPool) NewProxyTransport() http.RoundTripper {
|
||||||
|
t := &http.Transport{
|
||||||
|
ForceAttemptHTTP2: false, // HTTP/1.1 only — avoids HTTP/2 HOL blocking
|
||||||
|
MaxIdleConnsPerHost: 20,
|
||||||
|
MaxIdleConns: 100,
|
||||||
|
IdleConnTimeout: 120 * time.Second,
|
||||||
|
DisableCompression: true,
|
||||||
|
DialContext: (&net.Dialer{
|
||||||
|
Timeout: 30 * time.Second,
|
||||||
|
KeepAlive: 20 * time.Second,
|
||||||
|
}).DialContext,
|
||||||
|
}
|
||||||
|
|
||||||
|
// If the pool uses TLS, the proxy transport must too.
|
||||||
|
if p.httpClient.Transport != nil {
|
||||||
|
if ht, ok := p.httpClient.Transport.(*http.Transport); ok && ht.TLSClientConfig != nil {
|
||||||
|
t.TLSClientConfig = ht.TLSClientConfig.Clone()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return t
|
||||||
|
}
|
||||||
|
|
||||||
// EnsureScheme adds "http://" if the address has no scheme.
|
// EnsureScheme adds "http://" if the address has no scheme.
|
||||||
// Deprecated: use pool.ResolveAddr which respects the pool's TLS setting.
|
// Deprecated: use pool.ResolveAddr which respects the pool's TLS setting.
|
||||||
func EnsureScheme(addr string) string {
|
func EnsureScheme(addr string) string {
|
||||||
|
|||||||
Reference in New Issue
Block a user