1
0
forked from wrenn/wrenn

Merge pull request 'Fixed crash on non-regular files and connection leaks' (#23) from hotfix/file-browsing-error-for-dev into dev

Reviewed-on: wrenn/wrenn#23
This commit is contained in:
2026-04-12 20:12:46 +00:00
5 changed files with 79 additions and 22 deletions

View File

@ -11,7 +11,7 @@ WRENN_CP_LISTEN_ADDR=:9725
WRENN_HOST_LISTEN_ADDR=:50051 WRENN_HOST_LISTEN_ADDR=:50051
WRENN_DIR=/var/lib/wrenn WRENN_DIR=/var/lib/wrenn
WRENN_HOST_INTERFACE=eth0 WRENN_HOST_INTERFACE=eth0
WRENN_CP_URL=http://localhost:8080 WRENN_CP_URL=http://localhost:9725
WRENN_DEFAULT_ROOTFS_SIZE=5Gi WRENN_DEFAULT_ROOTFS_SIZE=5Gi
# Lago (billing — external service) # Lago (billing — external service)

View File

@ -1,4 +1,5 @@
// SPDX-License-Identifier: Apache-2.0 // SPDX-License-Identifier: Apache-2.0
// Modifications by M/S Omukk
package api package api
@ -106,6 +107,17 @@ func (a *API) GetFiles(w http.ResponseWriter, r *http.Request, params GetFilesPa
return return
} }
// Reject anything that isn't a regular file (devices, pipes, sockets, etc.).
// Reading device files like /dev/zero or /dev/urandom produces infinite data
// and will exhaust memory on all layers of the stack.
if !stat.Mode().IsRegular() {
errMsg = fmt.Errorf("path '%s' is not a regular file", resolvedPath)
errorCode = http.StatusBadRequest
jsonError(w, errorCode, errMsg)
return
}
// Validate Accept-Encoding header // Validate Accept-Encoding header
encoding, err := parseAcceptEncoding(r) encoding, err := parseAcceptEncoding(r)
if err != nil { if err != nil {

View File

@ -72,7 +72,11 @@ export async function listDir(capsuleId: string, path: string, depth = 1): Promi
} }
} }
export async function readFile(capsuleId: string, path: string): Promise<ApiResult<string>> { export async function readFile(
capsuleId: string,
path: string,
signal?: AbortSignal,
): Promise<ApiResult<string>> {
try { try {
const headers: Record<string, string> = { 'Content-Type': 'application/json' }; const headers: Record<string, string> = { 'Content-Type': 'application/json' };
if (auth.token) headers['Authorization'] = `Bearer ${auth.token}`; if (auth.token) headers['Authorization'] = `Bearer ${auth.token}`;
@ -81,6 +85,7 @@ export async function readFile(capsuleId: string, path: string): Promise<ApiResu
method: 'POST', method: 'POST',
headers, headers,
body: JSON.stringify({ path }), body: JSON.stringify({ path }),
signal,
}); });
if (!res.ok) { if (!res.ok) {
@ -95,12 +100,20 @@ export async function readFile(capsuleId: string, path: string): Promise<ApiResu
const blob = await res.blob(); const blob = await res.blob();
const text = await blob.text(); const text = await blob.text();
return { ok: true, data: text }; return { ok: true, data: text };
} catch { } catch (e) {
if (e instanceof DOMException && e.name === 'AbortError') {
return { ok: false, error: 'Request aborted' };
}
return { ok: false, error: 'Unable to connect to the server' }; return { ok: false, error: 'Unable to connect to the server' };
} }
} }
export async function downloadFile(capsuleId: string, path: string, filename: string): Promise<void> { export async function downloadFile(
capsuleId: string,
path: string,
filename: string,
signal?: AbortSignal,
): Promise<void> {
const headers: Record<string, string> = { 'Content-Type': 'application/json' }; const headers: Record<string, string> = { 'Content-Type': 'application/json' };
if (auth.token) headers['Authorization'] = `Bearer ${auth.token}`; if (auth.token) headers['Authorization'] = `Bearer ${auth.token}`;
@ -108,6 +121,7 @@ export async function downloadFile(capsuleId: string, path: string, filename: st
method: 'POST', method: 'POST',
headers, headers,
body: JSON.stringify({ path }), body: JSON.stringify({ path }),
signal,
}); });
if (!res.ok) throw new Error('Download failed'); if (!res.ok) throw new Error('Download failed');

View File

@ -1,4 +1,5 @@
<script lang="ts"> <script lang="ts">
import { onDestroy } from 'svelte';
import { import {
listDir, listDir,
readFile, readFile,
@ -37,6 +38,14 @@
let dirGeneration = 0; let dirGeneration = 0;
let fileGeneration = 0; let fileGeneration = 0;
// AbortController for in-flight file reads — aborted when the user
// selects a different file or the component is torn down.
let fileAbort: AbortController | null = null;
onDestroy(() => {
fileAbort?.abort();
});
const MAX_PREVIEW_LINES = 5000; const MAX_PREVIEW_LINES = 5000;
const MAX_HIGHLIGHT_LINES = 2000; // Don't tokenize huge files — diminishing returns const MAX_HIGHLIGHT_LINES = 2000; // Don't tokenize huge files — diminishing returns
@ -83,6 +92,10 @@
const canGoUp = $derived(currentPath !== '/' && currentPath.startsWith('/')); const canGoUp = $derived(currentPath !== '/' && currentPath.startsWith('/'));
async function navigateTo(path: string) { async function navigateTo(path: string) {
// Abort any in-flight file read and invalidate stale generation so the
// abort error isn't surfaced in the UI.
fileAbort?.abort();
++fileGeneration;
currentPath = normalizePath(path); currentPath = normalizePath(path);
pathInput = currentPath; pathInput = currentPath;
selectedFile = null; selectedFile = null;
@ -146,6 +159,9 @@
return; return;
} }
// Abort any in-flight file read before starting a new one.
fileAbort?.abort();
selectedFile = entry; selectedFile = entry;
fileContent = null; fileContent = null;
fileError = null; fileError = null;
@ -159,26 +175,31 @@
fileLoading = true; fileLoading = true;
const gen = ++fileGeneration; const gen = ++fileGeneration;
const result = await readFile(capsuleId, entry.path); const controller = new AbortController();
if (gen !== fileGeneration) return; // stale response — user clicked another file fileAbort = controller;
if (result.ok) { try {
if (looksLikeBinary(result.data)) { const result = await readFile(capsuleId, entry.path, controller.signal);
fileContent = null; if (gen !== fileGeneration) return; // stale response — user clicked another file
} else { if (result.ok) {
fileContent = result.data; if (looksLikeBinary(result.data)) {
// Kick off highlighting in the background — preview shows plain text immediately. fileContent = null;
// Only tokenize up to MAX_HIGHLIGHT_LINES to avoid freezing on large files. } else {
const linesToHighlight = result.data.split('\n').length > MAX_HIGHLIGHT_LINES fileContent = result.data;
? result.data.split('\n').slice(0, MAX_HIGHLIGHT_LINES).join('\n') // Kick off highlighting in the background — preview shows plain text immediately.
: result.data; // Only tokenize up to MAX_HIGHLIGHT_LINES to avoid freezing on large files.
tokenize(linesToHighlight, entry.name).then((tokens) => { const linesToHighlight = result.data.split('\n').length > MAX_HIGHLIGHT_LINES
if (gen === fileGeneration) highlightedTokens = tokens; ? result.data.split('\n').slice(0, MAX_HIGHLIGHT_LINES).join('\n')
}); : result.data;
tokenize(linesToHighlight, entry.name).then((tokens) => {
if (gen === fileGeneration) highlightedTokens = tokens;
});
}
} else if (result.error !== 'Request aborted') {
fileError = result.error;
} }
} else { } finally {
fileError = result.error; if (gen === fileGeneration) fileLoading = false;
} }
fileLoading = false;
} }
function looksLikeBinary(text: string): boolean { function looksLikeBinary(text: string): boolean {

View File

@ -516,6 +516,16 @@ func (s *Server) ReadFileStream(
// Stream file content in 64KB chunks. // Stream file content in 64KB chunks.
buf := make([]byte, 64*1024) buf := make([]byte, 64*1024)
for { for {
// Bail out early if the client disconnected or the context was cancelled.
select {
case <-ctx.Done():
if ctx.Err() == context.DeadlineExceeded {
return connect.NewError(connect.CodeDeadlineExceeded, ctx.Err())
}
return connect.NewError(connect.CodeCanceled, ctx.Err())
default:
}
n, err := resp.Body.Read(buf) n, err := resp.Body.Read(buf)
if n > 0 { if n > 0 {
chunk := make([]byte, n) chunk := make([]byte, n)