9 Commits

Author SHA1 Message Date
f298e4a2a6 fix: resolve heredoc parsing error in deploy-oci Makefile target
All checks were successful
Deploy to OCI / deploy (push) Successful in 51s
Extract inline Dockerfile to Dockerfile.oci and scp it instead of
generating it via heredoc over SSH. Make's line-continuation backslash
was causing the heredoc delimiter to be parsed as 'DEOFnFROM' by the
remote shell, breaking the image build step.
2026-04-09 20:32:23 +00:00
b58ba41458 fix: cap watermark dimensions to prevent RangeError on large files
Some checks failed
Deploy to OCI / deploy (push) Failing after 40s
buildWatermark() calls Array.join() on a lines array whose size is
derived from tile dimensions divided by wmFontSize. For files with very
many lines the codeFontSize (and thus wmFontSize) approaches zero,
making charsPerLine and lineCount astronomically large and blowing past
JS's string length limit.

Fix by:
1. Clamping wmFontSize to a minimum of 1.0 to handle pathologically
   large files.
2. Capping charsPerLine at 400 and lineCount at 150 — the watermark is
   purely decorative so this cap has no visible impact.
2026-04-07 10:56:02 +01:00
f45e842370 fix: use aarch64-linux-musl-gcc linker for cross-compile to musl target
Some checks failed
Deploy to OCI / deploy (push) Failing after 40s
2026-04-07 10:46:34 +01:00
Fam Zheng
720c32c485 ci: use login shell (bash -l) to fix npm PATH on host runner
Some checks failed
Deploy to OCI / deploy (push) Failing after 30s
2026-04-07 10:43:17 +01:00
bd3842f854 ci: use absolute /usr/bin/npm path for host runner
Some checks failed
Deploy to OCI / deploy (push) Failing after 0s
2026-04-07 10:42:29 +01:00
8111812d3b ci: fix PATH for host runner - include /usr/bin and cargo bin
Some checks failed
Deploy to OCI / deploy (push) Failing after 0s
2026-04-07 10:41:27 +01:00
73e05ef0ad ci: fix checkout - replace actions/checkout@v4 with raw git (no node in host runner)
Some checks failed
Deploy to OCI / deploy (push) Failing after 0s
2026-04-07 10:40:36 +01:00
fam
b3652f2cb0 Merge pull request 'perf: lazy-load file content + fix oversized tile labels' (#5) from perf/lazy-load-and-font-fix into master
Some checks failed
Deploy to OCI / deploy (push) Failing after 0s
Reviewed-on: #5
2026-04-07 09:38:49 +00:00
Fam Zheng
398ae64ed9 perf: lazy-load file content and fix oversized tile labels
- Server now returns metadata-only tree on initial load (no file content
  in the JSON payload); content is served on-demand via the new
  GET /api/repos/{key}/file?path=... endpoint
- Cache still stores full content; strip_content() runs in-memory before
  the response is sent
- Frontend fetches file content lazily in _fetchContent() when a tile
  enters the LOD view, preventing a massive upfront JSON download for
  large repos (e.g. claude code)
- computeColorRanges() is now deferred to first _showCode() call instead
  of running synchronously for every file during load()
- Cap label fontSize at 5 world units to prevent giant text on large tiles
2026-04-07 10:37:31 +01:00
7 changed files with 150 additions and 45 deletions

View File

@@ -8,9 +8,17 @@ on:
jobs:
deploy:
runs-on: self-hosted
defaults:
run:
shell: bash -l {0}
steps:
- name: Checkout
uses: actions/checkout@v4
run: |
if [ -d .git ]; then
git fetch origin master && git reset --hard origin/master && git clean -fd
else
git clone ${{ gitea.server_url }}/${{ gitea.repository }} . && git checkout ${{ gitea.sha }}
fi
- name: Install frontend dependencies
run: cd web && npm ci

9
Dockerfile.oci Normal file
View File

@@ -0,0 +1,9 @@
FROM alpine:3.21
RUN apk add --no-cache git ca-certificates
WORKDIR /app
COPY repo-vis-server ./
COPY dist ./web/dist/
ENV PORT=8080
ENV FRONTEND_DIR=./web/dist
EXPOSE 8080
CMD ["./repo-vis-server"]

View File

@@ -39,18 +39,9 @@ deploy-oci: build-arm
ssh $(OCI_HOST) "rm -rf $(OCI_TMP) && mkdir -p $(OCI_TMP)"
scp server/target/$(MUSL_TARGET_ARM)/release/repo-vis-server $(OCI_HOST):$(OCI_TMP)/
cd web && tar czf /tmp/_rv_dist.tar.gz dist && scp /tmp/_rv_dist.tar.gz $(OCI_HOST):$(OCI_TMP)/
scp Dockerfile.oci $(OCI_HOST):$(OCI_TMP)/Dockerfile
@echo "==> Building image on OCI..."
ssh $(OCI_HOST) 'cd $(OCI_TMP) && tar xzf _rv_dist.tar.gz && cat > Dockerfile <<DEOF\n\
FROM alpine:3.21\n\
RUN apk add --no-cache git ca-certificates\n\
WORKDIR /app\n\
COPY repo-vis-server ./\n\
COPY dist ./web/dist/\n\
ENV PORT=8080\n\
ENV FRONTEND_DIR=./web/dist\n\
EXPOSE 8080\n\
CMD ["./repo-vis-server"]\n\
DEOF\n\
ssh $(OCI_HOST) 'cd $(OCI_TMP) && tar xzf _rv_dist.tar.gz && \
sudo docker build -t repo-vis:latest . && \
sudo docker save repo-vis:latest -o /tmp/_rv.tar && \
sudo k3s ctr images import /tmp/_rv.tar'

View File

@@ -1,2 +1,2 @@
[target.aarch64-unknown-linux-musl]
linker = "aarch64-linux-gnu-gcc"
linker = "aarch64-linux-musl-gcc"

View File

@@ -2,7 +2,7 @@ mod cache;
mod scanner;
use axum::{
extract::{DefaultBodyLimit, Multipart, Path, State},
extract::{DefaultBodyLimit, Multipart, Path, Query, State},
http::StatusCode,
response::Json,
routing::{get, post},
@@ -10,7 +10,7 @@ use axum::{
};
use cache::{Cache, RepoEntry};
use scanner::{scan_dir, FileNode};
use serde::Deserialize;
use serde::{Deserialize, Serialize};
use sha2::{Digest, Sha256};
use std::process::Command;
use std::sync::Arc;
@@ -27,6 +27,43 @@ struct GitRequest {
url: String,
}
#[derive(Deserialize)]
struct FileQuery {
path: String,
}
/// Response wrapper that includes the cache key alongside the (content-stripped) tree.
#[derive(Serialize)]
struct TreeResponse {
cache_key: String,
tree: FileNode,
}
/// Recursively strip file content so the initial response is metadata-only.
fn strip_content(node: &mut FileNode) {
node.content = None;
if let Some(children) = &mut node.children {
for child in children {
strip_content(child);
}
}
}
/// Walk the cached tree to find a single file's content by its relative path.
fn find_file_content(node: &FileNode, path: &str) -> Option<String> {
if node.content.is_some() && node.path == path {
return node.content.clone();
}
if let Some(children) = &node.children {
for child in children {
if let Some(content) = find_file_content(child, path) {
return Some(content);
}
}
}
None
}
fn count_leaves(node: &FileNode) -> usize {
match &node.children {
Some(children) => children.iter().map(count_leaves).sum(),
@@ -55,6 +92,7 @@ async fn main() {
.route("/api/scan-zip", post(scan_zip))
.route("/api/repos", get(list_repos))
.route("/api/repos/{key}", get(get_repo))
.route("/api/repos/{key}/file", get(get_file))
.layer(DefaultBodyLimit::max(100 * 1024 * 1024))
.with_state(state)
.fallback_service(ServeDir::new(frontend_dir).append_index_html_on_directories(true));
@@ -76,19 +114,38 @@ async fn list_repos(
async fn get_repo(
State(state): State<Arc<AppState>>,
Path(key): Path<String>,
) -> Result<Json<FileNode>, (StatusCode, String)> {
) -> Result<Json<TreeResponse>, (StatusCode, String)> {
state
.cache
.get(&key)
.and_then(|data| serde_json::from_str(&data).ok())
.map(Json)
.and_then(|data| serde_json::from_str::<FileNode>(&data).ok())
.map(|mut tree| {
strip_content(&mut tree);
Json(TreeResponse { cache_key: key, tree })
})
.ok_or((StatusCode::NOT_FOUND, "Repo not found in cache".to_string()))
}
async fn get_file(
State(state): State<Arc<AppState>>,
Path(key): Path<String>,
Query(q): Query<FileQuery>,
) -> Result<Json<serde_json::Value>, (StatusCode, String)> {
let data = state
.cache
.get(&key)
.ok_or((StatusCode::NOT_FOUND, "Repo not found in cache".to_string()))?;
let tree: FileNode = serde_json::from_str(&data)
.map_err(|e| (StatusCode::INTERNAL_SERVER_ERROR, e.to_string()))?;
let content = find_file_content(&tree, &q.path)
.ok_or((StatusCode::NOT_FOUND, format!("File not found: {}", q.path)))?;
Ok(Json(serde_json::json!({ "content": content })))
}
async fn scan_git(
State(state): State<Arc<AppState>>,
Json(req): Json<GitRequest>,
) -> Result<Json<FileNode>, (StatusCode, String)> {
) -> Result<Json<TreeResponse>, (StatusCode, String)> {
let url = req.url.trim().to_string();
if !url.starts_with("http://")
@@ -102,9 +159,10 @@ async fn scan_git(
let key = Cache::make_key(&format!("git:{url}"));
if let Some(cached) = state.cache.get(&key) {
info!("Cache hit for {url}");
let tree: FileNode =
let mut tree: FileNode =
serde_json::from_str(&cached).map_err(|e| (StatusCode::INTERNAL_SERVER_ERROR, e.to_string()))?;
return Ok(Json(tree));
strip_content(&mut tree);
return Ok(Json(TreeResponse { cache_key: key, tree }));
}
// Clone into temp dir
@@ -138,18 +196,20 @@ async fn scan_git(
let file_count = count_leaves(&tree);
// Cache with full content, then strip for response
if let Ok(json_str) = serde_json::to_string(&tree) {
state.cache.set(&key, &json_str);
state.cache.record_repo(&key, &repo_name, &url, file_count);
}
Ok(Json(tree))
strip_content(&mut tree);
Ok(Json(TreeResponse { cache_key: key, tree }))
}
async fn scan_zip(
State(state): State<Arc<AppState>>,
mut multipart: Multipart,
) -> Result<Json<FileNode>, (StatusCode, String)> {
) -> Result<Json<TreeResponse>, (StatusCode, String)> {
let field = multipart
.next_field()
.await
@@ -173,9 +233,10 @@ async fn scan_zip(
if let Some(cached) = state.cache.get(&key) {
info!("Cache hit for zip {file_name}");
let tree: FileNode =
let mut tree: FileNode =
serde_json::from_str(&cached).map_err(|e| (StatusCode::INTERNAL_SERVER_ERROR, e.to_string()))?;
return Ok(Json(tree));
strip_content(&mut tree);
return Ok(Json(TreeResponse { cache_key: key, tree }));
}
let tmp = TempDir::new().map_err(|e| (StatusCode::INTERNAL_SERVER_ERROR, e.to_string()))?;
@@ -235,10 +296,12 @@ async fn scan_zip(
let file_count = count_leaves(&tree);
// Cache with full content, then strip for response
if let Ok(json_str) = serde_json::to_string(&tree) {
state.cache.set(&key, &json_str);
state.cache.record_repo(&key, &zip_name, &format!("zip:{file_name}"), file_count);
}
Ok(Json(tree))
strip_content(&mut tree);
Ok(Json(TreeResponse { cache_key: key, tree }))
}

View File

@@ -31,7 +31,7 @@ function showError(msg) {
alert(msg);
}
async function visualize(tree, repoName) {
async function visualize(tree, repoName, cacheKey) {
showLoading("Building layout...");
// Wait for fonts to load so canvas renders them correctly
@@ -50,7 +50,7 @@ async function visualize(tree, repoName) {
showVisualization();
document.getElementById("osd-info").classList.add("active");
const renderer = new RepoRenderer(viewport, repoName || tree.name);
const renderer = new RepoRenderer(viewport, repoName || tree.name, cacheKey);
await renderer.load(leaves, totalWidth, totalHeight);
}
@@ -85,8 +85,8 @@ async function loadCachedRepo(key, name) {
try {
const res = await fetch(`/api/repos/${key}`);
if (!res.ok) throw new Error("Cache expired");
const tree = await res.json();
await visualize(tree, name);
const { cache_key, tree } = await res.json();
await visualize(tree, name, cache_key);
} catch (err) {
showError(err.message);
}
@@ -121,8 +121,8 @@ btnClone.addEventListener("click", async () => {
throw new Error(err.error || "Clone failed");
}
const tree = await res.json();
await visualize(tree);
const { cache_key, tree } = await res.json();
await visualize(tree, undefined, cache_key);
} catch (err) {
showError(err.message);
} finally {
@@ -174,8 +174,8 @@ async function uploadZip(file) {
throw new Error(err.error || "Upload failed");
}
const tree = await res.json();
await visualize(tree);
const { cache_key, tree } = await res.json();
await visualize(tree, undefined, cache_key);
} catch (err) {
showError(err.message);
}

View File

@@ -42,9 +42,10 @@ function buildWatermark(text, cols, rows) {
// ---------- renderer ----------
export class RepoRenderer {
constructor(container, repoName) {
constructor(container, repoName, cacheKey) {
this.container = container;
this.repoName = repoName || "repo";
this.cacheKey = cacheKey || null;
this.tiles = [];
this.bgMeshes = [];
this.raycaster = new THREE.Raycaster();
@@ -226,7 +227,7 @@ export class RepoRenderer {
// --- Label (always visible, cheap — one per file) ---
const label = new Text();
label.text = leaf.name;
label.fontSize = Math.min(leaf.w, leaf.h) * 0.15;
label.fontSize = Math.min(Math.min(leaf.w, leaf.h) * 0.15, 5);
label.color = 0xffffff;
label.anchorX = "center"; label.anchorY = "middle";
label.rotation.x = -Math.PI / 2;
@@ -235,13 +236,12 @@ export class RepoRenderer {
this.scene.add(label);
label.sync();
// Pre-compute syntax highlight ranges (cheap, no GPU)
const colorRanges = computeColorRanges(leaf.content, leaf.name);
this.tiles.push({
bgMesh, label, darkMat, colorRanges,
bgMesh, label, darkMat,
codeMesh: null, watermark: null, darkMesh: null,
data: leaf, showingCode: false, color, dist: Infinity
// colorRanges computed lazily on first _showCode
colorRanges: undefined,
data: leaf, showingCode: false, loading: false, color, dist: Infinity
});
this.bgMeshes.push(bgMesh);
}
@@ -254,10 +254,43 @@ export class RepoRenderer {
this.tooltip = document.getElementById("tooltip");
}
// -------- lazy content fetch --------
async _fetchContent(tile) {
try {
const res = await fetch(
`/api/repos/${encodeURIComponent(this.cacheKey)}/file?path=${encodeURIComponent(tile.data.path)}`
);
if (res.ok) {
const { content } = await res.json();
tile.data.content = content;
// Pre-compute colorRanges right after fetch (off the hot animation path)
tile.colorRanges = computeColorRanges(content, tile.data.name);
}
} catch {
// network error — leave content null, will retry next LOD cycle
} finally {
tile.loading = false;
}
}
// -------- lazy code/watermark creation --------
_showCode(tile) {
const d = tile.data;
// If content hasn't been loaded yet, kick off a fetch and bail
if (!d.content) {
if (!tile.loading) {
tile.loading = true;
if (this.cacheKey) this._fetchContent(tile);
}
return;
}
// Compute colorRanges lazily (only once, synchronous after content is available)
if (tile.colorRanges === undefined) {
tile.colorRanges = computeColorRanges(d.content, d.name);
}
// Dark bg
if (!tile.darkMesh) {
tile.darkMesh = new THREE.Mesh(new THREE.PlaneGeometry(d.w, d.h), tile.darkMat);
@@ -269,11 +302,12 @@ export class RepoRenderer {
// Watermark — tiled path text, 45° rotated, slightly larger than code font
if (!tile.watermark) {
const codeFontSize = (d.h / d.lines) * 0.65;
const wmFontSize = codeFontSize * 2.5;
// Clamp wmFontSize to avoid degenerate tiny values on files with huge line counts
const wmFontSize = Math.max(codeFontSize * 2.5, 1.0);
const wmLabel = `${this.repoName}/${d.path}`;
// Estimate how many repetitions to fill the area
const charsPerLine = Math.ceil(Math.max(d.w, d.h) * 1.5 / (wmFontSize * 0.5));
const lineCount = Math.ceil(Math.max(d.w, d.h) * 1.5 / (wmFontSize * 1.5));
// Estimate how many repetitions to fill the area; cap to prevent RangeError on massive tiles
const charsPerLine = Math.min(Math.ceil(Math.max(d.w, d.h) * 1.5 / (wmFontSize * 0.5)), 400);
const lineCount = Math.min(Math.ceil(Math.max(d.w, d.h) * 1.5 / (wmFontSize * 1.5)), 150);
const wmContent = buildWatermark(wmLabel, charsPerLine, lineCount);
const wm = new Text();