Initial commit: repo-vis — 3D codebase visualization
Rust (axum) backend with git clone / zip upload / SQLite cache. Three.js frontend with D3 treemap layout and semantic zoom. Docker deployment with musl static binary.
This commit is contained in:
13
.dockerignore
Normal file
13
.dockerignore
Normal file
@@ -0,0 +1,13 @@
|
||||
# Keep the musl release binary, ignore the rest
|
||||
server/target/debug
|
||||
server/target/release
|
||||
server/target/x86_64-unknown-linux-musl/debug
|
||||
server/target/x86_64-unknown-linux-musl/release/build
|
||||
server/target/x86_64-unknown-linux-musl/release/deps
|
||||
server/target/x86_64-unknown-linux-musl/release/.fingerprint
|
||||
server/target/x86_64-unknown-linux-musl/release/incremental
|
||||
server/target/x86_64-unknown-linux-musl/release/examples
|
||||
web/node_modules
|
||||
web/dist
|
||||
*.db
|
||||
data/
|
||||
5
.gitignore
vendored
Normal file
5
.gitignore
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
server/target/
|
||||
web/node_modules/
|
||||
web/dist/
|
||||
data/
|
||||
*.db
|
||||
21
Dockerfile
Normal file
21
Dockerfile
Normal file
@@ -0,0 +1,21 @@
|
||||
# Stage 1: Build frontend
|
||||
FROM node:22-slim AS frontend
|
||||
WORKDIR /build
|
||||
COPY web/package.json web/package-lock.json ./
|
||||
RUN npm ci
|
||||
COPY web/ ./
|
||||
RUN npm run build
|
||||
|
||||
# Stage 2: Minimal runtime (binary built locally with musl)
|
||||
FROM alpine:3.21
|
||||
RUN apk add --no-cache git ca-certificates
|
||||
|
||||
WORKDIR /app
|
||||
COPY server/target/x86_64-unknown-linux-musl/release/repo-vis-server ./
|
||||
COPY --from=frontend /build/dist ./web/dist/
|
||||
|
||||
ENV PORT=8080
|
||||
ENV FRONTEND_DIR=./web/dist
|
||||
EXPOSE 8080
|
||||
|
||||
CMD ["./repo-vis-server"]
|
||||
30
Makefile
Normal file
30
Makefile
Normal file
@@ -0,0 +1,30 @@
|
||||
.PHONY: build-server build-web build deploy clean
|
||||
|
||||
MUSL_TARGET := x86_64-unknown-linux-musl
|
||||
CONTAINER := repo-vis
|
||||
IMAGE := repo-vis:latest
|
||||
PORT := 9120
|
||||
|
||||
build-server:
|
||||
cd server && cargo build --release --target $(MUSL_TARGET)
|
||||
|
||||
build-web:
|
||||
cd web && npm run build
|
||||
|
||||
build: build-server build-web
|
||||
|
||||
deploy: build
|
||||
-docker stop $(CONTAINER) 2>/dev/null
|
||||
-docker rm $(CONTAINER) 2>/dev/null
|
||||
docker build -t $(IMAGE) .
|
||||
docker run -d \
|
||||
--name $(CONTAINER) \
|
||||
-p $(PORT):8080 \
|
||||
-v repo-vis-data:/app/data \
|
||||
--restart unless-stopped \
|
||||
$(IMAGE)
|
||||
@echo "repo-vis running at http://localhost:$(PORT)"
|
||||
|
||||
clean:
|
||||
cd server && cargo clean
|
||||
rm -rf web/dist
|
||||
1751
server/Cargo.lock
generated
Normal file
1751
server/Cargo.lock
generated
Normal file
File diff suppressed because it is too large
Load Diff
19
server/Cargo.toml
Normal file
19
server/Cargo.toml
Normal file
@@ -0,0 +1,19 @@
|
||||
[package]
|
||||
name = "repo-vis-server"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
[dependencies]
|
||||
axum = { version = "0.8", features = ["multipart"] }
|
||||
tokio = { version = "1", features = ["full"] }
|
||||
tower-http = { version = "0.6", features = ["fs", "cors"] }
|
||||
serde = { version = "1", features = ["derive"] }
|
||||
serde_json = "1"
|
||||
rusqlite = { version = "0.32", features = ["bundled"] }
|
||||
walkdir = "2"
|
||||
sha2 = "0.10"
|
||||
hex = "0.4"
|
||||
zip = "2"
|
||||
tempfile = "3"
|
||||
tracing = "0.1"
|
||||
tracing-subscriber = "0.3"
|
||||
131
server/src/cache.rs
Normal file
131
server/src/cache.rs
Normal file
@@ -0,0 +1,131 @@
|
||||
use rusqlite::Connection;
|
||||
use serde::Serialize;
|
||||
use sha2::{Digest, Sha256};
|
||||
use std::path::Path;
|
||||
use std::sync::Mutex;
|
||||
use std::time::{SystemTime, UNIX_EPOCH};
|
||||
|
||||
const TTL_SECS: u64 = 24 * 60 * 60; // 24 hours
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
pub struct RepoEntry {
|
||||
pub name: String,
|
||||
pub source: String, // "git:url" or "zip:filename"
|
||||
pub file_count: usize,
|
||||
pub cache_key: String,
|
||||
pub created: u64,
|
||||
}
|
||||
|
||||
pub struct Cache {
|
||||
conn: Mutex<Connection>,
|
||||
}
|
||||
|
||||
impl Cache {
|
||||
pub fn new(db_path: &Path) -> Self {
|
||||
if let Some(parent) = db_path.parent() {
|
||||
std::fs::create_dir_all(parent).ok();
|
||||
}
|
||||
|
||||
let conn = Connection::open(db_path).expect("Failed to open cache database");
|
||||
conn.execute_batch(
|
||||
"CREATE TABLE IF NOT EXISTS scan_cache (
|
||||
key TEXT PRIMARY KEY,
|
||||
data TEXT NOT NULL,
|
||||
created INTEGER NOT NULL
|
||||
);
|
||||
CREATE TABLE IF NOT EXISTS repos (
|
||||
cache_key TEXT PRIMARY KEY,
|
||||
name TEXT NOT NULL,
|
||||
source TEXT NOT NULL,
|
||||
file_count INTEGER NOT NULL DEFAULT 0,
|
||||
created INTEGER NOT NULL
|
||||
);",
|
||||
)
|
||||
.expect("Failed to create tables");
|
||||
|
||||
Cache {
|
||||
conn: Mutex::new(conn),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn make_key(input: &str) -> String {
|
||||
let mut hasher = Sha256::new();
|
||||
hasher.update(input.as_bytes());
|
||||
hex::encode(hasher.finalize())
|
||||
}
|
||||
|
||||
pub fn get(&self, key: &str) -> Option<String> {
|
||||
let conn = self.conn.lock().unwrap();
|
||||
let now = now_secs();
|
||||
|
||||
// Prune old entries
|
||||
conn.execute(
|
||||
"DELETE FROM scan_cache WHERE created < ?1",
|
||||
[now.saturating_sub(TTL_SECS)],
|
||||
)
|
||||
.ok();
|
||||
|
||||
conn.query_row(
|
||||
"SELECT data, created FROM scan_cache WHERE key = ?1",
|
||||
[key],
|
||||
|row| {
|
||||
let data: String = row.get(0)?;
|
||||
let created: u64 = row.get(1)?;
|
||||
Ok((data, created))
|
||||
},
|
||||
)
|
||||
.ok()
|
||||
.and_then(|(data, created)| {
|
||||
if now - created < TTL_SECS {
|
||||
Some(data)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
pub fn set(&self, key: &str, data: &str) {
|
||||
let conn = self.conn.lock().unwrap();
|
||||
conn.execute(
|
||||
"INSERT OR REPLACE INTO scan_cache (key, data, created) VALUES (?1, ?2, ?3)",
|
||||
rusqlite::params![key, data, now_secs()],
|
||||
)
|
||||
.ok();
|
||||
}
|
||||
|
||||
pub fn record_repo(&self, cache_key: &str, name: &str, source: &str, file_count: usize) {
|
||||
let conn = self.conn.lock().unwrap();
|
||||
conn.execute(
|
||||
"INSERT OR REPLACE INTO repos (cache_key, name, source, file_count, created) VALUES (?1, ?2, ?3, ?4, ?5)",
|
||||
rusqlite::params![cache_key, name, source, file_count, now_secs()],
|
||||
)
|
||||
.ok();
|
||||
}
|
||||
|
||||
pub fn list_repos(&self) -> Vec<RepoEntry> {
|
||||
let conn = self.conn.lock().unwrap();
|
||||
let mut stmt = conn
|
||||
.prepare("SELECT cache_key, name, source, file_count, created FROM repos ORDER BY created DESC LIMIT 50")
|
||||
.unwrap();
|
||||
|
||||
stmt.query_map([], |row| {
|
||||
Ok(RepoEntry {
|
||||
cache_key: row.get(0)?,
|
||||
name: row.get(1)?,
|
||||
source: row.get(2)?,
|
||||
file_count: row.get(3)?,
|
||||
created: row.get(4)?,
|
||||
})
|
||||
})
|
||||
.unwrap()
|
||||
.filter_map(|r| r.ok())
|
||||
.collect()
|
||||
}
|
||||
}
|
||||
|
||||
fn now_secs() -> u64 {
|
||||
SystemTime::now()
|
||||
.duration_since(UNIX_EPOCH)
|
||||
.unwrap()
|
||||
.as_secs()
|
||||
}
|
||||
213
server/src/main.rs
Normal file
213
server/src/main.rs
Normal file
@@ -0,0 +1,213 @@
|
||||
mod cache;
|
||||
mod scanner;
|
||||
|
||||
use axum::{
|
||||
extract::{DefaultBodyLimit, Multipart, Path, State},
|
||||
http::StatusCode,
|
||||
response::Json,
|
||||
routing::{get, post},
|
||||
Router,
|
||||
};
|
||||
use cache::{Cache, RepoEntry};
|
||||
use scanner::{scan_dir, FileNode};
|
||||
use serde::Deserialize;
|
||||
use sha2::{Digest, Sha256};
|
||||
use std::process::Command;
|
||||
use std::sync::Arc;
|
||||
use tempfile::TempDir;
|
||||
use tower_http::services::ServeDir;
|
||||
use tracing::info;
|
||||
|
||||
struct AppState {
|
||||
cache: Cache,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct GitRequest {
|
||||
url: String,
|
||||
}
|
||||
|
||||
fn count_leaves(node: &FileNode) -> usize {
|
||||
match &node.children {
|
||||
Some(children) => children.iter().map(count_leaves).sum(),
|
||||
None => 1,
|
||||
}
|
||||
}
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() {
|
||||
tracing_subscriber::fmt::init();
|
||||
|
||||
let data_dir_str = std::env::var("DATA_DIR").unwrap_or_else(|_| "data".to_string());
|
||||
let data_dir = std::path::Path::new(&data_dir_str);
|
||||
std::fs::create_dir_all(data_dir).ok();
|
||||
|
||||
let state = Arc::new(AppState {
|
||||
cache: Cache::new(&data_dir.join("cache.db")),
|
||||
});
|
||||
|
||||
let frontend_dir_str =
|
||||
std::env::var("FRONTEND_DIR").unwrap_or_else(|_| "../web/dist".to_string());
|
||||
let frontend_dir = std::path::Path::new(&frontend_dir_str);
|
||||
|
||||
let app = Router::new()
|
||||
.route("/api/scan-git", post(scan_git))
|
||||
.route("/api/scan-zip", post(scan_zip))
|
||||
.route("/api/repos", get(list_repos))
|
||||
.route("/api/repos/{key}", get(get_repo))
|
||||
.layer(DefaultBodyLimit::max(100 * 1024 * 1024))
|
||||
.with_state(state)
|
||||
.fallback_service(ServeDir::new(frontend_dir).append_index_html_on_directories(true));
|
||||
|
||||
let port = std::env::var("PORT").unwrap_or_else(|_| "3000".to_string());
|
||||
let addr = format!("0.0.0.0:{port}");
|
||||
info!("repo-vis server running at http://localhost:{port}");
|
||||
|
||||
let listener = tokio::net::TcpListener::bind(&addr).await.unwrap();
|
||||
axum::serve(listener, app).await.unwrap();
|
||||
}
|
||||
|
||||
async fn list_repos(
|
||||
State(state): State<Arc<AppState>>,
|
||||
) -> Json<Vec<RepoEntry>> {
|
||||
Json(state.cache.list_repos())
|
||||
}
|
||||
|
||||
async fn get_repo(
|
||||
State(state): State<Arc<AppState>>,
|
||||
Path(key): Path<String>,
|
||||
) -> Result<Json<FileNode>, (StatusCode, String)> {
|
||||
state
|
||||
.cache
|
||||
.get(&key)
|
||||
.and_then(|data| serde_json::from_str(&data).ok())
|
||||
.map(Json)
|
||||
.ok_or((StatusCode::NOT_FOUND, "Repo not found in cache".to_string()))
|
||||
}
|
||||
|
||||
async fn scan_git(
|
||||
State(state): State<Arc<AppState>>,
|
||||
Json(req): Json<GitRequest>,
|
||||
) -> Result<Json<FileNode>, (StatusCode, String)> {
|
||||
let url = req.url.trim().to_string();
|
||||
|
||||
if !url.starts_with("http://")
|
||||
&& !url.starts_with("https://")
|
||||
&& !url.starts_with("git@")
|
||||
{
|
||||
return Err((StatusCode::BAD_REQUEST, "Invalid git URL".to_string()));
|
||||
}
|
||||
|
||||
// Check cache
|
||||
let key = Cache::make_key(&format!("git:{url}"));
|
||||
if let Some(cached) = state.cache.get(&key) {
|
||||
info!("Cache hit for {url}");
|
||||
let tree: FileNode =
|
||||
serde_json::from_str(&cached).map_err(|e| (StatusCode::INTERNAL_SERVER_ERROR, e.to_string()))?;
|
||||
return Ok(Json(tree));
|
||||
}
|
||||
|
||||
// Clone into temp dir
|
||||
let tmp = TempDir::new().map_err(|e| (StatusCode::INTERNAL_SERVER_ERROR, e.to_string()))?;
|
||||
|
||||
info!("Cloning {url} ...");
|
||||
let output = Command::new("git")
|
||||
.args(["clone", "--depth", "1", "--", &url])
|
||||
.arg(tmp.path())
|
||||
.output()
|
||||
.map_err(|e| (StatusCode::INTERNAL_SERVER_ERROR, format!("git clone failed: {e}")))?;
|
||||
|
||||
if !output.status.success() {
|
||||
let stderr = String::from_utf8_lossy(&output.stderr);
|
||||
return Err((
|
||||
StatusCode::INTERNAL_SERVER_ERROR,
|
||||
format!("git clone failed: {stderr}"),
|
||||
));
|
||||
}
|
||||
|
||||
let mut tree = scan_dir(tmp.path(), tmp.path());
|
||||
|
||||
let repo_name = url
|
||||
.trim_end_matches('/')
|
||||
.trim_end_matches(".git")
|
||||
.rsplit('/')
|
||||
.next()
|
||||
.unwrap_or("repo")
|
||||
.to_string();
|
||||
tree.name = repo_name.clone();
|
||||
|
||||
let file_count = count_leaves(&tree);
|
||||
|
||||
if let Ok(json_str) = serde_json::to_string(&tree) {
|
||||
state.cache.set(&key, &json_str);
|
||||
state.cache.record_repo(&key, &repo_name, &url, file_count);
|
||||
}
|
||||
|
||||
Ok(Json(tree))
|
||||
}
|
||||
|
||||
async fn scan_zip(
|
||||
State(state): State<Arc<AppState>>,
|
||||
mut multipart: Multipart,
|
||||
) -> Result<Json<FileNode>, (StatusCode, String)> {
|
||||
let field = multipart
|
||||
.next_field()
|
||||
.await
|
||||
.map_err(|e| (StatusCode::BAD_REQUEST, e.to_string()))?
|
||||
.ok_or((StatusCode::BAD_REQUEST, "No file uploaded".to_string()))?;
|
||||
|
||||
let file_name = field
|
||||
.file_name()
|
||||
.unwrap_or("upload.zip")
|
||||
.to_string();
|
||||
|
||||
let data = field
|
||||
.bytes()
|
||||
.await
|
||||
.map_err(|e| (StatusCode::BAD_REQUEST, format!("Failed to read upload: {e}")))?;
|
||||
|
||||
let mut hasher = Sha256::new();
|
||||
hasher.update(&data);
|
||||
let hash = hex::encode(hasher.finalize());
|
||||
let key = Cache::make_key(&format!("zip:{hash}"));
|
||||
|
||||
if let Some(cached) = state.cache.get(&key) {
|
||||
info!("Cache hit for zip {file_name}");
|
||||
let tree: FileNode =
|
||||
serde_json::from_str(&cached).map_err(|e| (StatusCode::INTERNAL_SERVER_ERROR, e.to_string()))?;
|
||||
return Ok(Json(tree));
|
||||
}
|
||||
|
||||
let tmp = TempDir::new().map_err(|e| (StatusCode::INTERNAL_SERVER_ERROR, e.to_string()))?;
|
||||
let cursor = std::io::Cursor::new(&data);
|
||||
let mut archive =
|
||||
zip::ZipArchive::new(cursor).map_err(|e| (StatusCode::BAD_REQUEST, format!("Invalid zip: {e}")))?;
|
||||
|
||||
archive
|
||||
.extract(tmp.path())
|
||||
.map_err(|e| (StatusCode::INTERNAL_SERVER_ERROR, format!("Extract failed: {e}")))?;
|
||||
|
||||
let entries: Vec<_> = std::fs::read_dir(tmp.path())
|
||||
.map_err(|e| (StatusCode::INTERNAL_SERVER_ERROR, e.to_string()))?
|
||||
.filter_map(|e| e.ok())
|
||||
.collect();
|
||||
|
||||
let scan_root = if entries.len() == 1 && entries[0].file_type().map(|t| t.is_dir()).unwrap_or(false) {
|
||||
entries[0].path()
|
||||
} else {
|
||||
tmp.path().to_path_buf()
|
||||
};
|
||||
|
||||
let mut tree = scan_dir(&scan_root, &scan_root);
|
||||
let zip_name = file_name.trim_end_matches(".zip").to_string();
|
||||
tree.name = zip_name.clone();
|
||||
|
||||
let file_count = count_leaves(&tree);
|
||||
|
||||
if let Ok(json_str) = serde_json::to_string(&tree) {
|
||||
state.cache.set(&key, &json_str);
|
||||
state.cache.record_repo(&key, &zip_name, &format!("zip:{file_name}"), file_count);
|
||||
}
|
||||
|
||||
Ok(Json(tree))
|
||||
}
|
||||
172
server/src/scanner.rs
Normal file
172
server/src/scanner.rs
Normal file
@@ -0,0 +1,172 @@
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::collections::HashSet;
|
||||
use std::fs;
|
||||
use std::path::Path;
|
||||
|
||||
const MAX_FILE_SIZE: u64 = 256 * 1024;
|
||||
|
||||
static IGNORE_DIRS: &[&str] = &[
|
||||
"node_modules", ".git", ".hg", ".svn", "dist", "build", "__pycache__",
|
||||
".next", ".nuxt", ".output", "vendor", ".venv", "venv", "target",
|
||||
".idea", ".vscode", ".cache",
|
||||
];
|
||||
|
||||
static TEXT_EXTENSIONS: &[&str] = &[
|
||||
"js", "jsx", "ts", "tsx", "mjs", "cjs", "py", "pyw", "go", "rs", "c", "h", "cpp", "hpp",
|
||||
"cc", "java", "kt", "scala", "rb", "php", "lua", "pl", "pm", "sh", "bash", "zsh", "fish",
|
||||
"html", "htm", "css", "scss", "less", "sass", "json", "yaml", "yml", "toml", "ini", "cfg",
|
||||
"xml", "svg", "md", "txt", "rst", "sql", "graphql", "gql", "vue", "svelte", "astro", "tf",
|
||||
"hcl", "proto", "thrift", "r", "jl", "ex", "exs", "erl", "hrl", "zig", "nim", "v",
|
||||
"swift", "m", "mm", "cs", "fs", "fsx", "hs", "lhs", "ml", "mli", "clj", "cljs", "cljc",
|
||||
"edn", "el", "lisp", "scm", "rkt", "cmake",
|
||||
];
|
||||
|
||||
static SPECIAL_NAMES: &[&str] = &[
|
||||
"Makefile", "Dockerfile", "Containerfile", "Vagrantfile", "Rakefile", "Gemfile",
|
||||
"Brewfile", "Procfile", "CMakeLists.txt", "meson.build", "BUILD", "WORKSPACE",
|
||||
".gitignore", ".dockerignore", ".editorconfig", ".env.example",
|
||||
];
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
pub struct FileNode {
|
||||
pub name: String,
|
||||
pub path: String,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub lines: Option<usize>,
|
||||
#[serde(rename = "maxLen", skip_serializing_if = "Option::is_none")]
|
||||
pub max_len: Option<usize>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub content: Option<String>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub children: Option<Vec<FileNode>>,
|
||||
}
|
||||
|
||||
fn is_text_file(name: &str, ext: Option<&str>) -> bool {
|
||||
if SPECIAL_NAMES.contains(&name) {
|
||||
return true;
|
||||
}
|
||||
match ext {
|
||||
Some(e) => {
|
||||
let lower = e.to_ascii_lowercase();
|
||||
TEXT_EXTENSIONS.contains(&lower.as_str())
|
||||
}
|
||||
None => false,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn scan_dir(dir: &Path, root: &Path) -> FileNode {
|
||||
let ignore_set: HashSet<&str> = IGNORE_DIRS.iter().copied().collect();
|
||||
let mut children = Vec::new();
|
||||
|
||||
let entries = match fs::read_dir(dir) {
|
||||
Ok(e) => e,
|
||||
Err(_) => {
|
||||
return FileNode {
|
||||
name: dir_name(dir),
|
||||
path: rel_path(dir, root),
|
||||
lines: None,
|
||||
max_len: None,
|
||||
content: None,
|
||||
children: Some(Vec::new()),
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
let mut sorted: Vec<_> = entries.filter_map(|e| e.ok()).collect();
|
||||
sorted.sort_by(|a, b| a.file_name().cmp(&b.file_name()));
|
||||
|
||||
for entry in sorted {
|
||||
let name = entry.file_name();
|
||||
let name_str = name.to_string_lossy();
|
||||
|
||||
if ignore_set.contains(name_str.as_ref()) {
|
||||
continue;
|
||||
}
|
||||
|
||||
let path = entry.path();
|
||||
let ft = match entry.file_type() {
|
||||
Ok(ft) => ft,
|
||||
Err(_) => continue,
|
||||
};
|
||||
|
||||
if ft.is_dir() {
|
||||
let subtree = scan_dir(&path, root);
|
||||
if subtree
|
||||
.children
|
||||
.as_ref()
|
||||
.map(|c| !c.is_empty())
|
||||
.unwrap_or(false)
|
||||
{
|
||||
children.push(subtree);
|
||||
}
|
||||
} else if ft.is_file() {
|
||||
let ext = path.extension().and_then(|e| e.to_str());
|
||||
if !is_text_file(&name_str, ext) {
|
||||
continue;
|
||||
}
|
||||
|
||||
let meta = match fs::metadata(&path) {
|
||||
Ok(m) => m,
|
||||
Err(_) => continue,
|
||||
};
|
||||
|
||||
if meta.len() == 0 || meta.len() > MAX_FILE_SIZE {
|
||||
continue;
|
||||
}
|
||||
|
||||
let content = match fs::read_to_string(&path) {
|
||||
Ok(c) => c,
|
||||
Err(_) => continue, // binary or encoding error
|
||||
};
|
||||
|
||||
let lines: Vec<&str> = content.split('\n').collect();
|
||||
let line_count = lines.len();
|
||||
// P99 line length to avoid outlier long lines distorting layout
|
||||
let max_len = percentile_line_len(&lines, 99).max(1);
|
||||
|
||||
children.push(FileNode {
|
||||
name: name_str.to_string(),
|
||||
path: rel_path(&path, root),
|
||||
lines: Some(line_count),
|
||||
max_len: Some(max_len),
|
||||
content: Some(content),
|
||||
children: None,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
FileNode {
|
||||
name: dir_name(dir),
|
||||
path: rel_path(dir, root),
|
||||
lines: None,
|
||||
max_len: None,
|
||||
content: None,
|
||||
children: Some(children),
|
||||
}
|
||||
}
|
||||
|
||||
fn percentile_line_len(lines: &[&str], pct: usize) -> usize {
|
||||
if lines.is_empty() {
|
||||
return 0;
|
||||
}
|
||||
let mut lens: Vec<usize> = lines.iter().map(|l| l.len()).collect();
|
||||
lens.sort_unstable();
|
||||
let idx = (lens.len() * pct / 100).min(lens.len() - 1);
|
||||
lens[idx]
|
||||
}
|
||||
|
||||
fn dir_name(dir: &Path) -> String {
|
||||
dir.file_name()
|
||||
.and_then(|n| n.to_str())
|
||||
.unwrap_or(".")
|
||||
.to_string()
|
||||
}
|
||||
|
||||
fn rel_path(path: &Path, root: &Path) -> String {
|
||||
path.strip_prefix(root)
|
||||
.map(|p| {
|
||||
let s = p.to_string_lossy().to_string();
|
||||
if s.is_empty() { ".".to_string() } else { s }
|
||||
})
|
||||
.unwrap_or_else(|_| ".".to_string())
|
||||
}
|
||||
288
web/index.html
Normal file
288
web/index.html
Normal file
@@ -0,0 +1,288 @@
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
<title>repo-vis</title>
|
||||
<!-- Terminus (code) + LXGW WenKai Mono (CJK) -->
|
||||
<link rel="stylesheet" href="https://cdn.jsdelivr.net/npm/@xz/fonts@1/serve/terminus.min.css">
|
||||
<link rel="stylesheet" href="https://cdn.jsdelivr.net/npm/lxgw-wenkai-mono-webfont@1.7.0/style.css">
|
||||
<style>
|
||||
* { margin: 0; padding: 0; box-sizing: border-box; }
|
||||
|
||||
body {
|
||||
font-family: -apple-system, "Segoe UI", Roboto, sans-serif;
|
||||
background: #11111b;
|
||||
color: #cdd6f4;
|
||||
overflow: hidden;
|
||||
}
|
||||
|
||||
/* Landing page */
|
||||
#landing {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
height: 100vh;
|
||||
gap: 32px;
|
||||
}
|
||||
|
||||
#landing h1 {
|
||||
font-size: 48px;
|
||||
font-weight: 300;
|
||||
letter-spacing: -1px;
|
||||
color: #cba6f7;
|
||||
}
|
||||
|
||||
#landing .subtitle {
|
||||
font-size: 16px;
|
||||
color: #6c7086;
|
||||
margin-top: -20px;
|
||||
}
|
||||
|
||||
.input-group {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 16px;
|
||||
width: 500px;
|
||||
max-width: 90vw;
|
||||
}
|
||||
|
||||
.input-row {
|
||||
display: flex;
|
||||
gap: 8px;
|
||||
}
|
||||
|
||||
.input-row input[type="text"] {
|
||||
flex: 1;
|
||||
padding: 12px 16px;
|
||||
border: 1px solid #313244;
|
||||
border-radius: 8px;
|
||||
background: #1e1e2e;
|
||||
color: #cdd6f4;
|
||||
font-size: 15px;
|
||||
outline: none;
|
||||
transition: border-color 0.2s;
|
||||
}
|
||||
|
||||
.input-row input[type="text"]:focus {
|
||||
border-color: #cba6f7;
|
||||
}
|
||||
|
||||
.input-row input[type="text"]::placeholder {
|
||||
color: #585b70;
|
||||
}
|
||||
|
||||
button {
|
||||
padding: 12px 24px;
|
||||
border: none;
|
||||
border-radius: 8px;
|
||||
background: #cba6f7;
|
||||
color: #1e1e2e;
|
||||
font-size: 15px;
|
||||
font-weight: 600;
|
||||
cursor: pointer;
|
||||
transition: background 0.2s;
|
||||
}
|
||||
|
||||
button:hover { background: #b4befe; }
|
||||
button:disabled { opacity: 0.5; cursor: not-allowed; }
|
||||
|
||||
.divider {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 16px;
|
||||
color: #585b70;
|
||||
font-size: 13px;
|
||||
}
|
||||
|
||||
.divider::before, .divider::after {
|
||||
content: "";
|
||||
flex: 1;
|
||||
height: 1px;
|
||||
background: #313244;
|
||||
}
|
||||
|
||||
.drop-zone {
|
||||
border: 2px dashed #313244;
|
||||
border-radius: 12px;
|
||||
padding: 32px;
|
||||
text-align: center;
|
||||
color: #6c7086;
|
||||
cursor: pointer;
|
||||
transition: border-color 0.2s, background 0.2s;
|
||||
}
|
||||
|
||||
.drop-zone:hover, .drop-zone.dragover {
|
||||
border-color: #cba6f7;
|
||||
background: rgba(203, 166, 247, 0.05);
|
||||
}
|
||||
|
||||
.drop-zone input { display: none; }
|
||||
|
||||
/* Loading state */
|
||||
#loading {
|
||||
display: none;
|
||||
position: fixed;
|
||||
inset: 0;
|
||||
background: #11111b;
|
||||
z-index: 100;
|
||||
flex-direction: column;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
gap: 16px;
|
||||
}
|
||||
|
||||
#loading.active { display: flex; }
|
||||
|
||||
.spinner {
|
||||
width: 40px;
|
||||
height: 40px;
|
||||
border: 3px solid #313244;
|
||||
border-top-color: #cba6f7;
|
||||
border-radius: 50%;
|
||||
animation: spin 0.8s linear infinite;
|
||||
}
|
||||
|
||||
@keyframes spin { to { transform: rotate(360deg); } }
|
||||
|
||||
#loading-text { color: #6c7086; font-size: 14px; }
|
||||
|
||||
/* Visualization */
|
||||
#viewport {
|
||||
display: none;
|
||||
width: 100vw;
|
||||
height: 100vh;
|
||||
}
|
||||
|
||||
#viewport.active { display: block; }
|
||||
|
||||
/* Tooltip */
|
||||
#tooltip {
|
||||
display: none;
|
||||
position: fixed;
|
||||
bottom: 16px;
|
||||
left: 50%;
|
||||
transform: translateX(-50%);
|
||||
padding: 8px 16px;
|
||||
background: rgba(30, 30, 46, 0.9);
|
||||
backdrop-filter: blur(8px);
|
||||
border: 1px solid #313244;
|
||||
border-radius: 8px;
|
||||
font-size: 13px;
|
||||
font-family: Terminus, "LXGW WenKai Mono", monospace;
|
||||
color: #cdd6f4;
|
||||
pointer-events: none;
|
||||
z-index: 50;
|
||||
white-space: nowrap;
|
||||
}
|
||||
|
||||
/* Controls hint */
|
||||
#controls-hint {
|
||||
position: fixed;
|
||||
top: 16px;
|
||||
right: 16px;
|
||||
padding: 8px 12px;
|
||||
background: rgba(30, 30, 46, 0.8);
|
||||
border: 1px solid #313244;
|
||||
border-radius: 8px;
|
||||
font-size: 12px;
|
||||
color: #585b70;
|
||||
line-height: 1.6;
|
||||
z-index: 50;
|
||||
display: none;
|
||||
}
|
||||
|
||||
#controls-hint.active { display: block; }
|
||||
|
||||
/* History */
|
||||
#history {
|
||||
display: none;
|
||||
width: 500px;
|
||||
max-width: 90vw;
|
||||
margin-top: 8px;
|
||||
}
|
||||
|
||||
#history.has-items { display: block; }
|
||||
|
||||
#history h3 {
|
||||
font-size: 13px;
|
||||
font-weight: 500;
|
||||
color: #585b70;
|
||||
margin-bottom: 8px;
|
||||
text-transform: uppercase;
|
||||
letter-spacing: 1px;
|
||||
}
|
||||
|
||||
.history-list {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 4px;
|
||||
}
|
||||
|
||||
.history-item {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: space-between;
|
||||
padding: 10px 14px;
|
||||
background: #1e1e2e;
|
||||
border: 1px solid #313244;
|
||||
border-radius: 8px;
|
||||
cursor: pointer;
|
||||
transition: border-color 0.2s;
|
||||
}
|
||||
|
||||
.history-item:hover { border-color: #cba6f7; }
|
||||
|
||||
.history-item .name {
|
||||
font-size: 14px;
|
||||
font-weight: 500;
|
||||
color: #cdd6f4;
|
||||
}
|
||||
|
||||
.history-item .meta {
|
||||
font-size: 12px;
|
||||
color: #585b70;
|
||||
}
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
|
||||
<div id="landing">
|
||||
<h1>repo-vis</h1>
|
||||
<p class="subtitle">Visualize any codebase in 3D</p>
|
||||
|
||||
<div class="input-group">
|
||||
<div class="input-row">
|
||||
<input type="text" id="git-url" placeholder="https://github.com/user/repo">
|
||||
<button id="btn-clone">Clone & Visualize</button>
|
||||
</div>
|
||||
|
||||
<div class="divider">or</div>
|
||||
|
||||
<div class="drop-zone" id="drop-zone">
|
||||
<p>Drop a .zip file here, or click to browse</p>
|
||||
<input type="file" id="file-input" accept=".zip">
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div id="history">
|
||||
<h3>Recent</h3>
|
||||
<div class="history-list" id="history-list"></div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div id="loading">
|
||||
<div class="spinner"></div>
|
||||
<div id="loading-text">Cloning repository...</div>
|
||||
</div>
|
||||
|
||||
<div id="viewport"></div>
|
||||
<div id="tooltip"></div>
|
||||
<div id="controls-hint">
|
||||
LMB drag — rotate | RMB drag — pan | scroll — zoom | double-click — focus file
|
||||
</div>
|
||||
|
||||
<script type="module" src="/src/app.js"></script>
|
||||
</body>
|
||||
</html>
|
||||
1058
web/package-lock.json
generated
Normal file
1058
web/package-lock.json
generated
Normal file
File diff suppressed because it is too large
Load Diff
17
web/package.json
Normal file
17
web/package.json
Normal file
@@ -0,0 +1,17 @@
|
||||
{
|
||||
"name": "repo-vis-web",
|
||||
"version": "0.1.0",
|
||||
"private": true,
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"dev": "vite",
|
||||
"build": "vite build"
|
||||
},
|
||||
"dependencies": {
|
||||
"d3-hierarchy": "^3.1.2",
|
||||
"three": "^0.170.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"vite": "^6.0.0"
|
||||
}
|
||||
}
|
||||
181
web/src/app.js
Normal file
181
web/src/app.js
Normal file
@@ -0,0 +1,181 @@
|
||||
import { computeLayout } from "./layout.js";
|
||||
import { RepoRenderer } from "./renderer.js";
|
||||
|
||||
const landing = document.getElementById("landing");
|
||||
const loading = document.getElementById("loading");
|
||||
const loadingText = document.getElementById("loading-text");
|
||||
const viewport = document.getElementById("viewport");
|
||||
const controlsHint = document.getElementById("controls-hint");
|
||||
const gitUrlInput = document.getElementById("git-url");
|
||||
const btnClone = document.getElementById("btn-clone");
|
||||
const dropZone = document.getElementById("drop-zone");
|
||||
const fileInput = document.getElementById("file-input");
|
||||
const historyEl = document.getElementById("history");
|
||||
const historyList = document.getElementById("history-list");
|
||||
|
||||
function showLoading(msg) {
|
||||
landing.style.display = "none";
|
||||
loading.classList.add("active");
|
||||
loadingText.textContent = msg;
|
||||
}
|
||||
|
||||
function showVisualization() {
|
||||
loading.classList.remove("active");
|
||||
viewport.classList.add("active");
|
||||
controlsHint.classList.add("active");
|
||||
}
|
||||
|
||||
function showError(msg) {
|
||||
loading.classList.remove("active");
|
||||
landing.style.display = "";
|
||||
alert(msg);
|
||||
}
|
||||
|
||||
async function visualize(tree) {
|
||||
showLoading("Building layout...");
|
||||
|
||||
// Wait for fonts to load so canvas renders them correctly
|
||||
await document.fonts.ready;
|
||||
await new Promise((r) => setTimeout(r, 50));
|
||||
|
||||
const { leaves, totalWidth, totalHeight } = computeLayout(tree);
|
||||
|
||||
if (leaves.length === 0) {
|
||||
showError("No source files found in repository.");
|
||||
return;
|
||||
}
|
||||
|
||||
showLoading(`Rendering ${leaves.length} files...`);
|
||||
await new Promise((r) => setTimeout(r, 50));
|
||||
|
||||
showVisualization();
|
||||
const renderer = new RepoRenderer(viewport);
|
||||
renderer.load(leaves, totalWidth, totalHeight);
|
||||
}
|
||||
|
||||
// --- History ---
|
||||
async function loadHistory() {
|
||||
try {
|
||||
const res = await fetch("/api/repos");
|
||||
if (!res.ok) return;
|
||||
const repos = await res.json();
|
||||
if (repos.length === 0) return;
|
||||
|
||||
historyEl.classList.add("has-items");
|
||||
historyList.innerHTML = "";
|
||||
|
||||
for (const repo of repos) {
|
||||
const item = document.createElement("div");
|
||||
item.className = "history-item";
|
||||
item.innerHTML = `
|
||||
<span class="name">${escapeHtml(repo.name)}</span>
|
||||
<span class="meta">${repo.file_count} files</span>
|
||||
`;
|
||||
item.addEventListener("click", () => loadCachedRepo(repo.cache_key, repo.name));
|
||||
historyList.appendChild(item);
|
||||
}
|
||||
} catch {
|
||||
// ignore
|
||||
}
|
||||
}
|
||||
|
||||
async function loadCachedRepo(key, name) {
|
||||
showLoading(`Loading ${name}...`);
|
||||
try {
|
||||
const res = await fetch(`/api/repos/${key}`);
|
||||
if (!res.ok) throw new Error("Cache expired");
|
||||
const tree = await res.json();
|
||||
await visualize(tree);
|
||||
} catch (err) {
|
||||
showError(err.message);
|
||||
}
|
||||
}
|
||||
|
||||
function escapeHtml(s) {
|
||||
const div = document.createElement("div");
|
||||
div.textContent = s;
|
||||
return div.innerHTML;
|
||||
}
|
||||
|
||||
// Load history on page load
|
||||
loadHistory();
|
||||
|
||||
// --- Git clone ---
|
||||
btnClone.addEventListener("click", async () => {
|
||||
const url = gitUrlInput.value.trim();
|
||||
if (!url) return;
|
||||
|
||||
btnClone.disabled = true;
|
||||
showLoading("Cloning repository...");
|
||||
|
||||
try {
|
||||
const res = await fetch("/api/scan-git", {
|
||||
method: "POST",
|
||||
headers: { "Content-Type": "application/json" },
|
||||
body: JSON.stringify({ url }),
|
||||
});
|
||||
|
||||
if (!res.ok) {
|
||||
const err = await res.json();
|
||||
throw new Error(err.error || "Clone failed");
|
||||
}
|
||||
|
||||
const tree = await res.json();
|
||||
await visualize(tree);
|
||||
} catch (err) {
|
||||
showError(err.message);
|
||||
} finally {
|
||||
btnClone.disabled = false;
|
||||
}
|
||||
});
|
||||
|
||||
gitUrlInput.addEventListener("keydown", (e) => {
|
||||
if (e.key === "Enter") btnClone.click();
|
||||
});
|
||||
|
||||
// --- Zip upload ---
|
||||
dropZone.addEventListener("click", () => fileInput.click());
|
||||
|
||||
dropZone.addEventListener("dragover", (e) => {
|
||||
e.preventDefault();
|
||||
dropZone.classList.add("dragover");
|
||||
});
|
||||
|
||||
dropZone.addEventListener("dragleave", () => {
|
||||
dropZone.classList.remove("dragover");
|
||||
});
|
||||
|
||||
dropZone.addEventListener("drop", (e) => {
|
||||
e.preventDefault();
|
||||
dropZone.classList.remove("dragover");
|
||||
const file = e.dataTransfer.files[0];
|
||||
if (file) uploadZip(file);
|
||||
});
|
||||
|
||||
fileInput.addEventListener("change", () => {
|
||||
if (fileInput.files[0]) uploadZip(fileInput.files[0]);
|
||||
});
|
||||
|
||||
async function uploadZip(file) {
|
||||
showLoading("Uploading and scanning zip...");
|
||||
|
||||
try {
|
||||
const form = new FormData();
|
||||
form.append("file", file);
|
||||
|
||||
const res = await fetch("/api/scan-zip", {
|
||||
method: "POST",
|
||||
body: form,
|
||||
});
|
||||
|
||||
if (!res.ok) {
|
||||
const err = await res.json();
|
||||
throw new Error(err.error || "Upload failed");
|
||||
}
|
||||
|
||||
const tree = await res.json();
|
||||
await visualize(tree);
|
||||
} catch (err) {
|
||||
showError(err.message);
|
||||
}
|
||||
}
|
||||
50
web/src/layout.js
Normal file
50
web/src/layout.js
Normal file
@@ -0,0 +1,50 @@
|
||||
/**
|
||||
* Treemap layout using d3-hierarchy.
|
||||
* Each file rectangle is sized by: lines × maxLen (the "true" code area).
|
||||
* Returns a flat array of positioned file nodes.
|
||||
*/
|
||||
|
||||
import { treemap, treemapSquarify, hierarchy } from "d3-hierarchy";
|
||||
|
||||
/**
|
||||
* @param {object} tree - The scanned repo tree from scanner.js
|
||||
* @param {number} charW - Character width in world units
|
||||
* @param {number} charH - Character height (line height) in world units
|
||||
* @param {number} padding - Padding between files in world units
|
||||
* @returns {Array<{path, name, x, z, w, h, lines, maxLen, content}>}
|
||||
*/
|
||||
export function computeLayout(tree, charW = 0.15, charH = 0.3, padding = 2) {
|
||||
const root = hierarchy(tree)
|
||||
.sum((d) => (d.content != null ? d.lines * d.maxLen : 0))
|
||||
.sort((a, b) => b.value - a.value);
|
||||
|
||||
const totalValue = root.value;
|
||||
// Scale factor: we want roughly sqrt(totalValue) * charW on each side
|
||||
const scale = Math.sqrt(totalValue) * charW * 1.2;
|
||||
|
||||
const tm = treemap()
|
||||
.tile(treemapSquarify.ratio(1))
|
||||
.size([scale, scale])
|
||||
.paddingInner(padding)
|
||||
.paddingOuter(padding);
|
||||
|
||||
tm(root);
|
||||
|
||||
const leaves = [];
|
||||
for (const leaf of root.leaves()) {
|
||||
if (!leaf.data.content) continue;
|
||||
leaves.push({
|
||||
path: leaf.data.path,
|
||||
name: leaf.data.name,
|
||||
x: leaf.x0,
|
||||
z: leaf.y0,
|
||||
w: leaf.x1 - leaf.x0,
|
||||
h: leaf.y1 - leaf.y0,
|
||||
lines: leaf.data.lines,
|
||||
maxLen: leaf.data.maxLen,
|
||||
content: leaf.data.content,
|
||||
});
|
||||
}
|
||||
|
||||
return { leaves, totalWidth: scale, totalHeight: scale };
|
||||
}
|
||||
373
web/src/renderer.js
Normal file
373
web/src/renderer.js
Normal file
@@ -0,0 +1,373 @@
|
||||
/**
|
||||
* Three.js renderer for the repo visualization.
|
||||
* - Files are flat planes on the XZ ground plane
|
||||
* - 3D OrbitControls camera
|
||||
* - Semantic zoom: color blocks → code texture based on camera distance
|
||||
*/
|
||||
|
||||
import * as THREE from "three";
|
||||
import { OrbitControls } from "three/addons/controls/OrbitControls.js";
|
||||
|
||||
// Color palette by file extension
|
||||
const EXT_COLORS = {
|
||||
".js": 0xf7df1e,
|
||||
".jsx": 0x61dafb,
|
||||
".ts": 0x3178c6,
|
||||
".tsx": 0x61dafb,
|
||||
".py": 0x3572a5,
|
||||
".go": 0x00add8,
|
||||
".rs": 0xdea584,
|
||||
".c": 0x555555,
|
||||
".h": 0x555555,
|
||||
".cpp": 0xf34b7d,
|
||||
".hpp": 0xf34b7d,
|
||||
".java": 0xb07219,
|
||||
".rb": 0xcc342d,
|
||||
".php": 0x4f5d95,
|
||||
".html": 0xe34c26,
|
||||
".css": 0x563d7c,
|
||||
".scss": 0xc6538c,
|
||||
".json": 0x40b5a4,
|
||||
".yaml": 0xcb171e,
|
||||
".yml": 0xcb171e,
|
||||
".md": 0x083fa1,
|
||||
".sh": 0x89e051,
|
||||
".sql": 0xe38c00,
|
||||
".vue": 0x41b883,
|
||||
".svelte": 0xff3e00,
|
||||
};
|
||||
const DEFAULT_COLOR = 0x8899aa;
|
||||
|
||||
function getColor(filename) {
|
||||
const dot = filename.lastIndexOf(".");
|
||||
if (dot === -1) return DEFAULT_COLOR;
|
||||
const ext = filename.substring(dot).toLowerCase();
|
||||
return EXT_COLORS[ext] ?? DEFAULT_COLOR;
|
||||
}
|
||||
|
||||
/**
|
||||
* Render code text onto a Canvas and return it as a texture.
|
||||
*/
|
||||
// Monospace font stack with CJK fallback
|
||||
const CODE_FONT = 'Terminus, "LXGW WenKai Mono", "Noto Sans Mono CJK SC", "Microsoft YaHei", monospace';
|
||||
|
||||
function createCodeTexture(content, lines, maxLen) {
|
||||
const fontSize = 28; // Higher base resolution for sharp zoom
|
||||
const charW = fontSize * 0.6;
|
||||
const lineH = fontSize * 1.4;
|
||||
|
||||
// Wrap lines that exceed maxLen
|
||||
const rawLines = content.split("\n");
|
||||
const wrappedLines = [];
|
||||
for (const line of rawLines) {
|
||||
if (line.length <= maxLen) {
|
||||
wrappedLines.push(line);
|
||||
} else {
|
||||
for (let j = 0; j < line.length; j += maxLen) {
|
||||
wrappedLines.push(line.substring(j, j + maxLen));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const canvasW = Math.ceil(maxLen * charW) + 20;
|
||||
const canvasH = Math.ceil(wrappedLines.length * lineH) + 20;
|
||||
|
||||
// Cap canvas size — 8192 for sharper textures on modern GPUs
|
||||
const maxDim = 8192;
|
||||
const scaleX = canvasW > maxDim ? maxDim / canvasW : 1;
|
||||
const scaleY = canvasH > maxDim ? maxDim / canvasH : 1;
|
||||
const scale = Math.min(scaleX, scaleY);
|
||||
|
||||
const w = Math.ceil(canvasW * scale);
|
||||
const h = Math.ceil(canvasH * scale);
|
||||
|
||||
const canvas = document.createElement("canvas");
|
||||
canvas.width = w;
|
||||
canvas.height = h;
|
||||
const ctx = canvas.getContext("2d");
|
||||
|
||||
// Dark background
|
||||
ctx.fillStyle = "#1e1e2e";
|
||||
ctx.fillRect(0, 0, w, h);
|
||||
|
||||
ctx.scale(scale, scale);
|
||||
ctx.font = `${fontSize}px ${CODE_FONT}`;
|
||||
ctx.fillStyle = "#cdd6f4";
|
||||
ctx.textBaseline = "top";
|
||||
|
||||
for (let i = 0; i < wrappedLines.length; i++) {
|
||||
ctx.fillText(wrappedLines[i], 10, 10 + i * lineH);
|
||||
}
|
||||
|
||||
const tex = new THREE.CanvasTexture(canvas);
|
||||
tex.minFilter = THREE.LinearMipmapLinearFilter;
|
||||
tex.magFilter = THREE.LinearFilter;
|
||||
tex.anisotropy = 8;
|
||||
return tex;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a simple color block texture with the file name.
|
||||
*/
|
||||
function createBlockTexture(name, color) {
|
||||
const canvas = document.createElement("canvas");
|
||||
canvas.width = 256;
|
||||
canvas.height = 256;
|
||||
const ctx = canvas.getContext("2d");
|
||||
|
||||
const hex = "#" + color.toString(16).padStart(6, "0");
|
||||
ctx.fillStyle = hex;
|
||||
ctx.fillRect(0, 0, 256, 256);
|
||||
|
||||
// Slightly darker border
|
||||
ctx.strokeStyle = "rgba(0,0,0,0.3)";
|
||||
ctx.lineWidth = 4;
|
||||
ctx.strokeRect(2, 2, 252, 252);
|
||||
|
||||
// File name
|
||||
ctx.fillStyle = "#ffffff";
|
||||
ctx.font = "bold 18px sans-serif";
|
||||
ctx.textAlign = "center";
|
||||
ctx.textBaseline = "middle";
|
||||
|
||||
// Truncate long names
|
||||
const label = name.length > 28 ? name.substring(0, 25) + "..." : name;
|
||||
ctx.fillText(label, 128, 128);
|
||||
|
||||
const tex = new THREE.CanvasTexture(canvas);
|
||||
tex.minFilter = THREE.LinearFilter;
|
||||
return tex;
|
||||
}
|
||||
|
||||
/**
|
||||
* @typedef {Object} FileTile
|
||||
* @property {THREE.Mesh} mesh
|
||||
* @property {object} data - layout data (path, name, content, etc.)
|
||||
* @property {THREE.Texture|null} codeTexture - lazily created
|
||||
* @property {THREE.Texture} blockTexture
|
||||
* @property {boolean} showingCode
|
||||
*/
|
||||
|
||||
export class RepoRenderer {
|
||||
constructor(container) {
|
||||
this.container = container;
|
||||
this.tiles = [];
|
||||
this.raycaster = new THREE.Raycaster();
|
||||
this.mouse = new THREE.Vector2();
|
||||
this.hoveredTile = null;
|
||||
|
||||
this._initScene();
|
||||
this._initControls();
|
||||
this._initEvents();
|
||||
this._animate();
|
||||
}
|
||||
|
||||
_initScene() {
|
||||
this.scene = new THREE.Scene();
|
||||
this.scene.background = new THREE.Color(0x11111b);
|
||||
|
||||
this.camera = new THREE.PerspectiveCamera(
|
||||
60,
|
||||
window.innerWidth / window.innerHeight,
|
||||
0.1,
|
||||
50000
|
||||
);
|
||||
this.camera.position.set(0, 200, 200);
|
||||
this.camera.lookAt(0, 0, 0);
|
||||
|
||||
this.renderer3d = new THREE.WebGLRenderer({ antialias: true });
|
||||
this.renderer3d.setSize(window.innerWidth, window.innerHeight);
|
||||
this.renderer3d.setPixelRatio(Math.min(window.devicePixelRatio, 2));
|
||||
this.container.appendChild(this.renderer3d.domElement);
|
||||
|
||||
// Ambient + directional light
|
||||
this.scene.add(new THREE.AmbientLight(0xffffff, 0.7));
|
||||
const dirLight = new THREE.DirectionalLight(0xffffff, 0.5);
|
||||
dirLight.position.set(100, 200, 100);
|
||||
this.scene.add(dirLight);
|
||||
}
|
||||
|
||||
_initControls() {
|
||||
this.controls = new OrbitControls(this.camera, this.renderer3d.domElement);
|
||||
this.controls.enableDamping = true;
|
||||
this.controls.dampingFactor = 0.1;
|
||||
this.controls.maxPolarAngle = Math.PI / 2.1; // Don't go below the ground
|
||||
this.controls.minDistance = 1;
|
||||
this.controls.maxDistance = 10000;
|
||||
this.controls.zoomSpeed = 1.5;
|
||||
this.controls.target.set(0, 0, 0);
|
||||
|
||||
// Google Maps-style touch: 1 finger pan, 2 finger pinch zoom + rotate
|
||||
this.controls.touches = {
|
||||
ONE: THREE.TOUCH.PAN,
|
||||
TWO: THREE.TOUCH.DOLLY_ROTATE,
|
||||
};
|
||||
}
|
||||
|
||||
_initEvents() {
|
||||
window.addEventListener("resize", () => {
|
||||
this.camera.aspect = window.innerWidth / window.innerHeight;
|
||||
this.camera.updateProjectionMatrix();
|
||||
this.renderer3d.setSize(window.innerWidth, window.innerHeight);
|
||||
});
|
||||
|
||||
// Hover for tooltip
|
||||
this.renderer3d.domElement.addEventListener("mousemove", (e) => {
|
||||
this.mouse.x = (e.clientX / window.innerWidth) * 2 - 1;
|
||||
this.mouse.y = -(e.clientY / window.innerHeight) * 2 + 1;
|
||||
});
|
||||
|
||||
// Double-click to zoom into a file
|
||||
this.renderer3d.domElement.addEventListener("dblclick", () => {
|
||||
if (this.hoveredTile) {
|
||||
const t = this.hoveredTile;
|
||||
const d = t.data;
|
||||
const targetX = d.x + d.w / 2;
|
||||
const targetZ = d.z + d.h / 2;
|
||||
const viewDist = Math.max(d.w, d.h) * 0.8;
|
||||
|
||||
// Animate camera
|
||||
this._animateTo(
|
||||
new THREE.Vector3(targetX, viewDist, targetZ + viewDist * 0.5),
|
||||
new THREE.Vector3(targetX, 0, targetZ)
|
||||
);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
_animateTo(position, target, duration = 800) {
|
||||
const startPos = this.camera.position.clone();
|
||||
const startTarget = this.controls.target.clone();
|
||||
const startTime = performance.now();
|
||||
|
||||
const step = () => {
|
||||
const elapsed = performance.now() - startTime;
|
||||
const t = Math.min(elapsed / duration, 1);
|
||||
// Smooth ease
|
||||
const ease = t < 0.5 ? 4 * t * t * t : 1 - Math.pow(-2 * t + 2, 3) / 2;
|
||||
|
||||
this.camera.position.lerpVectors(startPos, position, ease);
|
||||
this.controls.target.lerpVectors(startTarget, target, ease);
|
||||
|
||||
if (t < 1) requestAnimationFrame(step);
|
||||
};
|
||||
requestAnimationFrame(step);
|
||||
}
|
||||
|
||||
/**
|
||||
* Load layout data and create meshes.
|
||||
*/
|
||||
load(leaves, totalWidth, totalHeight) {
|
||||
// Center the layout around origin
|
||||
const offsetX = -totalWidth / 2;
|
||||
const offsetZ = -totalHeight / 2;
|
||||
|
||||
for (const leaf of leaves) {
|
||||
const color = getColor(leaf.name);
|
||||
|
||||
// Block texture (far LOD)
|
||||
const blockTex = createBlockTexture(leaf.name, color);
|
||||
const material = new THREE.MeshStandardMaterial({
|
||||
map: blockTex,
|
||||
roughness: 0.8,
|
||||
metalness: 0.1,
|
||||
});
|
||||
|
||||
const geometry = new THREE.PlaneGeometry(leaf.w, leaf.h);
|
||||
const mesh = new THREE.Mesh(geometry, material);
|
||||
|
||||
// Lay flat on XZ plane, slight Y offset to avoid z-fighting
|
||||
mesh.rotation.x = -Math.PI / 2;
|
||||
mesh.position.set(
|
||||
offsetX + leaf.x + leaf.w / 2,
|
||||
0.01,
|
||||
offsetZ + leaf.z + leaf.h / 2
|
||||
);
|
||||
|
||||
this.scene.add(mesh);
|
||||
|
||||
// Adjust leaf coords to world space for camera targeting
|
||||
leaf.x += offsetX;
|
||||
leaf.z += offsetZ;
|
||||
|
||||
this.tiles.push({
|
||||
mesh,
|
||||
data: leaf,
|
||||
codeTexture: null,
|
||||
blockTexture: blockTex,
|
||||
showingCode: false,
|
||||
color,
|
||||
});
|
||||
}
|
||||
|
||||
// Set initial camera to see the whole scene
|
||||
const maxDim = Math.max(totalWidth, totalHeight);
|
||||
this.camera.position.set(0, maxDim * 0.7, maxDim * 0.5);
|
||||
this.controls.target.set(0, 0, 0);
|
||||
|
||||
// Tooltip element
|
||||
this.tooltip = document.getElementById("tooltip");
|
||||
}
|
||||
|
||||
_updateLOD() {
|
||||
const camPos = this.camera.position;
|
||||
|
||||
for (const tile of this.tiles) {
|
||||
const cx = tile.data.x + tile.data.w / 2;
|
||||
const cz = tile.data.z + tile.data.h / 2;
|
||||
const dist = Math.sqrt(
|
||||
(camPos.x - cx) ** 2 + camPos.y ** 2 + (camPos.z - cz) ** 2
|
||||
);
|
||||
|
||||
const fileSize = Math.max(tile.data.w, tile.data.h);
|
||||
// Switch to code texture when close enough to read
|
||||
const threshold = fileSize * 3;
|
||||
|
||||
if (dist < threshold && !tile.showingCode) {
|
||||
// Create code texture lazily
|
||||
if (!tile.codeTexture) {
|
||||
tile.codeTexture = createCodeTexture(
|
||||
tile.data.content,
|
||||
tile.data.lines,
|
||||
tile.data.maxLen
|
||||
);
|
||||
}
|
||||
tile.mesh.material.map = tile.codeTexture;
|
||||
tile.mesh.material.needsUpdate = true;
|
||||
tile.showingCode = true;
|
||||
} else if (dist >= threshold && tile.showingCode) {
|
||||
tile.mesh.material.map = tile.blockTexture;
|
||||
tile.mesh.material.needsUpdate = true;
|
||||
tile.showingCode = false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
_updateHover() {
|
||||
this.raycaster.setFromCamera(this.mouse, this.camera);
|
||||
const meshes = this.tiles.map((t) => t.mesh);
|
||||
const intersects = this.raycaster.intersectObjects(meshes);
|
||||
|
||||
if (intersects.length > 0) {
|
||||
const mesh = intersects[0].object;
|
||||
const tile = this.tiles.find((t) => t.mesh === mesh);
|
||||
this.hoveredTile = tile;
|
||||
|
||||
if (this.tooltip && tile) {
|
||||
this.tooltip.style.display = "block";
|
||||
this.tooltip.textContent = `${tile.data.path} (${tile.data.lines} lines)`;
|
||||
}
|
||||
} else {
|
||||
this.hoveredTile = null;
|
||||
if (this.tooltip) this.tooltip.style.display = "none";
|
||||
}
|
||||
}
|
||||
|
||||
_animate() {
|
||||
requestAnimationFrame(() => this._animate());
|
||||
this.controls.update();
|
||||
this._updateLOD();
|
||||
this._updateHover();
|
||||
this.renderer3d.render(this.scene, this.camera);
|
||||
}
|
||||
}
|
||||
12
web/vite.config.js
Normal file
12
web/vite.config.js
Normal file
@@ -0,0 +1,12 @@
|
||||
import { defineConfig } from "vite";
|
||||
|
||||
export default defineConfig({
|
||||
server: {
|
||||
proxy: {
|
||||
"/api": "http://localhost:3000",
|
||||
},
|
||||
},
|
||||
build: {
|
||||
outDir: "dist",
|
||||
},
|
||||
});
|
||||
Reference in New Issue
Block a user