Compare commits

..

No commits in common. "master" and "ci/gitea-actions" have entirely different histories.

7 changed files with 45 additions and 150 deletions

View File

@ -8,17 +8,9 @@ on:
jobs:
deploy:
runs-on: self-hosted
defaults:
run:
shell: bash -l {0}
steps:
- name: Checkout
run: |
if [ -d .git ]; then
git fetch origin master && git reset --hard origin/master && git clean -fd
else
git clone ${{ gitea.server_url }}/${{ gitea.repository }} . && git checkout ${{ gitea.sha }}
fi
uses: actions/checkout@v4
- name: Install frontend dependencies
run: cd web && npm ci

View File

@ -1,9 +0,0 @@
FROM alpine:3.21
RUN apk add --no-cache git ca-certificates
WORKDIR /app
COPY repo-vis-server ./
COPY dist ./web/dist/
ENV PORT=8080
ENV FRONTEND_DIR=./web/dist
EXPOSE 8080
CMD ["./repo-vis-server"]

View File

@ -39,9 +39,18 @@ deploy-oci: build-arm
ssh $(OCI_HOST) "rm -rf $(OCI_TMP) && mkdir -p $(OCI_TMP)"
scp server/target/$(MUSL_TARGET_ARM)/release/repo-vis-server $(OCI_HOST):$(OCI_TMP)/
cd web && tar czf /tmp/_rv_dist.tar.gz dist && scp /tmp/_rv_dist.tar.gz $(OCI_HOST):$(OCI_TMP)/
scp Dockerfile.oci $(OCI_HOST):$(OCI_TMP)/Dockerfile
@echo "==> Building image on OCI..."
ssh $(OCI_HOST) 'cd $(OCI_TMP) && tar xzf _rv_dist.tar.gz && \
ssh $(OCI_HOST) 'cd $(OCI_TMP) && tar xzf _rv_dist.tar.gz && cat > Dockerfile <<DEOF\n\
FROM alpine:3.21\n\
RUN apk add --no-cache git ca-certificates\n\
WORKDIR /app\n\
COPY repo-vis-server ./\n\
COPY dist ./web/dist/\n\
ENV PORT=8080\n\
ENV FRONTEND_DIR=./web/dist\n\
EXPOSE 8080\n\
CMD ["./repo-vis-server"]\n\
DEOF\n\
sudo docker build -t repo-vis:latest . && \
sudo docker save repo-vis:latest -o /tmp/_rv.tar && \
sudo k3s ctr images import /tmp/_rv.tar'

View File

@ -1,2 +1,2 @@
[target.aarch64-unknown-linux-musl]
linker = "aarch64-linux-musl-gcc"
linker = "aarch64-linux-gnu-gcc"

View File

@ -2,7 +2,7 @@ mod cache;
mod scanner;
use axum::{
extract::{DefaultBodyLimit, Multipart, Path, Query, State},
extract::{DefaultBodyLimit, Multipart, Path, State},
http::StatusCode,
response::Json,
routing::{get, post},
@ -10,7 +10,7 @@ use axum::{
};
use cache::{Cache, RepoEntry};
use scanner::{scan_dir, FileNode};
use serde::{Deserialize, Serialize};
use serde::Deserialize;
use sha2::{Digest, Sha256};
use std::process::Command;
use std::sync::Arc;
@ -27,43 +27,6 @@ struct GitRequest {
url: String,
}
#[derive(Deserialize)]
struct FileQuery {
path: String,
}
/// Response wrapper that includes the cache key alongside the (content-stripped) tree.
#[derive(Serialize)]
struct TreeResponse {
cache_key: String,
tree: FileNode,
}
/// Recursively strip file content so the initial response is metadata-only.
fn strip_content(node: &mut FileNode) {
node.content = None;
if let Some(children) = &mut node.children {
for child in children {
strip_content(child);
}
}
}
/// Walk the cached tree to find a single file's content by its relative path.
fn find_file_content(node: &FileNode, path: &str) -> Option<String> {
if node.content.is_some() && node.path == path {
return node.content.clone();
}
if let Some(children) = &node.children {
for child in children {
if let Some(content) = find_file_content(child, path) {
return Some(content);
}
}
}
None
}
fn count_leaves(node: &FileNode) -> usize {
match &node.children {
Some(children) => children.iter().map(count_leaves).sum(),
@ -92,7 +55,6 @@ async fn main() {
.route("/api/scan-zip", post(scan_zip))
.route("/api/repos", get(list_repos))
.route("/api/repos/{key}", get(get_repo))
.route("/api/repos/{key}/file", get(get_file))
.layer(DefaultBodyLimit::max(100 * 1024 * 1024))
.with_state(state)
.fallback_service(ServeDir::new(frontend_dir).append_index_html_on_directories(true));
@ -114,38 +76,19 @@ async fn list_repos(
async fn get_repo(
State(state): State<Arc<AppState>>,
Path(key): Path<String>,
) -> Result<Json<TreeResponse>, (StatusCode, String)> {
) -> Result<Json<FileNode>, (StatusCode, String)> {
state
.cache
.get(&key)
.and_then(|data| serde_json::from_str::<FileNode>(&data).ok())
.map(|mut tree| {
strip_content(&mut tree);
Json(TreeResponse { cache_key: key, tree })
})
.and_then(|data| serde_json::from_str(&data).ok())
.map(Json)
.ok_or((StatusCode::NOT_FOUND, "Repo not found in cache".to_string()))
}
async fn get_file(
State(state): State<Arc<AppState>>,
Path(key): Path<String>,
Query(q): Query<FileQuery>,
) -> Result<Json<serde_json::Value>, (StatusCode, String)> {
let data = state
.cache
.get(&key)
.ok_or((StatusCode::NOT_FOUND, "Repo not found in cache".to_string()))?;
let tree: FileNode = serde_json::from_str(&data)
.map_err(|e| (StatusCode::INTERNAL_SERVER_ERROR, e.to_string()))?;
let content = find_file_content(&tree, &q.path)
.ok_or((StatusCode::NOT_FOUND, format!("File not found: {}", q.path)))?;
Ok(Json(serde_json::json!({ "content": content })))
}
async fn scan_git(
State(state): State<Arc<AppState>>,
Json(req): Json<GitRequest>,
) -> Result<Json<TreeResponse>, (StatusCode, String)> {
) -> Result<Json<FileNode>, (StatusCode, String)> {
let url = req.url.trim().to_string();
if !url.starts_with("http://")
@ -159,10 +102,9 @@ async fn scan_git(
let key = Cache::make_key(&format!("git:{url}"));
if let Some(cached) = state.cache.get(&key) {
info!("Cache hit for {url}");
let mut tree: FileNode =
let tree: FileNode =
serde_json::from_str(&cached).map_err(|e| (StatusCode::INTERNAL_SERVER_ERROR, e.to_string()))?;
strip_content(&mut tree);
return Ok(Json(TreeResponse { cache_key: key, tree }));
return Ok(Json(tree));
}
// Clone into temp dir
@ -196,20 +138,18 @@ async fn scan_git(
let file_count = count_leaves(&tree);
// Cache with full content, then strip for response
if let Ok(json_str) = serde_json::to_string(&tree) {
state.cache.set(&key, &json_str);
state.cache.record_repo(&key, &repo_name, &url, file_count);
}
strip_content(&mut tree);
Ok(Json(TreeResponse { cache_key: key, tree }))
Ok(Json(tree))
}
async fn scan_zip(
State(state): State<Arc<AppState>>,
mut multipart: Multipart,
) -> Result<Json<TreeResponse>, (StatusCode, String)> {
) -> Result<Json<FileNode>, (StatusCode, String)> {
let field = multipart
.next_field()
.await
@ -233,10 +173,9 @@ async fn scan_zip(
if let Some(cached) = state.cache.get(&key) {
info!("Cache hit for zip {file_name}");
let mut tree: FileNode =
let tree: FileNode =
serde_json::from_str(&cached).map_err(|e| (StatusCode::INTERNAL_SERVER_ERROR, e.to_string()))?;
strip_content(&mut tree);
return Ok(Json(TreeResponse { cache_key: key, tree }));
return Ok(Json(tree));
}
let tmp = TempDir::new().map_err(|e| (StatusCode::INTERNAL_SERVER_ERROR, e.to_string()))?;
@ -296,12 +235,10 @@ async fn scan_zip(
let file_count = count_leaves(&tree);
// Cache with full content, then strip for response
if let Ok(json_str) = serde_json::to_string(&tree) {
state.cache.set(&key, &json_str);
state.cache.record_repo(&key, &zip_name, &format!("zip:{file_name}"), file_count);
}
strip_content(&mut tree);
Ok(Json(TreeResponse { cache_key: key, tree }))
Ok(Json(tree))
}

View File

@ -31,7 +31,7 @@ function showError(msg) {
alert(msg);
}
async function visualize(tree, repoName, cacheKey) {
async function visualize(tree, repoName) {
showLoading("Building layout...");
// Wait for fonts to load so canvas renders them correctly
@ -50,7 +50,7 @@ async function visualize(tree, repoName, cacheKey) {
showVisualization();
document.getElementById("osd-info").classList.add("active");
const renderer = new RepoRenderer(viewport, repoName || tree.name, cacheKey);
const renderer = new RepoRenderer(viewport, repoName || tree.name);
await renderer.load(leaves, totalWidth, totalHeight);
}
@ -85,8 +85,8 @@ async function loadCachedRepo(key, name) {
try {
const res = await fetch(`/api/repos/${key}`);
if (!res.ok) throw new Error("Cache expired");
const { cache_key, tree } = await res.json();
await visualize(tree, name, cache_key);
const tree = await res.json();
await visualize(tree, name);
} catch (err) {
showError(err.message);
}
@ -121,8 +121,8 @@ btnClone.addEventListener("click", async () => {
throw new Error(err.error || "Clone failed");
}
const { cache_key, tree } = await res.json();
await visualize(tree, undefined, cache_key);
const tree = await res.json();
await visualize(tree);
} catch (err) {
showError(err.message);
} finally {
@ -174,8 +174,8 @@ async function uploadZip(file) {
throw new Error(err.error || "Upload failed");
}
const { cache_key, tree } = await res.json();
await visualize(tree, undefined, cache_key);
const tree = await res.json();
await visualize(tree);
} catch (err) {
showError(err.message);
}

View File

@ -42,10 +42,9 @@ function buildWatermark(text, cols, rows) {
// ---------- renderer ----------
export class RepoRenderer {
constructor(container, repoName, cacheKey) {
constructor(container, repoName) {
this.container = container;
this.repoName = repoName || "repo";
this.cacheKey = cacheKey || null;
this.tiles = [];
this.bgMeshes = [];
this.raycaster = new THREE.Raycaster();
@ -227,7 +226,7 @@ export class RepoRenderer {
// --- Label (always visible, cheap — one per file) ---
const label = new Text();
label.text = leaf.name;
label.fontSize = Math.min(Math.min(leaf.w, leaf.h) * 0.15, 5);
label.fontSize = Math.min(leaf.w, leaf.h) * 0.15;
label.color = 0xffffff;
label.anchorX = "center"; label.anchorY = "middle";
label.rotation.x = -Math.PI / 2;
@ -236,12 +235,13 @@ export class RepoRenderer {
this.scene.add(label);
label.sync();
// Pre-compute syntax highlight ranges (cheap, no GPU)
const colorRanges = computeColorRanges(leaf.content, leaf.name);
this.tiles.push({
bgMesh, label, darkMat,
bgMesh, label, darkMat, colorRanges,
codeMesh: null, watermark: null, darkMesh: null,
// colorRanges computed lazily on first _showCode
colorRanges: undefined,
data: leaf, showingCode: false, loading: false, color, dist: Infinity
data: leaf, showingCode: false, color, dist: Infinity
});
this.bgMeshes.push(bgMesh);
}
@ -254,43 +254,10 @@ export class RepoRenderer {
this.tooltip = document.getElementById("tooltip");
}
// -------- lazy content fetch --------
async _fetchContent(tile) {
try {
const res = await fetch(
`/api/repos/${encodeURIComponent(this.cacheKey)}/file?path=${encodeURIComponent(tile.data.path)}`
);
if (res.ok) {
const { content } = await res.json();
tile.data.content = content;
// Pre-compute colorRanges right after fetch (off the hot animation path)
tile.colorRanges = computeColorRanges(content, tile.data.name);
}
} catch {
// network error — leave content null, will retry next LOD cycle
} finally {
tile.loading = false;
}
}
// -------- lazy code/watermark creation --------
_showCode(tile) {
const d = tile.data;
// If content hasn't been loaded yet, kick off a fetch and bail
if (!d.content) {
if (!tile.loading) {
tile.loading = true;
if (this.cacheKey) this._fetchContent(tile);
}
return;
}
// Compute colorRanges lazily (only once, synchronous after content is available)
if (tile.colorRanges === undefined) {
tile.colorRanges = computeColorRanges(d.content, d.name);
}
// Dark bg
if (!tile.darkMesh) {
tile.darkMesh = new THREE.Mesh(new THREE.PlaneGeometry(d.w, d.h), tile.darkMat);
@ -302,12 +269,11 @@ export class RepoRenderer {
// Watermark — tiled path text, 45° rotated, slightly larger than code font
if (!tile.watermark) {
const codeFontSize = (d.h / d.lines) * 0.65;
// Clamp wmFontSize to avoid degenerate tiny values on files with huge line counts
const wmFontSize = Math.max(codeFontSize * 2.5, 1.0);
const wmFontSize = codeFontSize * 2.5;
const wmLabel = `${this.repoName}/${d.path}`;
// Estimate how many repetitions to fill the area; cap to prevent RangeError on massive tiles
const charsPerLine = Math.min(Math.ceil(Math.max(d.w, d.h) * 1.5 / (wmFontSize * 0.5)), 400);
const lineCount = Math.min(Math.ceil(Math.max(d.w, d.h) * 1.5 / (wmFontSize * 1.5)), 150);
// Estimate how many repetitions to fill the area
const charsPerLine = Math.ceil(Math.max(d.w, d.h) * 1.5 / (wmFontSize * 0.5));
const lineCount = Math.ceil(Math.max(d.w, d.h) * 1.5 / (wmFontSize * 1.5));
const wmContent = buildWatermark(wmLabel, charsPerLine, lineCount);
const wm = new Text();