wip render repo

This commit is contained in:
Pascal Engélibert 2026-03-29 12:00:57 +02:00
commit 1c0156f512
6 changed files with 203 additions and 56 deletions

View file

@ -1,6 +1,9 @@
use std::{collections::HashSet, io::Write, path::PathBuf}; use std::{collections::HashSet, io::Write, path::PathBuf};
use crate::config::Config; use crate::{
config::Config,
repo::{RepoMetadata, WriteRepoMetadataError},
};
use rand::Rng; use rand::Rng;
use serde::Deserialize; use serde::Deserialize;
@ -108,6 +111,7 @@ pub enum FetchRepoError {
TooManyEntries, TooManyEntries,
Client(ClientError), Client(ClientError),
UrlParsing, UrlParsing,
WriteRepoMetadata(WriteRepoMetadataError),
} }
impl From<ClientError> for FetchRepoError { impl From<ClientError> for FetchRepoError {
@ -140,14 +144,20 @@ impl From<url::ParseError> for FetchRepoError {
} }
} }
impl From<WriteRepoMetadataError> for FetchRepoError {
fn from(value: WriteRepoMetadataError) -> Self {
Self::WriteRepoMetadata(value)
}
}
#[derive(Default)] #[derive(Default)]
pub struct RepoIndex { pub struct RepoIndex {
files: Vec<RepoIndexFile>, pub files: Vec<RepoIndexFile>,
} }
pub struct RepoIndexFile { pub struct RepoIndexFile {
path: String, pub path: String,
url: String, pub url: String,
} }
pub async fn fetch_repo_tree_index_at_commit( pub async fn fetch_repo_tree_index_at_commit(
@ -155,10 +165,11 @@ pub async fn fetch_repo_tree_index_at_commit(
repo_url: &str, repo_url: &str,
commit_hash: &str, commit_hash: &str,
token: Option<&str>, token: Option<&str>,
) -> Result<RepoIndex, FetchRepoError> { ) -> Result<(RepoIndex, RepoMetadata), FetchRepoError> {
let parsed = url::Url::parse(repo_url)?; let parsed = url::Url::parse(repo_url)?;
let mut base = parsed.clone(); let mut base = parsed.clone();
base.set_fragment(None); base.set_fragment(None);
base.set_path("");
let base_url = base.as_str(); let base_url = base.as_str();
// Is the URL always /owner/repo? // Is the URL always /owner/repo?
let mut segments = parsed.path_segments().ok_or(FetchRepoError::UrlParsing)?; let mut segments = parsed.path_segments().ok_or(FetchRepoError::UrlParsing)?;
@ -196,7 +207,15 @@ pub async fn fetch_repo_tree_index_at_commit(
} }
} }
Ok(repo_index) Ok((
repo_index,
RepoMetadata {
date: std::time::UNIX_EPOCH.elapsed().unwrap().as_secs(),
repo_url: repo_url.to_string(),
commit_hash: commit_hash.to_string(),
content: Vec::new(),
},
))
} }
#[derive(Deserialize)] #[derive(Deserialize)]
@ -208,6 +227,7 @@ pub async fn fetch_repo_files(
config: &Config, config: &Config,
client: &mut Client, client: &mut Client,
repo_index: &RepoIndex, repo_index: &RepoIndex,
repo_metadata: &mut RepoMetadata,
token: Option<&str>, token: Option<&str>,
) -> Result<(), FetchRepoError> { ) -> Result<(), FetchRepoError> {
let mut hasher = sha2::Sha256::default(); let mut hasher = sha2::Sha256::default();
@ -246,6 +266,15 @@ pub async fn fetch_repo_files(
base64_turbo::URL_SAFE base64_turbo::URL_SAFE
.encode_into(file_name, &mut file_name_str) .encode_into(file_name, &mut file_name_str)
.expect("unreachable"); .expect("unreachable");
repo_metadata
.content
.extend_from_slice(&(file.path.len() as u32).to_be_bytes());
repo_metadata
.content
.extend_from_slice(file.path.as_bytes());
repo_metadata.content.extend_from_slice(&file_name_str);
let file_name_str = str::from_utf8(&file_name_str).expect("unreachable"); let file_name_str = str::from_utf8(&file_name_str).expect("unreachable");
if let Ok(mut file) = std::fs::OpenOptions::new() if let Ok(mut file) = std::fs::OpenOptions::new()
.create_new(true) .create_new(true)
@ -258,5 +287,7 @@ pub async fn fetch_repo_files(
} }
} }
repo_metadata.write_to_file(config, &repo_dir)?;
Ok(()) Ok(())
} }

View file

@ -1,3 +1,5 @@
#![feature(btree_set_entry)]
use std::{path::PathBuf, sync::Arc}; use std::{path::PathBuf, sync::Arc};
mod api_client; mod api_client;
@ -10,6 +12,8 @@ mod templates;
use std::str::FromStr; use std::str::FromStr;
use log::info;
const SUBDIR_REPOS: &str = "repos"; const SUBDIR_REPOS: &str = "repos";
fn log_level_from_str(input: &str) -> Result<log::LevelFilter, String> { fn log_level_from_str(input: &str) -> Result<log::LevelFilter, String> {
@ -49,6 +53,10 @@ fn main() {
.recursive(true) .recursive(true)
.create(PathBuf::from(&config.data_dir).join(SUBDIR_REPOS)) .create(PathBuf::from(&config.data_dir).join(SUBDIR_REPOS))
.expect("Cannot create repos dir"); .expect("Cannot create repos dir");
info!(
"Listening http://{}:{}",
config.listen_host, config.listen_port
);
trillium_smol::config() trillium_smol::config()
.with_host(&config.listen_host) .with_host(&config.listen_host)
.with_port(config.listen_port) .with_port(config.listen_port)

View file

@ -14,8 +14,9 @@ const VERSION: [u8; 1] = [0];
#[derive(Debug)] #[derive(Debug)]
pub struct RepoMetadata { pub struct RepoMetadata {
pub date: u64, pub date: u64,
pub commit_url: String, pub repo_url: String,
content: Vec<u8>, pub commit_hash: String,
pub content: Vec<u8>,
} }
#[derive(Debug)] #[derive(Debug)]
@ -74,9 +75,13 @@ impl RepoMetadata {
let now = std::time::UNIX_EPOCH.elapsed().unwrap().as_secs(); let now = std::time::UNIX_EPOCH.elapsed().unwrap().as_secs();
file.write_all(&now.to_be_bytes())?; file.write_all(&now.to_be_bytes())?;
file.write_all(&(self.commit_url.len() as u32).to_be_bytes())?; file.write_all(&(self.repo_url.len() as u32).to_be_bytes())?;
file.write_all(self.commit_url.as_bytes())?; file.write_all(self.repo_url.as_bytes())?;
file.write_all(&(self.commit_hash.len() as u32).to_be_bytes())?;
file.write_all(self.commit_hash.as_bytes())?;
file.write_all(&self.content)?; file.write_all(&self.content)?;
@ -99,23 +104,35 @@ impl RepoMetadata {
file.read_exact(&mut date)?; file.read_exact(&mut date)?;
let date = u64::from_be_bytes(date); let date = u64::from_be_bytes(date);
let mut commit_url_len = [0u8; 4]; let mut repo_url_len = [0u8; 4];
file.read_exact(&mut commit_url_len)?; file.read_exact(&mut repo_url_len)?;
let commit_url_len = u32::from_be_bytes(commit_url_len) as usize; let repo_url_len = u32::from_be_bytes(repo_url_len) as usize;
if commit_url_len > MAX_URL_SIZE { if repo_url_len > MAX_URL_SIZE {
return Err(ReadRepoMetadataError::InvalidFormat); return Err(ReadRepoMetadataError::InvalidFormat);
} }
let mut commit_url = vec![0; commit_url_len]; let mut repo_url = vec![0; repo_url_len];
file.read_exact(&mut commit_url)?; file.read_exact(&mut repo_url)?;
let commit_url = String::from_utf8(commit_url)?; let repo_url = String::from_utf8(repo_url)?;
let mut commit_hash_len = [0u8; 4];
file.read_exact(&mut commit_hash_len)?;
let commit_hash_len = u32::from_be_bytes(commit_hash_len) as usize;
if commit_hash_len > MAX_URL_SIZE {
return Err(ReadRepoMetadataError::InvalidFormat);
}
let mut commit_hash = vec![0; commit_hash_len];
file.read_exact(&mut commit_hash)?;
let commit_hash = String::from_utf8(commit_hash)?;
let mut content = Vec::new(); let mut content = Vec::new();
file.read_to_end(&mut content)?; file.read_to_end(&mut content)?;
Ok(Self { Ok(Self {
date, date,
commit_url, repo_url,
commit_hash,
content, content,
}) })
} }

View file

@ -1,9 +1,13 @@
use crate::{cache, config::Config, repo::ReadRepoMetadataError}; use crate::{cache, config::Config, repo::ReadRepoMetadataError, templates};
use askama::Template; use askama::Template;
use async_lock::Mutex; use async_lock::Mutex;
use log::error; use log::error;
use std::{collections::HashMap, io::ErrorKind, path::PathBuf}; use std::{
collections::{BTreeMap, BTreeSet, HashMap},
io::ErrorKind,
path::PathBuf,
};
use trillium::{Conn, Handler}; use trillium::{Conn, Handler};
use trillium_router::{Router, RouterConnExt}; use trillium_router::{Router, RouterConnExt};
@ -51,7 +55,8 @@ pub fn make_router(config: &'static Config) -> impl Handler {
}; };
let mut client = client.lock().await; let mut client = client.lock().await;
let repo_index = crate::api_client::fetch_repo_tree_index_at_commit( let (repo_index, mut repo_metadata) =
crate::api_client::fetch_repo_tree_index_at_commit(
&mut client, &mut client,
&repo_url, &repo_url,
&commit_hash, &commit_hash,
@ -59,7 +64,13 @@ pub fn make_router(config: &'static Config) -> impl Handler {
) )
.await .await
.expect("todo handle error"); .expect("todo handle error");
crate::api_client::fetch_repo_files(config, &mut client, &repo_index, None) crate::api_client::fetch_repo_files(
config,
&mut client,
&repo_index,
&mut repo_metadata,
None,
)
.await .await
.expect("todo handle error"); .expect("todo handle error");
} }
@ -74,9 +85,7 @@ pub fn make_router(config: &'static Config) -> impl Handler {
let cache_fetch = |key| { let cache_fetch = |key| {
let mut repo_hash = [0; 32]; let mut repo_hash = [0; 32];
if base64_turbo::URL_SAFE.decode_into(repo_hash_str, &mut repo_hash) if base64_turbo::URL_SAFE.decode_into(key, &mut repo_hash) != Ok(32) {
!= Ok(32)
{
return None; return None;
} }
let repo_dir = PathBuf::from(&config.data_dir) let repo_dir = PathBuf::from(&config.data_dir)
@ -95,11 +104,16 @@ pub fn make_router(config: &'static Config) -> impl Handler {
return None; return None;
} }
}; };
let mut files = HashMap::new(); let mut entries = templates::Directory::default();
for file in repo_metadata.iter_files() { for entry in repo_metadata.iter_files() {
match file { match entry {
Ok(file) => { Ok(entry) => {
files.insert(file.file_path.to_string(), file.hash.to_string()); let Some(name) = entry.file_path.rsplit('/').next() else {
error!("Entry has no name");
continue;
};
let path = entry.file_path.split('/').peekable();
entries.insert(entry, path);
} }
Err(e) => { Err(e) => {
error!("Reading repo metadata file index: {e:?}") error!("Reading repo metadata file index: {e:?}")
@ -109,10 +123,13 @@ pub fn make_router(config: &'static Config) -> impl Handler {
Some(files) Some(files)
}; };
// TODO replace mutex with better thing (less contention or async mutex) // TODO replace mutex with better thing (less contention or async mutex)
metadata_cache let Some(metadata) = metadata_cache
.lock() .lock()
.await .await
.fetch(repo_hash_str.to_string(), cache_fetch); .fetch(repo_hash_str.to_string(), cache_fetch)
else {
return conn.with_status(404);
};
let hl_options = giallo::HighlightOptions::new( let hl_options = giallo::HighlightOptions::new(
"py", "py",
@ -130,6 +147,7 @@ pub fn make_router(config: &'static Config) -> impl Handler {
); );
conn.ok(crate::templates::Repo { conn.ok(crate::templates::Repo {
content: html.clone(), content: html.clone(),
entries: Vec::new(),
} }
.render() .render()
.unwrap()) .unwrap())

View file

@ -1,3 +1,9 @@
use std::{
collections::{BTreeMap, BTreeSet, HashMap, btree_map, btree_set},
iter::Peekable,
};
use log::warn;
use trillium_askama::Template; use trillium_askama::Template;
#[derive(Template)] #[derive(Template)]
@ -8,4 +14,88 @@ pub struct Home {}
#[template(path = "repo.html")] #[template(path = "repo.html")]
pub struct Repo { pub struct Repo {
pub content: String, pub content: String,
pub entries: BTreeSet<Entry>,
}
#[derive(Template)]
#[template(
source = r#"
{% match self %}
{% when Entry::Directory(dir) %}
<li class="tree-dir">
<details>
<summary><span class="filename">{{ dir.name|escape }}</span></summary>
<ul class="tree">
{% for entry in dir.entries.values() %}
{{ entry.render()? }}
{% endfor %}
</ul>
</details>
</li>
{% when Entry::File(file) %}
<li class="tree-file"><span class="filename">{{ file.name|escape }}</span></li>
{% endmatch %}
"#,
ext = "html",
escape = "none"
)]
pub enum Entry {
Directory(Directory),
File(File),
}
pub struct File {
name: String,
path: String,
}
#[derive(Default)]
pub struct Directory {
name: String,
path: String,
entries: BTreeMap<String, Entry>,
}
impl Entry {
fn name(&self) -> &str {
match self {
Entry::Directory(dir) => &dir.name,
Entry::File(file) => &file.name,
}
}
}
impl PartialEq<Entry> for Entry {
fn eq(&self, other: &Entry) -> bool {
self.name() == other.name()
}
}
impl PartialOrd<Entry> for Entry {
fn partial_cmp(&self, other: &Entry) -> Option<std::cmp::Ordering> {
self.name().partial_cmp(other.name())
}
}
impl Directory {
pub fn insert<'a>(&mut self, entry: Entry, mut path: Peekable<impl Iterator<Item = &'a str>>) {
let Some(next_path_level) = path.next() else {
warn!("Cannot insert entry: no more path");
return;
};
if path.peek().is_some() {
if let Some(parent) = self.entries.get_mut(next_path_level) {
match parent {
Entry::Directory(dir) => dir.insert(entry, path),
Entry::File(_file) => {
warn!("Cannot insert entry: file");
}
}
} else {
warn!("Cannot insert entry: not found");
}
} else {
self.entries.insert(entry.name().to_string(), entry);
}
}
} }

View file

@ -29,26 +29,9 @@ html, body {
</header> </header>
<div id="tree"> <div id="tree">
<ul class="tree"> <ul class="tree">
<li class="tree-file"><span class="filename">Fichier</span></li> {% for entry in entries %}
<li class="tree-file"><span class="filename">Fichier</span></li> {{ entry.render()? }}
<li class="tree-dir"> {% endfor %}
<details>
<summary><span class="filename">Dossier</span></summary>
<ul class="tree">
<li class="tree-file"><span class="filename">Fichier</span></li>
<li class="tree-file"><span class="filename">Fichier</span></li>
<li class="tree-dir">
<details>
<summary><span class="filename">Dossier</span></summary>
</details>
</li>
<li class="tree-file"><span class="filename">Fichier</span></li>
</ul>
</details>
</li>
<li class="tree-file"><span class="filename">Fichier</span></li>
</ul> </ul>
</div> </div>
<div id="page"> <div id="page">