wip render repo

This commit is contained in:
Pascal Engélibert 2026-03-29 12:00:57 +02:00
commit 145e3c592e
6 changed files with 190 additions and 50 deletions

View file

@ -1,6 +1,9 @@
use std::{collections::HashSet, io::Write, path::PathBuf};
use crate::config::Config;
use crate::{
config::Config,
repo::{RepoMetadata, WriteRepoMetadataError},
};
use rand::Rng;
use serde::Deserialize;
@ -108,6 +111,7 @@ pub enum FetchRepoError {
TooManyEntries,
Client(ClientError),
UrlParsing,
WriteRepoMetadata(WriteRepoMetadataError),
}
impl From<ClientError> for FetchRepoError {
@ -140,14 +144,20 @@ impl From<url::ParseError> for FetchRepoError {
}
}
impl From<WriteRepoMetadataError> for FetchRepoError {
fn from(value: WriteRepoMetadataError) -> Self {
Self::WriteRepoMetadata(value)
}
}
#[derive(Default)]
pub struct RepoIndex {
files: Vec<RepoIndexFile>,
pub files: Vec<RepoIndexFile>,
}
pub struct RepoIndexFile {
path: String,
url: String,
pub path: String,
pub url: String,
}
pub async fn fetch_repo_tree_index_at_commit(
@ -155,10 +165,11 @@ pub async fn fetch_repo_tree_index_at_commit(
repo_url: &str,
commit_hash: &str,
token: Option<&str>,
) -> Result<RepoIndex, FetchRepoError> {
) -> Result<(RepoIndex, RepoMetadata), FetchRepoError> {
let parsed = url::Url::parse(repo_url)?;
let mut base = parsed.clone();
base.set_fragment(None);
base.set_path("");
let base_url = base.as_str();
// Is the URL always /owner/repo?
let mut segments = parsed.path_segments().ok_or(FetchRepoError::UrlParsing)?;
@ -196,7 +207,15 @@ pub async fn fetch_repo_tree_index_at_commit(
}
}
Ok(repo_index)
Ok((
repo_index,
RepoMetadata {
date: std::time::UNIX_EPOCH.elapsed().unwrap().as_secs(),
repo_url: repo_url.to_string(),
commit_hash: commit_hash.to_string(),
content: Vec::new(),
},
))
}
#[derive(Deserialize)]
@ -208,6 +227,7 @@ pub async fn fetch_repo_files(
config: &Config,
client: &mut Client,
repo_index: &RepoIndex,
repo_metadata: &mut RepoMetadata,
token: Option<&str>,
) -> Result<(), FetchRepoError> {
let mut hasher = sha2::Sha256::default();
@ -246,6 +266,15 @@ pub async fn fetch_repo_files(
base64_turbo::URL_SAFE
.encode_into(file_name, &mut file_name_str)
.expect("unreachable");
repo_metadata
.content
.extend_from_slice(&(file.path.len() as u32).to_be_bytes());
repo_metadata
.content
.extend_from_slice(file.path.as_bytes());
repo_metadata.content.extend_from_slice(&file_name_str);
let file_name_str = str::from_utf8(&file_name_str).expect("unreachable");
if let Ok(mut file) = std::fs::OpenOptions::new()
.create_new(true)
@ -258,5 +287,7 @@ pub async fn fetch_repo_files(
}
}
repo_metadata.write_to_file(config, &repo_dir)?;
Ok(())
}

View file

@ -1,3 +1,5 @@
#![feature(btree_set_entry)]
use std::{path::PathBuf, sync::Arc};
mod api_client;
@ -10,6 +12,8 @@ mod templates;
use std::str::FromStr;
use log::info;
const SUBDIR_REPOS: &str = "repos";
fn log_level_from_str(input: &str) -> Result<log::LevelFilter, String> {
@ -49,6 +53,10 @@ fn main() {
.recursive(true)
.create(PathBuf::from(&config.data_dir).join(SUBDIR_REPOS))
.expect("Cannot create repos dir");
info!(
"Listening http://{}:{}",
config.listen_host, config.listen_port
);
trillium_smol::config()
.with_host(&config.listen_host)
.with_port(config.listen_port)

View file

@ -14,8 +14,9 @@ const VERSION: [u8; 1] = [0];
#[derive(Debug)]
pub struct RepoMetadata {
pub date: u64,
pub commit_url: String,
content: Vec<u8>,
pub repo_url: String,
pub commit_hash: String,
pub content: Vec<u8>,
}
#[derive(Debug)]
@ -74,9 +75,13 @@ impl RepoMetadata {
let now = std::time::UNIX_EPOCH.elapsed().unwrap().as_secs();
file.write_all(&now.to_be_bytes())?;
file.write_all(&(self.commit_url.len() as u32).to_be_bytes())?;
file.write_all(&(self.repo_url.len() as u32).to_be_bytes())?;
file.write_all(self.commit_url.as_bytes())?;
file.write_all(self.repo_url.as_bytes())?;
file.write_all(&(self.commit_hash.len() as u32).to_be_bytes())?;
file.write_all(self.commit_hash.as_bytes())?;
file.write_all(&self.content)?;
@ -99,23 +104,35 @@ impl RepoMetadata {
file.read_exact(&mut date)?;
let date = u64::from_be_bytes(date);
let mut commit_url_len = [0u8; 4];
file.read_exact(&mut commit_url_len)?;
let commit_url_len = u32::from_be_bytes(commit_url_len) as usize;
if commit_url_len > MAX_URL_SIZE {
let mut repo_url_len = [0u8; 4];
file.read_exact(&mut repo_url_len)?;
let repo_url_len = u32::from_be_bytes(repo_url_len) as usize;
if repo_url_len > MAX_URL_SIZE {
return Err(ReadRepoMetadataError::InvalidFormat);
}
let mut commit_url = vec![0; commit_url_len];
file.read_exact(&mut commit_url)?;
let commit_url = String::from_utf8(commit_url)?;
let mut repo_url = vec![0; repo_url_len];
file.read_exact(&mut repo_url)?;
let repo_url = String::from_utf8(repo_url)?;
let mut commit_hash_len = [0u8; 4];
file.read_exact(&mut commit_hash_len)?;
let commit_hash_len = u32::from_be_bytes(commit_hash_len) as usize;
if commit_hash_len > MAX_URL_SIZE {
return Err(ReadRepoMetadataError::InvalidFormat);
}
let mut commit_hash = vec![0; commit_hash_len];
file.read_exact(&mut commit_hash)?;
let commit_hash = String::from_utf8(commit_hash)?;
let mut content = Vec::new();
file.read_to_end(&mut content)?;
Ok(Self {
date,
commit_url,
repo_url,
commit_hash,
content,
})
}

View file

@ -1,9 +1,9 @@
use crate::{cache, config::Config, repo::ReadRepoMetadataError};
use crate::{cache, config::Config, repo::ReadRepoMetadataError, templates};
use askama::Template;
use async_lock::Mutex;
use log::error;
use std::{collections::HashMap, io::ErrorKind, path::PathBuf};
use std::{collections::{BTreeMap, BTreeSet, HashMap}, io::ErrorKind, path::PathBuf};
use trillium::{Conn, Handler};
use trillium_router::{Router, RouterConnExt};
@ -51,7 +51,7 @@ pub fn make_router(config: &'static Config) -> impl Handler {
};
let mut client = client.lock().await;
let repo_index = crate::api_client::fetch_repo_tree_index_at_commit(
let (repo_index, mut repo_metadata) = crate::api_client::fetch_repo_tree_index_at_commit(
&mut client,
&repo_url,
&commit_hash,
@ -59,7 +59,7 @@ pub fn make_router(config: &'static Config) -> impl Handler {
)
.await
.expect("todo handle error");
crate::api_client::fetch_repo_files(config, &mut client, &repo_index, None)
crate::api_client::fetch_repo_files(config, &mut client, &repo_index, &mut repo_metadata, None)
.await
.expect("todo handle error");
}
@ -74,7 +74,7 @@ pub fn make_router(config: &'static Config) -> impl Handler {
let cache_fetch = |key| {
let mut repo_hash = [0; 32];
if base64_turbo::URL_SAFE.decode_into(repo_hash_str, &mut repo_hash)
if base64_turbo::URL_SAFE.decode_into(key, &mut repo_hash)
!= Ok(32)
{
return None;
@ -95,11 +95,19 @@ pub fn make_router(config: &'static Config) -> impl Handler {
return None;
}
};
let mut files = HashMap::new();
for file in repo_metadata.iter_files() {
match file {
Ok(file) => {
files.insert(file.file_path.to_string(), file.hash.to_string());
let mut entries = templates::Directory::default();
for entry in repo_metadata.iter_files() {
match entry {
Ok(entry) => {
let Some(name) = entry.file_path.rsplit('/').next() else {
error!("Entry has no name");
continue;
};
let path = entry.file_path.split('/').peekable();
let template_entry = match entry {
}
entries.insert(, path);
}
Err(e) => {
error!("Reading repo metadata file index: {e:?}")
@ -109,10 +117,12 @@ pub fn make_router(config: &'static Config) -> impl Handler {
Some(files)
};
// TODO replace mutex with better thing (less contention or async mutex)
metadata_cache
let Some(metadata) = metadata_cache
.lock()
.await
.fetch(repo_hash_str.to_string(), cache_fetch);
.fetch(repo_hash_str.to_string(), cache_fetch) else {
return conn.with_status(404);
};
let hl_options = giallo::HighlightOptions::new(
"py",
@ -130,6 +140,7 @@ pub fn make_router(config: &'static Config) -> impl Handler {
);
conn.ok(crate::templates::Repo {
content: html.clone(),
entries: Vec::new(),
}
.render()
.unwrap())

View file

@ -1,3 +1,9 @@
use std::{
collections::{BTreeMap, BTreeSet, HashMap, btree_map, btree_set},
iter::Peekable,
};
use log::warn;
use trillium_askama::Template;
#[derive(Template)]
@ -8,4 +14,88 @@ pub struct Home {}
#[template(path = "repo.html")]
pub struct Repo {
pub content: String,
pub entries: BTreeSet<Entry>,
}
#[derive(Template)]
#[template(
source = r#"
{% match self %}
{% when Entry::Directory(dir) %}
<li class="tree-dir">
<details>
<summary><span class="filename">{{ dir.name|escape }}</span></summary>
<ul class="tree">
{% for entry in dir.entries.values() %}
{{ entry.render()? }}
{% endfor %}
</ul>
</details>
</li>
{% when Entry::File(file) %}
<li class="tree-file"><span class="filename">{{ file.name|escape }}</span></li>
{% endmatch %}
"#,
ext = "html",
escape = "none"
)]
pub enum Entry {
Directory(Directory),
File(File),
}
pub struct File {
name: String,
path: String,
}
#[derive(Default)]
pub struct Directory {
name: String,
path: String,
entries: BTreeMap<String, Entry>,
}
impl Entry {
fn name(&self) -> &str {
match self {
Entry::Directory(dir) => &dir.name,
Entry::File(file) => &file.name,
}
}
}
impl PartialEq<Entry> for Entry {
fn eq(&self, other: &Entry) -> bool {
self.name() == other.name()
}
}
impl PartialOrd<Entry> for Entry {
fn partial_cmp(&self, other: &Entry) -> Option<std::cmp::Ordering> {
self.name().partial_cmp(other.name())
}
}
impl Directory {
pub fn insert<'a>(&mut self, entry: Entry, mut path: Peekable<impl Iterator<Item = &'a str>>) {
let Some(next_path_level) = path.next() else {
warn!("Cannot insert entry: no more path");
return;
};
if path.peek().is_some() {
if let Some(parent) = self.entries.get_mut(next_path_level) {
match parent {
Entry::Directory(dir) => dir.insert(entry, path),
Entry::File(_file) => {
warn!("Cannot insert entry: file");
}
}
} else {
warn!("Cannot insert entry: not found");
}
} else {
self.entries.insert(entry.name().to_string(), entry);
}
}
}

View file

@ -29,26 +29,9 @@ html, body {
</header>
<div id="tree">
<ul class="tree">
<li class="tree-file"><span class="filename">Fichier</span></li>
<li class="tree-file"><span class="filename">Fichier</span></li>
<li class="tree-dir">
<details>
<summary><span class="filename">Dossier</span></summary>
<ul class="tree">
<li class="tree-file"><span class="filename">Fichier</span></li>
<li class="tree-file"><span class="filename">Fichier</span></li>
<li class="tree-dir">
<details>
<summary><span class="filename">Dossier</span></summary>
</details>
</li>
<li class="tree-file"><span class="filename">Fichier</span></li>
</ul>
</details>
</li>
<li class="tree-file"><span class="filename">Fichier</span></li>
{% for entry in entries %}
{{ entry.render()? }}
{% endfor %}
</ul>
</div>
<div id="page">