Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: expose posts pages to both server and build #85

Merged
merged 1 commit into from
Mar 4, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
9 changes: 7 additions & 2 deletions src/cmd/build.rs
Original file line number Diff line number Diff line change
Expand Up @@ -102,6 +102,7 @@ async fn generate_public_build(
site_config: config::SiteConfig,
minify: bool,
) -> Result<()> {
let posts = shared::collect_all_posts_metadata(&paths.build, &site_config.root_url).await?;
let entries = WalkDir::new(&paths.build)
.into_iter()
.filter_map(|e| e.ok());
Expand All @@ -112,6 +113,7 @@ async fn generate_public_build(
// Parallel processing
futures_util::stream::iter(entries)
.for_each_concurrent(num_cpus::get(), |entry| {
let posts = posts.clone();
let site_config = site_config.clone();
let validation_errors = Arc::clone(&validation_errors);

Expand All @@ -123,6 +125,7 @@ async fn generate_public_build(
&site_config,
minify,
&validation_errors,
&posts,
)
.await
{
Expand All @@ -144,14 +147,15 @@ async fn generate_public_build(
///
/// Handles template rendering, metadata validation, and output path determination.
/// Skips draft content and applies minification when enabled.
#[instrument(skip(tera, paths, site_config, validation_errors))]
#[instrument(skip(tera, paths, site_config, validation_errors, posts))]
async fn process_build_entry(
entry: DirEntry,
tera: &Tera,
paths: &SitePaths,
site_config: &config::SiteConfig,
minify: bool,
validation_errors: &Arc<Mutex<Vec<String>>>,
posts: &[toml::Value],
) -> Result<()> {
let path = entry.path();

Expand Down Expand Up @@ -207,6 +211,7 @@ async fn process_build_entry(
let mut context = Context::new();
context.insert("content", &html);
context.insert("config", &site_config);
context.insert("posts", &posts);
context.insert("metadata", &metadata);

// Render template
Expand Down Expand Up @@ -337,7 +342,7 @@ async fn write_public_file(public_path: &Path, rendered: String) -> Result<()> {
fn should_minify_asset(src: &Path) -> bool {
let file_stem = src.file_stem().and_then(|s| s.to_str()).unwrap_or_default();
let file_ext = src.extension().and_then(|s| s.to_str()).unwrap_or_default();
file_stem != "min" && (file_ext == "js" || file_ext == "css")
!file_stem.ends_with(".min") && (file_ext == "js" || file_ext == "css")
}

/// Minifies HTML content using optimized settings for production builds.
Expand Down
28 changes: 24 additions & 4 deletions src/cmd/serve.rs
Original file line number Diff line number Diff line change
Expand Up @@ -81,6 +81,7 @@ struct ServerState {
paths: SitePaths,
build_drafts: bool,
routes_url: String,
posts: Arc<RwLock<Vec<toml::Value>>>,
}

impl ServerState {
Expand Down Expand Up @@ -149,7 +150,7 @@ impl ServerState {
/// This struct defines the actions that should be performed when file system events
/// are detected. It includes flags for reloading templates and assets, as well as
/// lists of paths to rebuild or clean up.
#[derive(Default, Debug)]
#[derive(Default, Debug, Clone)]
struct FileActions {
reload_templates: bool,
reload_assets: bool,
Expand Down Expand Up @@ -340,7 +341,8 @@ async fn execute_actions(actions: FileActions, state: Arc<ServerState>) {
}

// Rebuild changed content
for path in actions.rebuild_paths {
for path in &actions.rebuild_paths {
let path = path.clone();
let state = Arc::clone(&state);
let content_path = match path.strip_prefix(&state.paths.content) {
Ok(p) => p.to_path_buf(),
Expand Down Expand Up @@ -391,6 +393,18 @@ async fn execute_actions(actions: FileActions, state: Arc<ServerState>) {
}
});
}

// Re-collect pages metadata on content changes
if !actions.rebuild_paths.is_empty() || !actions.cleanup_paths.is_empty() {
let build_dir = state.paths.content.parent().unwrap().join(".build");
match shared::collect_all_posts_metadata(&build_dir, &state.routes_url).await {
Ok(new_posts) => {
let mut posts_lock = state.posts.write().await;
*posts_lock = new_posts;
}
Err(e) => error!("Failed to update pages metadata: {}", e),
}
}
}

/// Helper function to handle content retrieval errors.
Expand Down Expand Up @@ -725,10 +739,12 @@ async fn handle_html_content(
.get("layout")
.and_then(|v| v.as_str())
.unwrap_or("default");
let posts = state.posts.read().await.clone();

let mut context = Context::new();
context.insert("content", content);
context.insert("config", &state.config);
context.insert("posts", &posts);
context.insert("metadata", &metadata);

let tera = state.tera.read().await;
Expand Down Expand Up @@ -852,9 +868,9 @@ async fn handle_server_request(

let response = match handle_request(req, state).await {
Ok(res) => res,
Err(_e) => {
Err(e) => {
// XXX: this may be too verbose sometimes, do we want to keep it?
// error!("Request handling error: {}", e);
error!("Request handling error: {}", e);
Response::builder()
.status(StatusCode::INTERNAL_SERVER_ERROR)
.body(Body::from("500 Internal Server Error"))
Expand Down Expand Up @@ -905,13 +921,17 @@ async fn setup_server_state(

let (reload_tx, _) = broadcast::channel(16);

let build_dir = root_dir.join(".build");
let posts = shared::collect_all_posts_metadata(&build_dir, &routes_url).await?;

Ok(Arc::new(ServerState {
reload_tx: Arc::new(reload_tx),
tera,
config: site_config,
paths,
build_drafts: drafts,
routes_url,
posts: Arc::new(RwLock::new(posts)),
}))
}

Expand Down
114 changes: 105 additions & 9 deletions src/shared/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ use std::time::Instant;
use eyre::{bail, eyre, Result};
use tera::Tera;
use tracing::{error, info};
use walkdir::WalkDir;

use crate::converter;
use crate::schema::{format_errors, validate_metadata, ContentSchema};
Expand Down Expand Up @@ -195,16 +196,20 @@ pub async fn init_tera(templates_dir: &str, theme_templates_dir: &Path) -> Resul
// Initialize Tera with the user-defined templates first
let mut tera = match Tera::parse(&(templates_dir.to_owned() + "/**/*.html")) {
Ok(t) => t,
Err(e) => bail!("Error parsing templates from the templates directory: {}", e),
Err(e) => bail!(
"Error parsing templates from the templates directory: {}",
e
),
};

// Theme templates will override the user-defined templates by design if they are named exactly
// the same in both the user's templates directory and the theme templates directory
if tokio::fs::try_exists(&theme_templates_dir).await? {
let tera_theme = match Tera::parse(&(theme_templates_dir.display().to_string() + "/**/*.html")) {
Ok(t) => t,
Err(e) => bail!("Error parsing templates from themes: {}", e),
};
let tera_theme =
match Tera::parse(&(theme_templates_dir.display().to_string() + "/**/*.html")) {
Ok(t) => t,
Err(e) => bail!("Error parsing templates from themes: {}", e),
};
tera.extend(&tera_theme)?;
}
tera.build_inheritance_chains()?;
Expand All @@ -227,10 +232,23 @@ pub async fn init_tera(templates_dir: &str, theme_templates_dir: &Path) -> Resul
/// * `toml::Value` - The parsed metadata or an empty table if an error occurs.
pub async fn load_metadata(path: PathBuf) -> toml::Value {
match tokio::fs::read_to_string(&path).await {
Ok(content) => toml::from_str(&content).unwrap_or_else(|e| {
error!("Metadata parse error: {}", e);
toml::Value::Table(toml::map::Map::new())
}),
Ok(content) => {
let mut value = toml::from_str(&content).unwrap_or_else(|e| {
error!("Metadata parse error: {}", e);
toml::Value::Table(toml::map::Map::new())
});

// Convert TOML datetimes to RFC3339 strings
if let Some(table) = value.as_table_mut() {
for (_k, v) in table.iter_mut() {
if let toml::Value::Datetime(dt) = v {
*v = toml::Value::String(dt.to_string());
}
}
}

value
}
Err(e) => {
error!("Metadata file not found: {}", e);
toml::Value::Table(toml::map::Map::new())
Expand Down Expand Up @@ -286,3 +304,81 @@ pub async fn validate_content_metadata(
}
Ok(String::new())
}

pub async fn collect_all_posts_metadata(
build_dir: &Path,
routes_url: &str,
) -> Result<Vec<toml::Value>> {
let mut posts = Vec::new();

for entry in WalkDir::new(build_dir)
.into_iter()
.filter_map(|e| e.ok())
.filter(|e| {
let path = e.path();
let file_name = path.file_name().and_then(|name| name.to_str());
let is_meta_file = file_name.is_some_and(|name| name.ends_with(".meta.toml"));
let is_post = path
.strip_prefix(build_dir)
.is_ok_and(|p| p.starts_with("posts") && !p.ends_with("posts/index.meta.toml"));
is_post && is_meta_file
})
{
let meta_path = entry.path();
let mut metadata = load_metadata(entry.path().to_path_buf()).await;

// TODO: this won't hot reload if the content changes, should be passed as an argument instead
// Get the raw html content
let html_file = entry.path().with_extension("").with_extension("html");
let html = tokio::fs::read_to_string(&html_file).await?;

// Generate permalink from file structure
let rel_path = meta_path.strip_prefix(build_dir)?;
// Remove .meta.toml
let mut permalink_path = rel_path.with_extension("").with_extension("");

// Handle index pages
if let Some(file_name) = permalink_path.file_name() {
if file_name == "index" {
permalink_path = permalink_path
.parent()
.unwrap_or_else(|| Path::new(""))
.to_path_buf();
}
}

// Convert to URL path
let permalink_str = permalink_path
.to_string_lossy()
.trim_start_matches('/')
.to_string();

let permalink = if permalink_str.is_empty() {
format!("{}/", routes_url)
} else {
format!("{}/{}/", routes_url, permalink_str)
};

// Add permalink and html content to metadata
if let toml::Value::Table(ref mut table) = metadata {
table.insert("permalink".to_string(), toml::Value::String(permalink));
table.insert("raw".to_string(), toml::Value::String(html));
}
posts.push(metadata);
}

posts.sort_by(|a, b| {
let a_date = a.get("date").and_then(|v| v.as_str()).unwrap_or_default();
let b_date = b.get("date").and_then(|v| v.as_str()).unwrap_or_default();

let parse_date = |s: &str| {
chrono::DateTime::parse_from_str(s, "%Y-%m-%d")
.unwrap_or_else(|_| chrono::DateTime::from_timestamp(0, 0).unwrap().into())
.with_timezone(&chrono::Utc)
};

parse_date(b_date).cmp(&parse_date(a_date))
});

Ok(posts)
}
3 changes: 2 additions & 1 deletion src/tera_functions.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
use eyre::Result;
use std::collections::HashMap;

use eyre::Result;
use tera::{Error, Function, Value};

/// Now function
Expand Down
Loading