extract ssg logic to crate

This commit is contained in:
Maciej Jur 2024-07-20 17:08:11 +02:00
parent 005d4c15e5
commit b5049bba09
Signed by: kamov
GPG key ID: 191CBFF5F72ECAFD
22 changed files with 616 additions and 527 deletions

4
.gitignore vendored
View file

@ -27,5 +27,5 @@ target/
# JavaScript
js/**/node_modules/
# Hashed images
.hash
# Hashed file cache
.cache

44
Cargo.lock generated
View file

@ -107,9 +107,9 @@ checksum = "7d902e3d592a523def97af8f317b08ce16b7ab854c1985a0c671e6f15cebc236"
[[package]]
name = "async-trait"
version = "0.1.80"
version = "0.1.81"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c6fa2087f2753a7da8cc1c0dbfcf89579dd57458e36769de5ac750b4671737ca"
checksum = "6e0c28dcc82d7c8ead5cb13beb15405b57b8546e93215673ff8ca0349a028107"
dependencies = [
"proc-macro2",
"quote",
@ -348,9 +348,9 @@ dependencies = [
[[package]]
name = "crossbeam-channel"
version = "0.5.12"
version = "0.5.13"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ab3db02a9c5b5121e1e42fbdb1aeb65f5e02624cc58c43f2884c6ccac0b82f95"
checksum = "33480d6946193aa8033910124896ca395333cae7e2d1113d1fef6c3272217df2"
dependencies = [
"crossbeam-utils",
]
@ -720,6 +720,26 @@ dependencies = [
"hashbrown 0.14.3",
]
[[package]]
name = "hauchiwa"
version = "0.0.1"
dependencies = [
"camino",
"chrono",
"glob",
"grass",
"gray_matter",
"hayagriva",
"hypertext",
"image",
"notify",
"notify-debouncer-mini",
"rayon",
"serde",
"sha256",
"tungstenite",
]
[[package]]
name = "hayagriva"
version = "0.5.3"
@ -781,9 +801,9 @@ dependencies = [
[[package]]
name = "httparse"
version = "1.8.0"
version = "1.9.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d897f394bad6a705d5f4104762e116a75639e470d80901eed05a860a95cb1904"
checksum = "0fcc0b4a115bf80b728eb8ea024ad5bd707b615bfed49e0665b6e0f86fd082d9"
[[package]]
name = "hypertext"
@ -1438,9 +1458,9 @@ dependencies = [
[[package]]
name = "serde_json"
version = "1.0.114"
version = "1.0.120"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c5f09b1bd632ef549eaa9f60a1f8de742bdbc698e6cee2095fc84dde5f549ae0"
checksum = "4e0d21c9a8cae1235ad58a00c11cb40d4b1e5c784f1ef2c537876ed6ffd8b7c5"
dependencies = [
"itoa",
"ryu",
@ -1530,21 +1550,16 @@ dependencies = [
"clap",
"emojis",
"glob",
"grass",
"gray_matter",
"hauchiwa",
"hayagriva",
"hypertext",
"image",
"katex",
"notify",
"notify-debouncer-mini",
"npezza93-tree-sitter-nix",
"once_cell",
"pulldown-cmark",
"rayon",
"regex",
"serde",
"sha256",
"tree-sitter",
"tree-sitter-css",
"tree-sitter-haskell",
@ -1559,7 +1574,6 @@ dependencies = [
"tree-sitter-scss",
"tree-sitter-toml-ng",
"tree-sitter-typescript",
"tungstenite",
]
[[package]]

View file

@ -3,28 +3,24 @@ name = "ssg"
version = "0.1.0"
edition = "2021"
[workspace]
[dependencies.hauchiwa]
path = "hauchiwa"
[dependencies]
camino = "1.1.7"
chrono = "0.4.38"
clap = { version = "4.5.7", features = ["derive"] }
emojis = "0.6.2"
glob = "0.3.1"
grass = { version = "0.13.3", default-features = false, features = ["random"] }
gray_matter = { version = "0.2.8", default-features = false, features = ["yaml"] }
hayagriva = "0.5.3"
hypertext = "0.5.1"
image = "0.24.0"
katex = "0.4.6"
once_cell = "1.19.0"
rayon = "1.10.0"
regex = "1.10.5"
serde = { version = "1.0.203", features = ["derive"] }
sha256 = { version = "1.5.0", default-features = false }
# Watch
notify = "6.1.1"
notify-debouncer-mini = "0.4.1"
tungstenite = "0.23.0"
# Markdown
pulldown-cmark = "0.11.0"

25
hauchiwa/Cargo.toml Normal file
View file

@ -0,0 +1,25 @@
[package]
name = "hauchiwa"
version = "0.0.1"
edition = "2021"
authors = ["Maciej Jur <maciej@kamoshi.org>"]
keywords = ["ssg"]
description = "Static site generator library"
[dependencies]
camino = "1.1.7"
gray_matter = { version = "0.2.8", default-features = false, features = ["yaml"] }
chrono = "0.4.38"
glob = "0.3.1"
grass = { version = "0.13.3", default-features = false, features = ["random"] }
hayagriva = "0.5.3"
hypertext = "0.5.1"
image = "0.24.0"
rayon = "1.10.0"
serde = { version = "1.0.203", features = ["derive"] }
sha256 = { version = "1.5.0", default-features = false }
# Watch
notify = "6.1.1"
notify-debouncer-mini = "0.4.1"
tungstenite = "0.23.0"

3
hauchiwa/README.md Normal file
View file

@ -0,0 +1,3 @@
# Hauchiwa
Static site generator library

View file

@ -5,11 +5,12 @@ use std::io;
use std::io::Write;
use std::path::Path;
use std::process::Command;
use std::rc::Rc;
use camino::{Utf8Path, Utf8PathBuf};
use rayon::iter::{IntoParallelRefIterator, ParallelIterator};
use crate::pipeline::{AssetKind, Output, OutputKind, Sack, Virtual};
use crate::site::Source;
use crate::tree::{Asset, AssetKind, FileItemKind, Output, OutputKind, PipelineItem, Sack, Virtual};
use crate::BuildContext;
pub(crate) fn clean_dist() {
@ -20,6 +21,27 @@ pub(crate) fn clean_dist() {
fs::create_dir("dist").unwrap();
}
pub(crate) fn build_hash(
content: &[Output],
cache: &Utf8Path,
) -> HashMap<Utf8PathBuf, Utf8PathBuf> {
println!("Optimizing images. Cache in {}", cache);
let now = std::time::Instant::now();
let images: Vec<&Output> = content
.iter()
.filter(|&e| match e.kind {
OutputKind::Asset(ref a) => matches!(a.kind, AssetKind::Image),
_ => false,
})
.collect();
let hashes = crate::hash::hash_assets(cache, &images);
copy_recursively(cache, Path::new("dist/hash")).unwrap();
println!("Finished optimizing. Elapsed: {:.2?}", now.elapsed());
hashes
}
pub(crate) fn build_styles() {
let css = grass::from_path("styles/styles.scss", &grass::Options::default()).unwrap();
fs::write("dist/styles.css", css).unwrap();
@ -34,7 +56,6 @@ pub(crate) fn build_content(
let now = std::time::Instant::now();
render_all(ctx, pending, hole, hash);
println!("Elapsed: {:.2?}", now.elapsed());
copy_recursively(Path::new(".hash"), Path::new("dist/hash")).unwrap();
}
pub(crate) fn build_static() {
@ -108,69 +129,6 @@ fn render_all(
.collect()
}
fn add_hash(buffer: &[u8], file: &Utf8Path, path: &Utf8Path) {
println!("Hashing image {} -> {}", file, path);
let img = image::load_from_memory(buffer).expect("Couldn't load image");
let dim = (img.width(), img.height());
let mut out = Vec::new();
let encoder = image::codecs::webp::WebPEncoder::new_lossless(&mut out);
encoder
.encode(&img.to_rgba8(), dim.0, dim.1, image::ColorType::Rgba8)
.expect("Encoding error");
fs::write(path, out).expect("Couldn't output optimized image");
}
fn ensure_hashed(cache: &Utf8Path, buffer: &[u8], file: &Utf8Path) -> Utf8PathBuf {
let hash = sha256::digest(buffer);
let path = cache.join(&hash).with_extension("webp");
if !path.exists() {
add_hash(buffer, file, &path)
}
Utf8Path::new("/")
.join("hash")
.join(hash)
.with_extension("webp")
}
pub(crate) fn store_hash_all(items: &[&Output]) -> Vec<Hashed> {
let cache = Utf8Path::new(".hash");
fs::create_dir_all(cache).unwrap();
items
.par_iter()
.filter_map(|item| match item.kind {
OutputKind::Asset(ref asset) => match asset.kind {
AssetKind::Image => {
let buffer = std::fs::read(&asset.meta.path).expect("Couldn't read file");
let format = image::guess_format(&buffer).expect("Couldn't read format");
if matches!(format, image::ImageFormat::Gif) {
return None;
}
Some(Hashed {
file: item.path.to_owned(),
hash: ensure_hashed(cache, &buffer, &asset.meta.path),
})
}
_ => None,
},
_ => None,
})
.collect()
}
#[derive(Debug)]
pub(crate) struct Hashed {
pub file: Utf8PathBuf,
pub hash: Utf8PathBuf,
}
fn render(item: &Output, sack: Sack) {
let dist = Utf8Path::new("dist");
let o = dist.join(&item.path);
@ -201,3 +159,76 @@ fn render(item: &Output, sack: Sack) {
}
}
}
pub(crate) fn build(
ctx: &BuildContext,
sources: &[Source],
special: &[Rc<Output>],
) -> Vec<Rc<Output>> {
crate::build::clean_dist();
let content: Vec<Output> = sources
.iter()
.flat_map(Source::get)
.map(to_bundle)
.filter_map(Option::from)
.collect();
let assets: Vec<_> = content
.iter()
.chain(special.iter().map(AsRef::as_ref))
.collect();
let hashes = crate::build::build_hash(&content, ".cache".into());
crate::build::build_content(ctx, &assets, &assets, Some(hashes));
crate::build::build_static();
crate::build::build_styles();
crate::build::build_pagefind();
crate::build::build_js();
content
.into_iter()
.map(Rc::new)
.chain(special.iter().map(ToOwned::to_owned))
.collect()
}
fn to_bundle(item: PipelineItem) -> PipelineItem {
let meta = match item {
PipelineItem::Skip(meta) if matches!(meta.kind, FileItemKind::Bundle) => meta,
_ => return item,
};
let path = meta.path.strip_prefix("content").unwrap().to_owned();
match meta.path.extension() {
// any image
Some("jpg" | "png" | "gif") => Output {
kind: Asset {
kind: AssetKind::Image,
meta,
}
.into(),
path,
link: None,
}
.into(),
// bibliography
Some("bib") => {
let data = fs::read_to_string(&meta.path).unwrap();
let data = hayagriva::io::from_biblatex_str(&data).unwrap();
Output {
kind: Asset {
kind: AssetKind::Bibtex(data),
meta,
}
.into(),
path,
link: None,
}
.into()
}
_ => meta.into(),
}
}

65
hauchiwa/src/hash.rs Normal file
View file

@ -0,0 +1,65 @@
use std::{collections::HashMap, fs};
use camino::{Utf8Path, Utf8PathBuf};
use rayon::iter::{IntoParallelRefIterator, ParallelIterator};
use crate::tree::{AssetKind, Output, OutputKind};
pub(crate) fn hash_assets(
cache: &Utf8Path,
items: &[&Output],
) -> HashMap<Utf8PathBuf, Utf8PathBuf> {
fs::create_dir_all(cache).unwrap();
items
.par_iter()
.filter_map(|item| match item.kind {
OutputKind::Asset(ref asset) => match asset.kind {
AssetKind::Image => {
let buffer = std::fs::read(&asset.meta.path).expect("Couldn't read file");
let format = image::guess_format(&buffer).expect("Couldn't read format");
if matches!(format, image::ImageFormat::Gif) {
return None;
}
let path = item.path.to_owned();
let hash = hash_image(cache, &buffer, &asset.meta.path);
Some((path, hash))
}
_ => None,
},
_ => None,
})
.collect()
}
fn optimize_image(buffer: &[u8], file: &Utf8Path, path: &Utf8Path) -> Vec<u8> {
println!("Hashing image {} -> {}", file, path);
let img = image::load_from_memory(buffer).expect("Couldn't load image");
let dim = (img.width(), img.height());
let mut out = Vec::new();
let encoder = image::codecs::webp::WebPEncoder::new_lossless(&mut out);
encoder
.encode(&img.to_rgba8(), dim.0, dim.1, image::ColorType::Rgba8)
.expect("Encoding error");
out
}
fn hash_image(cache: &Utf8Path, buffer: &[u8], file: &Utf8Path) -> Utf8PathBuf {
let hash = sha256::digest(buffer);
let path = cache.join(&hash).with_extension("webp");
if !path.exists() {
let img = optimize_image(buffer, file, &path);
fs::write(path, img).expect("Couldn't output optimized image");
}
Utf8Path::new("/")
.join("hash")
.join(hash)
.with_extension("webp")
}

147
hauchiwa/src/lib.rs Normal file
View file

@ -0,0 +1,147 @@
mod hash;
mod tree;
mod site;
mod build;
mod watch;
use std::{fs, process::Command};
use camino::{Utf8Path, Utf8PathBuf};
use chrono::{DateTime, Datelike, Utc};
use gray_matter::Matter;
use gray_matter::engine::YAML;
use hypertext::{Raw, Renderable};
use serde::Deserialize;
use tree::{Asset, FileItemKind, Output, PipelineItem};
pub use crate::tree::{Content, Outline, Sack, TreePage};
pub use crate::site::Website;
#[derive(Debug, Clone, Copy)]
pub enum Mode {
Build,
Watch,
}
#[derive(Debug, Clone)]
pub struct BuildContext {
pub mode: Mode,
pub year: i32,
pub date: String,
pub link: String,
pub hash: String,
}
impl BuildContext {
fn new() -> Self {
let time = chrono::Utc::now();
Self {
mode: Mode::Build,
year: time.year(),
date: time.format("%Y/%m/%d %H:%M").to_string(),
link: "https://git.kamoshi.org/kamov/website".into(),
hash: String::from_utf8(
Command::new("git")
.args(["rev-parse", "--short", "HEAD"])
.output()
.expect("Couldn't load git revision")
.stdout,
)
.expect("Invalid UTF8")
.trim()
.into(),
}
}
}
impl Default for BuildContext {
fn default() -> Self {
Self::new()
}
}
#[derive(Debug, Clone)]
pub struct Link {
pub path: Utf8PathBuf,
pub name: String,
pub desc: Option<String>,
}
#[derive(Debug, Clone)]
pub struct LinkDate {
pub link: Link,
pub date: DateTime<Utc>,
}
#[derive(Debug, Clone)]
pub enum Linkable {
Link(Link),
Date(LinkDate),
}
pub fn process_content<T>(item: PipelineItem) -> PipelineItem
where
T: for<'de> Deserialize<'de> + Content + Clone + Send + Sync + 'static,
{
let meta = match item {
PipelineItem::Skip(e) if matches!(e.kind, FileItemKind::Index) => e,
_ => return item,
};
let dir = meta.path.parent().unwrap().strip_prefix("content").unwrap();
let dir = match meta.path.file_stem().unwrap() {
"index" => dir.to_owned(),
name => dir.join(name),
};
let path = dir.join("index.html");
match meta.path.extension() {
Some("md" | "mdx" | "lhs") => {
let raw = fs::read_to_string(&meta.path).unwrap();
let (matter, parsed) = parse_frontmatter::<T>(&raw);
let link = T::as_link(&matter, Utf8Path::new("/").join(&dir));
Output {
kind: Asset {
kind: crate::tree::AssetKind::html(move |sack| {
let lib = sack.get_library();
let (outline, parsed, bib) = T::parse(
parsed.clone(),
lib,
dir.clone(),
sack.hash
.as_ref()
.map(ToOwned::to_owned)
.unwrap_or_default(),
);
T::render(matter.clone(), sack, Raw(parsed), outline, bib)
.render()
.into()
}),
meta,
}
.into(),
path,
link,
}
.into()
}
_ => meta.into(),
}
}
fn parse_frontmatter<D>(raw: &str) -> (D, String)
where
D: for<'de> Deserialize<'de>,
{
let parser = Matter::<YAML>::new();
let result = parser.parse_with_struct::<D>(raw).unwrap();
(
// Just the front matter
result.data,
// The rest of the content
result.content,
)
}

109
hauchiwa/src/site.rs Normal file
View file

@ -0,0 +1,109 @@
use std::{collections::HashSet, rc::Rc};
use camino::{Utf8Path, Utf8PathBuf};
use crate::tree::{Output, PipelineItem, Sack, Virtual};
use crate::{BuildContext, Mode};
#[derive(Debug)]
pub struct Website {
sources: Vec<Source>,
special: Vec<Rc<Output>>,
}
impl Website {
pub fn new() -> WebsiteBuilder {
WebsiteBuilder::default()
}
pub fn build(&self) {
let ctx = BuildContext {
mode: Mode::Build,
..Default::default()
};
let _ = crate::build::build(&ctx, &self.sources, &self.special.clone());
}
pub fn watch(&self) {
let ctx = BuildContext {
mode: Mode::Watch,
..Default::default()
};
let state = crate::build::build(&ctx, &self.sources, &self.special.clone());
crate::watch::watch(&ctx, &self.sources, state).unwrap()
}
}
#[derive(Debug, Default)]
pub struct WebsiteBuilder {
sources: Vec<Source>,
special: Vec<Rc<Output>>,
}
impl WebsiteBuilder {
pub fn add_source(
mut self,
path: &'static str,
exts: HashSet<&'static str>,
func: fn(PipelineItem) -> PipelineItem,
) -> Self {
self.sources.push(Source { path, exts, func });
self
}
pub fn add_virtual(
mut self,
func: fn(&Sack) -> String,
path: Utf8PathBuf,
) -> Self {
self.special.push(Output {
kind: Virtual::new(func).into(),
path,
link: None,
}.into());
self
}
pub fn add_output(mut self, output: Output) -> Self {
self.special.push(Rc::new(output));
self
}
pub fn finish(self) -> Website {
Website {
context: BuildContext::default(),
sources: self.sources,
special: self.special,
}
}
}
#[derive(Debug)]
pub(crate) struct Source {
pub path: &'static str,
pub exts: HashSet<&'static str>,
pub func: fn(PipelineItem) -> PipelineItem,
}
impl Source {
pub(crate) fn get(&self) -> Vec<PipelineItem> {
crate::tree::gather(self.path, &self.exts)
.into_iter()
.map(self.func)
.collect()
}
pub(crate) fn get_maybe(&self, path: &Utf8Path) -> Option<PipelineItem> {
let pattern = glob::Pattern::new(self.path).expect("Bad pattern");
if !pattern.matches_path(path.as_std_path()) {
return None;
};
let item = match path.is_file() {
true => Some(crate::tree::to_source(path.to_owned(), &self.exts)),
false => None,
};
item.map(Into::into).map(self.func)
}
}

View file

@ -10,14 +10,15 @@ use glob::glob;
use hayagriva::Library;
use hypertext::Renderable;
use crate::text::md::Outline;
use crate::{BuildContext, Link, LinkDate, Linkable};
pub struct Outline(pub Vec<(String, String)>);
/// Represents a piece of content that can be rendered as a page. This trait needs to be
/// implemented for the front matter associated with some web page as that is what ultimately
/// matters when rendering the page. Each front matter *definition* maps to exactly one kind of
/// rendered page on the website.
pub(crate) trait Content {
pub trait Content {
/// Parse the document. Pass an optional library for bibliography.
/// This generates the initial HTML markup from content.
fn parse(
@ -154,7 +155,7 @@ pub(crate) struct Output {
/// Items currently in the pipeline. In order for an item to be rendered, it needs to be marked as
/// `Take`, which means it needs to have an output location assigned to itself.
#[derive(Debug)]
pub(crate) enum PipelineItem {
pub enum PipelineItem {
/// Unclaimed file.
Skip(FileItem),
/// Data ready to be processed.
@ -184,7 +185,7 @@ impl From<PipelineItem> for Option<Output> {
/// This struct allows for querying the website hierarchy. It is passed to each rendered website
/// page, so that it can easily access the website metadata.
pub(crate) struct Sack<'a> {
pub struct Sack<'a> {
pub ctx: &'a BuildContext,
/// Literally all of the content
pub hole: &'a [&'a Output],
@ -193,7 +194,7 @@ pub(crate) struct Sack<'a> {
/// Original file location for this page
pub file: Option<&'a Utf8PathBuf>,
/// Hashed optimized images
pub hash: Option<HashMap<Utf8PathBuf, Utf8PathBuf>>
pub hash: Option<HashMap<Utf8PathBuf, Utf8PathBuf>>,
}
impl<'a> Sack<'a> {
@ -257,7 +258,7 @@ impl<'a> Sack<'a> {
}
#[derive(Debug)]
pub(crate) struct TreePage {
pub struct TreePage {
pub link: Option<Link>,
pub subs: HashMap<String, TreePage>,
}

View file

@ -15,63 +15,11 @@ use notify_debouncer_mini::new_debouncer;
use tungstenite::WebSocket;
use crate::build::{build_content, build_styles};
use crate::pipeline::Output;
use crate::{BuildContext, Source};
use crate::site::Source;
use crate::tree::Output;
use crate::BuildContext;
fn new_thread_ws_incoming(
server: TcpListener,
client: Arc<Mutex<Vec<WebSocket<TcpStream>>>>,
) -> JoinHandle<()> {
std::thread::spawn(move || {
for stream in server.incoming() {
let socket = tungstenite::accept(stream.unwrap()).unwrap();
client.lock().unwrap().push(socket);
}
})
}
fn new_thread_ws_reload(
client: Arc<Mutex<Vec<WebSocket<TcpStream>>>>,
) -> (Sender<()>, JoinHandle<()>) {
let (tx, rx) = std::sync::mpsc::channel();
let thread = std::thread::spawn(move || {
while rx.recv().is_ok() {
let mut clients = client.lock().unwrap();
let mut broken = vec![];
for (i, socket) in clients.iter_mut().enumerate() {
match socket.send("reload".into()) {
Ok(_) => {}
Err(tungstenite::error::Error::Io(e)) => {
if e.kind() == std::io::ErrorKind::BrokenPipe {
broken.push(i);
}
}
Err(e) => {
eprintln!("Error: {:?}", e);
}
}
}
for i in broken.into_iter().rev() {
clients.remove(i);
}
// Close all but the last 10 connections
let len = clients.len();
if len > 10 {
for mut socket in clients.drain(0..len - 10) {
socket.close(None).ok();
}
}
}
});
(tx, thread)
}
pub fn watch(ctx: &BuildContext, sources: &[Source], mut state: Vec<Rc<Output>>) -> Result<()> {
pub(crate) fn watch(ctx: &BuildContext, sources: &[Source], mut state: Vec<Rc<Output>>) -> Result<()> {
let root = env::current_dir().unwrap();
let server = TcpListener::bind("127.0.0.1:1337")?;
let client = Arc::new(Mutex::new(vec![]));
@ -142,8 +90,61 @@ fn update_stream(old: &[Rc<Output>], new: &[Rc<Output>]) -> Vec<Rc<Output>> {
let mut map: HashMap<&Utf8Path, Rc<Output>> = HashMap::new();
for output in old.iter().chain(new) {
map.insert(&*output.path, output.clone());
map.insert(&output.path, output.clone());
}
map.into_values().collect()
}
fn new_thread_ws_incoming(
server: TcpListener,
client: Arc<Mutex<Vec<WebSocket<TcpStream>>>>,
) -> JoinHandle<()> {
std::thread::spawn(move || {
for stream in server.incoming() {
let socket = tungstenite::accept(stream.unwrap()).unwrap();
client.lock().unwrap().push(socket);
}
})
}
fn new_thread_ws_reload(
client: Arc<Mutex<Vec<WebSocket<TcpStream>>>>,
) -> (Sender<()>, JoinHandle<()>) {
let (tx, rx) = std::sync::mpsc::channel();
let thread = std::thread::spawn(move || {
while rx.recv().is_ok() {
let mut clients = client.lock().unwrap();
let mut broken = vec![];
for (i, socket) in clients.iter_mut().enumerate() {
match socket.send("reload".into()) {
Ok(_) => {}
Err(tungstenite::error::Error::Io(e)) => {
if e.kind() == std::io::ErrorKind::BrokenPipe {
broken.push(i);
}
}
Err(e) => {
eprintln!("Error: {:?}", e);
}
}
}
for i in broken.into_iter().rev() {
clients.remove(i);
}
// Close all but the last 10 connections
let len = clients.len();
if len > 10 {
for mut socket in clients.drain(0..len - 10) {
socket.close(None).ok();
}
}
}
});
(tx, thread)
}

View file

@ -1,8 +1,8 @@
use std::collections::HashMap;
use hauchiwa::Sack;
use hypertext::{html_elements, maud, maud_move, GlobalAttributes, Raw, Renderable};
use crate::pipeline::Sack;
use crate::text::md::parse;
const INTRO: &str = r#"

View file

@ -1,8 +1,7 @@
use hauchiwa::{LinkDate, Sack};
use hypertext::{html_elements, maud_move, GlobalAttributes, Renderable};
use crate::html::page;
use crate::pipeline::Sack;
use crate::LinkDate;
pub fn list<'s, 'g, 'html>(
sack: &'s Sack,

View file

@ -1,7 +1,6 @@
use hauchiwa::{Outline, Sack, TreePage};
use hypertext::{html_elements, maud_move, GlobalAttributes, Raw, Renderable};
use crate::pipeline::{Sack, TreePage};
use crate::text::md::Outline;
/// Render the outline for a document
pub(crate) fn show_outline(outline: Outline) -> impl Renderable {

View file

@ -11,6 +11,7 @@ use std::collections::HashMap;
use camino::Utf8Path;
use chrono::Datelike;
use hauchiwa::{LinkDate, Mode, Sack};
use hypertext::{html_elements, maud, maud_move, GlobalAttributes, Raw, Renderable};
pub(crate) use home::home;
@ -18,8 +19,6 @@ pub(crate) use post::Post;
pub(crate) use slideshow::Slideshow;
pub(crate) use wiki::Wiki;
use crate::{pipeline::Sack, Mode};
const JS_RELOAD: &str = r#"
const socket = new WebSocket("ws://localhost:1337");
socket.addEventListener("message", (event) => {
@ -198,7 +197,7 @@ where
)
}
pub(crate) fn to_list(sack: &Sack, list: Vec<crate::LinkDate>, title: String) -> String {
pub(crate) fn to_list(sack: &Sack, list: Vec<LinkDate>, title: String) -> String {
let mut groups = HashMap::<i32, Vec<_>>::new();
for page in list {

View file

@ -2,14 +2,11 @@ use std::collections::HashMap;
use camino::Utf8PathBuf;
use chrono::{DateTime, Utc};
use hauchiwa::{Content, Link, LinkDate, Linkable, Outline, Sack};
use hayagriva::Library;
use hypertext::{html_elements, maud_move, GlobalAttributes, Renderable};
use serde::Deserialize;
use crate::pipeline::{Content, Sack};
use crate::text::md::Outline;
use crate::{LinkDate, Linkable};
/// Represents a simple post.
#[derive(Deserialize, Debug, Clone)]
pub(crate) struct Post {
@ -45,7 +42,7 @@ impl Content for Post {
fn as_link(&self, path: Utf8PathBuf) -> Option<Linkable> {
Some(Linkable::Date(LinkDate {
link: crate::Link {
link: Link {
path,
name: self.title.to_owned(),
desc: self.desc.to_owned(),

View file

@ -2,14 +2,11 @@ use std::collections::HashMap;
use camino::Utf8PathBuf;
use chrono::{DateTime, Utc};
use hauchiwa::{Content, Link, LinkDate, Linkable, Outline, Sack};
use hayagriva::Library;
use hypertext::{html_elements, maud_move, GlobalAttributes, Raw, Renderable};
use serde::Deserialize;
use crate::pipeline::{Content, Sack};
use crate::text::md::Outline;
use crate::{Link, LinkDate, Linkable};
const CSS: &str = r#"
.slides img {
margin-left: auto;

View file

@ -1,14 +1,11 @@
use std::collections::HashMap;
use camino::Utf8PathBuf;
use hauchiwa::{Content, Link, Linkable, Outline, Sack};
use hayagriva::Library;
use hypertext::{html_elements, maud_move, GlobalAttributes, Renderable};
use serde::Deserialize;
use crate::pipeline::{Content, Sack};
use crate::text::md::Outline;
use crate::{Link, Linkable};
/// Represents a wiki page
#[derive(Deserialize, Debug, Clone)]
pub struct Wiki {

View file

@ -1,29 +1,10 @@
mod build;
mod html;
mod pipeline;
mod text;
mod ts;
mod utils;
mod watch;
mod website;
use std::collections::{HashMap, HashSet};
use std::fs;
use std::process::Command;
use std::rc::Rc;
use build::Hashed;
use camino::{Utf8Path, Utf8PathBuf};
use chrono::{DateTime, Datelike, Utc};
use clap::{Parser, ValueEnum};
use gray_matter::engine::YAML;
use gray_matter::Matter;
use hauchiwa::{process_content, Website};
use hypertext::{Raw, Renderable};
use pipeline::{Asset, AssetKind, Content, FileItemKind, Output, OutputKind, PipelineItem};
use serde::Deserialize;
use website::WebsiteDesigner;
use crate::pipeline::Virtual;
#[derive(Parser, Debug, Clone)]
struct Args {
@ -37,297 +18,75 @@ enum Mode {
Watch,
}
#[derive(Debug)]
struct BuildContext {
pub mode: Mode,
pub year: i32,
pub date: String,
pub link: String,
pub hash: String,
}
#[derive(Debug, Clone)]
pub struct Link {
pub path: Utf8PathBuf,
pub name: String,
pub desc: Option<String>,
}
#[derive(Debug, Clone)]
pub struct LinkDate {
pub link: Link,
pub date: DateTime<Utc>,
}
#[derive(Debug, Clone)]
pub enum Linkable {
Link(Link),
Date(LinkDate),
}
fn main() {
let args = Args::parse();
let time = chrono::Utc::now();
let ctx = BuildContext {
mode: args.mode,
year: time.year(),
date: time.format("%Y/%m/%d %H:%M").to_string(),
link: "https://git.kamoshi.org/kamov/website".into(),
hash: String::from_utf8(
Command::new("git")
.args(["rev-parse", "--short", "HEAD"])
.output()
.expect("Couldn't load git revision")
.stdout,
let website = Website::new()
.add_source(
"content/about.md",
["md"].into(),
process_content::<crate::html::Post>,
)
.expect("Invalid UTF8")
.trim()
.into(),
};
let website = WebsiteDesigner::default()
.add_source(Source {
path: "content/about.md",
exts: ["md"].into(),
func: process_content::<crate::html::Post>,
})
.add_source(Source {
path: "content/posts/**/*",
exts: ["md", "mdx"].into(),
func: process_content::<crate::html::Post>,
})
.add_source(Source {
path: "content/slides/**/*",
exts: ["md", "lhs"].into(),
func: process_content::<crate::html::Slideshow>,
})
.add_source(Source {
path: "content/wiki/**/*",
exts: ["md"].into(),
func: process_content::<crate::html::Wiki>,
})
.add_output(Output {
kind: Virtual::new(|sack| crate::html::map(sack).render().to_owned().into()).into(),
path: "map/index.html".into(),
link: None,
})
.add_output(Output {
kind: Virtual::new(|sack| crate::html::search(sack).render().to_owned().into()).into(),
path: "search/index.html".into(),
link: None,
})
.add_output(Output {
kind: Asset {
kind: pipeline::AssetKind::html(|sack| {
let data = std::fs::read_to_string("content/index.md").unwrap();
let (_, html, _) = text::md::parse(data, None, "".into(), HashMap::new());
crate::html::home(sack, Raw(html))
.render()
.to_owned()
.into()
}),
meta: pipeline::FileItem {
kind: pipeline::FileItemKind::Index,
path: "content/index.md".into(),
},
}
.into(),
path: "index.html".into(),
link: None,
})
.add_output(Output {
kind: Virtual::new(|sack| {
crate::html::to_list(sack, sack.get_links("posts/**/*.html"), "Posts".into())
})
.into(),
path: "posts/index.html".into(),
link: None,
})
.add_output(Output {
kind: Virtual::new(|sack| {
.add_source(
"content/posts/**/*",
["md", "mdx"].into(),
process_content::<crate::html::Post>,
)
.add_source(
"content/slides/**/*",
["md", "lhs"].into(),
process_content::<crate::html::Slideshow>,
)
.add_source(
"content/wiki/**/*",
["md"].into(),
process_content::<crate::html::Wiki>,
)
.add_virtual(
|sack| crate::html::map(sack).render().to_owned().into(),
"map/index.html".into(),
)
.add_virtual(
|sack| crate::html::search(sack).render().to_owned().into(),
"search/index.html".into(),
)
.add_virtual(
|sack| crate::html::to_list(sack, sack.get_links("posts/**/*.html"), "Posts".into()),
"posts/index.html".into(),
)
.add_virtual(
|sack| {
crate::html::to_list(
sack,
sack.get_links("slides/**/*.html"),
"Slideshows".into(),
)
})
.into(),
path: "slides/index.html".into(),
link: None,
})
},
"slides/index.html".into(),
)
.add_virtual(
|sack| {
let data = std::fs::read_to_string("content/index.md").unwrap();
let (_, html, _) = text::md::parse(
data,
None,
"".into(),
sack.hash
.as_ref()
.map(ToOwned::to_owned)
.unwrap_or_default(),
);
crate::html::home(sack, Raw(html))
.render()
.to_owned()
.into()
},
"index.html".into(),
)
.finish();
match args.mode {
Mode::Build => website.build(&ctx),
Mode::Watch => website.watch(&ctx),
}
}
#[derive(Debug)]
struct Source {
pub path: &'static str,
pub exts: HashSet<&'static str>,
pub func: fn(PipelineItem) -> PipelineItem,
}
impl Source {
fn get(&self) -> Vec<PipelineItem> {
pipeline::gather(self.path, &self.exts)
.into_iter()
.map(self.func)
.collect()
}
fn get_maybe(&self, path: &Utf8Path) -> Option<PipelineItem> {
let pattern = glob::Pattern::new(self.path).expect("Bad pattern");
if !pattern.matches_path(path.as_std_path()) {
return None;
};
let item = match path.is_file() {
true => Some(crate::pipeline::to_source(path.to_owned(), &self.exts)),
false => None,
};
item.map(Into::into).map(self.func)
}
}
fn build(ctx: &BuildContext, sources: &[Source], special: &[Rc<Output>]) -> Vec<Rc<Output>> {
crate::build::clean_dist();
let content: Vec<Output> = sources
.iter()
.flat_map(Source::get)
.map(to_bundle)
.filter_map(Option::from)
.collect();
let images: Vec<&Output> = content
.iter()
.filter(|&e| match e.kind {
OutputKind::Asset(ref a) => matches!(a.kind, AssetKind::Image),
_ => false,
})
.collect();
let hashes = crate::build::store_hash_all(&images);
let hashes = HashMap::from_iter(hashes.into_iter().map(|Hashed { file, hash }| (file, hash)));
let assets: Vec<_> = content.iter().chain(special.iter().map(AsRef::as_ref)).collect();
crate::build::build_content(ctx, &assets, &assets, Some(hashes));
crate::build::build_static();
crate::build::build_styles();
crate::build::build_pagefind();
crate::build::build_js();
content.into_iter().map(Rc::new).chain(special.iter().map(ToOwned::to_owned)).collect()
}
pub fn parse_frontmatter<D>(raw: &str) -> (D, String)
where
D: for<'de> Deserialize<'de>,
{
let parser = Matter::<YAML>::new();
let result = parser.parse_with_struct::<D>(raw).unwrap();
(
// Just the front matter
result.data,
// The rest of the content
result.content,
)
}
fn process_content<T>(item: PipelineItem) -> PipelineItem
where
T: for<'de> Deserialize<'de> + Content + Clone + Send + Sync + 'static,
{
let meta = match item {
PipelineItem::Skip(e) if matches!(e.kind, FileItemKind::Index) => e,
_ => return item,
};
let dir = meta.path.parent().unwrap().strip_prefix("content").unwrap();
let dir = match meta.path.file_stem().unwrap() {
"index" => dir.to_owned(),
name => dir.join(name),
};
let path = dir.join("index.html");
match meta.path.extension() {
Some("md" | "mdx" | "lhs") => {
let raw = fs::read_to_string(&meta.path).unwrap();
let (matter, parsed) = parse_frontmatter::<T>(&raw);
let link = T::as_link(&matter, Utf8Path::new("/").join(&dir));
Output {
kind: Asset {
kind: pipeline::AssetKind::html(move |sack| {
let lib = sack.get_library();
let (outline, parsed, bib) = T::parse(
parsed.clone(),
lib,
dir.clone(),
sack.hash
.as_ref()
.map(ToOwned::to_owned)
.unwrap_or_default(),
);
T::render(matter.clone(), sack, Raw(parsed), outline, bib)
.render()
.into()
}),
meta,
}
.into(),
path,
link,
}
.into()
}
_ => meta.into(),
}
}
fn to_bundle(item: PipelineItem) -> PipelineItem {
let meta = match item {
PipelineItem::Skip(meta) if matches!(meta.kind, FileItemKind::Bundle) => meta,
_ => return item,
};
let path = meta.path.strip_prefix("content").unwrap().to_owned();
match meta.path.extension() {
// any image
Some("jpg" | "png" | "gif") => Output {
kind: Asset {
kind: AssetKind::Image,
meta,
}
.into(),
path,
link: None,
}
.into(),
// bibliography
Some("bib") => {
let data = fs::read_to_string(&meta.path).unwrap();
let data = hayagriva::io::from_biblatex_str(&data).unwrap();
Output {
kind: Asset {
kind: AssetKind::Bibtex(data),
meta,
}
.into(),
path,
link: None,
}
.into()
}
_ => meta.into(),
Mode::Build => website.build(),
Mode::Watch => website.watch(),
}
}

View file

@ -1,6 +1,7 @@
use std::collections::HashMap;
use camino::Utf8PathBuf;
use hauchiwa::Outline;
use hayagriva::{
archive::ArchivedStyle,
citationberg::{IndependentStyle, Locale, Style},
@ -46,8 +47,6 @@ static STYLE: Lazy<IndependentStyle> =
Style::Dependent(_) => unreachable!(),
});
pub struct Outline(pub Vec<(String, String)>);
pub fn parse(
text: String,
lib: Option<&Library>,

View file

View file

@ -1,49 +0,0 @@
use std::rc::Rc;
use crate::{build, pipeline::Output, watch, BuildContext, Source};
#[derive(Debug)]
pub(crate) struct Website {
sources: Vec<Source>,
special: Vec<Rc<Output>>,
}
impl Website {
pub(crate) fn designer() -> WebsiteDesigner {
WebsiteDesigner::default()
}
pub(crate) fn build(&self, ctx: &BuildContext) {
let _ = build(ctx, &self.sources, &self.special.clone());
}
pub(crate) fn watch(&self, ctx: &BuildContext) {
let state = build(&ctx, &self.sources, &self.special.clone());
watch::watch(&ctx, &self.sources, state).unwrap()
}
}
#[derive(Debug, Default)]
pub(crate) struct WebsiteDesigner {
sources: Vec<Source>,
special: Vec<Rc<Output>>,
}
impl WebsiteDesigner {
pub(crate) fn add_source(mut self, source: Source) -> WebsiteDesigner {
self.sources.push(source);
self
}
pub(crate) fn add_output(mut self, output: Output) -> WebsiteDesigner {
self.special.push(Rc::new(output));
self
}
pub(crate) fn finish(self) -> Website {
Website {
sources: self.sources,
special: self.special,
}
}
}