This commit is contained in:
Maciej Jur 2024-07-05 13:59:07 +02:00
parent 46705d707f
commit fc01a1ed2a
Signed by: kamov
GPG key ID: 191CBFF5F72ECAFD
14 changed files with 1399 additions and 1316 deletions

2
rustfmt.toml Normal file
View file

@ -0,0 +1,2 @@
tab_spaces = 4
hard_tabs = true

View file

@ -1,6 +1,6 @@
use hypertext::{html_elements, maud, maud_move, GlobalAttributes, Raw, Renderable}; use hypertext::{html_elements, maud, maud_move, GlobalAttributes, Raw, Renderable};
use crate::text::md::parse; use crate::{pipeline::Sack, text::md::parse};
const INTRO: &str = r#" const INTRO: &str = r#"
## ##
@ -15,7 +15,7 @@ const INTRO: &str = r#"
"#; "#;
fn intro() -> impl Renderable { fn intro() -> impl Renderable {
let (_, html, _) = parse(INTRO, None); let (_, html, _) = parse(INTRO.into(), None);
maud!( maud!(
section .p-card.intro-jp lang="ja-JP" { section .p-card.intro-jp lang="ja-JP" {
(Raw(html)) (Raw(html))
@ -23,18 +23,18 @@ fn intro() -> impl Renderable {
) )
} }
fn kanji() -> impl Renderable { // fn kanji() -> impl Renderable {
maud!( // maud!(
section .p-card { // section .p-card {
h2 .p-card__heading { // h2 .p-card__heading {
"Kanji of the Day" // "Kanji of the Day"
} // }
div { // div {
// <Widget client:load/> // // <Widget client:load/>
} // }
} // }
) // )
} // }
fn photo() -> impl Renderable { fn photo() -> impl Renderable {
maud!( maud!(
@ -51,10 +51,13 @@ fn photo() -> impl Renderable {
) )
} }
pub fn home<'data, 'home, R>(main: R) -> impl Renderable + 'home pub(crate) fn home<'s, 'p, 'html>(
sack: &'s Sack,
main: impl Renderable + 'p,
) -> impl Renderable + 'html
where where
'data: 'home, 's: 'html,
R: Renderable + 'data, 'p: 'html,
{ {
let main = maud_move!( let main = maud_move!(
main .l-home { main .l-home {
@ -69,5 +72,5 @@ where
} }
); );
crate::html::page("Home", main, None) crate::html::page(sack, main, "Home".into())
} }

View file

@ -1,20 +1,24 @@
use crate::{html::page, LinkDate};
use camino::Utf8PathBuf;
use chrono::{DateTime, Utc};
use hypertext::{html_elements, maud_move, GlobalAttributes, Renderable}; use hypertext::{html_elements, maud_move, GlobalAttributes, Renderable};
pub fn list<'data, 'list>( use crate::html::page;
title: &'data str, use crate::pipeline::Sack;
groups: &'data [(i32, Vec<LinkDate>)], use crate::LinkDate;
) -> impl Renderable + 'list
pub fn list<'s, 'g, 'html>(
sack: &'s Sack,
groups: &'g [(i32, Vec<LinkDate>)],
title: String,
) -> impl Renderable + 'html
where where
'data: 'list, 's: 'html,
'g: 'html,
{ {
let heading = title.clone();
let list = maud_move!( let list = maud_move!(
main .page-list-main { main .page-list-main {
article .page-list { article .page-list {
header .markdown { header .markdown {
h1 { (title) } h1 { (heading) }
} }
@for (year, group) in groups { @for (year, group) in groups {
@ -24,7 +28,7 @@ where
} }
); );
page(title, list, None) page(sack, list, title)
} }
fn section(year: i32, group: &[LinkDate]) -> impl Renderable + '_ { fn section(year: i32, group: &[LinkDate]) -> impl Renderable + '_ {

View file

@ -3,7 +3,6 @@ use hypertext::{html_elements, maud_move, GlobalAttributes, Raw, Renderable};
use crate::pipeline::{Sack, TreePage}; use crate::pipeline::{Sack, TreePage};
use crate::text::md::Outline; use crate::text::md::Outline;
/// Render the outline for a document /// Render the outline for a document
pub(crate) fn show_outline(outline: Outline) -> impl Renderable { pub(crate) fn show_outline(outline: Outline) -> impl Renderable {
maud_move!( maud_move!(

View file

@ -13,13 +13,13 @@ use camino::Utf8Path;
use chrono::Datelike; use chrono::Datelike;
use hypertext::{html_elements, maud, maud_move, GlobalAttributes, Raw, Renderable}; use hypertext::{html_elements, maud, maud_move, GlobalAttributes, Raw, Renderable};
use crate::REPO;
pub(crate) use home::home; pub(crate) use home::home;
pub(crate) use post::Post; pub(crate) use post::Post;
pub(crate) use slideshow::Slideshow; pub(crate) use slideshow::Slideshow;
pub(crate) use wiki::Wiki; pub(crate) use wiki::Wiki;
use crate::{pipeline::Sack, Mode};
const JS_RELOAD: &str = r#" const JS_RELOAD: &str = r#"
const socket = new WebSocket("ws://localhost:1337"); const socket = new WebSocket("ws://localhost:1337");
socket.addEventListener("message", (event) => { socket.addEventListener("message", (event) => {
@ -37,7 +37,10 @@ const JS_IMPORTS: &str = r#"
} }
"#; "#;
fn head(title: &str) -> impl Renderable + '_ { fn head<'s, 'html>(sack: &'s Sack, title: String) -> impl Renderable + 'html
where
's: 'html,
{
let title = format!("{} | kamoshi.org", title); let title = format!("{} | kamoshi.org", title);
maud_move!( maud_move!(
@ -60,7 +63,9 @@ fn head(title: &str) -> impl Renderable + '_ {
script type="importmap" {(Raw(JS_IMPORTS))} script type="importmap" {(Raw(JS_IMPORTS))}
@if matches!(sack.ctx.mode, Mode::Watch) {
script { (Raw(JS_RELOAD)) } script { (Raw(JS_RELOAD)) }
}
) )
} }
@ -109,14 +114,17 @@ fn navbar() -> impl Renderable {
) )
} }
pub fn footer(path: Option<&Utf8Path>) -> impl Renderable { pub fn footer<'s, 'html>(sack: &'s Sack) -> impl Renderable + 'html
let copy = format!("Copyright &copy; {} Maciej Jur", &REPO.year); where
's: 'html,
{
let copy = format!("Copyright &copy; {} Maciej Jur", &sack.ctx.year);
let mail = "maciej@kamoshi.org"; let mail = "maciej@kamoshi.org";
let href = format!("mailto:{}", mail); let href = format!("mailto:{}", mail);
let link = Utf8Path::new(&REPO.link) let link = Utf8Path::new(&sack.ctx.link)
.join("src/commit") .join("src/commit")
.join(&REPO.hash); .join(&sack.ctx.hash);
let link = match path { let link = match sack.get_file() {
Some(path) => link.join(path), Some(path) => link.join(path),
None => link, None => link,
}; };
@ -133,10 +141,10 @@ pub fn footer(path: Option<&Utf8Path>) -> impl Renderable {
} }
div .repo { div .repo {
a href=(link.as_str()) { a href=(link.as_str()) {
(&REPO.hash) (&sack.ctx.hash)
} }
div { div {
(&REPO.date) (&sack.ctx.date)
} }
} }
a .right.footer__cc-wrap rel="license" href="http://creativecommons.org/licenses/by/4.0/" { a .right.footer__cc-wrap rel="license" href="http://creativecommons.org/licenses/by/4.0/" {
@ -146,15 +154,19 @@ pub fn footer(path: Option<&Utf8Path>) -> impl Renderable {
) )
} }
fn bare<'data, 'html, R>(title: &'data str, main: R) -> impl Renderable + 'html fn bare<'s, 'p, 'html>(
where sack: &'s Sack,
'data : 'html, main: impl Renderable + 'p,
R: Renderable + 'data title: String,
) -> impl Renderable + 'html
where
's: 'html,
'p: 'html,
{ {
maud_move!( maud_move!(
(Raw("<!DOCTYPE html>")) (Raw("<!DOCTYPE html>"))
html lang="en" { html lang="en" {
(head(title)) (head(sack, title))
body { body {
(main) (main)
@ -163,31 +175,30 @@ fn bare<'data, 'html, R>(title: &'data str, main: R) -> impl Renderable + 'html
) )
} }
fn page<'data, 'main, 'html, T>( fn page<'s, 'p, 'html>(
title: &'data str, sack: &'s Sack,
main: T, main: impl Renderable + 'p,
path: Option<&'data Utf8Path>, title: String,
) -> impl Renderable + 'html ) -> impl Renderable + 'html
where where
'main : 'html, 's: 'html,
'data : 'html, 'p: 'html,
T: Renderable + 'main
{ {
maud_move!( maud_move!(
(Raw("<!DOCTYPE html>")) (Raw("<!DOCTYPE html>"))
html lang="en" { html lang="en" {
(head(title)) (head(sack, title))
body { body {
(navbar()) (navbar())
(main) (main)
(footer(path)) (footer(sack))
} }
} }
) )
} }
pub(crate) fn to_list(list: Vec<crate::LinkDate>) -> String { pub(crate) fn to_list(sack: &Sack, list: Vec<crate::LinkDate>, title: String) -> String {
let mut groups = HashMap::<i32, Vec<_>>::new(); let mut groups = HashMap::<i32, Vec<_>>::new();
for page in list { for page in list {
@ -204,12 +215,15 @@ pub(crate) fn to_list(list: Vec<crate::LinkDate>) -> String {
groups.sort_by(|a, b| b.0.cmp(&a.0)); groups.sort_by(|a, b| b.0.cmp(&a.0));
list::list("", &groups).render().into() list::list(sack, &groups, title).render().into()
} }
pub(crate) fn map() -> impl Renderable { pub(crate) fn map<'s, 'html>(sack: &'s Sack) -> impl Renderable + 'html
where
's: 'html,
{
page( page(
"Map", sack,
maud!( maud!(
main { main {
div #map style="height: 100%; width: 100%" {} div #map style="height: 100%; width: 100%" {}
@ -219,17 +233,20 @@ pub(crate) fn map() -> impl Renderable {
} }
} }
), ),
None, String::from("Map"),
) )
} }
pub(crate) fn search() -> impl Renderable { pub(crate) fn search<'s, 'html>(sack: &'s Sack) -> impl Renderable + 'html
where
's: 'html,
{
page( page(
"Search", sack,
maud!( maud!(
main #app {} main #app {}
script type="module" src="/js/search/dist/search.js" {} script type="module" src="/js/search/dist/search.js" {}
), ),
None, String::from("Search"),
) )
} }

View file

@ -6,7 +6,7 @@ use serde::Deserialize;
use crate::pipeline::{Content, Sack}; use crate::pipeline::{Content, Sack};
use crate::text::md::Outline; use crate::text::md::Outline;
use crate::{Linkable, LinkDate}; use crate::{LinkDate, Linkable};
/// Represents a simple post. /// Represents a simple post.
#[derive(Deserialize, Debug, Clone)] #[derive(Deserialize, Debug, Clone)]
@ -18,24 +18,22 @@ pub(crate) struct Post {
} }
impl Content for Post { impl Content for Post {
fn parse(data: &str, lib: Option<&Library>) -> (Outline, String, Option<Vec<String>>) { fn parse(data: String, lib: Option<&Library>) -> (Outline, String, Option<Vec<String>>) {
crate::text::md::parse(data, lib) crate::text::md::parse(data, lib)
} }
fn transform<'f, 'm, 's, 'html, T>( fn render<'s, 'p, 'html>(
&'f self, self,
content: T,
outline: Outline,
sack: &'s Sack, sack: &'s Sack,
parsed: impl Renderable + 'p,
outline: Outline,
bib: Option<Vec<String>>, bib: Option<Vec<String>>,
) -> impl Renderable + 'html ) -> impl Renderable + 'html
where where
'f: 'html,
'm: 'html,
's: 'html, 's: 'html,
T: Renderable + 'm, 'p: 'html,
{ {
post(self, content, outline, bib, sack) post(self, sack, parsed, outline, bib)
} }
fn as_link(&self, path: Utf8PathBuf) -> Option<Linkable> { fn as_link(&self, path: Utf8PathBuf) -> Option<Linkable> {
@ -50,19 +48,18 @@ impl Content for Post {
} }
} }
pub fn post<'f, 'm, 's, 'html, T>( pub fn post<'s, 'p, 'html>(
fm: &'f Post, fm: Post,
content: T, sack: &'s Sack,
content: impl Renderable + 'p,
outline: Outline, outline: Outline,
bib: Option<Vec<String>>, bib: Option<Vec<String>>,
sack: &'s Sack,
) -> impl Renderable + 'html ) -> impl Renderable + 'html
where where
'f: 'html,
'm: 'html,
's: 'html, 's: 'html,
T: Renderable + 'm 'p: 'html,
{ {
let heading = fm.title.clone();
let main = maud_move!( let main = maud_move!(
main .wiki-main { main .wiki-main {
@ -79,7 +76,7 @@ pub fn post<'f, 'm, 's, 'html, T>(
article .wiki-article /*class:list={classlist)*/ { article .wiki-article /*class:list={classlist)*/ {
header class="markdown" { header class="markdown" {
h1 #top { (fm.title.clone()) } h1 #top { (heading) }
} }
section .wiki-article__markdown.markdown { section .wiki-article__markdown.markdown {
(content) (content)
@ -92,5 +89,5 @@ pub fn post<'f, 'm, 's, 'html, T>(
} }
); );
crate::html::page(&fm.title, main, sack.get_file()) crate::html::page(sack, main, fm.title.clone())
} }

View file

@ -1,13 +1,20 @@
use camino::Utf8PathBuf; use camino::Utf8PathBuf;
use chrono::{DateTime, Utc}; use chrono::{DateTime, Utc};
use hayagriva::Library; use hayagriva::Library;
use hypertext::{html_elements, maud_move, Renderable, GlobalAttributes, Raw}; use hypertext::{html_elements, maud_move, GlobalAttributes, Raw, Renderable};
use serde::Deserialize; use serde::Deserialize;
use crate::pipeline::{Content, Sack}; use crate::pipeline::{Content, Sack};
use crate::text::md::Outline; use crate::text::md::Outline;
use crate::{Link, LinkDate, Linkable}; use crate::{Link, LinkDate, Linkable};
const CSS: &str = r#"
.slides img {
margin-left: auto;
margin-right: auto;
max-height: 60vh;
}
"#;
/// Represents a slideshow /// Represents a slideshow
#[derive(Deserialize, Debug, Clone)] #[derive(Deserialize, Debug, Clone)]
@ -19,19 +26,42 @@ pub(crate) struct Slideshow {
} }
impl Content for Slideshow { impl Content for Slideshow {
fn transform<'f, 'm, 's, 'html, T>( fn parse(data: String, _: Option<&Library>) -> (Outline, String, Option<Vec<String>>) {
&'f self, let html = data
content: T, .split("\n-----\n")
.map(|chunk| {
chunk
.split("\n---\n")
.map(|s| crate::text::md::parse(s.to_owned(), None))
.map(|e| e.1)
.collect::<Vec<_>>()
})
.map(|stack| match stack.len() > 1 {
true => format!(
"<section>{}</section>",
stack
.into_iter()
.map(|slide| format!("<section>{slide}</section>"))
.collect::<String>()
),
false => format!("<section>{}</section>", stack[0]),
})
.collect::<String>();
(Outline(vec![]), html, None)
}
fn render<'s, 'p, 'html>(
self,
sack: &'s Sack,
parsed: impl Renderable + 'p,
_: Outline, _: Outline,
_: &'s Sack, _: Option<Vec<String>>,
_bib: Option<Vec<String>>,
) -> impl Renderable + 'html ) -> impl Renderable + 'html
where where
'f: 'html,
'm: 'html,
's: 'html, 's: 'html,
T: Renderable + 'm { 'p: 'html,
show(self, content) {
show(self, sack, parsed)
} }
fn as_link(&self, path: Utf8PathBuf) -> Option<Linkable> { fn as_link(&self, path: Utf8PathBuf) -> Option<Linkable> {
@ -44,28 +74,20 @@ impl Content for Slideshow {
date: self.date.to_owned(), date: self.date.to_owned(),
})) }))
} }
fn parse(data: &str, _: Option<&Library>) -> (Outline, String, Option<Vec<String>>) {
let html = data
.split("\n-----\n")
.map(|chunk| chunk.split("\n---\n").map(|s| crate::text::md::parse(s, None)).map(|e| e.1).collect::<Vec<_>>())
.map(|stack| match stack.len() > 1 {
true => format!("<section>{}</section>", stack.into_iter().map(|slide| format!("<section>{slide}</section>")).collect::<String>()),
false => format!("<section>{}</section>", stack[0])
})
.collect::<String>();
(Outline(vec![]), html, None)
}
} }
pub fn show<'data, 'show>( pub fn show<'s, 'p, 'html>(
fm: &'data Slideshow, fm: Slideshow,
slides: impl Renderable + 'data sack: &'s Sack,
) -> impl Renderable + 'show slides: impl Renderable + 'p,
where ) -> impl Renderable + 'html
'data: 'show where
's: 'html,
'p: 'html,
{ {
crate::html::bare(&fm.title, maud_move!( crate::html::bare(
sack,
maud_move!(
div .reveal { div .reveal {
div .slides { div .slides {
(slides) (slides)
@ -76,12 +98,8 @@ pub fn show<'data, 'show>(
(Raw("import 'reveal';")) (Raw("import 'reveal';"))
} }
style {r#" style { (Raw(CSS)) }
.slides img { ),
margin-left: auto; fm.title.clone(),
margin-right: auto; )
max-height: 60vh;
}
"#}
))
} }

View file

@ -14,19 +14,22 @@ pub struct Wiki {
} }
impl Content for Wiki { impl Content for Wiki {
fn transform<'f, 'm, 's, 'html, T>( fn parse(data: String, lib: Option<&Library>) -> (Outline, String, Option<Vec<String>>) {
&'f self, crate::text::md::parse(data, lib)
content: T, }
outline: Outline,
fn render<'s, 'p, 'html>(
self,
sack: &'s Sack, sack: &'s Sack,
parsed: impl Renderable + 'p,
outline: Outline,
bib: Option<Vec<String>>, bib: Option<Vec<String>>,
) -> impl Renderable + 'html ) -> impl Renderable + 'html
where where
'f: 'html,
'm: 'html,
's: 'html, 's: 'html,
T: Renderable + 'm { 'p: 'html,
wiki(self, content, outline, sack, bib) {
wiki(self, sack, parsed, outline, bib)
} }
fn as_link(&self, path: Utf8PathBuf) -> Option<Linkable> { fn as_link(&self, path: Utf8PathBuf) -> Option<Linkable> {
@ -36,24 +39,20 @@ impl Content for Wiki {
desc: None, desc: None,
})) }))
} }
fn parse(data: &str, lib: Option<&Library>) -> (Outline, String, Option<Vec<String>>) {
crate::text::md::parse(data, lib)
}
} }
fn wiki<'data, 'html, 'sack, T>( fn wiki<'s, 'p, 'html>(
fm: &'data Wiki, matter: Wiki,
content: T, sack: &'s Sack,
parsed: impl Renderable + 'p,
_: Outline, _: Outline,
sack: &'sack Sack,
bib: Option<Vec<String>>, bib: Option<Vec<String>>,
) -> impl Renderable + 'html ) -> impl Renderable + 'html
where where
'sack: 'html, 's: 'html,
'data: 'html, 'p: 'html,
T: Renderable + 'data
{ {
let heading = matter.title.clone();
let main = maud_move!( let main = maud_move!(
main .wiki-main { main .wiki-main {
@ -75,10 +74,10 @@ fn wiki<'data, 'html, 'sack, T>(
article .wiki-article /*class:list={classlist)*/ { article .wiki-article /*class:list={classlist)*/ {
header class="markdown" { header class="markdown" {
h1 #top { (fm.title.clone()) } h1 #top { (heading) }
} }
section .wiki-article__markdown.markdown { section .wiki-article__markdown.markdown {
(content) (parsed)
} }
@if let Some(bib) = bib { @if let Some(bib) = bib {
@ -88,5 +87,5 @@ fn wiki<'data, 'html, 'sack, T>(
} }
); );
crate::html::page(&fm.title, main, sack.get_file()) crate::html::page(sack, main, matter.title.to_owned())
} }

View file

@ -1,25 +1,26 @@
mod build; mod build;
mod html; mod html;
mod md;
mod pipeline; mod pipeline;
mod text; mod text;
mod ts; mod ts;
mod utils; mod utils;
mod watch; mod watch;
use std::collections::HashSet;
use std::fs; use std::fs;
use std::process::Command; use std::process::Command;
use camino::{Utf8Path, Utf8PathBuf}; use camino::{Utf8Path, Utf8PathBuf};
use chrono::{DateTime, Datelike, Utc}; use chrono::{DateTime, Datelike, Utc};
use clap::{Parser, ValueEnum}; use clap::{Parser, ValueEnum};
use pipeline::{Asset, AssetKind, Content, FileItemKind, Output, PipelineItem, Sack}; use gray_matter::engine::YAML;
use gray_matter::Matter;
use hypertext::{Raw, Renderable}; use hypertext::{Raw, Renderable};
use once_cell::sync::Lazy; use pipeline::{Asset, AssetKind, Content, FileItemKind, Output, PipelineItem};
use serde::Deserialize; use serde::Deserialize;
use crate::pipeline::Virtual;
use crate::build::build_styles; use crate::build::build_styles;
use crate::pipeline::Virtual;
#[derive(Parser, Debug, Clone)] #[derive(Parser, Debug, Clone)]
struct Args { struct Args {
@ -34,35 +35,14 @@ enum Mode {
} }
#[derive(Debug)] #[derive(Debug)]
struct BuildInfo { struct BuildContext {
pub mode: Mode,
pub year: i32, pub year: i32,
pub date: String, pub date: String,
pub link: String, pub link: String,
pub hash: String, pub hash: String,
} }
static REPO: Lazy<BuildInfo> = Lazy::new(|| {
let time = chrono::Utc::now();
BuildInfo {
year: time.year(),
date: time.format("%Y/%m/%d %H:%M").to_string(),
link: "https://git.kamoshi.org/kamov/website".into(),
hash: String::from_utf8(
Command::new("git")
.args(["rev-parse", "--short", "HEAD"])
.output()
.unwrap()
.stdout
)
.unwrap()
.trim()
.into()
}
});
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct Link { pub struct Link {
pub path: Utf8PathBuf, pub path: Utf8PathBuf,
@ -82,88 +62,54 @@ pub enum Linkable {
Date(LinkDate), Date(LinkDate),
} }
fn main() {
let args = Args::parse();
let time = chrono::Utc::now();
fn to_index<T>(item: PipelineItem) -> PipelineItem let ctx = BuildContext {
where mode: args.mode,
T: for<'de> Deserialize<'de> + Content + 'static, year: time.year(),
{ date: time.format("%Y/%m/%d %H:%M").to_string(),
let meta = match item { link: "https://git.kamoshi.org/kamov/website".into(),
PipelineItem::Skip(meta) if matches!(meta.kind, FileItemKind::Index) => meta, hash: String::from_utf8(
_ => return item, Command::new("git")
.args(["rev-parse", "--short", "HEAD"])
.output()
.expect("Couldn't load git revision")
.stdout,
)
.expect("Invalid UTF8")
.trim()
.into(),
}; };
let dir = meta.path.parent().unwrap().strip_prefix("content").unwrap(); match args.mode {
let dir = match meta.path.file_stem().unwrap() { Mode::Build => {
"index" => dir.to_owned(), build(&ctx);
name => dir.join(name), }
}; Mode::Watch => {
let path = dir.join("index.html"); build(&ctx);
watch::watch().unwrap()
match meta.path.extension() { }
Some("md" | "mdx" | "lhs") => {
let data = fs::read_to_string(&meta.path).unwrap();
let (fm, md) = md::preflight::<T>(&data);
let link = T::as_link(&fm, Utf8Path::new("/").join(dir));
let call = move |sack: &Sack| {
let lib = sack.get_library();
let (outline, html, bib) = T::parse(&md, lib);
T::transform(&fm, Raw(html), outline, sack, bib).render().into()
};
Output {
kind: Asset {
kind: pipeline::AssetKind::Html(Box::new(call)),
meta,
}.into(),
path,
link,
}.into()
},
_ => meta.into(),
} }
} }
fn to_bundle(item: PipelineItem) -> PipelineItem { struct Source {
let meta = match item { path: &'static str,
PipelineItem::Skip(meta) if matches!(meta.kind, FileItemKind::Bundle) => meta, exts: HashSet<&'static str>,
_ => return item, func: fn(PipelineItem) -> PipelineItem,
}; }
let path = meta.path.strip_prefix("content").unwrap().to_owned(); impl Source {
fn get(&self) -> Vec<PipelineItem> {
match meta.path.extension() { pipeline::gather(self.path, &self.exts)
// any image .into_iter()
Some("jpg" | "png" | "gif") => { .map(self.func)
Output { .collect()
kind: Asset {
kind: AssetKind::Image,
meta,
}.into(),
path,
link: None,
}.into()
},
// bibliography
Some("bib") => {
let data = fs::read_to_string(&meta.path).unwrap();
let data = hayagriva::io::from_biblatex_str(&data).unwrap();
Output {
kind: Asset {
kind: AssetKind::Bibtex(data),
meta,
}.into(),
path,
link: None,
}.into()
},
_ => meta.into(),
} }
} }
fn build(ctx: &BuildContext) {
fn build() {
if fs::metadata("dist").is_ok() { if fs::metadata("dist").is_ok() {
println!("Cleaning dist"); println!("Cleaning dist");
fs::remove_dir_all("dist").unwrap(); fs::remove_dir_all("dist").unwrap();
@ -171,28 +117,38 @@ fn build() {
fs::create_dir("dist").unwrap(); fs::create_dir("dist").unwrap();
let assets: Vec<Output> = [ let sources = vec![
pipeline::gather("content/about.md", &["md"].into()) Source {
.into_iter() path: "content/about.md",
.map(to_index::<crate::html::Post> as fn(PipelineItem) -> PipelineItem), exts: ["md"].into(),
pipeline::gather("content/posts/**/*", &["md", "mdx"].into()) func: as_index::<crate::html::Post>,
.into_iter() },
.map(to_index::<crate::html::Post>), Source {
pipeline::gather("content/slides/**/*", &["md", "lhs"].into()) path: "content/posts/**/*",
.into_iter() exts: ["md", "mdx"].into(),
.map(to_index::<crate::html::Slideshow>), func: as_index::<crate::html::Post>,
pipeline::gather("content/wiki/**/*", &["md"].into()) },
.into_iter() Source {
.map(to_index::<crate::html::Wiki>), path: "content/slides/**/*",
] exts: ["md", "lhs"].into(),
.into_iter() func: as_index::<crate::html::Slideshow>,
.flatten() },
Source {
path: "content/wiki/**/*",
exts: ["md"].into(),
func: as_index::<crate::html::Wiki>,
},
];
let assets: Vec<Output> = sources
.iter()
.flat_map(Source::get)
.map(to_bundle) .map(to_bundle)
.filter_map(|item| match item { .filter_map(|item| match item {
PipelineItem::Skip(skip) => { PipelineItem::Skip(skip) => {
println!("Skipping {}", skip.path); println!("Skipping {}", skip.path);
None None
}, }
PipelineItem::Take(take) => Some(take), PipelineItem::Take(take) => Some(take),
}) })
.collect(); .collect();
@ -201,37 +157,48 @@ fn build() {
assets, assets,
vec![ vec![
Output { Output {
kind: Virtual::new(|_| crate::html::map().render().to_owned().into()).into(), kind: Virtual::new(|sack| crate::html::map(sack).render().to_owned().into()).into(),
path: "map/index.html".into(), path: "map/index.html".into(),
link: None, link: None,
}, },
Output { Output {
kind: Virtual::new(|_| crate::html::search().render().to_owned().into()).into(), kind: Virtual::new(|sack| crate::html::search(sack).render().to_owned().into())
.into(),
path: "search/index.html".into(), path: "search/index.html".into(),
link: None, link: None,
}, },
Output { Output {
kind: Asset { kind: Asset {
kind: pipeline::AssetKind::Html(Box::new(|_| { kind: pipeline::AssetKind::html(|sack| {
let data = std::fs::read_to_string("content/index.md").unwrap(); let data = std::fs::read_to_string("content/index.md").unwrap();
let (_, html, _) = text::md::parse(&data, None); let (_, html, _) = text::md::parse(data, None);
crate::html::home(Raw(html)).render().to_owned().into() crate::html::home(sack, Raw(html))
})), .render()
.to_owned()
.into()
}),
meta: pipeline::FileItem { meta: pipeline::FileItem {
kind: pipeline::FileItemKind::Index, kind: pipeline::FileItemKind::Index,
path: "content/index.md".into() path: "content/index.md".into(),
},
} }
}.into(), .into(),
path: "index.html".into(), path: "index.html".into(),
link: None, link: None,
}, },
Output { Output {
kind: Virtual::new(|sack| crate::html::to_list(sack.get_links("posts/**/*.html"))).into(), kind: Virtual::new(|sack| {
crate::html::to_list(sack, sack.get_links("posts/**/*.html"), "Posts".into())
})
.into(),
path: "posts/index.html".into(), path: "posts/index.html".into(),
link: None, link: None,
}, },
Output { Output {
kind: Virtual::new(|sack| crate::html::to_list(sack.get_links("slides/**/*.html"))).into(), kind: Virtual::new(|sack| {
crate::html::to_list(sack, sack.get_links("slides/**/*.html"), "Slideshows".into())
})
.into(),
path: "slides/index.html".into(), path: "slides/index.html".into(),
link: None, link: None,
}, },
@ -243,7 +210,7 @@ fn build() {
{ {
let now = std::time::Instant::now(); let now = std::time::Instant::now();
pipeline::render_all(&assets); pipeline::render_all(ctx, &assets);
println!("Elapsed: {:.2?}", now.elapsed()); println!("Elapsed: {:.2?}", now.elapsed());
} }
@ -273,14 +240,100 @@ fn build() {
println!("{}", String::from_utf8(res.stderr).unwrap()); println!("{}", String::from_utf8(res.stderr).unwrap());
} }
fn main() { pub fn parse_frontmatter<T>(raw: &str) -> (T, String)
let args = Args::parse(); where
T: for<'de> Deserialize<'de>,
{
let matter = Matter::<YAML>::new();
let result = matter.parse(raw);
match args.mode { (
Mode::Build => build(), // Just the front matter
Mode::Watch => { result.data.unwrap().deserialize::<T>().unwrap(),
build(); // The rest of the content
watch::watch().unwrap() result.content,
}, )
}
fn as_index<T>(item: PipelineItem) -> PipelineItem
where
T: for<'de> Deserialize<'de> + Content + Clone + 'static,
{
let meta = match item {
PipelineItem::Skip(e) if matches!(e.kind, FileItemKind::Index) => e,
_ => return item,
};
let dir = meta.path.parent().unwrap().strip_prefix("content").unwrap();
let dir = match meta.path.file_stem().unwrap() {
"index" => dir.to_owned(),
name => dir.join(name),
};
let path = dir.join("index.html");
match meta.path.extension() {
Some("md" | "mdx" | "lhs") => {
let data = fs::read_to_string(&meta.path).unwrap();
let (fm, md) = parse_frontmatter::<T>(&data);
let link = T::as_link(&fm, Utf8Path::new("/").join(dir));
Output {
kind: Asset {
kind: pipeline::AssetKind::html(move |sack| {
let lib = sack.get_library();
let (outline, parsed, bib) = T::parse(md.clone(), lib);
T::render(fm.clone(), sack, Raw(parsed), outline, bib)
.render()
.into()
}),
meta,
}
.into(),
path,
link,
}
.into()
}
_ => meta.into(),
}
}
fn to_bundle(item: PipelineItem) -> PipelineItem {
let meta = match item {
PipelineItem::Skip(meta) if matches!(meta.kind, FileItemKind::Bundle) => meta,
_ => return item,
};
let path = meta.path.strip_prefix("content").unwrap().to_owned();
match meta.path.extension() {
// any image
Some("jpg" | "png" | "gif") => Output {
kind: Asset {
kind: AssetKind::Image,
meta,
}
.into(),
path,
link: None,
}
.into(),
// bibliography
Some("bib") => {
let data = fs::read_to_string(&meta.path).unwrap();
let data = hayagriva::io::from_biblatex_str(&data).unwrap();
Output {
kind: Asset {
kind: AssetKind::Bibtex(data),
meta,
}
.into(),
path,
link: None,
}
.into()
}
_ => meta.into(),
} }
} }

View file

@ -1,43 +0,0 @@
use gray_matter::{engine::YAML, Matter};
use serde::Deserialize;
pub fn preflight<T>(raw: &str) -> (T, String)
where
T: for<'de> Deserialize<'de>,
{
let matter = Matter::<YAML>::new();
let result = matter.parse(raw);
(
// Just the front matter
result.data.unwrap().deserialize::<T>().unwrap(),
// The actual markdown content
result.content,
)
}
mod isodate {
use chrono::{DateTime, Utc};
use serde::{self, Deserialize, Deserializer};
// pub fn serialize<S>(
// date: &DateTime<Utc>,
// serializer: S,
// ) -> Result<S::Ok, S::Error>
// where
// S: Serializer,
// {
// let s = date.to_rfc3339();
// serializer.serialize_str(&s)
// }
pub fn deserialize<'de, D>(deserializer: D) -> Result<DateTime<Utc>, D::Error>
where
D: Deserializer<'de>,
{
let s = String::deserialize(deserializer)?;
let dt = chrono::DateTime::parse_from_rfc3339(&s).map_err(serde::de::Error::custom)?;
Ok(dt.into())
}
}

View file

@ -1,3 +0,0 @@
mod matter;
pub use matter::preflight;

View file

@ -11,7 +11,7 @@ use hayagriva::Library;
use hypertext::Renderable; use hypertext::Renderable;
use crate::text::md::Outline; use crate::text::md::Outline;
use crate::{Link, LinkDate, Linkable}; use crate::{BuildContext, Link, LinkDate, Linkable};
/// Represents a piece of content that can be rendered as a page. This trait needs to be /// Represents a piece of content that can be rendered as a page. This trait needs to be
/// implemented for the front matter associated with some web page as that is what ultimately /// implemented for the front matter associated with some web page as that is what ultimately
@ -19,21 +19,23 @@ use crate::{Link, LinkDate, Linkable};
/// rendered page on the website. /// rendered page on the website.
pub(crate) trait Content { pub(crate) trait Content {
/// Parse the document. Pass an optional library for bibliography. /// Parse the document. Pass an optional library for bibliography.
fn parse(document: &str, library: Option<&Library>) -> (Outline, String, Option<Vec<String>>); /// This generates the initial HTML markup from content.
fn parse(document: String, library: Option<&Library>)
-> (Outline, String, Option<Vec<String>>);
fn transform<'fm, 'md, 'sack, 'html, T>( /// Render the full page from parsed content.
&'fm self, fn render<'s, 'p, 'html>(
content: T, self,
sack: &'s Sack,
parsed: impl Renderable + 'p,
outline: Outline, outline: Outline,
sack: &'sack Sack,
bib: Option<Vec<String>>, bib: Option<Vec<String>>,
) -> impl Renderable + 'html ) -> impl Renderable + 'html
where where
'fm: 'html, 's: 'html,
'md: 'html, 'p: 'html;
'sack: 'html,
T: Renderable + 'md;
/// Get link for this content
fn as_link(&self, path: Utf8PathBuf) -> Option<Linkable>; fn as_link(&self, path: Utf8PathBuf) -> Option<Linkable>;
} }
@ -66,6 +68,12 @@ pub(crate) enum AssetKind {
Image, Image,
} }
impl AssetKind {
pub fn html(f: impl Fn(&Sack) -> String + 'static) -> Self {
Self::Html(Box::new(f))
}
}
/// Asset corresponding to a file on disk. /// Asset corresponding to a file on disk.
pub(crate) struct Asset { pub(crate) struct Asset {
/// The kind of a processed asset. /// The kind of a processed asset.
@ -138,6 +146,7 @@ impl From<Output> for PipelineItem {
/// This struct allows for querying the website hierarchy. It is passed to each rendered website /// This struct allows for querying the website hierarchy. It is passed to each rendered website
/// page, so that it can easily access the website metadata. /// page, so that it can easily access the website metadata.
pub(crate) struct Sack<'a> { pub(crate) struct Sack<'a> {
pub ctx: &'a BuildContext,
/// Literally all of the content /// Literally all of the content
hole: &'a [Output], hole: &'a [Output],
/// Current path for the page being rendered /// Current path for the page being rendered
@ -147,10 +156,6 @@ pub(crate) struct Sack<'a> {
} }
impl<'a> Sack<'a> { impl<'a> Sack<'a> {
pub fn new(hole: &'a [Output], path: &'a Utf8PathBuf, file: Option<&'a Utf8PathBuf>) -> Self {
Self { hole, path, file }
}
pub fn get_links(&self, path: &str) -> Vec<LinkDate> { pub fn get_links(&self, path: &str) -> Vec<LinkDate> {
let pattern = glob::Pattern::new(path).expect("Bad glob pattern"); let pattern = glob::Pattern::new(path).expect("Bad glob pattern");
self.hole self.hole
@ -260,17 +265,25 @@ fn to_source(path: Utf8PathBuf, exts: &HashSet<&'static str>) -> FileItem {
FileItem { kind, path } FileItem { kind, path }
} }
pub fn render_all(items: &[Output]) { pub fn render_all(ctx: &BuildContext, items: &[Output]) {
for item in items { for item in items {
let file = match &item.kind { let file = match &item.kind {
OutputKind::Asset(a) => Some(&a.meta.path), OutputKind::Asset(a) => Some(&a.meta.path),
OutputKind::Virtual(_) => None, OutputKind::Virtual(_) => None,
}; };
render(item, &Sack::new(items, &item.path, file)); render(
item,
Sack {
ctx,
hole: items,
path: &item.path,
file,
},
);
} }
} }
fn render(item: &Output, sack: &Sack) { fn render(item: &Output, sack: Sack) {
let o = Utf8Path::new("dist").join(&item.path); let o = Utf8Path::new("dist").join(&item.path);
fs::create_dir_all(o.parent().unwrap()).unwrap(); fs::create_dir_all(o.parent().unwrap()).unwrap();
@ -281,7 +294,7 @@ fn render(item: &Output, sack: &Sack) {
match &real.kind { match &real.kind {
AssetKind::Html(closure) => { AssetKind::Html(closure) => {
let mut file = File::create(&o).unwrap(); let mut file = File::create(&o).unwrap();
file.write_all(closure(sack).as_bytes()).unwrap(); file.write_all(closure(&sack).as_bytes()).unwrap();
println!("HTML: {} -> {}", i, o); println!("HTML: {} -> {}", i, o);
} }
AssetKind::Bibtex(_) => {} AssetKind::Bibtex(_) => {}
@ -294,7 +307,7 @@ fn render(item: &Output, sack: &Sack) {
} }
OutputKind::Virtual(Virtual(ref closure)) => { OutputKind::Virtual(Virtual(ref closure)) => {
let mut file = File::create(&o).unwrap(); let mut file = File::create(&o).unwrap();
file.write_all(closure(sack).as_bytes()).unwrap(); file.write_all(closure(&sack).as_bytes()).unwrap();
println!("Virtual: -> {}", o); println!("Virtual: -> {}", o);
} }
} }

View file

@ -1,6 +1,11 @@
use std::collections::HashMap; use std::collections::HashMap;
use hayagriva::{archive::ArchivedStyle, citationberg::{IndependentStyle, Locale, Style}, BibliographyDriver, BibliographyRequest, BufWriteFormat, CitationItem, CitationRequest, Library}; use hayagriva::{
archive::ArchivedStyle,
citationberg::{IndependentStyle, Locale, Style},
BibliographyDriver, BibliographyRequest, BufWriteFormat, CitationItem, CitationRequest,
Library,
};
use hypertext::Renderable; use hypertext::Renderable;
use once_cell::sync::Lazy; use once_cell::sync::Lazy;
use pulldown_cmark::{CodeBlockKind, Event, Options, Parser, Tag, TagEnd, TextMergeStream}; use pulldown_cmark::{CodeBlockKind, Event, Options, Parser, Tag, TagEnd, TextMergeStream};
@ -8,52 +13,50 @@ use regex::Regex;
use crate::ts; use crate::ts;
static OPTS: Lazy<Options> = Lazy::new(|| {
static OPTS: Lazy<Options> = Lazy::new(||
Options::empty() Options::empty()
.union(Options::ENABLE_MATH) .union(Options::ENABLE_MATH)
.union(Options::ENABLE_TABLES) .union(Options::ENABLE_TABLES)
.union(Options::ENABLE_TASKLISTS) .union(Options::ENABLE_TASKLISTS)
.union(Options::ENABLE_STRIKETHROUGH) .union(Options::ENABLE_STRIKETHROUGH)
.union(Options::ENABLE_SMART_PUNCTUATION) .union(Options::ENABLE_SMART_PUNCTUATION)
); });
static KATEX_I: Lazy<katex::Opts> = Lazy::new(|| static KATEX_I: Lazy<katex::Opts> = Lazy::new(|| {
katex::opts::Opts::builder() katex::opts::Opts::builder()
.output_type(katex::OutputType::Mathml) .output_type(katex::OutputType::Mathml)
.build() .build()
.unwrap() .unwrap()
); });
static KATEX_B: Lazy<katex::Opts> = Lazy::new(|| static KATEX_B: Lazy<katex::Opts> = Lazy::new(|| {
katex::opts::Opts::builder() katex::opts::Opts::builder()
.output_type(katex::OutputType::Mathml) .output_type(katex::OutputType::Mathml)
.display_mode(true) .display_mode(true)
.build() .build()
.unwrap() .unwrap()
); });
static LOCALE: Lazy<Vec<Locale>> = Lazy::new(hayagriva::archive::locales); static LOCALE: Lazy<Vec<Locale>> = Lazy::new(hayagriva::archive::locales);
static STYLE: Lazy<IndependentStyle> = Lazy::new(|| static STYLE: Lazy<IndependentStyle> =
match ArchivedStyle::InstituteOfPhysicsNumeric.get() { Lazy::new(|| match ArchivedStyle::InstituteOfPhysicsNumeric.get() {
Style::Independent(style) => style, Style::Independent(style) => style,
Style::Dependent(_) => unreachable!(), Style::Dependent(_) => unreachable!(),
} });
);
pub struct Outline(pub Vec<(String, String)>); pub struct Outline(pub Vec<(String, String)>);
pub fn parse(text: String, lib: Option<&Library>) -> (Outline, String, Option<Vec<String>>) {
pub fn parse(text: &str, lib: Option<&Library>) -> (Outline, String, Option<Vec<String>>) {
let (outline, stream) = { let (outline, stream) = {
let stream = Parser::new_ext(text, *OPTS); let stream = Parser::new_ext(&text, *OPTS);
let mut stream: Vec<_> = TextMergeStream::new(stream).collect(); let mut stream: Vec<_> = TextMergeStream::new(stream).collect();
let outline = set_heading_ids(&mut stream); let outline = set_heading_ids(&mut stream);
(outline, stream) (outline, stream)
}; };
let stream = stream.into_iter() let stream = stream
.into_iter()
.map(make_math) .map(make_math)
.map(make_emoji) .map(make_emoji)
.collect::<Vec<_>>(); .collect::<Vec<_>>();
@ -75,13 +78,20 @@ pub fn parse(text: &str, lib: Option<&Library>) -> (Outline, String, Option<Vec<
(outline, html, bib) (outline, html, bib)
} }
fn make_bib<'a, 'b>(stream: Vec<Event<'a>>, lib: &'b Library) -> (Vec<Event<'a>>, Option<Vec<String>>) { fn make_bib<'a, 'b>(
stream: Vec<Event<'a>>,
lib: &'b Library,
) -> (Vec<Event<'a>>, Option<Vec<String>>) {
let mut driver = BibliographyDriver::new(); let mut driver = BibliographyDriver::new();
for event in stream.iter() { for event in stream.iter() {
match event { match event {
Event::InlineMath(ref text) => match lib.get(text) { Event::InlineMath(ref text) => match lib.get(text) {
Some(entry) => driver.citation(CitationRequest::from_items(vec![CitationItem::with_entry(entry)], &STYLE, &LOCALE)), Some(entry) => driver.citation(CitationRequest::from_items(
vec![CitationItem::with_entry(entry)],
&STYLE,
&LOCALE,
)),
None => (), None => (),
}, },
_ => (), _ => (),
@ -89,36 +99,51 @@ fn make_bib<'a, 'b>(stream: Vec<Event<'a>>, lib: &'b Library) -> (Vec<Event<'a>>
} }
// add fake citation to make all entries show up // add fake citation to make all entries show up
driver.citation(CitationRequest::from_items(lib.iter().map(CitationItem::with_entry).collect(), &STYLE, &LOCALE)); driver.citation(CitationRequest::from_items(
lib.iter().map(CitationItem::with_entry).collect(),
&STYLE,
&LOCALE,
));
let res = driver.finish(BibliographyRequest { style: &STYLE, locale: None, locale_files: &LOCALE }); let res = driver.finish(BibliographyRequest {
style: &STYLE,
locale: None,
locale_files: &LOCALE,
});
let mut n = 0; let mut n = 0;
let stream = stream.into_iter() let stream = stream
.into_iter()
.map(|event| match event { .map(|event| match event {
Event::InlineMath(name) => { Event::InlineMath(name) => {
let mut buffer = String::from("<cite>"); let mut buffer = String::from("<cite>");
match res.citations.get(n) { match res.citations.get(n) {
Some(rf) => rf.citation.write_buf(&mut buffer, BufWriteFormat::Html).unwrap(), Some(rf) => rf
.citation
.write_buf(&mut buffer, BufWriteFormat::Html)
.unwrap(),
None => buffer.push_str(&name), None => buffer.push_str(&name),
}; };
buffer.push_str("</cite>"); buffer.push_str("</cite>");
n += 1; n += 1;
Event::InlineHtml(buffer.into()) Event::InlineHtml(buffer.into())
}, }
_ => event _ => event,
}) })
.collect(); .collect();
let bib = res.bibliography.map(|bib| let bib = res.bibliography.map(|bib| {
bib.items.iter() bib.items
.iter()
.map(|x| { .map(|x| {
let mut buffer = String::new(); let mut buffer = String::new();
x.content.write_buf(&mut buffer, BufWriteFormat::Html).unwrap(); x.content
.write_buf(&mut buffer, BufWriteFormat::Html)
.unwrap();
buffer buffer
}) })
.collect::<Vec<_>>() .collect::<Vec<_>>()
); });
(stream, bib) (stream, bib)
} }
@ -156,15 +181,13 @@ fn annotate_(input: &str) -> Vec<Annotated_> {
fn make_cite(event: Event) -> Vec<Event> { fn make_cite(event: Event) -> Vec<Event> {
match event { match event {
Event::Text(ref text) => { Event::Text(ref text) => annotate_(text)
annotate_(text)
.into_iter() .into_iter()
.map(|e| match e { .map(|e| match e {
Annotated_::Text(text) => Event::Text(text.to_owned().into()), Annotated_::Text(text) => Event::Text(text.to_owned().into()),
Annotated_::Cite(cite) => Event::InlineMath(cite.to_owned().into()), Annotated_::Cite(cite) => Event::InlineMath(cite.to_owned().into()),
}) })
.collect() .collect(),
},
_ => vec![event], _ => vec![event],
} }
} }
@ -177,38 +200,45 @@ fn set_heading_ids(events: &mut [Event]) -> Outline {
for event in events { for event in events {
match event { match event {
Event::Start(ref mut tag @ Tag::Heading {..}) => { Event::Start(ref mut tag @ Tag::Heading { .. }) => {
ptr = Some(tag); ptr = Some(tag);
}, }
Event::Text(ref text) if ptr.is_some() => { Event::Text(ref text) if ptr.is_some() => buf.push_str(text),
buf.push_str(text)
},
Event::End(TagEnd::Heading(..)) => { Event::End(TagEnd::Heading(..)) => {
let txt = std::mem::take(&mut buf); let txt = std::mem::take(&mut buf);
let url = txt.to_lowercase().replace(' ', "-"); let url = txt.to_lowercase().replace(' ', "-");
let url = match cnt.get_mut(&url) { let url = match cnt.get_mut(&url) {
Some(ptr) => { *ptr += 1; format!("{url}-{ptr}") }, Some(ptr) => {
None => { cnt.insert(url.clone(), 0); url }, *ptr += 1;
format!("{url}-{ptr}")
}
None => {
cnt.insert(url.clone(), 0);
url
}
}; };
match ptr.take().unwrap() { match ptr.take().unwrap() {
Tag::Heading { ref mut id, .. } => *id = Some(url.clone().into()), Tag::Heading { ref mut id, .. } => *id = Some(url.clone().into()),
_ => unreachable!(), _ => unreachable!(),
} }
out.push((txt, url)); out.push((txt, url));
}, }
_ => (), _ => (),
} }
}; }
Outline(out) Outline(out)
} }
fn make_math(event: Event) -> Event { fn make_math(event: Event) -> Event {
match event { match event {
Event::InlineMath(math) => Event::InlineHtml(katex::render_with_opts(&math, &*KATEX_I).unwrap().into()), Event::InlineMath(math) => {
Event::DisplayMath(math) => Event::Html(katex::render_with_opts(&math, &*KATEX_B).unwrap().into()), Event::InlineHtml(katex::render_with_opts(&math, &*KATEX_I).unwrap().into())
_ => event }
Event::DisplayMath(math) => {
Event::Html(katex::render_with_opts(&math, &*KATEX_B).unwrap().into())
}
_ => event,
} }
} }
@ -228,21 +258,19 @@ fn make_code(es: Vec<Event>) -> Vec<Event> {
let html = ts::highlight(&lang, &code).render().as_str().to_owned(); let html = ts::highlight(&lang, &code).render().as_str().to_owned();
buff.push(Event::Html(html.into())); buff.push(Event::Html(html.into()));
code.clear(); code.clear();
}, }
Event::Text(text) => match lang { Event::Text(text) => match lang {
None => buff.push(Event::Text(text)), None => buff.push(Event::Text(text)),
Some(_) => code.push_str(&text), Some(_) => code.push_str(&text),
}, },
_ => buff.push(event) _ => buff.push(event),
} }
} }
buff buff
} }
static RE_RUBY: Lazy<Regex> = Lazy::new(|| static RE_RUBY: Lazy<Regex> = Lazy::new(|| Regex::new(r"\[([^\]]+)\]\{([^}]+)\}").unwrap());
Regex::new(r"\[([^\]]+)\]\{([^}]+)\}").unwrap()
);
#[derive(Debug)] #[derive(Debug)]
enum Annotated<'a> { enum Annotated<'a> {
@ -250,7 +278,6 @@ enum Annotated<'a> {
Ruby(&'a str, &'a str), Ruby(&'a str, &'a str),
} }
fn annotate(input: &str) -> Vec<Annotated> { fn annotate(input: &str) -> Vec<Annotated> {
let mut parts: Vec<Annotated> = Vec::new(); let mut parts: Vec<Annotated> = Vec::new();
let mut last_index = 0; let mut last_index = 0;
@ -281,7 +308,9 @@ fn make_ruby(event: Event) -> Vec<Event> {
.into_iter() .into_iter()
.map(|el| match el { .map(|el| match el {
Annotated::Text(text) => Event::Text(text.to_owned().into()), Annotated::Text(text) => Event::Text(text.to_owned().into()),
Annotated::Ruby(t, f) => Event::InlineHtml(format!("<ruby>{t}<rp>(</rp><rt>{f}</rt><rp>)</rp></ruby>").into()), Annotated::Ruby(t, f) => Event::InlineHtml(
format!("<ruby>{t}<rp>(</rp><rt>{f}</rt><rp>)</rp></ruby>").into(),
),
}) })
.collect(), .collect(),
_ => vec![event], _ => vec![event],
@ -300,7 +329,7 @@ fn make_emoji(event: Event) -> Event {
if let Some(emoji) = emojis::get_by_shortcode(key) { if let Some(emoji) = emojis::get_by_shortcode(key) {
let buf = buf.get_or_insert_with(|| String::with_capacity(text.len())); let buf = buf.get_or_insert_with(|| String::with_capacity(text.len()));
buf.push_str(&text[top..old-1]); buf.push_str(&text[top..old - 1]);
buf.push_str(emoji.as_str()); buf.push_str(emoji.as_str());
top = idx + 1; top = idx + 1;
} }
@ -314,9 +343,9 @@ fn make_emoji(event: Event) -> Event {
match buf { match buf {
None => event, None => event,
Some(buf) => Event::Text(buf.into()) Some(buf) => Event::Text(buf.into()),
}
} }
},
_ => event, _ => event,
} }
} }

View file

@ -1,11 +1,10 @@
use std::borrow::Cow;
use hypertext::{html_elements, maud_move, Raw, Renderable, GlobalAttributes};
use tree_sitter_highlight::{Highlighter, HighlightEvent};
mod captures; mod captures;
mod configs; mod configs;
use std::borrow::Cow;
use hypertext::{html_elements, maud_move, GlobalAttributes, Raw, Renderable};
use tree_sitter_highlight::{HighlightEvent, Highlighter};
pub enum Event { pub enum Event {
Write(String), Write(String),
@ -13,13 +12,9 @@ pub enum Event {
Close, Close,
} }
pub fn highlight<'data, 'html>(lang: &'data str, code: &'data str) -> impl Renderable + 'html
pub fn highlight<'data, 'html>( where
lang: &'data str, 'data: 'html,
code: &'data str
) -> impl Renderable + 'html
where
'data: 'html
{ {
maud_move!( maud_move!(
figure .listing.kanagawa data-lang=(lang) { figure .listing.kanagawa data-lang=(lang) {
@ -37,11 +32,13 @@ fn to_html(lang: &str, code: &str) -> String {
.into_iter() .into_iter()
.map(|event| match event { .map(|event| match event {
Event::Write(text) => Cow::from( Event::Write(text) => Cow::from(
text.replace('&', "&amp;").replace('<', "&lt;").replace('>', "&gt;") text.replace('&', "&amp;")
), .replace('<', "&lt;")
Event::Enter(class) => Cow::from( .replace('>', "&gt;"),
format!("<span class=\"{}\">", class.replace('.', "-"))
), ),
Event::Enter(class) => {
Cow::from(format!("<span class=\"{}\">", class.replace('.', "-")))
}
Event::Close => Cow::from("</span>"), Event::Close => Cow::from("</span>"),
}) })
.collect() .collect()
@ -50,17 +47,15 @@ fn to_html(lang: &str, code: &str) -> String {
fn get_events(lang: &str, src: &str) -> Vec<Event> { fn get_events(lang: &str, src: &str) -> Vec<Event> {
let config = match configs::get_config(lang) { let config = match configs::get_config(lang) {
Some(c) => c, Some(c) => c,
None => return vec![Event::Write(src.into())] None => return vec![Event::Write(src.into())],
}; };
let mut hl = Highlighter::new(); let mut hl = Highlighter::new();
let highlights = hl.highlight( let highlights = hl
config, .highlight(config, src.as_bytes(), None, |name| {
src.as_bytes(), configs::get_config(name)
None, })
|name| configs::get_config(name) .unwrap();
).unwrap();
let mut out = vec![]; let mut out = vec![];
for event in highlights { for event in highlights {
@ -73,7 +68,7 @@ fn get_events(lang: &str, src: &str) -> Vec<Event> {
fn map_event(event: HighlightEvent, src: &str) -> Event { fn map_event(event: HighlightEvent, src: &str) -> Event {
match event { match event {
HighlightEvent::Source {start, end} => Event::Write(src[start..end].into()), HighlightEvent::Source { start, end } => Event::Write(src[start..end].into()),
HighlightEvent::HighlightStart(s) => Event::Enter(captures::NAMES[s.0].into()), HighlightEvent::HighlightStart(s) => Event::Enter(captures::NAMES[s.0].into()),
HighlightEvent::HighlightEnd => Event::Close, HighlightEvent::HighlightEnd => Event::Close,
} }