This commit is contained in:
Maciej Jur 2024-07-05 13:59:07 +02:00
parent 46705d707f
commit fc01a1ed2a
Signed by: kamov
GPG key ID: 191CBFF5F72ECAFD
14 changed files with 1399 additions and 1316 deletions

2
rustfmt.toml Normal file
View file

@ -0,0 +1,2 @@
tab_spaces = 4
hard_tabs = true

View file

@ -1,6 +1,6 @@
use hypertext::{html_elements, maud, maud_move, GlobalAttributes, Raw, Renderable}; use hypertext::{html_elements, maud, maud_move, GlobalAttributes, Raw, Renderable};
use crate::text::md::parse; use crate::{pipeline::Sack, text::md::parse};
const INTRO: &str = r#" const INTRO: &str = r#"
## ##
@ -15,59 +15,62 @@ const INTRO: &str = r#"
"#; "#;
fn intro() -> impl Renderable { fn intro() -> impl Renderable {
let (_, html, _) = parse(INTRO, None); let (_, html, _) = parse(INTRO.into(), None);
maud!( maud!(
section .p-card.intro-jp lang="ja-JP" { section .p-card.intro-jp lang="ja-JP" {
(Raw(html)) (Raw(html))
} }
) )
} }
fn kanji() -> impl Renderable { // fn kanji() -> impl Renderable {
maud!( // maud!(
section .p-card { // section .p-card {
h2 .p-card__heading { // h2 .p-card__heading {
"Kanji of the Day" // "Kanji of the Day"
} // }
div { // div {
// <Widget client:load/> // // <Widget client:load/>
} // }
} // }
) // )
} // }
fn photo() -> impl Renderable { fn photo() -> impl Renderable {
maud!( maud!(
section .p-card.home-card-image { section .p-card.home-card-image {
h2 .p-card__heading { h2 .p-card__heading {
"Image of the Month" "Image of the Month"
} }
a .home-card-image__link href="/static/IMG_20231029_111650.jpg" { a .home-card-image__link href="/static/IMG_20231029_111650.jpg" {
img .home-card-image__image img .home-card-image__image
src="/static/IMG_20231029_111650.jpg" src="/static/IMG_20231029_111650.jpg"
alt="Autumn park with colorful trees and fallen leaves"; alt="Autumn park with colorful trees and fallen leaves";
} }
} }
) )
} }
pub fn home<'data, 'home, R>(main: R) -> impl Renderable + 'home pub(crate) fn home<'s, 'p, 'html>(
sack: &'s Sack,
main: impl Renderable + 'p,
) -> impl Renderable + 'html
where where
'data: 'home, 's: 'html,
R: Renderable + 'data, 'p: 'html,
{ {
let main = maud_move!( let main = maud_move!(
main .l-home { main .l-home {
article .l-home__article.markdown { article .l-home__article.markdown {
(main) (main)
} }
aside .l-home__aside { aside .l-home__aside {
(intro()) (intro())
// (kanji()) // (kanji())
(photo()) (photo())
} }
} }
); );
crate::html::page("Home", main, None) crate::html::page(sack, main, "Home".into())
} }

View file

@ -1,62 +1,66 @@
use crate::{html::page, LinkDate};
use camino::Utf8PathBuf;
use chrono::{DateTime, Utc};
use hypertext::{html_elements, maud_move, GlobalAttributes, Renderable}; use hypertext::{html_elements, maud_move, GlobalAttributes, Renderable};
pub fn list<'data, 'list>( use crate::html::page;
title: &'data str, use crate::pipeline::Sack;
groups: &'data [(i32, Vec<LinkDate>)], use crate::LinkDate;
) -> impl Renderable + 'list
pub fn list<'s, 'g, 'html>(
sack: &'s Sack,
groups: &'g [(i32, Vec<LinkDate>)],
title: String,
) -> impl Renderable + 'html
where where
'data: 'list, 's: 'html,
'g: 'html,
{ {
let list = maud_move!( let heading = title.clone();
main .page-list-main { let list = maud_move!(
article .page-list { main .page-list-main {
header .markdown { article .page-list {
h1 { (title) } header .markdown {
} h1 { (heading) }
}
@for (year, group) in groups { @for (year, group) in groups {
(section(*year, group)) (section(*year, group))
} }
} }
} }
); );
page(title, list, None) page(sack, list, title)
} }
fn section(year: i32, group: &[LinkDate]) -> impl Renderable + '_ { fn section(year: i32, group: &[LinkDate]) -> impl Renderable + '_ {
maud_move!( maud_move!(
section .page-list-year { section .page-list-year {
header .page-list-year__header { header .page-list-year__header {
h2 { (year) } h2 { (year) }
} }
@for item in group.iter() { @for item in group.iter() {
(link(item)) (link(item))
} }
} }
) )
} }
fn link(data: &LinkDate) -> impl Renderable + '_ { fn link(data: &LinkDate) -> impl Renderable + '_ {
let time = data.date.format("%m/%d"); let time = data.date.format("%m/%d");
maud_move!( maud_move!(
a .page-item href=(data.link.path.as_str()) { a .page-item href=(data.link.path.as_str()) {
div .page-item__header { div .page-item__header {
h3 { h3 {
(&data.link.name) (&data.link.name)
} }
time datetime=(data.date.to_rfc3339()) { time datetime=(data.date.to_rfc3339()) {
(time.to_string()) (time.to_string())
} }
} }
@if let Some(ref desc) = data.link.desc { @if let Some(ref desc) = data.link.desc {
div .page-item__desc { div .page-item__desc {
(desc) (desc)
} }
} }
} }
) )
} }

View file

@ -3,93 +3,92 @@ use hypertext::{html_elements, maud_move, GlobalAttributes, Raw, Renderable};
use crate::pipeline::{Sack, TreePage}; use crate::pipeline::{Sack, TreePage};
use crate::text::md::Outline; use crate::text::md::Outline;
/// Render the outline for a document /// Render the outline for a document
pub(crate) fn show_outline(outline: Outline) -> impl Renderable { pub(crate) fn show_outline(outline: Outline) -> impl Renderable {
maud_move!( maud_move!(
section .link-tree { section .link-tree {
h2 .link-tree__heading { h2 .link-tree__heading {
a .link-tree__heading-text href="#top" { "Content" } a .link-tree__heading-text href="#top" { "Content" }
} }
nav #table-of-contents .link-tree__nav { nav #table-of-contents .link-tree__nav {
ul .link-tree__nav-list { ul .link-tree__nav-list {
@for (title, id) in outline.0 { @for (title, id) in outline.0 {
li .link-tree__nav-list-item { li .link-tree__nav-list-item {
a .link-tree__nav-list-text.link href=(format!("#{}", id)) { a .link-tree__nav-list-text.link href=(format!("#{}", id)) {
(title) (title)
} }
} }
} }
} }
} }
} }
) )
} }
/// Render the bibliography for a document /// Render the bibliography for a document
pub(crate) fn show_bibliography(bib: Vec<String>) -> impl Renderable { pub(crate) fn show_bibliography(bib: Vec<String>) -> impl Renderable {
maud_move!( maud_move!(
section .markdown { section .markdown {
h2 { h2 {
"Bibliography" "Bibliography"
} }
ol .bibliography { ol .bibliography {
@for item in bib { @for item in bib {
li { li {
(Raw(item)) (Raw(item))
} }
} }
} }
} }
) )
} }
/// Render the page tree /// Render the page tree
pub(crate) fn show_page_tree(sack: &Sack, glob: &str) -> impl Renderable { pub(crate) fn show_page_tree(sack: &Sack, glob: &str) -> impl Renderable {
let tree = sack.get_tree(glob); let tree = sack.get_tree(glob);
maud_move!( maud_move!(
h2 .link-tree__heading { h2 .link-tree__heading {
// {pages.chain(x => x.prefix) // {pages.chain(x => x.prefix)
// .map(pathify) // .map(pathify)
// .mapOrDefault(href => // .mapOrDefault(href =>
// <a class="link-tree__heading-text" href={href}>{heading}</a>, // <a class="link-tree__heading-text" href={href}>{heading}</a>,
// <span class="link-tree__heading-text">{heading}</span> // <span class="link-tree__heading-text">{heading}</span>
// )} // )}
} }
nav .link-tree__nav { nav .link-tree__nav {
(show_page_tree_level(&tree)) (show_page_tree_level(&tree))
} }
) )
} }
fn show_page_tree_level(tree: &TreePage) -> impl Renderable + '_ { fn show_page_tree_level(tree: &TreePage) -> impl Renderable + '_ {
let subs = { let subs = {
let mut subs: Vec<_> = tree.subs.iter().collect(); let mut subs: Vec<_> = tree.subs.iter().collect();
subs.sort_by(|a, b| a.0.cmp(b.0)); subs.sort_by(|a, b| a.0.cmp(b.0));
subs subs
}; };
maud_move!( maud_move!(
ul .link-tree__nav-list { ul .link-tree__nav-list {
@for (key, next) in subs { @for (key, next) in subs {
li .link-tree__nav-list-item { li .link-tree__nav-list-item {
span .link-tree__nav-list-text { span .link-tree__nav-list-text {
@if let Some(ref link) = next.link { @if let Some(ref link) = next.link {
a .link-tree__nav-list-text.link href=(link.path.as_str()) { a .link-tree__nav-list-text.link href=(link.path.as_str()) {
(&link.name) (&link.name)
} }
} @else { } @else {
span .link-tree__nav-list-text { span .link-tree__nav-list-text {
(key) (key)
} }
} }
} }
@if !next.subs.is_empty() { @if !next.subs.is_empty() {
(show_page_tree_level(next)) (show_page_tree_level(next))
} }
} }
} }
} }
) )
} }

View file

@ -13,223 +13,240 @@ use camino::Utf8Path;
use chrono::Datelike; use chrono::Datelike;
use hypertext::{html_elements, maud, maud_move, GlobalAttributes, Raw, Renderable}; use hypertext::{html_elements, maud, maud_move, GlobalAttributes, Raw, Renderable};
use crate::REPO;
pub(crate) use home::home; pub(crate) use home::home;
pub(crate) use post::Post; pub(crate) use post::Post;
pub(crate) use slideshow::Slideshow; pub(crate) use slideshow::Slideshow;
pub(crate) use wiki::Wiki; pub(crate) use wiki::Wiki;
use crate::{pipeline::Sack, Mode};
const JS_RELOAD: &str = r#" const JS_RELOAD: &str = r#"
const socket = new WebSocket("ws://localhost:1337"); const socket = new WebSocket("ws://localhost:1337");
socket.addEventListener("message", (event) => { socket.addEventListener("message", (event) => {
console.log(event); console.log(event);
window.location.reload(); window.location.reload();
}); });
"#; "#;
const JS_IMPORTS: &str = r#" const JS_IMPORTS: &str = r#"
{ {
"imports": { "imports": {
"reveal": "/js/vanilla/reveal.js", "reveal": "/js/vanilla/reveal.js",
"photos": "/js/vanilla/photos.js" "photos": "/js/vanilla/photos.js"
} }
} }
"#; "#;
fn head(title: &str) -> impl Renderable + '_ { fn head<'s, 'html>(sack: &'s Sack, title: String) -> impl Renderable + 'html
let title = format!("{} | kamoshi.org", title); where
's: 'html,
{
let title = format!("{} | kamoshi.org", title);
maud_move!( maud_move!(
meta charset="utf-8"; meta charset="utf-8";
meta name="viewport" content="width=device-width, initial-scale=1"; meta name="viewport" content="width=device-width, initial-scale=1";
title { title {
(title) (title)
} }
// link rel="sitemap" href="/sitemap.xml"; // link rel="sitemap" href="/sitemap.xml";
link rel="stylesheet" href="/styles.css"; link rel="stylesheet" href="/styles.css";
link rel="stylesheet" href="/static/css/reveal.css"; link rel="stylesheet" href="/static/css/reveal.css";
link rel="stylesheet" href="/static/css/leaflet.css"; link rel="stylesheet" href="/static/css/leaflet.css";
link rel="stylesheet" href="/static/css/MarkerCluster.css"; link rel="stylesheet" href="/static/css/MarkerCluster.css";
link rel="stylesheet" href="/static/css/MarkerCluster.Default.css"; link rel="stylesheet" href="/static/css/MarkerCluster.Default.css";
link rel="icon" type="image/png" sizes="32x32" href="/favicon-32x32.png"; link rel="icon" type="image/png" sizes="32x32" href="/favicon-32x32.png";
link rel="icon" type="image/png" sizes="16x16" href="/favicon-16x16.png"; link rel="icon" type="image/png" sizes="16x16" href="/favicon-16x16.png";
link rel="icon" href="/favicon.ico" sizes="any"; link rel="icon" href="/favicon.ico" sizes="any";
script type="importmap" {(Raw(JS_IMPORTS))} script type="importmap" {(Raw(JS_IMPORTS))}
script { (Raw(JS_RELOAD)) } @if matches!(sack.ctx.mode, Mode::Watch) {
) script { (Raw(JS_RELOAD)) }
}
)
} }
fn navbar() -> impl Renderable { fn navbar() -> impl Renderable {
static ITEMS: &[(&str, &str)] = &[ static ITEMS: &[(&str, &str)] = &[
("Posts", "/posts/"), ("Posts", "/posts/"),
("Slides", "/slides/"), ("Slides", "/slides/"),
("Wiki", "/wiki/"), ("Wiki", "/wiki/"),
("Map", "/map/"), ("Map", "/map/"),
("About", "/about/"), ("About", "/about/"),
("Search", "/search/"), ("Search", "/search/"),
]; ];
maud!( maud!(
nav .p-nav { nav .p-nav {
input #p-nav-toggle type="checkbox" hidden; input #p-nav-toggle type="checkbox" hidden;
div .p-nav__bar { div .p-nav__bar {
a .p-nav__logo href="/" { a .p-nav__logo href="/" {
img .p-nav__logo-icon height="48px" width="51px" src="/static/svg/aya.svg" alt=""; img .p-nav__logo-icon height="48px" width="51px" src="/static/svg/aya.svg" alt="";
div .p-nav__logo-text { div .p-nav__logo-text {
div .p-nav__logo-main { div .p-nav__logo-main {
(Raw(include_str!("logotype.svg"))) (Raw(include_str!("logotype.svg")))
} }
div #p-nav-splash .p-nav__logo-sub { div #p-nav-splash .p-nav__logo-sub {
"夢現の遥か彼方" "夢現の遥か彼方"
} }
} }
} }
label .p-nav__burger for="p-nav-toggle" tabindex="0" { label .p-nav__burger for="p-nav-toggle" tabindex="0" {
span .p-nav__burger-icon {} span .p-nav__burger-icon {}
} }
} }
menu .p-nav__menu { menu .p-nav__menu {
@for (name, url) in ITEMS { @for (name, url) in ITEMS {
li .p-nav__menu-item { li .p-nav__menu-item {
a .p-nav__menu-link href=(*url) { a .p-nav__menu-link href=(*url) {
(*name) (*name)
} }
} }
} }
} }
} }
) )
} }
pub fn footer(path: Option<&Utf8Path>) -> impl Renderable { pub fn footer<'s, 'html>(sack: &'s Sack) -> impl Renderable + 'html
let copy = format!("Copyright &copy; {} Maciej Jur", &REPO.year); where
let mail = "maciej@kamoshi.org"; 's: 'html,
let href = format!("mailto:{}", mail);
let link = Utf8Path::new(&REPO.link)
.join("src/commit")
.join(&REPO.hash);
let link = match path {
Some(path) => link.join(path),
None => link,
};
maud_move!(
footer .footer {
div .left {
div {
(Raw(copy))
}
a href=(href) {
(mail)
}
}
div .repo {
a href=(link.as_str()) {
(&REPO.hash)
}
div {
(&REPO.date)
}
}
a .right.footer__cc-wrap rel="license" href="http://creativecommons.org/licenses/by/4.0/" {
img .footer__cc-stamp alt="Creative Commons License" width="88" height="31" src="/static/svg/by.svg";
}
}
)
}
fn bare<'data, 'html, R>(title: &'data str, main: R) -> impl Renderable + 'html
where
'data : 'html,
R: Renderable + 'data
{ {
maud_move!( let copy = format!("Copyright &copy; {} Maciej Jur", &sack.ctx.year);
(Raw("<!DOCTYPE html>")) let mail = "maciej@kamoshi.org";
html lang="en" { let href = format!("mailto:{}", mail);
(head(title)) let link = Utf8Path::new(&sack.ctx.link)
.join("src/commit")
.join(&sack.ctx.hash);
let link = match sack.get_file() {
Some(path) => link.join(path),
None => link,
};
body { maud_move!(
(main) footer .footer {
} div .left {
} div {
) (Raw(copy))
}
a href=(href) {
(mail)
}
}
div .repo {
a href=(link.as_str()) {
(&sack.ctx.hash)
}
div {
(&sack.ctx.date)
}
}
a .right.footer__cc-wrap rel="license" href="http://creativecommons.org/licenses/by/4.0/" {
img .footer__cc-stamp alt="Creative Commons License" width="88" height="31" src="/static/svg/by.svg";
}
}
)
} }
fn page<'data, 'main, 'html, T>( fn bare<'s, 'p, 'html>(
title: &'data str, sack: &'s Sack,
main: T, main: impl Renderable + 'p,
path: Option<&'data Utf8Path>, title: String,
) -> impl Renderable + 'html ) -> impl Renderable + 'html
where where
'main : 'html, 's: 'html,
'data : 'html, 'p: 'html,
T: Renderable + 'main
{ {
maud_move!( maud_move!(
(Raw("<!DOCTYPE html>")) (Raw("<!DOCTYPE html>"))
html lang="en" { html lang="en" {
(head(title)) (head(sack, title))
body { body {
(navbar()) (main)
(main) }
(footer(path)) }
} )
}
)
} }
pub(crate) fn to_list(list: Vec<crate::LinkDate>) -> String { fn page<'s, 'p, 'html>(
let mut groups = HashMap::<i32, Vec<_>>::new(); sack: &'s Sack,
main: impl Renderable + 'p,
title: String,
) -> impl Renderable + 'html
where
's: 'html,
'p: 'html,
{
maud_move!(
(Raw("<!DOCTYPE html>"))
html lang="en" {
(head(sack, title))
for page in list { body {
groups.entry(page.date.year()).or_default().push(page); (navbar())
} (main)
(footer(sack))
let mut groups: Vec<_> = groups }
.into_iter() }
.map(|(k, mut v)| { )
v.sort_by(|a, b| b.date.cmp(&a.date));
(k, v)
})
.collect();
groups.sort_by(|a, b| b.0.cmp(&a.0));
list::list("", &groups).render().into()
} }
pub(crate) fn map() -> impl Renderable { pub(crate) fn to_list(sack: &Sack, list: Vec<crate::LinkDate>, title: String) -> String {
page( let mut groups = HashMap::<i32, Vec<_>>::new();
"Map",
maud!(
main {
div #map style="height: 100%; width: 100%" {}
script type="module" { for page in list {
(Raw("import 'photos';")) groups.entry(page.date.year()).or_default().push(page);
} }
}
), let mut groups: Vec<_> = groups
None, .into_iter()
) .map(|(k, mut v)| {
v.sort_by(|a, b| b.date.cmp(&a.date));
(k, v)
})
.collect();
groups.sort_by(|a, b| b.0.cmp(&a.0));
list::list(sack, &groups, title).render().into()
} }
pub(crate) fn search() -> impl Renderable { pub(crate) fn map<'s, 'html>(sack: &'s Sack) -> impl Renderable + 'html
page( where
"Search", 's: 'html,
maud!( {
main #app {} page(
script type="module" src="/js/search/dist/search.js" {} sack,
), maud!(
None, main {
) div #map style="height: 100%; width: 100%" {}
script type="module" {
(Raw("import 'photos';"))
}
}
),
String::from("Map"),
)
}
pub(crate) fn search<'s, 'html>(sack: &'s Sack) -> impl Renderable + 'html
where
's: 'html,
{
page(
sack,
maud!(
main #app {}
script type="module" src="/js/search/dist/search.js" {}
),
String::from("Search"),
)
} }

View file

@ -6,91 +6,88 @@ use serde::Deserialize;
use crate::pipeline::{Content, Sack}; use crate::pipeline::{Content, Sack};
use crate::text::md::Outline; use crate::text::md::Outline;
use crate::{Linkable, LinkDate}; use crate::{LinkDate, Linkable};
/// Represents a simple post. /// Represents a simple post.
#[derive(Deserialize, Debug, Clone)] #[derive(Deserialize, Debug, Clone)]
pub(crate) struct Post { pub(crate) struct Post {
pub(crate) title: String, pub(crate) title: String,
#[serde(with = "super::isodate")] #[serde(with = "super::isodate")]
pub(crate) date: DateTime<Utc>, pub(crate) date: DateTime<Utc>,
pub(crate) desc: Option<String>, pub(crate) desc: Option<String>,
} }
impl Content for Post { impl Content for Post {
fn parse(data: &str, lib: Option<&Library>) -> (Outline, String, Option<Vec<String>>) { fn parse(data: String, lib: Option<&Library>) -> (Outline, String, Option<Vec<String>>) {
crate::text::md::parse(data, lib) crate::text::md::parse(data, lib)
} }
fn transform<'f, 'm, 's, 'html, T>( fn render<'s, 'p, 'html>(
&'f self, self,
content: T, sack: &'s Sack,
outline: Outline, parsed: impl Renderable + 'p,
sack: &'s Sack, outline: Outline,
bib: Option<Vec<String>>, bib: Option<Vec<String>>,
) -> impl Renderable + 'html ) -> impl Renderable + 'html
where where
'f: 'html, 's: 'html,
'm: 'html, 'p: 'html,
's: 'html, {
T: Renderable + 'm, post(self, sack, parsed, outline, bib)
{ }
post(self, content, outline, bib, sack)
}
fn as_link(&self, path: Utf8PathBuf) -> Option<Linkable> { fn as_link(&self, path: Utf8PathBuf) -> Option<Linkable> {
Some(Linkable::Date(LinkDate { Some(Linkable::Date(LinkDate {
link: crate::Link { link: crate::Link {
path, path,
name: self.title.to_owned(), name: self.title.to_owned(),
desc: self.desc.to_owned(), desc: self.desc.to_owned(),
}, },
date: self.date.to_owned(), date: self.date.to_owned(),
})) }))
} }
} }
pub fn post<'f, 'm, 's, 'html, T>( pub fn post<'s, 'p, 'html>(
fm: &'f Post, fm: Post,
content: T, sack: &'s Sack,
outline: Outline, content: impl Renderable + 'p,
bib: Option<Vec<String>>, outline: Outline,
sack: &'s Sack, bib: Option<Vec<String>>,
) -> impl Renderable + 'html ) -> impl Renderable + 'html
where where
'f: 'html, 's: 'html,
'm: 'html, 'p: 'html,
's: 'html,
T: Renderable + 'm
{ {
let main = maud_move!( let heading = fm.title.clone();
main .wiki-main { let main = maud_move!(
main .wiki-main {
// Slide in/out for mobile // Slide in/out for mobile
input #wiki-aside-shown type="checkbox" hidden; input #wiki-aside-shown type="checkbox" hidden;
aside .wiki-aside { aside .wiki-aside {
// Slide button // Slide button
label .wiki-aside__slider for="wiki-aside-shown" { label .wiki-aside__slider for="wiki-aside-shown" {
img .wiki-icon src="/static/svg/double-arrow.svg" width="24" height="24"; img .wiki-icon src="/static/svg/double-arrow.svg" width="24" height="24";
} }
(crate::html::misc::show_outline(outline)) (crate::html::misc::show_outline(outline))
} }
article .wiki-article /*class:list={classlist)*/ { article .wiki-article /*class:list={classlist)*/ {
header class="markdown" { header class="markdown" {
h1 #top { (fm.title.clone()) } h1 #top { (heading) }
} }
section .wiki-article__markdown.markdown { section .wiki-article__markdown.markdown {
(content) (content)
} }
@if let Some(bib) = bib { @if let Some(bib) = bib {
(crate::html::misc::show_bibliography(bib)) (crate::html::misc::show_bibliography(bib))
} }
} }
} }
); );
crate::html::page(&fm.title, main, sack.get_file()) crate::html::page(sack, main, fm.title.clone())
} }

View file

@ -1,87 +1,105 @@
use camino::Utf8PathBuf; use camino::Utf8PathBuf;
use chrono::{DateTime, Utc}; use chrono::{DateTime, Utc};
use hayagriva::Library; use hayagriva::Library;
use hypertext::{html_elements, maud_move, Renderable, GlobalAttributes, Raw}; use hypertext::{html_elements, maud_move, GlobalAttributes, Raw, Renderable};
use serde::Deserialize; use serde::Deserialize;
use crate::pipeline::{Content, Sack}; use crate::pipeline::{Content, Sack};
use crate::text::md::Outline; use crate::text::md::Outline;
use crate::{Link, LinkDate, Linkable}; use crate::{Link, LinkDate, Linkable};
const CSS: &str = r#"
.slides img {
margin-left: auto;
margin-right: auto;
max-height: 60vh;
}
"#;
/// Represents a slideshow /// Represents a slideshow
#[derive(Deserialize, Debug, Clone)] #[derive(Deserialize, Debug, Clone)]
pub(crate) struct Slideshow { pub(crate) struct Slideshow {
pub title: String, pub title: String,
#[serde(with = "super::isodate")] #[serde(with = "super::isodate")]
pub date: DateTime<Utc>, pub date: DateTime<Utc>,
pub desc: Option<String>, pub desc: Option<String>,
} }
impl Content for Slideshow { impl Content for Slideshow {
fn transform<'f, 'm, 's, 'html, T>( fn parse(data: String, _: Option<&Library>) -> (Outline, String, Option<Vec<String>>) {
&'f self, let html = data
content: T, .split("\n-----\n")
_: Outline, .map(|chunk| {
_: &'s Sack, chunk
_bib: Option<Vec<String>>, .split("\n---\n")
) -> impl Renderable + 'html .map(|s| crate::text::md::parse(s.to_owned(), None))
where .map(|e| e.1)
'f: 'html, .collect::<Vec<_>>()
'm: 'html, })
's: 'html, .map(|stack| match stack.len() > 1 {
T: Renderable + 'm { true => format!(
show(self, content) "<section>{}</section>",
} stack
.into_iter()
.map(|slide| format!("<section>{slide}</section>"))
.collect::<String>()
),
false => format!("<section>{}</section>", stack[0]),
})
.collect::<String>();
(Outline(vec![]), html, None)
}
fn as_link(&self, path: Utf8PathBuf) -> Option<Linkable> { fn render<'s, 'p, 'html>(
Some(Linkable::Date(LinkDate { self,
link: Link { sack: &'s Sack,
path, parsed: impl Renderable + 'p,
name: self.title.to_owned(), _: Outline,
desc: self.desc.to_owned(), _: Option<Vec<String>>,
}, ) -> impl Renderable + 'html
date: self.date.to_owned(), where
})) 's: 'html,
} 'p: 'html,
{
show(self, sack, parsed)
}
fn parse(data: &str, _: Option<&Library>) -> (Outline, String, Option<Vec<String>>) { fn as_link(&self, path: Utf8PathBuf) -> Option<Linkable> {
let html = data Some(Linkable::Date(LinkDate {
.split("\n-----\n") link: Link {
.map(|chunk| chunk.split("\n---\n").map(|s| crate::text::md::parse(s, None)).map(|e| e.1).collect::<Vec<_>>()) path,
.map(|stack| match stack.len() > 1 { name: self.title.to_owned(),
true => format!("<section>{}</section>", stack.into_iter().map(|slide| format!("<section>{slide}</section>")).collect::<String>()), desc: self.desc.to_owned(),
false => format!("<section>{}</section>", stack[0]) },
}) date: self.date.to_owned(),
.collect::<String>(); }))
(Outline(vec![]), html, None) }
}
} }
pub fn show<'data, 'show>( pub fn show<'s, 'p, 'html>(
fm: &'data Slideshow, fm: Slideshow,
slides: impl Renderable + 'data sack: &'s Sack,
) -> impl Renderable + 'show slides: impl Renderable + 'p,
where ) -> impl Renderable + 'html
'data: 'show where
's: 'html,
'p: 'html,
{ {
crate::html::bare(&fm.title, maud_move!( crate::html::bare(
div .reveal { sack,
div .slides { maud_move!(
(slides) div .reveal {
} div .slides {
} (slides)
}
}
script type="module" { script type="module" {
(Raw("import 'reveal';")) (Raw("import 'reveal';"))
} }
style {r#" style { (Raw(CSS)) }
.slides img { ),
margin-left: auto; fm.title.clone(),
margin-right: auto; )
max-height: 60vh;
}
"#}
))
} }

View file

@ -10,83 +10,82 @@ use crate::{Link, Linkable};
/// Represents a wiki page /// Represents a wiki page
#[derive(Deserialize, Debug, Clone)] #[derive(Deserialize, Debug, Clone)]
pub struct Wiki { pub struct Wiki {
pub title: String, pub title: String,
} }
impl Content for Wiki { impl Content for Wiki {
fn transform<'f, 'm, 's, 'html, T>( fn parse(data: String, lib: Option<&Library>) -> (Outline, String, Option<Vec<String>>) {
&'f self, crate::text::md::parse(data, lib)
content: T, }
outline: Outline,
sack: &'s Sack,
bib: Option<Vec<String>>,
) -> impl Renderable + 'html
where
'f: 'html,
'm: 'html,
's: 'html,
T: Renderable + 'm {
wiki(self, content, outline, sack, bib)
}
fn as_link(&self, path: Utf8PathBuf) -> Option<Linkable> { fn render<'s, 'p, 'html>(
Some(Linkable::Link(Link { self,
path, sack: &'s Sack,
name: self.title.to_owned(), parsed: impl Renderable + 'p,
desc: None, outline: Outline,
})) bib: Option<Vec<String>>,
} ) -> impl Renderable + 'html
where
's: 'html,
'p: 'html,
{
wiki(self, sack, parsed, outline, bib)
}
fn parse(data: &str, lib: Option<&Library>) -> (Outline, String, Option<Vec<String>>) { fn as_link(&self, path: Utf8PathBuf) -> Option<Linkable> {
crate::text::md::parse(data, lib) Some(Linkable::Link(Link {
} path,
name: self.title.to_owned(),
desc: None,
}))
}
} }
fn wiki<'data, 'html, 'sack, T>( fn wiki<'s, 'p, 'html>(
fm: &'data Wiki, matter: Wiki,
content: T, sack: &'s Sack,
_: Outline, parsed: impl Renderable + 'p,
sack: &'sack Sack, _: Outline,
bib: Option<Vec<String>>, bib: Option<Vec<String>>,
) -> impl Renderable + 'html ) -> impl Renderable + 'html
where where
'sack: 'html, 's: 'html,
'data: 'html, 'p: 'html,
T: Renderable + 'data
{ {
let main = maud_move!( let heading = matter.title.clone();
main .wiki-main { let main = maud_move!(
main .wiki-main {
// Slide in/out for mobile // Slide in/out for mobile
input #wiki-aside-shown type="checkbox" hidden; input #wiki-aside-shown type="checkbox" hidden;
aside .wiki-aside { aside .wiki-aside {
// Slide button // Slide button
label .wiki-aside__slider for="wiki-aside-shown" { label .wiki-aside__slider for="wiki-aside-shown" {
img .wiki-icon src="/static/svg/double-arrow.svg" width="24" height="24"; img .wiki-icon src="/static/svg/double-arrow.svg" width="24" height="24";
} }
// Navigation tree // Navigation tree
section .link-tree { section .link-tree {
div { div {
(crate::html::misc::show_page_tree(sack, "wiki/**/*.html")) (crate::html::misc::show_page_tree(sack, "wiki/**/*.html"))
} }
} }
} }
article .wiki-article /*class:list={classlist)*/ { article .wiki-article /*class:list={classlist)*/ {
header class="markdown" { header class="markdown" {
h1 #top { (fm.title.clone()) } h1 #top { (heading) }
} }
section .wiki-article__markdown.markdown { section .wiki-article__markdown.markdown {
(content) (parsed)
} }
@if let Some(bib) = bib { @if let Some(bib) = bib {
(crate::html::misc::show_bibliography(bib)) (crate::html::misc::show_bibliography(bib))
} }
} }
} }
); );
crate::html::page(&fm.title, main, sack.get_file()) crate::html::page(sack, main, matter.title.to_owned())
} }

View file

@ -1,286 +1,339 @@
mod build; mod build;
mod html; mod html;
mod md;
mod pipeline; mod pipeline;
mod text; mod text;
mod ts; mod ts;
mod utils; mod utils;
mod watch; mod watch;
use std::collections::HashSet;
use std::fs; use std::fs;
use std::process::Command; use std::process::Command;
use camino::{Utf8Path, Utf8PathBuf}; use camino::{Utf8Path, Utf8PathBuf};
use chrono::{DateTime, Datelike, Utc}; use chrono::{DateTime, Datelike, Utc};
use clap::{Parser, ValueEnum}; use clap::{Parser, ValueEnum};
use pipeline::{Asset, AssetKind, Content, FileItemKind, Output, PipelineItem, Sack}; use gray_matter::engine::YAML;
use gray_matter::Matter;
use hypertext::{Raw, Renderable}; use hypertext::{Raw, Renderable};
use once_cell::sync::Lazy; use pipeline::{Asset, AssetKind, Content, FileItemKind, Output, PipelineItem};
use serde::Deserialize; use serde::Deserialize;
use crate::pipeline::Virtual;
use crate::build::build_styles; use crate::build::build_styles;
use crate::pipeline::Virtual;
#[derive(Parser, Debug, Clone)] #[derive(Parser, Debug, Clone)]
struct Args { struct Args {
#[clap(value_enum, index = 1, default_value = "build")] #[clap(value_enum, index = 1, default_value = "build")]
mode: Mode, mode: Mode,
} }
#[derive(ValueEnum, Debug, Clone, Copy)] #[derive(ValueEnum, Debug, Clone, Copy)]
enum Mode { enum Mode {
Build, Build,
Watch, Watch,
} }
#[derive(Debug)] #[derive(Debug)]
struct BuildInfo { struct BuildContext {
pub year: i32, pub mode: Mode,
pub date: String, pub year: i32,
pub link: String, pub date: String,
pub hash: String, pub link: String,
pub hash: String,
} }
static REPO: Lazy<BuildInfo> = Lazy::new(|| {
let time = chrono::Utc::now();
BuildInfo {
year: time.year(),
date: time.format("%Y/%m/%d %H:%M").to_string(),
link: "https://git.kamoshi.org/kamov/website".into(),
hash: String::from_utf8(
Command::new("git")
.args(["rev-parse", "--short", "HEAD"])
.output()
.unwrap()
.stdout
)
.unwrap()
.trim()
.into()
}
});
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct Link { pub struct Link {
pub path: Utf8PathBuf, pub path: Utf8PathBuf,
pub name: String, pub name: String,
pub desc: Option<String>, pub desc: Option<String>,
} }
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct LinkDate { pub struct LinkDate {
pub link: Link, pub link: Link,
pub date: DateTime<Utc>, pub date: DateTime<Utc>,
} }
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub enum Linkable { pub enum Linkable {
Link(Link), Link(Link),
Date(LinkDate), Date(LinkDate),
}
fn to_index<T>(item: PipelineItem) -> PipelineItem
where
T: for<'de> Deserialize<'de> + Content + 'static,
{
let meta = match item {
PipelineItem::Skip(meta) if matches!(meta.kind, FileItemKind::Index) => meta,
_ => return item,
};
let dir = meta.path.parent().unwrap().strip_prefix("content").unwrap();
let dir = match meta.path.file_stem().unwrap() {
"index" => dir.to_owned(),
name => dir.join(name),
};
let path = dir.join("index.html");
match meta.path.extension() {
Some("md" | "mdx" | "lhs") => {
let data = fs::read_to_string(&meta.path).unwrap();
let (fm, md) = md::preflight::<T>(&data);
let link = T::as_link(&fm, Utf8Path::new("/").join(dir));
let call = move |sack: &Sack| {
let lib = sack.get_library();
let (outline, html, bib) = T::parse(&md, lib);
T::transform(&fm, Raw(html), outline, sack, bib).render().into()
};
Output {
kind: Asset {
kind: pipeline::AssetKind::Html(Box::new(call)),
meta,
}.into(),
path,
link,
}.into()
},
_ => meta.into(),
}
}
fn to_bundle(item: PipelineItem) -> PipelineItem {
let meta = match item {
PipelineItem::Skip(meta) if matches!(meta.kind, FileItemKind::Bundle) => meta,
_ => return item,
};
let path = meta.path.strip_prefix("content").unwrap().to_owned();
match meta.path.extension() {
// any image
Some("jpg" | "png" | "gif") => {
Output {
kind: Asset {
kind: AssetKind::Image,
meta,
}.into(),
path,
link: None,
}.into()
},
// bibliography
Some("bib") => {
let data = fs::read_to_string(&meta.path).unwrap();
let data = hayagriva::io::from_biblatex_str(&data).unwrap();
Output {
kind: Asset {
kind: AssetKind::Bibtex(data),
meta,
}.into(),
path,
link: None,
}.into()
},
_ => meta.into(),
}
}
fn build() {
if fs::metadata("dist").is_ok() {
println!("Cleaning dist");
fs::remove_dir_all("dist").unwrap();
}
fs::create_dir("dist").unwrap();
let assets: Vec<Output> = [
pipeline::gather("content/about.md", &["md"].into())
.into_iter()
.map(to_index::<crate::html::Post> as fn(PipelineItem) -> PipelineItem),
pipeline::gather("content/posts/**/*", &["md", "mdx"].into())
.into_iter()
.map(to_index::<crate::html::Post>),
pipeline::gather("content/slides/**/*", &["md", "lhs"].into())
.into_iter()
.map(to_index::<crate::html::Slideshow>),
pipeline::gather("content/wiki/**/*", &["md"].into())
.into_iter()
.map(to_index::<crate::html::Wiki>),
]
.into_iter()
.flatten()
.map(to_bundle)
.filter_map(|item| match item {
PipelineItem::Skip(skip) => {
println!("Skipping {}", skip.path);
None
},
PipelineItem::Take(take) => Some(take),
})
.collect();
let assets: Vec<Output> = vec![
assets,
vec![
Output {
kind: Virtual::new(|_| crate::html::map().render().to_owned().into()).into(),
path: "map/index.html".into(),
link: None,
},
Output {
kind: Virtual::new(|_| crate::html::search().render().to_owned().into()).into(),
path: "search/index.html".into(),
link: None,
},
Output {
kind: Asset {
kind: pipeline::AssetKind::Html(Box::new(|_| {
let data = std::fs::read_to_string("content/index.md").unwrap();
let (_, html, _) = text::md::parse(&data, None);
crate::html::home(Raw(html)).render().to_owned().into()
})),
meta: pipeline::FileItem {
kind: pipeline::FileItemKind::Index,
path: "content/index.md".into()
}
}.into(),
path: "index.html".into(),
link: None,
},
Output {
kind: Virtual::new(|sack| crate::html::to_list(sack.get_links("posts/**/*.html"))).into(),
path: "posts/index.html".into(),
link: None,
},
Output {
kind: Virtual::new(|sack| crate::html::to_list(sack.get_links("slides/**/*.html"))).into(),
path: "slides/index.html".into(),
link: None,
},
],
]
.into_iter()
.flatten()
.collect();
{
let now = std::time::Instant::now();
pipeline::render_all(&assets);
println!("Elapsed: {:.2?}", now.elapsed());
}
utils::copy_recursively(std::path::Path::new("public"), std::path::Path::new("dist")).unwrap();
build_styles();
let res = Command::new("pagefind")
.args(["--site", "dist"])
.output()
.unwrap();
println!("{}", String::from_utf8(res.stdout).unwrap());
let res = Command::new("esbuild")
.arg("js/vanilla/reveal.js")
.arg("js/vanilla/photos.ts")
.arg("js/search/dist/search.js")
.arg("--format=esm")
.arg("--bundle")
.arg("--splitting")
.arg("--minify")
.arg("--outdir=dist/js/")
.output()
.unwrap();
println!("{}", String::from_utf8(res.stderr).unwrap());
} }
fn main() { fn main() {
let args = Args::parse(); let args = Args::parse();
let time = chrono::Utc::now();
match args.mode { let ctx = BuildContext {
Mode::Build => build(), mode: args.mode,
Mode::Watch => { year: time.year(),
build(); date: time.format("%Y/%m/%d %H:%M").to_string(),
watch::watch().unwrap() link: "https://git.kamoshi.org/kamov/website".into(),
}, hash: String::from_utf8(
} Command::new("git")
.args(["rev-parse", "--short", "HEAD"])
.output()
.expect("Couldn't load git revision")
.stdout,
)
.expect("Invalid UTF8")
.trim()
.into(),
};
match args.mode {
Mode::Build => {
build(&ctx);
}
Mode::Watch => {
build(&ctx);
watch::watch().unwrap()
}
}
}
struct Source {
path: &'static str,
exts: HashSet<&'static str>,
func: fn(PipelineItem) -> PipelineItem,
}
impl Source {
fn get(&self) -> Vec<PipelineItem> {
pipeline::gather(self.path, &self.exts)
.into_iter()
.map(self.func)
.collect()
}
}
fn build(ctx: &BuildContext) {
if fs::metadata("dist").is_ok() {
println!("Cleaning dist");
fs::remove_dir_all("dist").unwrap();
}
fs::create_dir("dist").unwrap();
let sources = vec![
Source {
path: "content/about.md",
exts: ["md"].into(),
func: as_index::<crate::html::Post>,
},
Source {
path: "content/posts/**/*",
exts: ["md", "mdx"].into(),
func: as_index::<crate::html::Post>,
},
Source {
path: "content/slides/**/*",
exts: ["md", "lhs"].into(),
func: as_index::<crate::html::Slideshow>,
},
Source {
path: "content/wiki/**/*",
exts: ["md"].into(),
func: as_index::<crate::html::Wiki>,
},
];
let assets: Vec<Output> = sources
.iter()
.flat_map(Source::get)
.map(to_bundle)
.filter_map(|item| match item {
PipelineItem::Skip(skip) => {
println!("Skipping {}", skip.path);
None
}
PipelineItem::Take(take) => Some(take),
})
.collect();
let assets: Vec<Output> = vec![
assets,
vec![
Output {
kind: Virtual::new(|sack| crate::html::map(sack).render().to_owned().into()).into(),
path: "map/index.html".into(),
link: None,
},
Output {
kind: Virtual::new(|sack| crate::html::search(sack).render().to_owned().into())
.into(),
path: "search/index.html".into(),
link: None,
},
Output {
kind: Asset {
kind: pipeline::AssetKind::html(|sack| {
let data = std::fs::read_to_string("content/index.md").unwrap();
let (_, html, _) = text::md::parse(data, None);
crate::html::home(sack, Raw(html))
.render()
.to_owned()
.into()
}),
meta: pipeline::FileItem {
kind: pipeline::FileItemKind::Index,
path: "content/index.md".into(),
},
}
.into(),
path: "index.html".into(),
link: None,
},
Output {
kind: Virtual::new(|sack| {
crate::html::to_list(sack, sack.get_links("posts/**/*.html"), "Posts".into())
})
.into(),
path: "posts/index.html".into(),
link: None,
},
Output {
kind: Virtual::new(|sack| {
crate::html::to_list(sack, sack.get_links("slides/**/*.html"), "Slideshows".into())
})
.into(),
path: "slides/index.html".into(),
link: None,
},
],
]
.into_iter()
.flatten()
.collect();
{
let now = std::time::Instant::now();
pipeline::render_all(ctx, &assets);
println!("Elapsed: {:.2?}", now.elapsed());
}
utils::copy_recursively(std::path::Path::new("public"), std::path::Path::new("dist")).unwrap();
build_styles();
let res = Command::new("pagefind")
.args(["--site", "dist"])
.output()
.unwrap();
println!("{}", String::from_utf8(res.stdout).unwrap());
let res = Command::new("esbuild")
.arg("js/vanilla/reveal.js")
.arg("js/vanilla/photos.ts")
.arg("js/search/dist/search.js")
.arg("--format=esm")
.arg("--bundle")
.arg("--splitting")
.arg("--minify")
.arg("--outdir=dist/js/")
.output()
.unwrap();
println!("{}", String::from_utf8(res.stderr).unwrap());
}
pub fn parse_frontmatter<T>(raw: &str) -> (T, String)
where
T: for<'de> Deserialize<'de>,
{
let matter = Matter::<YAML>::new();
let result = matter.parse(raw);
(
// Just the front matter
result.data.unwrap().deserialize::<T>().unwrap(),
// The rest of the content
result.content,
)
}
fn as_index<T>(item: PipelineItem) -> PipelineItem
where
T: for<'de> Deserialize<'de> + Content + Clone + 'static,
{
let meta = match item {
PipelineItem::Skip(e) if matches!(e.kind, FileItemKind::Index) => e,
_ => return item,
};
let dir = meta.path.parent().unwrap().strip_prefix("content").unwrap();
let dir = match meta.path.file_stem().unwrap() {
"index" => dir.to_owned(),
name => dir.join(name),
};
let path = dir.join("index.html");
match meta.path.extension() {
Some("md" | "mdx" | "lhs") => {
let data = fs::read_to_string(&meta.path).unwrap();
let (fm, md) = parse_frontmatter::<T>(&data);
let link = T::as_link(&fm, Utf8Path::new("/").join(dir));
Output {
kind: Asset {
kind: pipeline::AssetKind::html(move |sack| {
let lib = sack.get_library();
let (outline, parsed, bib) = T::parse(md.clone(), lib);
T::render(fm.clone(), sack, Raw(parsed), outline, bib)
.render()
.into()
}),
meta,
}
.into(),
path,
link,
}
.into()
}
_ => meta.into(),
}
}
fn to_bundle(item: PipelineItem) -> PipelineItem {
let meta = match item {
PipelineItem::Skip(meta) if matches!(meta.kind, FileItemKind::Bundle) => meta,
_ => return item,
};
let path = meta.path.strip_prefix("content").unwrap().to_owned();
match meta.path.extension() {
// any image
Some("jpg" | "png" | "gif") => Output {
kind: Asset {
kind: AssetKind::Image,
meta,
}
.into(),
path,
link: None,
}
.into(),
// bibliography
Some("bib") => {
let data = fs::read_to_string(&meta.path).unwrap();
let data = hayagriva::io::from_biblatex_str(&data).unwrap();
Output {
kind: Asset {
kind: AssetKind::Bibtex(data),
meta,
}
.into(),
path,
link: None,
}
.into()
}
_ => meta.into(),
}
} }

View file

@ -1,43 +0,0 @@
use gray_matter::{engine::YAML, Matter};
use serde::Deserialize;
pub fn preflight<T>(raw: &str) -> (T, String)
where
T: for<'de> Deserialize<'de>,
{
let matter = Matter::<YAML>::new();
let result = matter.parse(raw);
(
// Just the front matter
result.data.unwrap().deserialize::<T>().unwrap(),
// The actual markdown content
result.content,
)
}
mod isodate {
use chrono::{DateTime, Utc};
use serde::{self, Deserialize, Deserializer};
// pub fn serialize<S>(
// date: &DateTime<Utc>,
// serializer: S,
// ) -> Result<S::Ok, S::Error>
// where
// S: Serializer,
// {
// let s = date.to_rfc3339();
// serializer.serialize_str(&s)
// }
pub fn deserialize<'de, D>(deserializer: D) -> Result<DateTime<Utc>, D::Error>
where
D: Deserializer<'de>,
{
let s = String::deserialize(deserializer)?;
let dt = chrono::DateTime::parse_from_rfc3339(&s).map_err(serde::de::Error::custom)?;
Ok(dt.into())
}
}

View file

@ -1,3 +0,0 @@
mod matter;
pub use matter::preflight;

View file

@ -11,67 +11,75 @@ use hayagriva::Library;
use hypertext::Renderable; use hypertext::Renderable;
use crate::text::md::Outline; use crate::text::md::Outline;
use crate::{Link, LinkDate, Linkable}; use crate::{BuildContext, Link, LinkDate, Linkable};
/// Represents a piece of content that can be rendered as a page. This trait needs to be /// Represents a piece of content that can be rendered as a page. This trait needs to be
/// implemented for the front matter associated with some web page as that is what ultimately /// implemented for the front matter associated with some web page as that is what ultimately
/// matters when rendering the page. Each front matter *definition* maps to exactly one kind of /// matters when rendering the page. Each front matter *definition* maps to exactly one kind of
/// rendered page on the website. /// rendered page on the website.
pub(crate) trait Content { pub(crate) trait Content {
/// Parse the document. Pass an optional library for bibliography. /// Parse the document. Pass an optional library for bibliography.
fn parse(document: &str, library: Option<&Library>) -> (Outline, String, Option<Vec<String>>); /// This generates the initial HTML markup from content.
fn parse(document: String, library: Option<&Library>)
-> (Outline, String, Option<Vec<String>>);
fn transform<'fm, 'md, 'sack, 'html, T>( /// Render the full page from parsed content.
&'fm self, fn render<'s, 'p, 'html>(
content: T, self,
outline: Outline, sack: &'s Sack,
sack: &'sack Sack, parsed: impl Renderable + 'p,
bib: Option<Vec<String>>, outline: Outline,
) -> impl Renderable + 'html bib: Option<Vec<String>>,
where ) -> impl Renderable + 'html
'fm: 'html, where
'md: 'html, 's: 'html,
'sack: 'html, 'p: 'html;
T: Renderable + 'md;
fn as_link(&self, path: Utf8PathBuf) -> Option<Linkable>; /// Get link for this content
fn as_link(&self, path: Utf8PathBuf) -> Option<Linkable>;
} }
/// Marks whether the item should be treated as a content page, converted into a standalone HTML /// Marks whether the item should be treated as a content page, converted into a standalone HTML
/// page, or as a bundled asset. /// page, or as a bundled asset.
#[derive(Debug)] #[derive(Debug)]
pub(crate) enum FileItemKind { pub(crate) enum FileItemKind {
/// Marks items converted to `index.html`. /// Marks items converted to `index.html`.
Index, Index,
/// Marks items from bundle. /// Marks items from bundle.
Bundle, Bundle,
} }
/// Metadata for a single item consumed by SSG. /// Metadata for a single item consumed by SSG.
#[derive(Debug)] #[derive(Debug)]
pub(crate) struct FileItem { pub(crate) struct FileItem {
/// The kind of an item from disk. /// The kind of an item from disk.
pub kind: FileItemKind, pub kind: FileItemKind,
/// Original source file location. /// Original source file location.
pub path: Utf8PathBuf, pub path: Utf8PathBuf,
} }
/// Marks how the asset should be processed by the SSG. /// Marks how the asset should be processed by the SSG.
pub(crate) enum AssetKind { pub(crate) enum AssetKind {
/// Data renderable to HTML. In order to process the data, a closure should be called. /// Data renderable to HTML. In order to process the data, a closure should be called.
Html(Box<dyn Fn(&Sack) -> String>), Html(Box<dyn Fn(&Sack) -> String>),
/// Bibliographical data. /// Bibliographical data.
Bibtex(Library), Bibtex(Library),
/// Image. For now they are simply cloned to the `dist` director. /// Image. For now they are simply cloned to the `dist` director.
Image, Image,
}
impl AssetKind {
pub fn html(f: impl Fn(&Sack) -> String + 'static) -> Self {
Self::Html(Box::new(f))
}
} }
/// Asset corresponding to a file on disk. /// Asset corresponding to a file on disk.
pub(crate) struct Asset { pub(crate) struct Asset {
/// The kind of a processed asset. /// The kind of a processed asset.
pub kind: AssetKind, pub kind: AssetKind,
/// File metadata /// File metadata
pub meta: FileItem, pub meta: FileItem,
} }
/// Dynamically generated asset not corresponding to any file on disk. This is useful when the /// Dynamically generated asset not corresponding to any file on disk. This is useful when the
@ -79,223 +87,228 @@ pub(crate) struct Asset {
pub(crate) struct Virtual(Box<dyn Fn(&Sack) -> String>); pub(crate) struct Virtual(Box<dyn Fn(&Sack) -> String>);
impl Virtual { impl Virtual {
pub fn new(call: impl Fn(&Sack) -> String + 'static) -> Self { pub fn new(call: impl Fn(&Sack) -> String + 'static) -> Self {
Self(Box::new(call)) Self(Box::new(call))
} }
} }
/// The kind of an output item. /// The kind of an output item.
pub(crate) enum OutputKind { pub(crate) enum OutputKind {
/// Marks an output item which corresponds to a file on disk. /// Marks an output item which corresponds to a file on disk.
Asset(Asset), Asset(Asset),
/// Marks an output item which doesn't correspond to any file. /// Marks an output item which doesn't correspond to any file.
Virtual(Virtual), Virtual(Virtual),
} }
impl From<Asset> for OutputKind { impl From<Asset> for OutputKind {
fn from(value: Asset) -> Self { fn from(value: Asset) -> Self {
OutputKind::Asset(value) OutputKind::Asset(value)
} }
} }
impl From<Virtual> for OutputKind { impl From<Virtual> for OutputKind {
fn from(value: Virtual) -> Self { fn from(value: Virtual) -> Self {
OutputKind::Virtual(value) OutputKind::Virtual(value)
} }
} }
/// Renderable output /// Renderable output
pub(crate) struct Output { pub(crate) struct Output {
/// The kind of an output item /// The kind of an output item
pub(crate) kind: OutputKind, pub(crate) kind: OutputKind,
/// Path for the output in dist /// Path for the output in dist
pub(crate) path: Utf8PathBuf, pub(crate) path: Utf8PathBuf,
/// Optional URL data for outputted page. /// Optional URL data for outputted page.
pub(crate) link: Option<Linkable>, pub(crate) link: Option<Linkable>,
} }
/// Items currently in the pipeline. In order for an item to be rendered, it needs to be marked as /// Items currently in the pipeline. In order for an item to be rendered, it needs to be marked as
/// `Take`, which means it needs to have an output location assigned to itself. /// `Take`, which means it needs to have an output location assigned to itself.
pub(crate) enum PipelineItem { pub(crate) enum PipelineItem {
/// Unclaimed file. /// Unclaimed file.
Skip(FileItem), Skip(FileItem),
/// Data ready to be processed. /// Data ready to be processed.
Take(Output), Take(Output),
} }
impl From<FileItem> for PipelineItem { impl From<FileItem> for PipelineItem {
fn from(value: FileItem) -> Self { fn from(value: FileItem) -> Self {
Self::Skip(value) Self::Skip(value)
} }
} }
impl From<Output> for PipelineItem { impl From<Output> for PipelineItem {
fn from(value: Output) -> Self { fn from(value: Output) -> Self {
Self::Take(value) Self::Take(value)
} }
} }
/// This struct allows for querying the website hierarchy. It is passed to each rendered website /// This struct allows for querying the website hierarchy. It is passed to each rendered website
/// page, so that it can easily access the website metadata. /// page, so that it can easily access the website metadata.
pub(crate) struct Sack<'a> { pub(crate) struct Sack<'a> {
/// Literally all of the content pub ctx: &'a BuildContext,
hole: &'a [Output], /// Literally all of the content
/// Current path for the page being rendered hole: &'a [Output],
path: &'a Utf8PathBuf, /// Current path for the page being rendered
/// Original file location for this page path: &'a Utf8PathBuf,
file: Option<&'a Utf8PathBuf>, /// Original file location for this page
file: Option<&'a Utf8PathBuf>,
} }
impl<'a> Sack<'a> { impl<'a> Sack<'a> {
pub fn new(hole: &'a [Output], path: &'a Utf8PathBuf, file: Option<&'a Utf8PathBuf>) -> Self { pub fn get_links(&self, path: &str) -> Vec<LinkDate> {
Self { hole, path, file } let pattern = glob::Pattern::new(path).expect("Bad glob pattern");
} self.hole
.iter()
.filter(|item| pattern.matches_path(item.path.as_ref()))
.filter_map(|item| match &item.link {
Some(Linkable::Date(link)) => Some(link.clone()),
_ => None,
})
.collect()
}
pub fn get_links(&self, path: &str) -> Vec<LinkDate> { pub fn get_tree(&self, path: &str) -> TreePage {
let pattern = glob::Pattern::new(path).expect("Bad glob pattern"); let glob = glob::Pattern::new(path).expect("Bad glob pattern");
self.hole let list = self
.iter() .hole
.filter(|item| pattern.matches_path(item.path.as_ref())) .iter()
.filter_map(|item| match &item.link { .filter(|item| glob.matches_path(item.path.as_ref()))
Some(Linkable::Date(link)) => Some(link.clone()), .filter_map(|item| match &item.link {
_ => None, Some(Linkable::Link(link)) => Some(link.clone()),
}) _ => None,
.collect() });
}
pub fn get_tree(&self, path: &str) -> TreePage { let mut tree = TreePage::new();
let glob = glob::Pattern::new(path).expect("Bad glob pattern"); for link in list {
let list = self tree.add_link(&link);
.hole }
.iter()
.filter(|item| glob.matches_path(item.path.as_ref()))
.filter_map(|item| match &item.link {
Some(Linkable::Link(link)) => Some(link.clone()),
_ => None,
});
let mut tree = TreePage::new(); tree
for link in list { }
tree.add_link(&link);
}
tree pub fn get_library(&self) -> Option<&Library> {
} let glob = format!("{}/*.bib", self.path.parent()?);
let glob = glob::Pattern::new(&glob).expect("Bad glob pattern");
let opts = glob::MatchOptions {
case_sensitive: true,
require_literal_separator: true,
require_literal_leading_dot: false,
};
pub fn get_library(&self) -> Option<&Library> { self.hole
let glob = format!("{}/*.bib", self.path.parent()?); .iter()
let glob = glob::Pattern::new(&glob).expect("Bad glob pattern"); .filter(|item| glob.matches_path_with(item.path.as_ref(), opts))
let opts = glob::MatchOptions { .filter_map(|asset| match asset.kind {
case_sensitive: true, OutputKind::Asset(ref real) => Some(real),
require_literal_separator: true, _ => None,
require_literal_leading_dot: false, })
}; .find_map(|asset| match asset.kind {
AssetKind::Bibtex(ref lib) => Some(lib),
_ => None,
})
}
self.hole /// Get the path for original file location
.iter() pub fn get_file(&self) -> Option<&'a Utf8Path> {
.filter(|item| glob.matches_path_with(item.path.as_ref(), opts)) self.file.map(Utf8PathBuf::as_ref)
.filter_map(|asset| match asset.kind { }
OutputKind::Asset(ref real) => Some(real),
_ => None,
})
.find_map(|asset| match asset.kind {
AssetKind::Bibtex(ref lib) => Some(lib),
_ => None,
})
}
/// Get the path for original file location
pub fn get_file(&self) -> Option<&'a Utf8Path> {
self.file.map(Utf8PathBuf::as_ref)
}
} }
#[derive(Debug)] #[derive(Debug)]
pub(crate) struct TreePage { pub(crate) struct TreePage {
pub link: Option<Link>, pub link: Option<Link>,
pub subs: HashMap<String, TreePage>, pub subs: HashMap<String, TreePage>,
} }
impl TreePage { impl TreePage {
fn new() -> Self { fn new() -> Self {
TreePage { TreePage {
link: None, link: None,
subs: HashMap::new(), subs: HashMap::new(),
} }
} }
fn add_link(&mut self, link: &Link) { fn add_link(&mut self, link: &Link) {
let mut ptr = self; let mut ptr = self;
for part in link.path.iter().skip(1) { for part in link.path.iter().skip(1) {
ptr = ptr.subs.entry(part.to_string()).or_insert(TreePage::new()); ptr = ptr.subs.entry(part.to_string()).or_insert(TreePage::new());
} }
ptr.link = Some(link.clone()); ptr.link = Some(link.clone());
} }
} }
pub fn gather(pattern: &str, exts: &HashSet<&'static str>) -> Vec<PipelineItem> { pub fn gather(pattern: &str, exts: &HashSet<&'static str>) -> Vec<PipelineItem> {
glob(pattern) glob(pattern)
.expect("Invalid glob pattern") .expect("Invalid glob pattern")
.filter_map(|path| { .filter_map(|path| {
let path = path.unwrap(); let path = path.unwrap();
let path = Utf8PathBuf::from_path_buf(path).expect("Filename is not valid UTF8"); let path = Utf8PathBuf::from_path_buf(path).expect("Filename is not valid UTF8");
match path.is_dir() { match path.is_dir() {
true => None, true => None,
false => Some(to_source(path, exts)), false => Some(to_source(path, exts)),
} }
}) })
.map(Into::into) .map(Into::into)
.collect() .collect()
} }
fn to_source(path: Utf8PathBuf, exts: &HashSet<&'static str>) -> FileItem { fn to_source(path: Utf8PathBuf, exts: &HashSet<&'static str>) -> FileItem {
let hit = path.extension().map_or(false, |ext| exts.contains(ext)); let hit = path.extension().map_or(false, |ext| exts.contains(ext));
let kind = match hit { let kind = match hit {
true => FileItemKind::Index, true => FileItemKind::Index,
false => FileItemKind::Bundle, false => FileItemKind::Bundle,
}; };
FileItem { kind, path } FileItem { kind, path }
} }
pub fn render_all(items: &[Output]) { pub fn render_all(ctx: &BuildContext, items: &[Output]) {
for item in items { for item in items {
let file = match &item.kind { let file = match &item.kind {
OutputKind::Asset(a) => Some(&a.meta.path), OutputKind::Asset(a) => Some(&a.meta.path),
OutputKind::Virtual(_) => None, OutputKind::Virtual(_) => None,
}; };
render(item, &Sack::new(items, &item.path, file)); render(
} item,
Sack {
ctx,
hole: items,
path: &item.path,
file,
},
);
}
} }
fn render(item: &Output, sack: &Sack) { fn render(item: &Output, sack: Sack) {
let o = Utf8Path::new("dist").join(&item.path); let o = Utf8Path::new("dist").join(&item.path);
fs::create_dir_all(o.parent().unwrap()).unwrap(); fs::create_dir_all(o.parent().unwrap()).unwrap();
match item.kind { match item.kind {
OutputKind::Asset(ref real) => { OutputKind::Asset(ref real) => {
let i = &real.meta.path; let i = &real.meta.path;
match &real.kind { match &real.kind {
AssetKind::Html(closure) => { AssetKind::Html(closure) => {
let mut file = File::create(&o).unwrap(); let mut file = File::create(&o).unwrap();
file.write_all(closure(sack).as_bytes()).unwrap(); file.write_all(closure(&sack).as_bytes()).unwrap();
println!("HTML: {} -> {}", i, o); println!("HTML: {} -> {}", i, o);
} }
AssetKind::Bibtex(_) => {} AssetKind::Bibtex(_) => {}
AssetKind::Image => { AssetKind::Image => {
fs::create_dir_all(o.parent().unwrap()).unwrap(); fs::create_dir_all(o.parent().unwrap()).unwrap();
fs::copy(i, &o).unwrap(); fs::copy(i, &o).unwrap();
println!("Image: {} -> {}", i, o); println!("Image: {} -> {}", i, o);
} }
}; };
} }
OutputKind::Virtual(Virtual(ref closure)) => { OutputKind::Virtual(Virtual(ref closure)) => {
let mut file = File::create(&o).unwrap(); let mut file = File::create(&o).unwrap();
file.write_all(closure(sack).as_bytes()).unwrap(); file.write_all(closure(&sack).as_bytes()).unwrap();
println!("Virtual: -> {}", o); println!("Virtual: -> {}", o);
} }
} }
} }

View file

@ -1,6 +1,11 @@
use std::collections::HashMap; use std::collections::HashMap;
use hayagriva::{archive::ArchivedStyle, citationberg::{IndependentStyle, Locale, Style}, BibliographyDriver, BibliographyRequest, BufWriteFormat, CitationItem, CitationRequest, Library}; use hayagriva::{
archive::ArchivedStyle,
citationberg::{IndependentStyle, Locale, Style},
BibliographyDriver, BibliographyRequest, BufWriteFormat, CitationItem, CitationRequest,
Library,
};
use hypertext::Renderable; use hypertext::Renderable;
use once_cell::sync::Lazy; use once_cell::sync::Lazy;
use pulldown_cmark::{CodeBlockKind, Event, Options, Parser, Tag, TagEnd, TextMergeStream}; use pulldown_cmark::{CodeBlockKind, Event, Options, Parser, Tag, TagEnd, TextMergeStream};
@ -8,315 +13,339 @@ use regex::Regex;
use crate::ts; use crate::ts;
static OPTS: Lazy<Options> = Lazy::new(|| {
Options::empty()
.union(Options::ENABLE_MATH)
.union(Options::ENABLE_TABLES)
.union(Options::ENABLE_TASKLISTS)
.union(Options::ENABLE_STRIKETHROUGH)
.union(Options::ENABLE_SMART_PUNCTUATION)
});
static OPTS: Lazy<Options> = Lazy::new(|| static KATEX_I: Lazy<katex::Opts> = Lazy::new(|| {
Options::empty() katex::opts::Opts::builder()
.union(Options::ENABLE_MATH) .output_type(katex::OutputType::Mathml)
.union(Options::ENABLE_TABLES) .build()
.union(Options::ENABLE_TASKLISTS) .unwrap()
.union(Options::ENABLE_STRIKETHROUGH) });
.union(Options::ENABLE_SMART_PUNCTUATION)
);
static KATEX_I: Lazy<katex::Opts> = Lazy::new(|| static KATEX_B: Lazy<katex::Opts> = Lazy::new(|| {
katex::opts::Opts::builder() katex::opts::Opts::builder()
.output_type(katex::OutputType::Mathml) .output_type(katex::OutputType::Mathml)
.build() .display_mode(true)
.unwrap() .build()
); .unwrap()
});
static KATEX_B: Lazy<katex::Opts> = Lazy::new(||
katex::opts::Opts::builder()
.output_type(katex::OutputType::Mathml)
.display_mode(true)
.build()
.unwrap()
);
static LOCALE: Lazy<Vec<Locale>> = Lazy::new(hayagriva::archive::locales); static LOCALE: Lazy<Vec<Locale>> = Lazy::new(hayagriva::archive::locales);
static STYLE: Lazy<IndependentStyle> = Lazy::new(|| static STYLE: Lazy<IndependentStyle> =
match ArchivedStyle::InstituteOfPhysicsNumeric.get() { Lazy::new(|| match ArchivedStyle::InstituteOfPhysicsNumeric.get() {
Style::Independent(style) => style, Style::Independent(style) => style,
Style::Dependent(_) => unreachable!(), Style::Dependent(_) => unreachable!(),
} });
);
pub struct Outline(pub Vec<(String, String)>); pub struct Outline(pub Vec<(String, String)>);
pub fn parse(text: String, lib: Option<&Library>) -> (Outline, String, Option<Vec<String>>) {
let (outline, stream) = {
let stream = Parser::new_ext(&text, *OPTS);
let mut stream: Vec<_> = TextMergeStream::new(stream).collect();
let outline = set_heading_ids(&mut stream);
(outline, stream)
};
pub fn parse(text: &str, lib: Option<&Library>) -> (Outline, String, Option<Vec<String>>) { let stream = stream
let (outline, stream) = { .into_iter()
let stream = Parser::new_ext(text, *OPTS); .map(make_math)
let mut stream: Vec<_> = TextMergeStream::new(stream).collect(); .map(make_emoji)
let outline = set_heading_ids(&mut stream); .collect::<Vec<_>>();
(outline, stream)
};
let stream = stream.into_iter() let stream = make_code(stream)
.map(make_math) .into_iter()
.map(make_emoji) .flat_map(make_ruby)
.collect::<Vec<_>>(); .flat_map(make_cite)
.collect::<Vec<_>>();
let stream = make_code(stream) let (stream, bib) = match lib {
.into_iter() Some(lib) => make_bib(stream, lib),
.flat_map(make_ruby) None => (stream, None),
.flat_map(make_cite) };
.collect::<Vec<_>>();
let (stream, bib) = match lib { let mut html = String::new();
Some(lib) => make_bib(stream, lib), pulldown_cmark::html::push_html(&mut html, stream.into_iter());
None => (stream, None),
};
let mut html = String::new(); (outline, html, bib)
pulldown_cmark::html::push_html(&mut html, stream.into_iter());
(outline, html, bib)
} }
fn make_bib<'a, 'b>(stream: Vec<Event<'a>>, lib: &'b Library) -> (Vec<Event<'a>>, Option<Vec<String>>) { fn make_bib<'a, 'b>(
let mut driver = BibliographyDriver::new(); stream: Vec<Event<'a>>,
lib: &'b Library,
) -> (Vec<Event<'a>>, Option<Vec<String>>) {
let mut driver = BibliographyDriver::new();
for event in stream.iter() { for event in stream.iter() {
match event { match event {
Event::InlineMath(ref text) => match lib.get(text) { Event::InlineMath(ref text) => match lib.get(text) {
Some(entry) => driver.citation(CitationRequest::from_items(vec![CitationItem::with_entry(entry)], &STYLE, &LOCALE)), Some(entry) => driver.citation(CitationRequest::from_items(
None => (), vec![CitationItem::with_entry(entry)],
}, &STYLE,
_ => (), &LOCALE,
} )),
} None => (),
},
_ => (),
}
}
// add fake citation to make all entries show up // add fake citation to make all entries show up
driver.citation(CitationRequest::from_items(lib.iter().map(CitationItem::with_entry).collect(), &STYLE, &LOCALE)); driver.citation(CitationRequest::from_items(
lib.iter().map(CitationItem::with_entry).collect(),
&STYLE,
&LOCALE,
));
let res = driver.finish(BibliographyRequest { style: &STYLE, locale: None, locale_files: &LOCALE }); let res = driver.finish(BibliographyRequest {
style: &STYLE,
locale: None,
locale_files: &LOCALE,
});
let mut n = 0; let mut n = 0;
let stream = stream.into_iter() let stream = stream
.map(|event| match event { .into_iter()
Event::InlineMath(name) => { .map(|event| match event {
let mut buffer = String::from("<cite>"); Event::InlineMath(name) => {
match res.citations.get(n) { let mut buffer = String::from("<cite>");
Some(rf) => rf.citation.write_buf(&mut buffer, BufWriteFormat::Html).unwrap(), match res.citations.get(n) {
None => buffer.push_str(&name), Some(rf) => rf
}; .citation
buffer.push_str("</cite>"); .write_buf(&mut buffer, BufWriteFormat::Html)
n += 1; .unwrap(),
Event::InlineHtml(buffer.into()) None => buffer.push_str(&name),
}, };
_ => event buffer.push_str("</cite>");
}) n += 1;
.collect(); Event::InlineHtml(buffer.into())
}
_ => event,
})
.collect();
let bib = res.bibliography.map(|bib| let bib = res.bibliography.map(|bib| {
bib.items.iter() bib.items
.map(|x| { .iter()
let mut buffer = String::new(); .map(|x| {
x.content.write_buf(&mut buffer, BufWriteFormat::Html).unwrap(); let mut buffer = String::new();
buffer x.content
}) .write_buf(&mut buffer, BufWriteFormat::Html)
.collect::<Vec<_>>() .unwrap();
); buffer
})
.collect::<Vec<_>>()
});
(stream, bib) (stream, bib)
} }
static RE_CITE: Lazy<Regex> = Lazy::new(|| Regex::new(r":cite\[([^\]]+)\]").unwrap()); static RE_CITE: Lazy<Regex> = Lazy::new(|| Regex::new(r":cite\[([^\]]+)\]").unwrap());
#[derive(Debug)] #[derive(Debug)]
enum Annotated_<'a> { enum Annotated_<'a> {
Text(&'a str), Text(&'a str),
Cite(&'a str), Cite(&'a str),
} }
fn annotate_(input: &str) -> Vec<Annotated_> { fn annotate_(input: &str) -> Vec<Annotated_> {
let mut parts: Vec<Annotated_> = Vec::new(); let mut parts: Vec<Annotated_> = Vec::new();
let mut last_index = 0; let mut last_index = 0;
for cap in RE_CITE.captures_iter(input) { for cap in RE_CITE.captures_iter(input) {
let cite = cap.get(1).unwrap().as_str(); let cite = cap.get(1).unwrap().as_str();
let index = cap.get(0).unwrap().start(); let index = cap.get(0).unwrap().start();
if index > last_index { if index > last_index {
parts.push(Annotated_::Text(&input[last_index..index])); parts.push(Annotated_::Text(&input[last_index..index]));
} }
parts.push(Annotated_::Cite(cite)); parts.push(Annotated_::Cite(cite));
last_index = cap.get(0).unwrap().end(); last_index = cap.get(0).unwrap().end();
} }
if last_index < input.len() { if last_index < input.len() {
parts.push(Annotated_::Text(&input[last_index..])); parts.push(Annotated_::Text(&input[last_index..]));
} }
parts parts
} }
fn make_cite(event: Event) -> Vec<Event> { fn make_cite(event: Event) -> Vec<Event> {
match event { match event {
Event::Text(ref text) => { Event::Text(ref text) => annotate_(text)
annotate_(text) .into_iter()
.into_iter() .map(|e| match e {
.map(|e| match e { Annotated_::Text(text) => Event::Text(text.to_owned().into()),
Annotated_::Text(text) => Event::Text(text.to_owned().into()), Annotated_::Cite(cite) => Event::InlineMath(cite.to_owned().into()),
Annotated_::Cite(cite) => Event::InlineMath(cite.to_owned().into()), })
}) .collect(),
.collect() _ => vec![event],
}, }
_ => vec![event],
}
} }
fn set_heading_ids(events: &mut [Event]) -> Outline { fn set_heading_ids(events: &mut [Event]) -> Outline {
let mut cnt = HashMap::<String, i32>::new(); let mut cnt = HashMap::<String, i32>::new();
let mut out = Vec::new(); let mut out = Vec::new();
let mut buf = String::new(); let mut buf = String::new();
let mut ptr = None; let mut ptr = None;
for event in events { for event in events {
match event { match event {
Event::Start(ref mut tag @ Tag::Heading {..}) => { Event::Start(ref mut tag @ Tag::Heading { .. }) => {
ptr = Some(tag); ptr = Some(tag);
}, }
Event::Text(ref text) if ptr.is_some() => { Event::Text(ref text) if ptr.is_some() => buf.push_str(text),
buf.push_str(text) Event::End(TagEnd::Heading(..)) => {
}, let txt = std::mem::take(&mut buf);
Event::End(TagEnd::Heading(..)) => { let url = txt.to_lowercase().replace(' ', "-");
let txt = std::mem::take(&mut buf); let url = match cnt.get_mut(&url) {
let url = txt.to_lowercase().replace(' ', "-"); Some(ptr) => {
let url = match cnt.get_mut(&url) { *ptr += 1;
Some(ptr) => { *ptr += 1; format!("{url}-{ptr}") }, format!("{url}-{ptr}")
None => { cnt.insert(url.clone(), 0); url }, }
}; None => {
match ptr.take().unwrap() { cnt.insert(url.clone(), 0);
Tag::Heading { ref mut id, .. } => *id = Some(url.clone().into()), url
_ => unreachable!(), }
} };
out.push((txt, url)); match ptr.take().unwrap() {
}, Tag::Heading { ref mut id, .. } => *id = Some(url.clone().into()),
_ => (), _ => unreachable!(),
} }
}; out.push((txt, url));
}
_ => (),
}
}
Outline(out) Outline(out)
} }
fn make_math(event: Event) -> Event { fn make_math(event: Event) -> Event {
match event { match event {
Event::InlineMath(math) => Event::InlineHtml(katex::render_with_opts(&math, &*KATEX_I).unwrap().into()), Event::InlineMath(math) => {
Event::DisplayMath(math) => Event::Html(katex::render_with_opts(&math, &*KATEX_B).unwrap().into()), Event::InlineHtml(katex::render_with_opts(&math, &*KATEX_I).unwrap().into())
_ => event }
} Event::DisplayMath(math) => {
Event::Html(katex::render_with_opts(&math, &*KATEX_B).unwrap().into())
}
_ => event,
}
} }
fn make_code(es: Vec<Event>) -> Vec<Event> { fn make_code(es: Vec<Event>) -> Vec<Event> {
let mut buff = Vec::new(); let mut buff = Vec::new();
let mut lang = None; let mut lang = None;
let mut code = String::new(); let mut code = String::new();
for event in es { for event in es {
match event { match event {
Event::Start(Tag::CodeBlock(kind)) => match kind { Event::Start(Tag::CodeBlock(kind)) => match kind {
CodeBlockKind::Indented => (), CodeBlockKind::Indented => (),
CodeBlockKind::Fenced(name) => lang = Some(name), CodeBlockKind::Fenced(name) => lang = Some(name),
}, },
Event::End(TagEnd::CodeBlock) => { Event::End(TagEnd::CodeBlock) => {
let lang = lang.take().unwrap_or("".into()); let lang = lang.take().unwrap_or("".into());
let html = ts::highlight(&lang, &code).render().as_str().to_owned(); let html = ts::highlight(&lang, &code).render().as_str().to_owned();
buff.push(Event::Html(html.into())); buff.push(Event::Html(html.into()));
code.clear(); code.clear();
}, }
Event::Text(text) => match lang { Event::Text(text) => match lang {
None => buff.push(Event::Text(text)), None => buff.push(Event::Text(text)),
Some(_) => code.push_str(&text), Some(_) => code.push_str(&text),
}, },
_ => buff.push(event) _ => buff.push(event),
} }
} }
buff buff
} }
static RE_RUBY: Lazy<Regex> = Lazy::new(|| static RE_RUBY: Lazy<Regex> = Lazy::new(|| Regex::new(r"\[([^\]]+)\]\{([^}]+)\}").unwrap());
Regex::new(r"\[([^\]]+)\]\{([^}]+)\}").unwrap()
);
#[derive(Debug)] #[derive(Debug)]
enum Annotated<'a> { enum Annotated<'a> {
Text(&'a str), Text(&'a str),
Ruby(&'a str, &'a str), Ruby(&'a str, &'a str),
} }
fn annotate(input: &str) -> Vec<Annotated> { fn annotate(input: &str) -> Vec<Annotated> {
let mut parts: Vec<Annotated> = Vec::new(); let mut parts: Vec<Annotated> = Vec::new();
let mut last_index = 0; let mut last_index = 0;
for cap in RE_RUBY.captures_iter(input) { for cap in RE_RUBY.captures_iter(input) {
let text = cap.get(1).unwrap().as_str(); let text = cap.get(1).unwrap().as_str();
let ruby = cap.get(2).unwrap().as_str(); let ruby = cap.get(2).unwrap().as_str();
let index = cap.get(0).unwrap().start(); let index = cap.get(0).unwrap().start();
if index > last_index { if index > last_index {
parts.push(Annotated::Text(&input[last_index..index])); parts.push(Annotated::Text(&input[last_index..index]));
} }
parts.push(Annotated::Ruby(text, ruby)); parts.push(Annotated::Ruby(text, ruby));
last_index = cap.get(0).unwrap().end(); last_index = cap.get(0).unwrap().end();
} }
if last_index < input.len() { if last_index < input.len() {
parts.push(Annotated::Text(&input[last_index..])); parts.push(Annotated::Text(&input[last_index..]));
} }
parts parts
} }
fn make_ruby(event: Event) -> Vec<Event> { fn make_ruby(event: Event) -> Vec<Event> {
match event { match event {
Event::Text(ref text) => annotate(text) Event::Text(ref text) => annotate(text)
.into_iter() .into_iter()
.map(|el| match el { .map(|el| match el {
Annotated::Text(text) => Event::Text(text.to_owned().into()), Annotated::Text(text) => Event::Text(text.to_owned().into()),
Annotated::Ruby(t, f) => Event::InlineHtml(format!("<ruby>{t}<rp>(</rp><rt>{f}</rt><rp>)</rp></ruby>").into()), Annotated::Ruby(t, f) => Event::InlineHtml(
}) format!("<ruby>{t}<rp>(</rp><rt>{f}</rt><rp>)</rp></ruby>").into(),
.collect(), ),
_ => vec![event], })
} .collect(),
_ => vec![event],
}
} }
fn make_emoji(event: Event) -> Event { fn make_emoji(event: Event) -> Event {
match event { match event {
Event::Text(ref text) => { Event::Text(ref text) => {
let mut buf = None; let mut buf = None;
let mut top = 0; let mut top = 0;
let mut old = 0; let mut old = 0;
for (idx, _) in text.match_indices(':') { for (idx, _) in text.match_indices(':') {
let key = &text[old..idx]; let key = &text[old..idx];
if let Some(emoji) = emojis::get_by_shortcode(key) { if let Some(emoji) = emojis::get_by_shortcode(key) {
let buf = buf.get_or_insert_with(|| String::with_capacity(text.len())); let buf = buf.get_or_insert_with(|| String::with_capacity(text.len()));
buf.push_str(&text[top..old-1]); buf.push_str(&text[top..old - 1]);
buf.push_str(emoji.as_str()); buf.push_str(emoji.as_str());
top = idx + 1; top = idx + 1;
} }
old = idx + 1; old = idx + 1;
} }
if let Some(ref mut buf) = buf { if let Some(ref mut buf) = buf {
buf.push_str(&text[top..]); buf.push_str(&text[top..]);
} }
match buf { match buf {
None => event, None => event,
Some(buf) => Event::Text(buf.into()) Some(buf) => Event::Text(buf.into()),
} }
}, }
_ => event, _ => event,
} }
} }

View file

@ -1,80 +1,75 @@
use std::borrow::Cow;
use hypertext::{html_elements, maud_move, Raw, Renderable, GlobalAttributes};
use tree_sitter_highlight::{Highlighter, HighlightEvent};
mod captures; mod captures;
mod configs; mod configs;
use std::borrow::Cow;
use hypertext::{html_elements, maud_move, GlobalAttributes, Raw, Renderable};
use tree_sitter_highlight::{HighlightEvent, Highlighter};
pub enum Event { pub enum Event {
Write(String), Write(String),
Enter(String), Enter(String),
Close, Close,
} }
pub fn highlight<'data, 'html>(lang: &'data str, code: &'data str) -> impl Renderable + 'html
pub fn highlight<'data, 'html>( where
lang: &'data str, 'data: 'html,
code: &'data str
) -> impl Renderable + 'html
where
'data: 'html
{ {
maud_move!( maud_move!(
figure .listing.kanagawa data-lang=(lang) { figure .listing.kanagawa data-lang=(lang) {
pre { pre {
code { code {
(Raw(to_html(lang, code))) (Raw(to_html(lang, code)))
} }
} }
} }
) )
} }
fn to_html(lang: &str, code: &str) -> String { fn to_html(lang: &str, code: &str) -> String {
get_events(lang, code) get_events(lang, code)
.into_iter() .into_iter()
.map(|event| match event { .map(|event| match event {
Event::Write(text) => Cow::from( Event::Write(text) => Cow::from(
text.replace('&', "&amp;").replace('<', "&lt;").replace('>', "&gt;") text.replace('&', "&amp;")
), .replace('<', "&lt;")
Event::Enter(class) => Cow::from( .replace('>', "&gt;"),
format!("<span class=\"{}\">", class.replace('.', "-")) ),
), Event::Enter(class) => {
Event::Close => Cow::from("</span>"), Cow::from(format!("<span class=\"{}\">", class.replace('.', "-")))
}) }
.collect() Event::Close => Cow::from("</span>"),
})
.collect()
} }
fn get_events(lang: &str, src: &str) -> Vec<Event> { fn get_events(lang: &str, src: &str) -> Vec<Event> {
let config = match configs::get_config(lang) { let config = match configs::get_config(lang) {
Some(c) => c, Some(c) => c,
None => return vec![Event::Write(src.into())] None => return vec![Event::Write(src.into())],
}; };
let mut hl = Highlighter::new();
let highlights = hl
.highlight(config, src.as_bytes(), None, |name| {
configs::get_config(name)
})
.unwrap();
let mut hl = Highlighter::new(); let mut out = vec![];
let highlights = hl.highlight( for event in highlights {
config, let event = event.unwrap();
src.as_bytes(), let obj = map_event(event, src);
None, out.push(obj);
|name| configs::get_config(name) }
).unwrap(); out
let mut out = vec![];
for event in highlights {
let event = event.unwrap();
let obj = map_event(event, src);
out.push(obj);
}
out
} }
fn map_event(event: HighlightEvent, src: &str) -> Event { fn map_event(event: HighlightEvent, src: &str) -> Event {
match event { match event {
HighlightEvent::Source {start, end} => Event::Write(src[start..end].into()), HighlightEvent::Source { start, end } => Event::Write(src[start..end].into()),
HighlightEvent::HighlightStart(s) => Event::Enter(captures::NAMES[s.0].into()), HighlightEvent::HighlightStart(s) => Event::Enter(captures::NAMES[s.0].into()),
HighlightEvent::HighlightEnd => Event::Close, HighlightEvent::HighlightEnd => Event::Close,
} }
} }