diff --git a/rustfmt.toml b/rustfmt.toml new file mode 100644 index 0000000..6a34f55 --- /dev/null +++ b/rustfmt.toml @@ -0,0 +1,2 @@ +tab_spaces = 4 +hard_tabs = true diff --git a/src/html/home.rs b/src/html/home.rs index edd602f..34ded98 100644 --- a/src/html/home.rs +++ b/src/html/home.rs @@ -1,6 +1,6 @@ use hypertext::{html_elements, maud, maud_move, GlobalAttributes, Raw, Renderable}; -use crate::text::md::parse; +use crate::{pipeline::Sack, text::md::parse}; const INTRO: &str = r#" ## かもし @@ -15,59 +15,62 @@ const INTRO: &str = r#" "#; fn intro() -> impl Renderable { - let (_, html, _) = parse(INTRO, None); - maud!( - section .p-card.intro-jp lang="ja-JP" { - (Raw(html)) - } - ) + let (_, html, _) = parse(INTRO.into(), None); + maud!( + section .p-card.intro-jp lang="ja-JP" { + (Raw(html)) + } + ) } -fn kanji() -> impl Renderable { - maud!( - section .p-card { - h2 .p-card__heading { - "Kanji of the Day" - } - div { - // - } - } - ) -} +// fn kanji() -> impl Renderable { +// maud!( +// section .p-card { +// h2 .p-card__heading { +// "Kanji of the Day" +// } +// div { +// // +// } +// } +// ) +// } fn photo() -> impl Renderable { - maud!( - section .p-card.home-card-image { - h2 .p-card__heading { - "Image of the Month" - } - a .home-card-image__link href="/static/IMG_20231029_111650.jpg" { - img .home-card-image__image - src="/static/IMG_20231029_111650.jpg" - alt="Autumn park with colorful trees and fallen leaves"; - } - } - ) + maud!( + section .p-card.home-card-image { + h2 .p-card__heading { + "Image of the Month" + } + a .home-card-image__link href="/static/IMG_20231029_111650.jpg" { + img .home-card-image__image + src="/static/IMG_20231029_111650.jpg" + alt="Autumn park with colorful trees and fallen leaves"; + } + } + ) } -pub fn home<'data, 'home, R>(main: R) -> impl Renderable + 'home +pub(crate) fn home<'s, 'p, 'html>( + sack: &'s Sack, + main: impl Renderable + 'p, +) -> impl Renderable + 'html where - 'data: 'home, - R: Renderable + 'data, + 's: 'html, + 'p: 'html, { - let main = maud_move!( - main .l-home { - article .l-home__article.markdown { - (main) - } - aside .l-home__aside { - (intro()) - // (kanji()) - (photo()) - } - } - ); + let main = maud_move!( + main .l-home { + article .l-home__article.markdown { + (main) + } + aside .l-home__aside { + (intro()) + // (kanji()) + (photo()) + } + } + ); - crate::html::page("Home", main, None) + crate::html::page(sack, main, "Home".into()) } diff --git a/src/html/list.rs b/src/html/list.rs index 1536d02..52eca77 100644 --- a/src/html/list.rs +++ b/src/html/list.rs @@ -1,62 +1,66 @@ -use crate::{html::page, LinkDate}; -use camino::Utf8PathBuf; -use chrono::{DateTime, Utc}; use hypertext::{html_elements, maud_move, GlobalAttributes, Renderable}; -pub fn list<'data, 'list>( - title: &'data str, - groups: &'data [(i32, Vec)], -) -> impl Renderable + 'list +use crate::html::page; +use crate::pipeline::Sack; +use crate::LinkDate; + +pub fn list<'s, 'g, 'html>( + sack: &'s Sack, + groups: &'g [(i32, Vec)], + title: String, +) -> impl Renderable + 'html where - 'data: 'list, + 's: 'html, + 'g: 'html, { - let list = maud_move!( - main .page-list-main { - article .page-list { - header .markdown { - h1 { (title) } - } + let heading = title.clone(); + let list = maud_move!( + main .page-list-main { + article .page-list { + header .markdown { + h1 { (heading) } + } - @for (year, group) in groups { - (section(*year, group)) - } - } - } - ); + @for (year, group) in groups { + (section(*year, group)) + } + } + } + ); - page(title, list, None) + page(sack, list, title) } fn section(year: i32, group: &[LinkDate]) -> impl Renderable + '_ { - maud_move!( - section .page-list-year { - header .page-list-year__header { - h2 { (year) } - } - @for item in group.iter() { - (link(item)) - } - } - ) + maud_move!( + section .page-list-year { + header .page-list-year__header { + h2 { (year) } + } + @for item in group.iter() { + (link(item)) + } + } + ) } fn link(data: &LinkDate) -> impl Renderable + '_ { - let time = data.date.format("%m/%d"); - maud_move!( - a .page-item href=(data.link.path.as_str()) { - div .page-item__header { - h3 { - (&data.link.name) - } - time datetime=(data.date.to_rfc3339()) { - (time.to_string()) - } - } - @if let Some(ref desc) = data.link.desc { - div .page-item__desc { - (desc) - } - } - } - ) + let time = data.date.format("%m/%d"); + maud_move!( + a .page-item href=(data.link.path.as_str()) { + div .page-item__header { + h3 { + (&data.link.name) + } + time datetime=(data.date.to_rfc3339()) { + (time.to_string()) + } + } + @if let Some(ref desc) = data.link.desc { + div .page-item__desc { + (desc) + } + } + } + ) } diff --git a/src/html/misc.rs b/src/html/misc.rs index f0dd44b..f8758e6 100644 --- a/src/html/misc.rs +++ b/src/html/misc.rs @@ -3,93 +3,92 @@ use hypertext::{html_elements, maud_move, GlobalAttributes, Raw, Renderable}; use crate::pipeline::{Sack, TreePage}; use crate::text::md::Outline; - /// Render the outline for a document pub(crate) fn show_outline(outline: Outline) -> impl Renderable { - maud_move!( - section .link-tree { - h2 .link-tree__heading { - a .link-tree__heading-text href="#top" { "Content" } - } - nav #table-of-contents .link-tree__nav { - ul .link-tree__nav-list { - @for (title, id) in outline.0 { - li .link-tree__nav-list-item { - a .link-tree__nav-list-text.link href=(format!("#{}", id)) { - (title) - } - } - } - } - } - } - ) + maud_move!( + section .link-tree { + h2 .link-tree__heading { + a .link-tree__heading-text href="#top" { "Content" } + } + nav #table-of-contents .link-tree__nav { + ul .link-tree__nav-list { + @for (title, id) in outline.0 { + li .link-tree__nav-list-item { + a .link-tree__nav-list-text.link href=(format!("#{}", id)) { + (title) + } + } + } + } + } + } + ) } /// Render the bibliography for a document pub(crate) fn show_bibliography(bib: Vec) -> impl Renderable { - maud_move!( - section .markdown { - h2 { - "Bibliography" - } - ol .bibliography { - @for item in bib { - li { - (Raw(item)) - } - } - } - } - ) + maud_move!( + section .markdown { + h2 { + "Bibliography" + } + ol .bibliography { + @for item in bib { + li { + (Raw(item)) + } + } + } + } + ) } /// Render the page tree pub(crate) fn show_page_tree(sack: &Sack, glob: &str) -> impl Renderable { - let tree = sack.get_tree(glob); + let tree = sack.get_tree(glob); - maud_move!( - h2 .link-tree__heading { - // {pages.chain(x => x.prefix) - // .map(pathify) - // .mapOrDefault(href => - // {heading}, - // {heading} - // )} - } - nav .link-tree__nav { - (show_page_tree_level(&tree)) - } - ) + maud_move!( + h2 .link-tree__heading { + // {pages.chain(x => x.prefix) + // .map(pathify) + // .mapOrDefault(href => + // {heading}, + // {heading} + // )} + } + nav .link-tree__nav { + (show_page_tree_level(&tree)) + } + ) } fn show_page_tree_level(tree: &TreePage) -> impl Renderable + '_ { - let subs = { - let mut subs: Vec<_> = tree.subs.iter().collect(); - subs.sort_by(|a, b| a.0.cmp(b.0)); - subs - }; + let subs = { + let mut subs: Vec<_> = tree.subs.iter().collect(); + subs.sort_by(|a, b| a.0.cmp(b.0)); + subs + }; - maud_move!( - ul .link-tree__nav-list { - @for (key, next) in subs { - li .link-tree__nav-list-item { - span .link-tree__nav-list-text { - @if let Some(ref link) = next.link { - a .link-tree__nav-list-text.link href=(link.path.as_str()) { - (&link.name) - } - } @else { - span .link-tree__nav-list-text { - (key) - } - } - } - @if !next.subs.is_empty() { - (show_page_tree_level(next)) - } - } - } - } - ) + maud_move!( + ul .link-tree__nav-list { + @for (key, next) in subs { + li .link-tree__nav-list-item { + span .link-tree__nav-list-text { + @if let Some(ref link) = next.link { + a .link-tree__nav-list-text.link href=(link.path.as_str()) { + (&link.name) + } + } @else { + span .link-tree__nav-list-text { + (key) + } + } + } + @if !next.subs.is_empty() { + (show_page_tree_level(next)) + } + } + } + } + ) } diff --git a/src/html/mod.rs b/src/html/mod.rs index d64cfc9..11bec4a 100644 --- a/src/html/mod.rs +++ b/src/html/mod.rs @@ -13,223 +13,240 @@ use camino::Utf8Path; use chrono::Datelike; use hypertext::{html_elements, maud, maud_move, GlobalAttributes, Raw, Renderable}; -use crate::REPO; - pub(crate) use home::home; pub(crate) use post::Post; pub(crate) use slideshow::Slideshow; pub(crate) use wiki::Wiki; +use crate::{pipeline::Sack, Mode}; + const JS_RELOAD: &str = r#" const socket = new WebSocket("ws://localhost:1337"); socket.addEventListener("message", (event) => { - console.log(event); - window.location.reload(); + console.log(event); + window.location.reload(); }); "#; const JS_IMPORTS: &str = r#" { - "imports": { - "reveal": "/js/vanilla/reveal.js", - "photos": "/js/vanilla/photos.js" - } + "imports": { + "reveal": "/js/vanilla/reveal.js", + "photos": "/js/vanilla/photos.js" + } } "#; -fn head(title: &str) -> impl Renderable + '_ { - let title = format!("{} | kamoshi.org", title); +fn head<'s, 'html>(sack: &'s Sack, title: String) -> impl Renderable + 'html +where + 's: 'html, +{ + let title = format!("{} | kamoshi.org", title); - maud_move!( - meta charset="utf-8"; - meta name="viewport" content="width=device-width, initial-scale=1"; - title { - (title) - } + maud_move!( + meta charset="utf-8"; + meta name="viewport" content="width=device-width, initial-scale=1"; + title { + (title) + } - // link rel="sitemap" href="/sitemap.xml"; + // link rel="sitemap" href="/sitemap.xml"; - link rel="stylesheet" href="/styles.css"; - link rel="stylesheet" href="/static/css/reveal.css"; - link rel="stylesheet" href="/static/css/leaflet.css"; - link rel="stylesheet" href="/static/css/MarkerCluster.css"; - link rel="stylesheet" href="/static/css/MarkerCluster.Default.css"; - link rel="icon" type="image/png" sizes="32x32" href="/favicon-32x32.png"; - link rel="icon" type="image/png" sizes="16x16" href="/favicon-16x16.png"; - link rel="icon" href="/favicon.ico" sizes="any"; + link rel="stylesheet" href="/styles.css"; + link rel="stylesheet" href="/static/css/reveal.css"; + link rel="stylesheet" href="/static/css/leaflet.css"; + link rel="stylesheet" href="/static/css/MarkerCluster.css"; + link rel="stylesheet" href="/static/css/MarkerCluster.Default.css"; + link rel="icon" type="image/png" sizes="32x32" href="/favicon-32x32.png"; + link rel="icon" type="image/png" sizes="16x16" href="/favicon-16x16.png"; + link rel="icon" href="/favicon.ico" sizes="any"; - script type="importmap" {(Raw(JS_IMPORTS))} + script type="importmap" {(Raw(JS_IMPORTS))} - script { (Raw(JS_RELOAD)) } - ) + @if matches!(sack.ctx.mode, Mode::Watch) { + script { (Raw(JS_RELOAD)) } + } + ) } fn navbar() -> impl Renderable { - static ITEMS: &[(&str, &str)] = &[ - ("Posts", "/posts/"), - ("Slides", "/slides/"), - ("Wiki", "/wiki/"), - ("Map", "/map/"), - ("About", "/about/"), - ("Search", "/search/"), - ]; + static ITEMS: &[(&str, &str)] = &[ + ("Posts", "/posts/"), + ("Slides", "/slides/"), + ("Wiki", "/wiki/"), + ("Map", "/map/"), + ("About", "/about/"), + ("Search", "/search/"), + ]; - maud!( - nav .p-nav { - input #p-nav-toggle type="checkbox" hidden; + maud!( + nav .p-nav { + input #p-nav-toggle type="checkbox" hidden; - div .p-nav__bar { - a .p-nav__logo href="/" { - img .p-nav__logo-icon height="48px" width="51px" src="/static/svg/aya.svg" alt=""; - div .p-nav__logo-text { - div .p-nav__logo-main { - (Raw(include_str!("logotype.svg"))) - } - div #p-nav-splash .p-nav__logo-sub { - "夢現の遥か彼方" - } - } - } + div .p-nav__bar { + a .p-nav__logo href="/" { + img .p-nav__logo-icon height="48px" width="51px" src="/static/svg/aya.svg" alt=""; + div .p-nav__logo-text { + div .p-nav__logo-main { + (Raw(include_str!("logotype.svg"))) + } + div #p-nav-splash .p-nav__logo-sub { + "夢現の遥か彼方" + } + } + } - label .p-nav__burger for="p-nav-toggle" tabindex="0" { - span .p-nav__burger-icon {} - } - } + label .p-nav__burger for="p-nav-toggle" tabindex="0" { + span .p-nav__burger-icon {} + } + } - menu .p-nav__menu { - @for (name, url) in ITEMS { - li .p-nav__menu-item { - a .p-nav__menu-link href=(*url) { - (*name) - } - } - } - } - } - ) + menu .p-nav__menu { + @for (name, url) in ITEMS { + li .p-nav__menu-item { + a .p-nav__menu-link href=(*url) { + (*name) + } + } + } + } + } + ) } -pub fn footer(path: Option<&Utf8Path>) -> impl Renderable { - let copy = format!("Copyright © {} Maciej Jur", &REPO.year); - let mail = "maciej@kamoshi.org"; - let href = format!("mailto:{}", mail); - let link = Utf8Path::new(&REPO.link) - .join("src/commit") - .join(&REPO.hash); - let link = match path { - Some(path) => link.join(path), - None => link, - }; - - maud_move!( - footer .footer { - div .left { - div { - (Raw(copy)) - } - a href=(href) { - (mail) - } - } - div .repo { - a href=(link.as_str()) { - (&REPO.hash) - } - div { - (&REPO.date) - } - } - a .right.footer__cc-wrap rel="license" href="http://creativecommons.org/licenses/by/4.0/" { - img .footer__cc-stamp alt="Creative Commons License" width="88" height="31" src="/static/svg/by.svg"; - } - } - ) -} - -fn bare<'data, 'html, R>(title: &'data str, main: R) -> impl Renderable + 'html - where - 'data : 'html, - R: Renderable + 'data +pub fn footer<'s, 'html>(sack: &'s Sack) -> impl Renderable + 'html +where + 's: 'html, { - maud_move!( - (Raw("")) - html lang="en" { - (head(title)) + let copy = format!("Copyright © {} Maciej Jur", &sack.ctx.year); + let mail = "maciej@kamoshi.org"; + let href = format!("mailto:{}", mail); + let link = Utf8Path::new(&sack.ctx.link) + .join("src/commit") + .join(&sack.ctx.hash); + let link = match sack.get_file() { + Some(path) => link.join(path), + None => link, + }; - body { - (main) - } - } - ) + maud_move!( + footer .footer { + div .left { + div { + (Raw(copy)) + } + a href=(href) { + (mail) + } + } + div .repo { + a href=(link.as_str()) { + (&sack.ctx.hash) + } + div { + (&sack.ctx.date) + } + } + a .right.footer__cc-wrap rel="license" href="http://creativecommons.org/licenses/by/4.0/" { + img .footer__cc-stamp alt="Creative Commons License" width="88" height="31" src="/static/svg/by.svg"; + } + } + ) } -fn page<'data, 'main, 'html, T>( - title: &'data str, - main: T, - path: Option<&'data Utf8Path>, +fn bare<'s, 'p, 'html>( + sack: &'s Sack, + main: impl Renderable + 'p, + title: String, ) -> impl Renderable + 'html - where - 'main : 'html, - 'data : 'html, - T: Renderable + 'main +where + 's: 'html, + 'p: 'html, { - maud_move!( - (Raw("")) - html lang="en" { - (head(title)) + maud_move!( + (Raw("")) + html lang="en" { + (head(sack, title)) - body { - (navbar()) - (main) - (footer(path)) - } - } - ) + body { + (main) + } + } + ) } -pub(crate) fn to_list(list: Vec) -> String { - let mut groups = HashMap::>::new(); +fn page<'s, 'p, 'html>( + sack: &'s Sack, + main: impl Renderable + 'p, + title: String, +) -> impl Renderable + 'html +where + 's: 'html, + 'p: 'html, +{ + maud_move!( + (Raw("")) + html lang="en" { + (head(sack, title)) - for page in list { - groups.entry(page.date.year()).or_default().push(page); - } - - let mut groups: Vec<_> = groups - .into_iter() - .map(|(k, mut v)| { - v.sort_by(|a, b| b.date.cmp(&a.date)); - (k, v) - }) - .collect(); - - groups.sort_by(|a, b| b.0.cmp(&a.0)); - - list::list("", &groups).render().into() + body { + (navbar()) + (main) + (footer(sack)) + } + } + ) } -pub(crate) fn map() -> impl Renderable { - page( - "Map", - maud!( - main { - div #map style="height: 100%; width: 100%" {} +pub(crate) fn to_list(sack: &Sack, list: Vec, title: String) -> String { + let mut groups = HashMap::>::new(); - script type="module" { - (Raw("import 'photos';")) - } - } - ), - None, - ) + for page in list { + groups.entry(page.date.year()).or_default().push(page); + } + + let mut groups: Vec<_> = groups + .into_iter() + .map(|(k, mut v)| { + v.sort_by(|a, b| b.date.cmp(&a.date)); + (k, v) + }) + .collect(); + + groups.sort_by(|a, b| b.0.cmp(&a.0)); + + list::list(sack, &groups, title).render().into() } -pub(crate) fn search() -> impl Renderable { - page( - "Search", - maud!( - main #app {} - script type="module" src="/js/search/dist/search.js" {} - ), - None, - ) +pub(crate) fn map<'s, 'html>(sack: &'s Sack) -> impl Renderable + 'html +where + 's: 'html, +{ + page( + sack, + maud!( + main { + div #map style="height: 100%; width: 100%" {} + + script type="module" { + (Raw("import 'photos';")) + } + } + ), + String::from("Map"), + ) +} + +pub(crate) fn search<'s, 'html>(sack: &'s Sack) -> impl Renderable + 'html +where + 's: 'html, +{ + page( + sack, + maud!( + main #app {} + script type="module" src="/js/search/dist/search.js" {} + ), + String::from("Search"), + ) } diff --git a/src/html/post.rs b/src/html/post.rs index 29dfa9d..3cff100 100644 --- a/src/html/post.rs +++ b/src/html/post.rs @@ -6,91 +6,88 @@ use serde::Deserialize; use crate::pipeline::{Content, Sack}; use crate::text::md::Outline; -use crate::{Linkable, LinkDate}; +use crate::{LinkDate, Linkable}; /// Represents a simple post. #[derive(Deserialize, Debug, Clone)] pub(crate) struct Post { - pub(crate) title: String, - #[serde(with = "super::isodate")] - pub(crate) date: DateTime, - pub(crate) desc: Option, + pub(crate) title: String, + #[serde(with = "super::isodate")] + pub(crate) date: DateTime, + pub(crate) desc: Option, } impl Content for Post { - fn parse(data: &str, lib: Option<&Library>) -> (Outline, String, Option>) { - crate::text::md::parse(data, lib) - } + fn parse(data: String, lib: Option<&Library>) -> (Outline, String, Option>) { + crate::text::md::parse(data, lib) + } - fn transform<'f, 'm, 's, 'html, T>( - &'f self, - content: T, - outline: Outline, - sack: &'s Sack, - bib: Option>, - ) -> impl Renderable + 'html - where - 'f: 'html, - 'm: 'html, - 's: 'html, - T: Renderable + 'm, - { - post(self, content, outline, bib, sack) - } + fn render<'s, 'p, 'html>( + self, + sack: &'s Sack, + parsed: impl Renderable + 'p, + outline: Outline, + bib: Option>, + ) -> impl Renderable + 'html + where + 's: 'html, + 'p: 'html, + { + post(self, sack, parsed, outline, bib) + } - fn as_link(&self, path: Utf8PathBuf) -> Option { - Some(Linkable::Date(LinkDate { - link: crate::Link { - path, - name: self.title.to_owned(), - desc: self.desc.to_owned(), - }, - date: self.date.to_owned(), - })) - } + fn as_link(&self, path: Utf8PathBuf) -> Option { + Some(Linkable::Date(LinkDate { + link: crate::Link { + path, + name: self.title.to_owned(), + desc: self.desc.to_owned(), + }, + date: self.date.to_owned(), + })) + } } -pub fn post<'f, 'm, 's, 'html, T>( - fm: &'f Post, - content: T, - outline: Outline, - bib: Option>, - sack: &'s Sack, +pub fn post<'s, 'p, 'html>( + fm: Post, + sack: &'s Sack, + content: impl Renderable + 'p, + outline: Outline, + bib: Option>, ) -> impl Renderable + 'html - where - 'f: 'html, - 'm: 'html, - 's: 'html, - T: Renderable + 'm +where + 's: 'html, + 'p: 'html, { - let main = maud_move!( - main .wiki-main { + let heading = fm.title.clone(); + let main = maud_move!( + main .wiki-main { - // Slide in/out for mobile - input #wiki-aside-shown type="checkbox" hidden; + // Slide in/out for mobile + input #wiki-aside-shown type="checkbox" hidden; - aside .wiki-aside { - // Slide button - label .wiki-aside__slider for="wiki-aside-shown" { - img .wiki-icon src="/static/svg/double-arrow.svg" width="24" height="24"; - } - (crate::html::misc::show_outline(outline)) - } + aside .wiki-aside { + // Slide button + label .wiki-aside__slider for="wiki-aside-shown" { + img .wiki-icon src="/static/svg/double-arrow.svg" width="24" height="24"; + } + (crate::html::misc::show_outline(outline)) + } - article .wiki-article /*class:list={classlist)*/ { - header class="markdown" { - h1 #top { (fm.title.clone()) } - } - section .wiki-article__markdown.markdown { - (content) - } + article .wiki-article /*class:list={classlist)*/ { + header class="markdown" { + h1 #top { (heading) } + } + section .wiki-article__markdown.markdown { + (content) + } - @if let Some(bib) = bib { - (crate::html::misc::show_bibliography(bib)) - } - } - } - ); + @if let Some(bib) = bib { + (crate::html::misc::show_bibliography(bib)) + } + } + } + ); - crate::html::page(&fm.title, main, sack.get_file()) + crate::html::page(sack, main, fm.title.clone()) } diff --git a/src/html/slideshow.rs b/src/html/slideshow.rs index effae9c..ebdd920 100644 --- a/src/html/slideshow.rs +++ b/src/html/slideshow.rs @@ -1,87 +1,105 @@ use camino::Utf8PathBuf; use chrono::{DateTime, Utc}; use hayagriva::Library; -use hypertext::{html_elements, maud_move, Renderable, GlobalAttributes, Raw}; +use hypertext::{html_elements, maud_move, GlobalAttributes, Raw, Renderable}; use serde::Deserialize; use crate::pipeline::{Content, Sack}; use crate::text::md::Outline; use crate::{Link, LinkDate, Linkable}; +const CSS: &str = r#" +.slides img { + margin-left: auto; + margin-right: auto; + max-height: 60vh; +} +"#; /// Represents a slideshow #[derive(Deserialize, Debug, Clone)] pub(crate) struct Slideshow { - pub title: String, - #[serde(with = "super::isodate")] - pub date: DateTime, - pub desc: Option, + pub title: String, + #[serde(with = "super::isodate")] + pub date: DateTime, + pub desc: Option, } impl Content for Slideshow { - fn transform<'f, 'm, 's, 'html, T>( - &'f self, - content: T, - _: Outline, - _: &'s Sack, - _bib: Option>, - ) -> impl Renderable + 'html - where - 'f: 'html, - 'm: 'html, - 's: 'html, - T: Renderable + 'm { - show(self, content) - } + fn parse(data: String, _: Option<&Library>) -> (Outline, String, Option>) { + let html = data + .split("\n-----\n") + .map(|chunk| { + chunk + .split("\n---\n") + .map(|s| crate::text::md::parse(s.to_owned(), None)) + .map(|e| e.1) + .collect::>() + }) + .map(|stack| match stack.len() > 1 { + true => format!( + "
{}
", + stack + .into_iter() + .map(|slide| format!("
{slide}
")) + .collect::() + ), + false => format!("
{}
", stack[0]), + }) + .collect::(); + (Outline(vec![]), html, None) + } - fn as_link(&self, path: Utf8PathBuf) -> Option { - Some(Linkable::Date(LinkDate { - link: Link { - path, - name: self.title.to_owned(), - desc: self.desc.to_owned(), - }, - date: self.date.to_owned(), - })) - } + fn render<'s, 'p, 'html>( + self, + sack: &'s Sack, + parsed: impl Renderable + 'p, + _: Outline, + _: Option>, + ) -> impl Renderable + 'html + where + 's: 'html, + 'p: 'html, + { + show(self, sack, parsed) + } - fn parse(data: &str, _: Option<&Library>) -> (Outline, String, Option>) { - let html = data - .split("\n-----\n") - .map(|chunk| chunk.split("\n---\n").map(|s| crate::text::md::parse(s, None)).map(|e| e.1).collect::>()) - .map(|stack| match stack.len() > 1 { - true => format!("
{}
", stack.into_iter().map(|slide| format!("
{slide}
")).collect::()), - false => format!("
{}
", stack[0]) - }) - .collect::(); - (Outline(vec![]), html, None) - } + fn as_link(&self, path: Utf8PathBuf) -> Option { + Some(Linkable::Date(LinkDate { + link: Link { + path, + name: self.title.to_owned(), + desc: self.desc.to_owned(), + }, + date: self.date.to_owned(), + })) + } } -pub fn show<'data, 'show>( - fm: &'data Slideshow, - slides: impl Renderable + 'data -) -> impl Renderable + 'show - where - 'data: 'show +pub fn show<'s, 'p, 'html>( + fm: Slideshow, + sack: &'s Sack, + slides: impl Renderable + 'p, +) -> impl Renderable + 'html +where + 's: 'html, + 'p: 'html, { - crate::html::bare(&fm.title, maud_move!( - div .reveal { - div .slides { - (slides) - } - } + crate::html::bare( + sack, + maud_move!( + div .reveal { + div .slides { + (slides) + } + } - script type="module" { - (Raw("import 'reveal';")) - } + script type="module" { + (Raw("import 'reveal';")) + } - style {r#" - .slides img { - margin-left: auto; - margin-right: auto; - max-height: 60vh; - } - "#} - )) + style { (Raw(CSS)) } + ), + fm.title.clone(), + ) } diff --git a/src/html/wiki.rs b/src/html/wiki.rs index f11869e..762ac83 100644 --- a/src/html/wiki.rs +++ b/src/html/wiki.rs @@ -10,83 +10,82 @@ use crate::{Link, Linkable}; /// Represents a wiki page #[derive(Deserialize, Debug, Clone)] pub struct Wiki { - pub title: String, + pub title: String, } impl Content for Wiki { - fn transform<'f, 'm, 's, 'html, T>( - &'f self, - content: T, - outline: Outline, - sack: &'s Sack, - bib: Option>, - ) -> impl Renderable + 'html - where - 'f: 'html, - 'm: 'html, - 's: 'html, - T: Renderable + 'm { - wiki(self, content, outline, sack, bib) - } + fn parse(data: String, lib: Option<&Library>) -> (Outline, String, Option>) { + crate::text::md::parse(data, lib) + } - fn as_link(&self, path: Utf8PathBuf) -> Option { - Some(Linkable::Link(Link { - path, - name: self.title.to_owned(), - desc: None, - })) - } + fn render<'s, 'p, 'html>( + self, + sack: &'s Sack, + parsed: impl Renderable + 'p, + outline: Outline, + bib: Option>, + ) -> impl Renderable + 'html + where + 's: 'html, + 'p: 'html, + { + wiki(self, sack, parsed, outline, bib) + } - fn parse(data: &str, lib: Option<&Library>) -> (Outline, String, Option>) { - crate::text::md::parse(data, lib) - } + fn as_link(&self, path: Utf8PathBuf) -> Option { + Some(Linkable::Link(Link { + path, + name: self.title.to_owned(), + desc: None, + })) + } } -fn wiki<'data, 'html, 'sack, T>( - fm: &'data Wiki, - content: T, - _: Outline, - sack: &'sack Sack, - bib: Option>, +fn wiki<'s, 'p, 'html>( + matter: Wiki, + sack: &'s Sack, + parsed: impl Renderable + 'p, + _: Outline, + bib: Option>, ) -> impl Renderable + 'html - where - 'sack: 'html, - 'data: 'html, - T: Renderable + 'data +where + 's: 'html, + 'p: 'html, { - let main = maud_move!( - main .wiki-main { + let heading = matter.title.clone(); + let main = maud_move!( + main .wiki-main { - // Slide in/out for mobile - input #wiki-aside-shown type="checkbox" hidden; + // Slide in/out for mobile + input #wiki-aside-shown type="checkbox" hidden; - aside .wiki-aside { - // Slide button - label .wiki-aside__slider for="wiki-aside-shown" { - img .wiki-icon src="/static/svg/double-arrow.svg" width="24" height="24"; - } - // Navigation tree - section .link-tree { - div { - (crate::html::misc::show_page_tree(sack, "wiki/**/*.html")) - } - } - } + aside .wiki-aside { + // Slide button + label .wiki-aside__slider for="wiki-aside-shown" { + img .wiki-icon src="/static/svg/double-arrow.svg" width="24" height="24"; + } + // Navigation tree + section .link-tree { + div { + (crate::html::misc::show_page_tree(sack, "wiki/**/*.html")) + } + } + } - article .wiki-article /*class:list={classlist)*/ { - header class="markdown" { - h1 #top { (fm.title.clone()) } - } - section .wiki-article__markdown.markdown { - (content) - } + article .wiki-article /*class:list={classlist)*/ { + header class="markdown" { + h1 #top { (heading) } + } + section .wiki-article__markdown.markdown { + (parsed) + } - @if let Some(bib) = bib { - (crate::html::misc::show_bibliography(bib)) - } - } - } - ); + @if let Some(bib) = bib { + (crate::html::misc::show_bibliography(bib)) + } + } + } + ); - crate::html::page(&fm.title, main, sack.get_file()) + crate::html::page(sack, main, matter.title.to_owned()) } diff --git a/src/main.rs b/src/main.rs index fd86cb7..5373a2c 100644 --- a/src/main.rs +++ b/src/main.rs @@ -1,286 +1,339 @@ mod build; mod html; -mod md; mod pipeline; mod text; mod ts; mod utils; mod watch; +use std::collections::HashSet; use std::fs; use std::process::Command; use camino::{Utf8Path, Utf8PathBuf}; use chrono::{DateTime, Datelike, Utc}; use clap::{Parser, ValueEnum}; -use pipeline::{Asset, AssetKind, Content, FileItemKind, Output, PipelineItem, Sack}; +use gray_matter::engine::YAML; +use gray_matter::Matter; use hypertext::{Raw, Renderable}; -use once_cell::sync::Lazy; +use pipeline::{Asset, AssetKind, Content, FileItemKind, Output, PipelineItem}; use serde::Deserialize; -use crate::pipeline::Virtual; use crate::build::build_styles; +use crate::pipeline::Virtual; #[derive(Parser, Debug, Clone)] struct Args { - #[clap(value_enum, index = 1, default_value = "build")] - mode: Mode, + #[clap(value_enum, index = 1, default_value = "build")] + mode: Mode, } #[derive(ValueEnum, Debug, Clone, Copy)] enum Mode { - Build, - Watch, + Build, + Watch, } #[derive(Debug)] -struct BuildInfo { - pub year: i32, - pub date: String, - pub link: String, - pub hash: String, +struct BuildContext { + pub mode: Mode, + pub year: i32, + pub date: String, + pub link: String, + pub hash: String, } - -static REPO: Lazy = Lazy::new(|| { - let time = chrono::Utc::now(); - - BuildInfo { - year: time.year(), - date: time.format("%Y/%m/%d %H:%M").to_string(), - link: "https://git.kamoshi.org/kamov/website".into(), - hash: String::from_utf8( - Command::new("git") - .args(["rev-parse", "--short", "HEAD"]) - .output() - .unwrap() - .stdout - ) - .unwrap() - .trim() - .into() - } -}); - - #[derive(Debug, Clone)] pub struct Link { - pub path: Utf8PathBuf, - pub name: String, - pub desc: Option, + pub path: Utf8PathBuf, + pub name: String, + pub desc: Option, } #[derive(Debug, Clone)] pub struct LinkDate { - pub link: Link, - pub date: DateTime, + pub link: Link, + pub date: DateTime, } #[derive(Debug, Clone)] pub enum Linkable { - Link(Link), - Date(LinkDate), -} - - -fn to_index(item: PipelineItem) -> PipelineItem - where - T: for<'de> Deserialize<'de> + Content + 'static, -{ - let meta = match item { - PipelineItem::Skip(meta) if matches!(meta.kind, FileItemKind::Index) => meta, - _ => return item, - }; - - let dir = meta.path.parent().unwrap().strip_prefix("content").unwrap(); - let dir = match meta.path.file_stem().unwrap() { - "index" => dir.to_owned(), - name => dir.join(name), - }; - let path = dir.join("index.html"); - - match meta.path.extension() { - Some("md" | "mdx" | "lhs") => { - let data = fs::read_to_string(&meta.path).unwrap(); - let (fm, md) = md::preflight::(&data); - let link = T::as_link(&fm, Utf8Path::new("/").join(dir)); - - let call = move |sack: &Sack| { - let lib = sack.get_library(); - let (outline, html, bib) = T::parse(&md, lib); - T::transform(&fm, Raw(html), outline, sack, bib).render().into() - }; - - Output { - kind: Asset { - kind: pipeline::AssetKind::Html(Box::new(call)), - meta, - }.into(), - path, - link, - }.into() - }, - _ => meta.into(), - } -} - -fn to_bundle(item: PipelineItem) -> PipelineItem { - let meta = match item { - PipelineItem::Skip(meta) if matches!(meta.kind, FileItemKind::Bundle) => meta, - _ => return item, - }; - - let path = meta.path.strip_prefix("content").unwrap().to_owned(); - - match meta.path.extension() { - // any image - Some("jpg" | "png" | "gif") => { - Output { - kind: Asset { - kind: AssetKind::Image, - meta, - }.into(), - path, - link: None, - }.into() - }, - // bibliography - Some("bib") => { - let data = fs::read_to_string(&meta.path).unwrap(); - let data = hayagriva::io::from_biblatex_str(&data).unwrap(); - - Output { - kind: Asset { - kind: AssetKind::Bibtex(data), - meta, - }.into(), - path, - link: None, - }.into() - }, - _ => meta.into(), - } -} - - -fn build() { - if fs::metadata("dist").is_ok() { - println!("Cleaning dist"); - fs::remove_dir_all("dist").unwrap(); - } - - fs::create_dir("dist").unwrap(); - - let assets: Vec = [ - pipeline::gather("content/about.md", &["md"].into()) - .into_iter() - .map(to_index:: as fn(PipelineItem) -> PipelineItem), - pipeline::gather("content/posts/**/*", &["md", "mdx"].into()) - .into_iter() - .map(to_index::), - pipeline::gather("content/slides/**/*", &["md", "lhs"].into()) - .into_iter() - .map(to_index::), - pipeline::gather("content/wiki/**/*", &["md"].into()) - .into_iter() - .map(to_index::), - ] - .into_iter() - .flatten() - .map(to_bundle) - .filter_map(|item| match item { - PipelineItem::Skip(skip) => { - println!("Skipping {}", skip.path); - None - }, - PipelineItem::Take(take) => Some(take), - }) - .collect(); - - let assets: Vec = vec![ - assets, - vec![ - Output { - kind: Virtual::new(|_| crate::html::map().render().to_owned().into()).into(), - path: "map/index.html".into(), - link: None, - }, - Output { - kind: Virtual::new(|_| crate::html::search().render().to_owned().into()).into(), - path: "search/index.html".into(), - link: None, - }, - Output { - kind: Asset { - kind: pipeline::AssetKind::Html(Box::new(|_| { - let data = std::fs::read_to_string("content/index.md").unwrap(); - let (_, html, _) = text::md::parse(&data, None); - crate::html::home(Raw(html)).render().to_owned().into() - })), - meta: pipeline::FileItem { - kind: pipeline::FileItemKind::Index, - path: "content/index.md".into() - } - }.into(), - path: "index.html".into(), - link: None, - }, - Output { - kind: Virtual::new(|sack| crate::html::to_list(sack.get_links("posts/**/*.html"))).into(), - path: "posts/index.html".into(), - link: None, - }, - Output { - kind: Virtual::new(|sack| crate::html::to_list(sack.get_links("slides/**/*.html"))).into(), - path: "slides/index.html".into(), - link: None, - }, - ], - ] - .into_iter() - .flatten() - .collect(); - - { - let now = std::time::Instant::now(); - pipeline::render_all(&assets); - println!("Elapsed: {:.2?}", now.elapsed()); - } - - utils::copy_recursively(std::path::Path::new("public"), std::path::Path::new("dist")).unwrap(); - - build_styles(); - - let res = Command::new("pagefind") - .args(["--site", "dist"]) - .output() - .unwrap(); - - println!("{}", String::from_utf8(res.stdout).unwrap()); - - let res = Command::new("esbuild") - .arg("js/vanilla/reveal.js") - .arg("js/vanilla/photos.ts") - .arg("js/search/dist/search.js") - .arg("--format=esm") - .arg("--bundle") - .arg("--splitting") - .arg("--minify") - .arg("--outdir=dist/js/") - .output() - .unwrap(); - - println!("{}", String::from_utf8(res.stderr).unwrap()); + Link(Link), + Date(LinkDate), } fn main() { - let args = Args::parse(); + let args = Args::parse(); + let time = chrono::Utc::now(); - match args.mode { - Mode::Build => build(), - Mode::Watch => { - build(); - watch::watch().unwrap() - }, - } + let ctx = BuildContext { + mode: args.mode, + year: time.year(), + date: time.format("%Y/%m/%d %H:%M").to_string(), + link: "https://git.kamoshi.org/kamov/website".into(), + hash: String::from_utf8( + Command::new("git") + .args(["rev-parse", "--short", "HEAD"]) + .output() + .expect("Couldn't load git revision") + .stdout, + ) + .expect("Invalid UTF8") + .trim() + .into(), + }; + + match args.mode { + Mode::Build => { + build(&ctx); + } + Mode::Watch => { + build(&ctx); + watch::watch().unwrap() + } + } +} + +struct Source { + path: &'static str, + exts: HashSet<&'static str>, + func: fn(PipelineItem) -> PipelineItem, +} + +impl Source { + fn get(&self) -> Vec { + pipeline::gather(self.path, &self.exts) + .into_iter() + .map(self.func) + .collect() + } +} + +fn build(ctx: &BuildContext) { + if fs::metadata("dist").is_ok() { + println!("Cleaning dist"); + fs::remove_dir_all("dist").unwrap(); + } + + fs::create_dir("dist").unwrap(); + + let sources = vec![ + Source { + path: "content/about.md", + exts: ["md"].into(), + func: as_index::, + }, + Source { + path: "content/posts/**/*", + exts: ["md", "mdx"].into(), + func: as_index::, + }, + Source { + path: "content/slides/**/*", + exts: ["md", "lhs"].into(), + func: as_index::, + }, + Source { + path: "content/wiki/**/*", + exts: ["md"].into(), + func: as_index::, + }, + ]; + + let assets: Vec = sources + .iter() + .flat_map(Source::get) + .map(to_bundle) + .filter_map(|item| match item { + PipelineItem::Skip(skip) => { + println!("Skipping {}", skip.path); + None + } + PipelineItem::Take(take) => Some(take), + }) + .collect(); + + let assets: Vec = vec![ + assets, + vec![ + Output { + kind: Virtual::new(|sack| crate::html::map(sack).render().to_owned().into()).into(), + path: "map/index.html".into(), + link: None, + }, + Output { + kind: Virtual::new(|sack| crate::html::search(sack).render().to_owned().into()) + .into(), + path: "search/index.html".into(), + link: None, + }, + Output { + kind: Asset { + kind: pipeline::AssetKind::html(|sack| { + let data = std::fs::read_to_string("content/index.md").unwrap(); + let (_, html, _) = text::md::parse(data, None); + crate::html::home(sack, Raw(html)) + .render() + .to_owned() + .into() + }), + meta: pipeline::FileItem { + kind: pipeline::FileItemKind::Index, + path: "content/index.md".into(), + }, + } + .into(), + path: "index.html".into(), + link: None, + }, + Output { + kind: Virtual::new(|sack| { + crate::html::to_list(sack, sack.get_links("posts/**/*.html"), "Posts".into()) + }) + .into(), + path: "posts/index.html".into(), + link: None, + }, + Output { + kind: Virtual::new(|sack| { + crate::html::to_list(sack, sack.get_links("slides/**/*.html"), "Slideshows".into()) + }) + .into(), + path: "slides/index.html".into(), + link: None, + }, + ], + ] + .into_iter() + .flatten() + .collect(); + + { + let now = std::time::Instant::now(); + pipeline::render_all(ctx, &assets); + println!("Elapsed: {:.2?}", now.elapsed()); + } + + utils::copy_recursively(std::path::Path::new("public"), std::path::Path::new("dist")).unwrap(); + + build_styles(); + + let res = Command::new("pagefind") + .args(["--site", "dist"]) + .output() + .unwrap(); + + println!("{}", String::from_utf8(res.stdout).unwrap()); + + let res = Command::new("esbuild") + .arg("js/vanilla/reveal.js") + .arg("js/vanilla/photos.ts") + .arg("js/search/dist/search.js") + .arg("--format=esm") + .arg("--bundle") + .arg("--splitting") + .arg("--minify") + .arg("--outdir=dist/js/") + .output() + .unwrap(); + + println!("{}", String::from_utf8(res.stderr).unwrap()); +} + +pub fn parse_frontmatter(raw: &str) -> (T, String) +where + T: for<'de> Deserialize<'de>, +{ + let matter = Matter::::new(); + let result = matter.parse(raw); + + ( + // Just the front matter + result.data.unwrap().deserialize::().unwrap(), + // The rest of the content + result.content, + ) +} + +fn as_index(item: PipelineItem) -> PipelineItem +where + T: for<'de> Deserialize<'de> + Content + Clone + 'static, +{ + let meta = match item { + PipelineItem::Skip(e) if matches!(e.kind, FileItemKind::Index) => e, + _ => return item, + }; + + let dir = meta.path.parent().unwrap().strip_prefix("content").unwrap(); + let dir = match meta.path.file_stem().unwrap() { + "index" => dir.to_owned(), + name => dir.join(name), + }; + let path = dir.join("index.html"); + + match meta.path.extension() { + Some("md" | "mdx" | "lhs") => { + let data = fs::read_to_string(&meta.path).unwrap(); + let (fm, md) = parse_frontmatter::(&data); + let link = T::as_link(&fm, Utf8Path::new("/").join(dir)); + + Output { + kind: Asset { + kind: pipeline::AssetKind::html(move |sack| { + let lib = sack.get_library(); + let (outline, parsed, bib) = T::parse(md.clone(), lib); + T::render(fm.clone(), sack, Raw(parsed), outline, bib) + .render() + .into() + }), + meta, + } + .into(), + path, + link, + } + .into() + } + _ => meta.into(), + } +} + +fn to_bundle(item: PipelineItem) -> PipelineItem { + let meta = match item { + PipelineItem::Skip(meta) if matches!(meta.kind, FileItemKind::Bundle) => meta, + _ => return item, + }; + + let path = meta.path.strip_prefix("content").unwrap().to_owned(); + + match meta.path.extension() { + // any image + Some("jpg" | "png" | "gif") => Output { + kind: Asset { + kind: AssetKind::Image, + meta, + } + .into(), + path, + link: None, + } + .into(), + // bibliography + Some("bib") => { + let data = fs::read_to_string(&meta.path).unwrap(); + let data = hayagriva::io::from_biblatex_str(&data).unwrap(); + + Output { + kind: Asset { + kind: AssetKind::Bibtex(data), + meta, + } + .into(), + path, + link: None, + } + .into() + } + _ => meta.into(), + } } diff --git a/src/md/matter.rs b/src/md/matter.rs deleted file mode 100644 index 6c6434a..0000000 --- a/src/md/matter.rs +++ /dev/null @@ -1,43 +0,0 @@ -use gray_matter::{engine::YAML, Matter}; -use serde::Deserialize; - - -pub fn preflight(raw: &str) -> (T, String) -where - T: for<'de> Deserialize<'de>, -{ - let matter = Matter::::new(); - let result = matter.parse(raw); - - ( - // Just the front matter - result.data.unwrap().deserialize::().unwrap(), - // The actual markdown content - result.content, - ) -} - -mod isodate { - use chrono::{DateTime, Utc}; - use serde::{self, Deserialize, Deserializer}; - - // pub fn serialize( - // date: &DateTime, - // serializer: S, - // ) -> Result - // where - // S: Serializer, - // { - // let s = date.to_rfc3339(); - // serializer.serialize_str(&s) - // } - - pub fn deserialize<'de, D>(deserializer: D) -> Result, D::Error> - where - D: Deserializer<'de>, - { - let s = String::deserialize(deserializer)?; - let dt = chrono::DateTime::parse_from_rfc3339(&s).map_err(serde::de::Error::custom)?; - Ok(dt.into()) - } -} diff --git a/src/md/mod.rs b/src/md/mod.rs deleted file mode 100644 index 936f793..0000000 --- a/src/md/mod.rs +++ /dev/null @@ -1,3 +0,0 @@ -mod matter; - -pub use matter::preflight; diff --git a/src/pipeline.rs b/src/pipeline.rs index 3866b24..58ea7d2 100644 --- a/src/pipeline.rs +++ b/src/pipeline.rs @@ -11,67 +11,75 @@ use hayagriva::Library; use hypertext::Renderable; use crate::text::md::Outline; -use crate::{Link, LinkDate, Linkable}; +use crate::{BuildContext, Link, LinkDate, Linkable}; /// Represents a piece of content that can be rendered as a page. This trait needs to be /// implemented for the front matter associated with some web page as that is what ultimately /// matters when rendering the page. Each front matter *definition* maps to exactly one kind of /// rendered page on the website. pub(crate) trait Content { - /// Parse the document. Pass an optional library for bibliography. - fn parse(document: &str, library: Option<&Library>) -> (Outline, String, Option>); + /// Parse the document. Pass an optional library for bibliography. + /// This generates the initial HTML markup from content. + fn parse(document: String, library: Option<&Library>) + -> (Outline, String, Option>); - fn transform<'fm, 'md, 'sack, 'html, T>( - &'fm self, - content: T, - outline: Outline, - sack: &'sack Sack, - bib: Option>, - ) -> impl Renderable + 'html - where - 'fm: 'html, - 'md: 'html, - 'sack: 'html, - T: Renderable + 'md; + /// Render the full page from parsed content. + fn render<'s, 'p, 'html>( + self, + sack: &'s Sack, + parsed: impl Renderable + 'p, + outline: Outline, + bib: Option>, + ) -> impl Renderable + 'html + where + 's: 'html, + 'p: 'html; - fn as_link(&self, path: Utf8PathBuf) -> Option; + /// Get link for this content + fn as_link(&self, path: Utf8PathBuf) -> Option; } /// Marks whether the item should be treated as a content page, converted into a standalone HTML /// page, or as a bundled asset. #[derive(Debug)] pub(crate) enum FileItemKind { - /// Marks items converted to `index.html`. - Index, - /// Marks items from bundle. - Bundle, + /// Marks items converted to `index.html`. + Index, + /// Marks items from bundle. + Bundle, } /// Metadata for a single item consumed by SSG. #[derive(Debug)] pub(crate) struct FileItem { - /// The kind of an item from disk. - pub kind: FileItemKind, - /// Original source file location. - pub path: Utf8PathBuf, + /// The kind of an item from disk. + pub kind: FileItemKind, + /// Original source file location. + pub path: Utf8PathBuf, } /// Marks how the asset should be processed by the SSG. pub(crate) enum AssetKind { - /// Data renderable to HTML. In order to process the data, a closure should be called. - Html(Box String>), - /// Bibliographical data. - Bibtex(Library), - /// Image. For now they are simply cloned to the `dist` director. - Image, + /// Data renderable to HTML. In order to process the data, a closure should be called. + Html(Box String>), + /// Bibliographical data. + Bibtex(Library), + /// Image. For now they are simply cloned to the `dist` director. + Image, +} + +impl AssetKind { + pub fn html(f: impl Fn(&Sack) -> String + 'static) -> Self { + Self::Html(Box::new(f)) + } } /// Asset corresponding to a file on disk. pub(crate) struct Asset { - /// The kind of a processed asset. - pub kind: AssetKind, - /// File metadata - pub meta: FileItem, + /// The kind of a processed asset. + pub kind: AssetKind, + /// File metadata + pub meta: FileItem, } /// Dynamically generated asset not corresponding to any file on disk. This is useful when the @@ -79,223 +87,228 @@ pub(crate) struct Asset { pub(crate) struct Virtual(Box String>); impl Virtual { - pub fn new(call: impl Fn(&Sack) -> String + 'static) -> Self { - Self(Box::new(call)) - } + pub fn new(call: impl Fn(&Sack) -> String + 'static) -> Self { + Self(Box::new(call)) + } } /// The kind of an output item. pub(crate) enum OutputKind { - /// Marks an output item which corresponds to a file on disk. - Asset(Asset), - /// Marks an output item which doesn't correspond to any file. - Virtual(Virtual), + /// Marks an output item which corresponds to a file on disk. + Asset(Asset), + /// Marks an output item which doesn't correspond to any file. + Virtual(Virtual), } impl From for OutputKind { - fn from(value: Asset) -> Self { - OutputKind::Asset(value) - } + fn from(value: Asset) -> Self { + OutputKind::Asset(value) + } } impl From for OutputKind { - fn from(value: Virtual) -> Self { - OutputKind::Virtual(value) - } + fn from(value: Virtual) -> Self { + OutputKind::Virtual(value) + } } /// Renderable output pub(crate) struct Output { - /// The kind of an output item - pub(crate) kind: OutputKind, - /// Path for the output in dist - pub(crate) path: Utf8PathBuf, - /// Optional URL data for outputted page. - pub(crate) link: Option, + /// The kind of an output item + pub(crate) kind: OutputKind, + /// Path for the output in dist + pub(crate) path: Utf8PathBuf, + /// Optional URL data for outputted page. + pub(crate) link: Option, } /// Items currently in the pipeline. In order for an item to be rendered, it needs to be marked as /// `Take`, which means it needs to have an output location assigned to itself. pub(crate) enum PipelineItem { - /// Unclaimed file. - Skip(FileItem), - /// Data ready to be processed. - Take(Output), + /// Unclaimed file. + Skip(FileItem), + /// Data ready to be processed. + Take(Output), } impl From for PipelineItem { - fn from(value: FileItem) -> Self { - Self::Skip(value) - } + fn from(value: FileItem) -> Self { + Self::Skip(value) + } } impl From for PipelineItem { - fn from(value: Output) -> Self { - Self::Take(value) - } + fn from(value: Output) -> Self { + Self::Take(value) + } } /// This struct allows for querying the website hierarchy. It is passed to each rendered website /// page, so that it can easily access the website metadata. pub(crate) struct Sack<'a> { - /// Literally all of the content - hole: &'a [Output], - /// Current path for the page being rendered - path: &'a Utf8PathBuf, - /// Original file location for this page - file: Option<&'a Utf8PathBuf>, + pub ctx: &'a BuildContext, + /// Literally all of the content + hole: &'a [Output], + /// Current path for the page being rendered + path: &'a Utf8PathBuf, + /// Original file location for this page + file: Option<&'a Utf8PathBuf>, } impl<'a> Sack<'a> { - pub fn new(hole: &'a [Output], path: &'a Utf8PathBuf, file: Option<&'a Utf8PathBuf>) -> Self { - Self { hole, path, file } - } + pub fn get_links(&self, path: &str) -> Vec { + let pattern = glob::Pattern::new(path).expect("Bad glob pattern"); + self.hole + .iter() + .filter(|item| pattern.matches_path(item.path.as_ref())) + .filter_map(|item| match &item.link { + Some(Linkable::Date(link)) => Some(link.clone()), + _ => None, + }) + .collect() + } - pub fn get_links(&self, path: &str) -> Vec { - let pattern = glob::Pattern::new(path).expect("Bad glob pattern"); - self.hole - .iter() - .filter(|item| pattern.matches_path(item.path.as_ref())) - .filter_map(|item| match &item.link { - Some(Linkable::Date(link)) => Some(link.clone()), - _ => None, - }) - .collect() - } + pub fn get_tree(&self, path: &str) -> TreePage { + let glob = glob::Pattern::new(path).expect("Bad glob pattern"); + let list = self + .hole + .iter() + .filter(|item| glob.matches_path(item.path.as_ref())) + .filter_map(|item| match &item.link { + Some(Linkable::Link(link)) => Some(link.clone()), + _ => None, + }); - pub fn get_tree(&self, path: &str) -> TreePage { - let glob = glob::Pattern::new(path).expect("Bad glob pattern"); - let list = self - .hole - .iter() - .filter(|item| glob.matches_path(item.path.as_ref())) - .filter_map(|item| match &item.link { - Some(Linkable::Link(link)) => Some(link.clone()), - _ => None, - }); + let mut tree = TreePage::new(); + for link in list { + tree.add_link(&link); + } - let mut tree = TreePage::new(); - for link in list { - tree.add_link(&link); - } + tree + } - tree - } + pub fn get_library(&self) -> Option<&Library> { + let glob = format!("{}/*.bib", self.path.parent()?); + let glob = glob::Pattern::new(&glob).expect("Bad glob pattern"); + let opts = glob::MatchOptions { + case_sensitive: true, + require_literal_separator: true, + require_literal_leading_dot: false, + }; - pub fn get_library(&self) -> Option<&Library> { - let glob = format!("{}/*.bib", self.path.parent()?); - let glob = glob::Pattern::new(&glob).expect("Bad glob pattern"); - let opts = glob::MatchOptions { - case_sensitive: true, - require_literal_separator: true, - require_literal_leading_dot: false, - }; + self.hole + .iter() + .filter(|item| glob.matches_path_with(item.path.as_ref(), opts)) + .filter_map(|asset| match asset.kind { + OutputKind::Asset(ref real) => Some(real), + _ => None, + }) + .find_map(|asset| match asset.kind { + AssetKind::Bibtex(ref lib) => Some(lib), + _ => None, + }) + } - self.hole - .iter() - .filter(|item| glob.matches_path_with(item.path.as_ref(), opts)) - .filter_map(|asset| match asset.kind { - OutputKind::Asset(ref real) => Some(real), - _ => None, - }) - .find_map(|asset| match asset.kind { - AssetKind::Bibtex(ref lib) => Some(lib), - _ => None, - }) - } - - /// Get the path for original file location - pub fn get_file(&self) -> Option<&'a Utf8Path> { - self.file.map(Utf8PathBuf::as_ref) - } + /// Get the path for original file location + pub fn get_file(&self) -> Option<&'a Utf8Path> { + self.file.map(Utf8PathBuf::as_ref) + } } #[derive(Debug)] pub(crate) struct TreePage { - pub link: Option, - pub subs: HashMap, + pub link: Option, + pub subs: HashMap, } impl TreePage { - fn new() -> Self { - TreePage { - link: None, - subs: HashMap::new(), - } - } + fn new() -> Self { + TreePage { + link: None, + subs: HashMap::new(), + } + } - fn add_link(&mut self, link: &Link) { - let mut ptr = self; - for part in link.path.iter().skip(1) { - ptr = ptr.subs.entry(part.to_string()).or_insert(TreePage::new()); - } - ptr.link = Some(link.clone()); - } + fn add_link(&mut self, link: &Link) { + let mut ptr = self; + for part in link.path.iter().skip(1) { + ptr = ptr.subs.entry(part.to_string()).or_insert(TreePage::new()); + } + ptr.link = Some(link.clone()); + } } pub fn gather(pattern: &str, exts: &HashSet<&'static str>) -> Vec { - glob(pattern) - .expect("Invalid glob pattern") - .filter_map(|path| { - let path = path.unwrap(); - let path = Utf8PathBuf::from_path_buf(path).expect("Filename is not valid UTF8"); + glob(pattern) + .expect("Invalid glob pattern") + .filter_map(|path| { + let path = path.unwrap(); + let path = Utf8PathBuf::from_path_buf(path).expect("Filename is not valid UTF8"); - match path.is_dir() { - true => None, - false => Some(to_source(path, exts)), - } - }) - .map(Into::into) - .collect() + match path.is_dir() { + true => None, + false => Some(to_source(path, exts)), + } + }) + .map(Into::into) + .collect() } fn to_source(path: Utf8PathBuf, exts: &HashSet<&'static str>) -> FileItem { - let hit = path.extension().map_or(false, |ext| exts.contains(ext)); + let hit = path.extension().map_or(false, |ext| exts.contains(ext)); - let kind = match hit { - true => FileItemKind::Index, - false => FileItemKind::Bundle, - }; + let kind = match hit { + true => FileItemKind::Index, + false => FileItemKind::Bundle, + }; - FileItem { kind, path } + FileItem { kind, path } } -pub fn render_all(items: &[Output]) { - for item in items { - let file = match &item.kind { - OutputKind::Asset(a) => Some(&a.meta.path), - OutputKind::Virtual(_) => None, - }; - render(item, &Sack::new(items, &item.path, file)); - } +pub fn render_all(ctx: &BuildContext, items: &[Output]) { + for item in items { + let file = match &item.kind { + OutputKind::Asset(a) => Some(&a.meta.path), + OutputKind::Virtual(_) => None, + }; + render( + item, + Sack { + ctx, + hole: items, + path: &item.path, + file, + }, + ); + } } -fn render(item: &Output, sack: &Sack) { - let o = Utf8Path::new("dist").join(&item.path); - fs::create_dir_all(o.parent().unwrap()).unwrap(); +fn render(item: &Output, sack: Sack) { + let o = Utf8Path::new("dist").join(&item.path); + fs::create_dir_all(o.parent().unwrap()).unwrap(); - match item.kind { - OutputKind::Asset(ref real) => { - let i = &real.meta.path; + match item.kind { + OutputKind::Asset(ref real) => { + let i = &real.meta.path; - match &real.kind { - AssetKind::Html(closure) => { - let mut file = File::create(&o).unwrap(); - file.write_all(closure(sack).as_bytes()).unwrap(); - println!("HTML: {} -> {}", i, o); - } - AssetKind::Bibtex(_) => {} - AssetKind::Image => { - fs::create_dir_all(o.parent().unwrap()).unwrap(); - fs::copy(i, &o).unwrap(); - println!("Image: {} -> {}", i, o); - } - }; - } - OutputKind::Virtual(Virtual(ref closure)) => { - let mut file = File::create(&o).unwrap(); - file.write_all(closure(sack).as_bytes()).unwrap(); - println!("Virtual: -> {}", o); - } - } + match &real.kind { + AssetKind::Html(closure) => { + let mut file = File::create(&o).unwrap(); + file.write_all(closure(&sack).as_bytes()).unwrap(); + println!("HTML: {} -> {}", i, o); + } + AssetKind::Bibtex(_) => {} + AssetKind::Image => { + fs::create_dir_all(o.parent().unwrap()).unwrap(); + fs::copy(i, &o).unwrap(); + println!("Image: {} -> {}", i, o); + } + }; + } + OutputKind::Virtual(Virtual(ref closure)) => { + let mut file = File::create(&o).unwrap(); + file.write_all(closure(&sack).as_bytes()).unwrap(); + println!("Virtual: -> {}", o); + } + } } diff --git a/src/text/md.rs b/src/text/md.rs index 6e39801..bd9bf45 100644 --- a/src/text/md.rs +++ b/src/text/md.rs @@ -1,6 +1,11 @@ use std::collections::HashMap; -use hayagriva::{archive::ArchivedStyle, citationberg::{IndependentStyle, Locale, Style}, BibliographyDriver, BibliographyRequest, BufWriteFormat, CitationItem, CitationRequest, Library}; +use hayagriva::{ + archive::ArchivedStyle, + citationberg::{IndependentStyle, Locale, Style}, + BibliographyDriver, BibliographyRequest, BufWriteFormat, CitationItem, CitationRequest, + Library, +}; use hypertext::Renderable; use once_cell::sync::Lazy; use pulldown_cmark::{CodeBlockKind, Event, Options, Parser, Tag, TagEnd, TextMergeStream}; @@ -8,315 +13,339 @@ use regex::Regex; use crate::ts; +static OPTS: Lazy = Lazy::new(|| { + Options::empty() + .union(Options::ENABLE_MATH) + .union(Options::ENABLE_TABLES) + .union(Options::ENABLE_TASKLISTS) + .union(Options::ENABLE_STRIKETHROUGH) + .union(Options::ENABLE_SMART_PUNCTUATION) +}); -static OPTS: Lazy = Lazy::new(|| - Options::empty() - .union(Options::ENABLE_MATH) - .union(Options::ENABLE_TABLES) - .union(Options::ENABLE_TASKLISTS) - .union(Options::ENABLE_STRIKETHROUGH) - .union(Options::ENABLE_SMART_PUNCTUATION) -); +static KATEX_I: Lazy = Lazy::new(|| { + katex::opts::Opts::builder() + .output_type(katex::OutputType::Mathml) + .build() + .unwrap() +}); -static KATEX_I: Lazy = Lazy::new(|| - katex::opts::Opts::builder() - .output_type(katex::OutputType::Mathml) - .build() - .unwrap() -); - -static KATEX_B: Lazy = Lazy::new(|| - katex::opts::Opts::builder() - .output_type(katex::OutputType::Mathml) - .display_mode(true) - .build() - .unwrap() -); +static KATEX_B: Lazy = Lazy::new(|| { + katex::opts::Opts::builder() + .output_type(katex::OutputType::Mathml) + .display_mode(true) + .build() + .unwrap() +}); static LOCALE: Lazy> = Lazy::new(hayagriva::archive::locales); -static STYLE: Lazy = Lazy::new(|| - match ArchivedStyle::InstituteOfPhysicsNumeric.get() { - Style::Independent(style) => style, - Style::Dependent(_) => unreachable!(), - } -); +static STYLE: Lazy = + Lazy::new(|| match ArchivedStyle::InstituteOfPhysicsNumeric.get() { + Style::Independent(style) => style, + Style::Dependent(_) => unreachable!(), + }); pub struct Outline(pub Vec<(String, String)>); +pub fn parse(text: String, lib: Option<&Library>) -> (Outline, String, Option>) { + let (outline, stream) = { + let stream = Parser::new_ext(&text, *OPTS); + let mut stream: Vec<_> = TextMergeStream::new(stream).collect(); + let outline = set_heading_ids(&mut stream); + (outline, stream) + }; -pub fn parse(text: &str, lib: Option<&Library>) -> (Outline, String, Option>) { - let (outline, stream) = { - let stream = Parser::new_ext(text, *OPTS); - let mut stream: Vec<_> = TextMergeStream::new(stream).collect(); - let outline = set_heading_ids(&mut stream); - (outline, stream) - }; + let stream = stream + .into_iter() + .map(make_math) + .map(make_emoji) + .collect::>(); - let stream = stream.into_iter() - .map(make_math) - .map(make_emoji) - .collect::>(); + let stream = make_code(stream) + .into_iter() + .flat_map(make_ruby) + .flat_map(make_cite) + .collect::>(); - let stream = make_code(stream) - .into_iter() - .flat_map(make_ruby) - .flat_map(make_cite) - .collect::>(); + let (stream, bib) = match lib { + Some(lib) => make_bib(stream, lib), + None => (stream, None), + }; - let (stream, bib) = match lib { - Some(lib) => make_bib(stream, lib), - None => (stream, None), - }; + let mut html = String::new(); + pulldown_cmark::html::push_html(&mut html, stream.into_iter()); - let mut html = String::new(); - pulldown_cmark::html::push_html(&mut html, stream.into_iter()); - - (outline, html, bib) + (outline, html, bib) } -fn make_bib<'a, 'b>(stream: Vec>, lib: &'b Library) -> (Vec>, Option>) { - let mut driver = BibliographyDriver::new(); +fn make_bib<'a, 'b>( + stream: Vec>, + lib: &'b Library, +) -> (Vec>, Option>) { + let mut driver = BibliographyDriver::new(); - for event in stream.iter() { - match event { - Event::InlineMath(ref text) => match lib.get(text) { - Some(entry) => driver.citation(CitationRequest::from_items(vec![CitationItem::with_entry(entry)], &STYLE, &LOCALE)), - None => (), - }, - _ => (), - } - } + for event in stream.iter() { + match event { + Event::InlineMath(ref text) => match lib.get(text) { + Some(entry) => driver.citation(CitationRequest::from_items( + vec![CitationItem::with_entry(entry)], + &STYLE, + &LOCALE, + )), + None => (), + }, + _ => (), + } + } - // add fake citation to make all entries show up - driver.citation(CitationRequest::from_items(lib.iter().map(CitationItem::with_entry).collect(), &STYLE, &LOCALE)); + // add fake citation to make all entries show up + driver.citation(CitationRequest::from_items( + lib.iter().map(CitationItem::with_entry).collect(), + &STYLE, + &LOCALE, + )); - let res = driver.finish(BibliographyRequest { style: &STYLE, locale: None, locale_files: &LOCALE }); + let res = driver.finish(BibliographyRequest { + style: &STYLE, + locale: None, + locale_files: &LOCALE, + }); - let mut n = 0; - let stream = stream.into_iter() - .map(|event| match event { - Event::InlineMath(name) => { - let mut buffer = String::from(""); - match res.citations.get(n) { - Some(rf) => rf.citation.write_buf(&mut buffer, BufWriteFormat::Html).unwrap(), - None => buffer.push_str(&name), - }; - buffer.push_str(""); - n += 1; - Event::InlineHtml(buffer.into()) - }, - _ => event - }) - .collect(); + let mut n = 0; + let stream = stream + .into_iter() + .map(|event| match event { + Event::InlineMath(name) => { + let mut buffer = String::from(""); + match res.citations.get(n) { + Some(rf) => rf + .citation + .write_buf(&mut buffer, BufWriteFormat::Html) + .unwrap(), + None => buffer.push_str(&name), + }; + buffer.push_str(""); + n += 1; + Event::InlineHtml(buffer.into()) + } + _ => event, + }) + .collect(); - let bib = res.bibliography.map(|bib| - bib.items.iter() - .map(|x| { - let mut buffer = String::new(); - x.content.write_buf(&mut buffer, BufWriteFormat::Html).unwrap(); - buffer - }) - .collect::>() - ); + let bib = res.bibliography.map(|bib| { + bib.items + .iter() + .map(|x| { + let mut buffer = String::new(); + x.content + .write_buf(&mut buffer, BufWriteFormat::Html) + .unwrap(); + buffer + }) + .collect::>() + }); - (stream, bib) + (stream, bib) } static RE_CITE: Lazy = Lazy::new(|| Regex::new(r":cite\[([^\]]+)\]").unwrap()); #[derive(Debug)] enum Annotated_<'a> { - Text(&'a str), - Cite(&'a str), + Text(&'a str), + Cite(&'a str), } fn annotate_(input: &str) -> Vec { - let mut parts: Vec = Vec::new(); - let mut last_index = 0; + let mut parts: Vec = Vec::new(); + let mut last_index = 0; - for cap in RE_CITE.captures_iter(input) { - let cite = cap.get(1).unwrap().as_str(); - let index = cap.get(0).unwrap().start(); + for cap in RE_CITE.captures_iter(input) { + let cite = cap.get(1).unwrap().as_str(); + let index = cap.get(0).unwrap().start(); - if index > last_index { - parts.push(Annotated_::Text(&input[last_index..index])); - } + if index > last_index { + parts.push(Annotated_::Text(&input[last_index..index])); + } - parts.push(Annotated_::Cite(cite)); - last_index = cap.get(0).unwrap().end(); - } + parts.push(Annotated_::Cite(cite)); + last_index = cap.get(0).unwrap().end(); + } - if last_index < input.len() { - parts.push(Annotated_::Text(&input[last_index..])); - } + if last_index < input.len() { + parts.push(Annotated_::Text(&input[last_index..])); + } - parts + parts } fn make_cite(event: Event) -> Vec { - match event { - Event::Text(ref text) => { - annotate_(text) - .into_iter() - .map(|e| match e { - Annotated_::Text(text) => Event::Text(text.to_owned().into()), - Annotated_::Cite(cite) => Event::InlineMath(cite.to_owned().into()), - }) - .collect() - }, - _ => vec![event], - } + match event { + Event::Text(ref text) => annotate_(text) + .into_iter() + .map(|e| match e { + Annotated_::Text(text) => Event::Text(text.to_owned().into()), + Annotated_::Cite(cite) => Event::InlineMath(cite.to_owned().into()), + }) + .collect(), + _ => vec![event], + } } fn set_heading_ids(events: &mut [Event]) -> Outline { - let mut cnt = HashMap::::new(); - let mut out = Vec::new(); - let mut buf = String::new(); - let mut ptr = None; + let mut cnt = HashMap::::new(); + let mut out = Vec::new(); + let mut buf = String::new(); + let mut ptr = None; - for event in events { - match event { - Event::Start(ref mut tag @ Tag::Heading {..}) => { - ptr = Some(tag); - }, - Event::Text(ref text) if ptr.is_some() => { - buf.push_str(text) - }, - Event::End(TagEnd::Heading(..)) => { - let txt = std::mem::take(&mut buf); - let url = txt.to_lowercase().replace(' ', "-"); - let url = match cnt.get_mut(&url) { - Some(ptr) => { *ptr += 1; format!("{url}-{ptr}") }, - None => { cnt.insert(url.clone(), 0); url }, - }; - match ptr.take().unwrap() { - Tag::Heading { ref mut id, .. } => *id = Some(url.clone().into()), - _ => unreachable!(), - } - out.push((txt, url)); - }, - _ => (), - } - }; + for event in events { + match event { + Event::Start(ref mut tag @ Tag::Heading { .. }) => { + ptr = Some(tag); + } + Event::Text(ref text) if ptr.is_some() => buf.push_str(text), + Event::End(TagEnd::Heading(..)) => { + let txt = std::mem::take(&mut buf); + let url = txt.to_lowercase().replace(' ', "-"); + let url = match cnt.get_mut(&url) { + Some(ptr) => { + *ptr += 1; + format!("{url}-{ptr}") + } + None => { + cnt.insert(url.clone(), 0); + url + } + }; + match ptr.take().unwrap() { + Tag::Heading { ref mut id, .. } => *id = Some(url.clone().into()), + _ => unreachable!(), + } + out.push((txt, url)); + } + _ => (), + } + } - Outline(out) + Outline(out) } - fn make_math(event: Event) -> Event { - match event { - Event::InlineMath(math) => Event::InlineHtml(katex::render_with_opts(&math, &*KATEX_I).unwrap().into()), - Event::DisplayMath(math) => Event::Html(katex::render_with_opts(&math, &*KATEX_B).unwrap().into()), - _ => event - } + match event { + Event::InlineMath(math) => { + Event::InlineHtml(katex::render_with_opts(&math, &*KATEX_I).unwrap().into()) + } + Event::DisplayMath(math) => { + Event::Html(katex::render_with_opts(&math, &*KATEX_B).unwrap().into()) + } + _ => event, + } } fn make_code(es: Vec) -> Vec { - let mut buff = Vec::new(); - let mut lang = None; - let mut code = String::new(); + let mut buff = Vec::new(); + let mut lang = None; + let mut code = String::new(); - for event in es { - match event { - Event::Start(Tag::CodeBlock(kind)) => match kind { - CodeBlockKind::Indented => (), - CodeBlockKind::Fenced(name) => lang = Some(name), - }, - Event::End(TagEnd::CodeBlock) => { - let lang = lang.take().unwrap_or("".into()); - let html = ts::highlight(&lang, &code).render().as_str().to_owned(); - buff.push(Event::Html(html.into())); - code.clear(); - }, - Event::Text(text) => match lang { - None => buff.push(Event::Text(text)), - Some(_) => code.push_str(&text), - }, - _ => buff.push(event) - } - } + for event in es { + match event { + Event::Start(Tag::CodeBlock(kind)) => match kind { + CodeBlockKind::Indented => (), + CodeBlockKind::Fenced(name) => lang = Some(name), + }, + Event::End(TagEnd::CodeBlock) => { + let lang = lang.take().unwrap_or("".into()); + let html = ts::highlight(&lang, &code).render().as_str().to_owned(); + buff.push(Event::Html(html.into())); + code.clear(); + } + Event::Text(text) => match lang { + None => buff.push(Event::Text(text)), + Some(_) => code.push_str(&text), + }, + _ => buff.push(event), + } + } - buff + buff } -static RE_RUBY: Lazy = Lazy::new(|| - Regex::new(r"\[([^\]]+)\]\{([^}]+)\}").unwrap() -); +static RE_RUBY: Lazy = Lazy::new(|| Regex::new(r"\[([^\]]+)\]\{([^}]+)\}").unwrap()); #[derive(Debug)] enum Annotated<'a> { - Text(&'a str), - Ruby(&'a str, &'a str), + Text(&'a str), + Ruby(&'a str, &'a str), } - fn annotate(input: &str) -> Vec { - let mut parts: Vec = Vec::new(); - let mut last_index = 0; + let mut parts: Vec = Vec::new(); + let mut last_index = 0; - for cap in RE_RUBY.captures_iter(input) { - let text = cap.get(1).unwrap().as_str(); - let ruby = cap.get(2).unwrap().as_str(); - let index = cap.get(0).unwrap().start(); + for cap in RE_RUBY.captures_iter(input) { + let text = cap.get(1).unwrap().as_str(); + let ruby = cap.get(2).unwrap().as_str(); + let index = cap.get(0).unwrap().start(); - if index > last_index { - parts.push(Annotated::Text(&input[last_index..index])); - } + if index > last_index { + parts.push(Annotated::Text(&input[last_index..index])); + } - parts.push(Annotated::Ruby(text, ruby)); - last_index = cap.get(0).unwrap().end(); - } + parts.push(Annotated::Ruby(text, ruby)); + last_index = cap.get(0).unwrap().end(); + } - if last_index < input.len() { - parts.push(Annotated::Text(&input[last_index..])); - } + if last_index < input.len() { + parts.push(Annotated::Text(&input[last_index..])); + } - parts + parts } fn make_ruby(event: Event) -> Vec { - match event { - Event::Text(ref text) => annotate(text) - .into_iter() - .map(|el| match el { - Annotated::Text(text) => Event::Text(text.to_owned().into()), - Annotated::Ruby(t, f) => Event::InlineHtml(format!("{t}({f})").into()), - }) - .collect(), - _ => vec![event], - } + match event { + Event::Text(ref text) => annotate(text) + .into_iter() + .map(|el| match el { + Annotated::Text(text) => Event::Text(text.to_owned().into()), + Annotated::Ruby(t, f) => Event::InlineHtml( + format!("{t}({f})").into(), + ), + }) + .collect(), + _ => vec![event], + } } fn make_emoji(event: Event) -> Event { - match event { - Event::Text(ref text) => { - let mut buf = None; - let mut top = 0; - let mut old = 0; + match event { + Event::Text(ref text) => { + let mut buf = None; + let mut top = 0; + let mut old = 0; - for (idx, _) in text.match_indices(':') { - let key = &text[old..idx]; + for (idx, _) in text.match_indices(':') { + let key = &text[old..idx]; - if let Some(emoji) = emojis::get_by_shortcode(key) { - let buf = buf.get_or_insert_with(|| String::with_capacity(text.len())); - buf.push_str(&text[top..old-1]); - buf.push_str(emoji.as_str()); - top = idx + 1; - } + if let Some(emoji) = emojis::get_by_shortcode(key) { + let buf = buf.get_or_insert_with(|| String::with_capacity(text.len())); + buf.push_str(&text[top..old - 1]); + buf.push_str(emoji.as_str()); + top = idx + 1; + } - old = idx + 1; - } + old = idx + 1; + } - if let Some(ref mut buf) = buf { - buf.push_str(&text[top..]); - } + if let Some(ref mut buf) = buf { + buf.push_str(&text[top..]); + } - match buf { - None => event, - Some(buf) => Event::Text(buf.into()) - } - }, - _ => event, - } + match buf { + None => event, + Some(buf) => Event::Text(buf.into()), + } + } + _ => event, + } } diff --git a/src/ts/mod.rs b/src/ts/mod.rs index 6adc5f1..5270c83 100644 --- a/src/ts/mod.rs +++ b/src/ts/mod.rs @@ -1,80 +1,75 @@ -use std::borrow::Cow; - -use hypertext::{html_elements, maud_move, Raw, Renderable, GlobalAttributes}; -use tree_sitter_highlight::{Highlighter, HighlightEvent}; - mod captures; mod configs; +use std::borrow::Cow; + +use hypertext::{html_elements, maud_move, GlobalAttributes, Raw, Renderable}; +use tree_sitter_highlight::{HighlightEvent, Highlighter}; pub enum Event { - Write(String), - Enter(String), - Close, + Write(String), + Enter(String), + Close, } - -pub fn highlight<'data, 'html>( - lang: &'data str, - code: &'data str -) -> impl Renderable + 'html - where - 'data: 'html +pub fn highlight<'data, 'html>(lang: &'data str, code: &'data str) -> impl Renderable + 'html +where + 'data: 'html, { - maud_move!( - figure .listing.kanagawa data-lang=(lang) { - pre { - code { - (Raw(to_html(lang, code))) - } - } - } - ) + maud_move!( + figure .listing.kanagawa data-lang=(lang) { + pre { + code { + (Raw(to_html(lang, code))) + } + } + } + ) } fn to_html(lang: &str, code: &str) -> String { - get_events(lang, code) - .into_iter() - .map(|event| match event { - Event::Write(text) => Cow::from( - text.replace('&', "&").replace('<', "<").replace('>', ">") - ), - Event::Enter(class) => Cow::from( - format!("", class.replace('.', "-")) - ), - Event::Close => Cow::from(""), - }) - .collect() + get_events(lang, code) + .into_iter() + .map(|event| match event { + Event::Write(text) => Cow::from( + text.replace('&', "&") + .replace('<', "<") + .replace('>', ">"), + ), + Event::Enter(class) => { + Cow::from(format!("", class.replace('.', "-"))) + } + Event::Close => Cow::from(""), + }) + .collect() } fn get_events(lang: &str, src: &str) -> Vec { - let config = match configs::get_config(lang) { - Some(c) => c, - None => return vec![Event::Write(src.into())] - }; + let config = match configs::get_config(lang) { + Some(c) => c, + None => return vec![Event::Write(src.into())], + }; + let mut hl = Highlighter::new(); + let highlights = hl + .highlight(config, src.as_bytes(), None, |name| { + configs::get_config(name) + }) + .unwrap(); - let mut hl = Highlighter::new(); - let highlights = hl.highlight( - config, - src.as_bytes(), - None, - |name| configs::get_config(name) - ).unwrap(); - - let mut out = vec![]; - for event in highlights { - let event = event.unwrap(); - let obj = map_event(event, src); - out.push(obj); - } - out + let mut out = vec![]; + for event in highlights { + let event = event.unwrap(); + let obj = map_event(event, src); + out.push(obj); + } + out } fn map_event(event: HighlightEvent, src: &str) -> Event { - match event { - HighlightEvent::Source {start, end} => Event::Write(src[start..end].into()), - HighlightEvent::HighlightStart(s) => Event::Enter(captures::NAMES[s.0].into()), - HighlightEvent::HighlightEnd => Event::Close, - } + match event { + HighlightEvent::Source { start, end } => Event::Write(src[start..end].into()), + HighlightEvent::HighlightStart(s) => Event::Enter(captures::NAMES[s.0].into()), + HighlightEvent::HighlightEnd => Event::Close, + } }