Compare commits

..

No commits in common. "1c2e87aa2509d06a3cf74da01b08143bd3381f41" and "46705d707f144764f90d5f0082c4df63b07f8b1e" have entirely different histories.

15 changed files with 1307 additions and 1411 deletions

View file

@ -1,2 +0,0 @@
tab_spaces = 4
hard_tabs = true

View file

@ -1,6 +1,6 @@
use hypertext::{html_elements, maud, maud_move, GlobalAttributes, Raw, Renderable}; use hypertext::{html_elements, maud, maud_move, GlobalAttributes, Raw, Renderable};
use crate::{pipeline::Sack, text::md::parse, LinkDate, Linkable}; use crate::text::md::parse;
const INTRO: &str = r#" const INTRO: &str = r#"
## ##
@ -15,75 +15,59 @@ const INTRO: &str = r#"
"#; "#;
fn intro() -> impl Renderable { fn intro() -> impl Renderable {
let (_, html, _) = parse(INTRO.into(), None); let (_, html, _) = parse(INTRO, None);
maud!( maud!(
section .p-card.intro-jp lang="ja-JP" { section .p-card.intro-jp lang="ja-JP" {
(Raw(html)) (Raw(html))
} }
) )
}
fn kanji() -> impl Renderable {
maud!(
section .p-card {
h2 .p-card__heading {
"Kanji of the Day"
}
div {
// <Widget client:load/>
}
}
)
} }
fn photo() -> impl Renderable { fn photo() -> impl Renderable {
maud!( maud!(
section .p-card.home-card-image { section .p-card.home-card-image {
h2 .p-card__heading { h2 .p-card__heading {
"Image of the Month" "Image of the Month"
} }
a .home-card-image__link href="/static/IMG_20231029_111650.jpg" { a .home-card-image__link href="/static/IMG_20231029_111650.jpg" {
img .home-card-image__image img .home-card-image__image
src="/static/IMG_20231029_111650.jpg" src="/static/IMG_20231029_111650.jpg"
alt="Autumn park with colorful trees and fallen leaves"; alt="Autumn park with colorful trees and fallen leaves";
} }
} }
) )
} }
fn latest(sack: &Sack) -> impl Renderable { pub fn home<'data, 'home, R>(main: R) -> impl Renderable + 'home
let links = {
let mut links = sack.get_links("**");
links.sort_by(|a, b| b.date.cmp(&a.date));
links
};
maud_move!(
section .p-card {
h2 .p-card__heading {
"Latest"
}
ol .p-card__latest {
@for link in links.iter().take(5) {
li {
a href=(link.link.path.as_str()) {
(&link.link.name)
}
}
}
}
}
)
}
pub(crate) fn home<'s, 'p, 'html>(
sack: &'s Sack,
main: impl Renderable + 'p,
) -> impl Renderable + 'html
where where
's: 'html, 'data: 'home,
'p: 'html, R: Renderable + 'data,
{ {
let main = maud_move!( let main = maud_move!(
main .l-home { main .l-home {
article .l-home__article.markdown { article .l-home__article.markdown {
(main) (main)
} }
aside .l-home__aside { aside .l-home__aside {
(intro()) (intro())
// (kanji()) // (kanji())
(photo()) (photo())
(latest(sack)) }
} }
} );
);
crate::html::page(sack, main, "Home".into()) crate::html::page("Home", main, None)
} }

View file

@ -1,66 +1,62 @@
use crate::{html::page, LinkDate};
use camino::Utf8PathBuf;
use chrono::{DateTime, Utc};
use hypertext::{html_elements, maud_move, GlobalAttributes, Renderable}; use hypertext::{html_elements, maud_move, GlobalAttributes, Renderable};
use crate::html::page; pub fn list<'data, 'list>(
use crate::pipeline::Sack; title: &'data str,
use crate::LinkDate; groups: &'data [(i32, Vec<LinkDate>)],
) -> impl Renderable + 'list
pub fn list<'s, 'g, 'html>(
sack: &'s Sack,
groups: &'g [(i32, Vec<LinkDate>)],
title: String,
) -> impl Renderable + 'html
where where
's: 'html, 'data: 'list,
'g: 'html,
{ {
let heading = title.clone(); let list = maud_move!(
let list = maud_move!( main .page-list-main {
main .page-list-main { article .page-list {
article .page-list { header .markdown {
header .markdown { h1 { (title) }
h1 { (heading) } }
}
@for (year, group) in groups { @for (year, group) in groups {
(section(*year, group)) (section(*year, group))
} }
} }
} }
); );
page(sack, list, title) page(title, list, None)
} }
fn section(year: i32, group: &[LinkDate]) -> impl Renderable + '_ { fn section(year: i32, group: &[LinkDate]) -> impl Renderable + '_ {
maud_move!( maud_move!(
section .page-list-year { section .page-list-year {
header .page-list-year__header { header .page-list-year__header {
h2 { (year) } h2 { (year) }
} }
@for item in group.iter() { @for item in group.iter() {
(link(item)) (link(item))
} }
} }
) )
} }
fn link(data: &LinkDate) -> impl Renderable + '_ { fn link(data: &LinkDate) -> impl Renderable + '_ {
let time = data.date.format("%m/%d"); let time = data.date.format("%m/%d");
maud_move!( maud_move!(
a .page-item href=(data.link.path.as_str()) { a .page-item href=(data.link.path.as_str()) {
div .page-item__header { div .page-item__header {
h3 { h3 {
(&data.link.name) (&data.link.name)
} }
time datetime=(data.date.to_rfc3339()) { time datetime=(data.date.to_rfc3339()) {
(time.to_string()) (time.to_string())
} }
} }
@if let Some(ref desc) = data.link.desc { @if let Some(ref desc) = data.link.desc {
div .page-item__desc { div .page-item__desc {
(desc) (desc)
} }
} }
} }
) )
} }

View file

@ -3,92 +3,93 @@ use hypertext::{html_elements, maud_move, GlobalAttributes, Raw, Renderable};
use crate::pipeline::{Sack, TreePage}; use crate::pipeline::{Sack, TreePage};
use crate::text::md::Outline; use crate::text::md::Outline;
/// Render the outline for a document /// Render the outline for a document
pub(crate) fn show_outline(outline: Outline) -> impl Renderable { pub(crate) fn show_outline(outline: Outline) -> impl Renderable {
maud_move!( maud_move!(
section .link-tree { section .link-tree {
h2 .link-tree__heading { h2 .link-tree__heading {
a .link-tree__heading-text href="#top" { "Content" } a .link-tree__heading-text href="#top" { "Content" }
} }
nav #table-of-contents .link-tree__nav { nav #table-of-contents .link-tree__nav {
ul .link-tree__nav-list { ul .link-tree__nav-list {
@for (title, id) in outline.0 { @for (title, id) in outline.0 {
li .link-tree__nav-list-item { li .link-tree__nav-list-item {
a .link-tree__nav-list-text.link href=(format!("#{}", id)) { a .link-tree__nav-list-text.link href=(format!("#{}", id)) {
(title) (title)
} }
} }
} }
} }
} }
} }
) )
} }
/// Render the bibliography for a document /// Render the bibliography for a document
pub(crate) fn show_bibliography(bib: Vec<String>) -> impl Renderable { pub(crate) fn show_bibliography(bib: Vec<String>) -> impl Renderable {
maud_move!( maud_move!(
section .markdown { section .markdown {
h2 { h2 {
"Bibliography" "Bibliography"
} }
ol .bibliography { ol .bibliography {
@for item in bib { @for item in bib {
li { li {
(Raw(item)) (Raw(item))
} }
} }
} }
} }
) )
} }
/// Render the page tree /// Render the page tree
pub(crate) fn show_page_tree(sack: &Sack, glob: &str) -> impl Renderable { pub(crate) fn show_page_tree(sack: &Sack, glob: &str) -> impl Renderable {
let tree = sack.get_tree(glob); let tree = sack.get_tree(glob);
maud_move!( maud_move!(
h2 .link-tree__heading { h2 .link-tree__heading {
// {pages.chain(x => x.prefix) // {pages.chain(x => x.prefix)
// .map(pathify) // .map(pathify)
// .mapOrDefault(href => // .mapOrDefault(href =>
// <a class="link-tree__heading-text" href={href}>{heading}</a>, // <a class="link-tree__heading-text" href={href}>{heading}</a>,
// <span class="link-tree__heading-text">{heading}</span> // <span class="link-tree__heading-text">{heading}</span>
// )} // )}
} }
nav .link-tree__nav { nav .link-tree__nav {
(show_page_tree_level(&tree)) (show_page_tree_level(&tree))
} }
) )
} }
fn show_page_tree_level(tree: &TreePage) -> impl Renderable + '_ { fn show_page_tree_level(tree: &TreePage) -> impl Renderable + '_ {
let subs = { let subs = {
let mut subs: Vec<_> = tree.subs.iter().collect(); let mut subs: Vec<_> = tree.subs.iter().collect();
subs.sort_by(|a, b| a.0.cmp(b.0)); subs.sort_by(|a, b| a.0.cmp(b.0));
subs subs
}; };
maud_move!( maud_move!(
ul .link-tree__nav-list { ul .link-tree__nav-list {
@for (key, next) in subs { @for (key, next) in subs {
li .link-tree__nav-list-item { li .link-tree__nav-list-item {
span .link-tree__nav-list-text { span .link-tree__nav-list-text {
@if let Some(ref link) = next.link { @if let Some(ref link) = next.link {
a .link-tree__nav-list-text.link href=(link.path.as_str()) { a .link-tree__nav-list-text.link href=(link.path.as_str()) {
(&link.name) (&link.name)
} }
} @else { } @else {
span .link-tree__nav-list-text { span .link-tree__nav-list-text {
(key) (key)
} }
} }
} }
@if !next.subs.is_empty() { @if !next.subs.is_empty() {
(show_page_tree_level(next)) (show_page_tree_level(next))
} }
} }
} }
} }
) )
} }

View file

@ -13,240 +13,223 @@ use camino::Utf8Path;
use chrono::Datelike; use chrono::Datelike;
use hypertext::{html_elements, maud, maud_move, GlobalAttributes, Raw, Renderable}; use hypertext::{html_elements, maud, maud_move, GlobalAttributes, Raw, Renderable};
use crate::REPO;
pub(crate) use home::home; pub(crate) use home::home;
pub(crate) use post::Post; pub(crate) use post::Post;
pub(crate) use slideshow::Slideshow; pub(crate) use slideshow::Slideshow;
pub(crate) use wiki::Wiki; pub(crate) use wiki::Wiki;
use crate::{pipeline::Sack, Mode};
const JS_RELOAD: &str = r#" const JS_RELOAD: &str = r#"
const socket = new WebSocket("ws://localhost:1337"); const socket = new WebSocket("ws://localhost:1337");
socket.addEventListener("message", (event) => { socket.addEventListener("message", (event) => {
console.log(event); console.log(event);
window.location.reload(); window.location.reload();
}); });
"#; "#;
const JS_IMPORTS: &str = r#" const JS_IMPORTS: &str = r#"
{ {
"imports": { "imports": {
"reveal": "/js/vanilla/reveal.js", "reveal": "/js/vanilla/reveal.js",
"photos": "/js/vanilla/photos.js" "photos": "/js/vanilla/photos.js"
} }
} }
"#; "#;
fn head<'s, 'html>(sack: &'s Sack, title: String) -> impl Renderable + 'html fn head(title: &str) -> impl Renderable + '_ {
where let title = format!("{} | kamoshi.org", title);
's: 'html,
{
let title = format!("{} | kamoshi.org", title);
maud_move!( maud_move!(
meta charset="utf-8"; meta charset="utf-8";
meta name="viewport" content="width=device-width, initial-scale=1"; meta name="viewport" content="width=device-width, initial-scale=1";
title { title {
(title) (title)
} }
// link rel="sitemap" href="/sitemap.xml"; // link rel="sitemap" href="/sitemap.xml";
link rel="stylesheet" href="/styles.css"; link rel="stylesheet" href="/styles.css";
link rel="stylesheet" href="/static/css/reveal.css"; link rel="stylesheet" href="/static/css/reveal.css";
link rel="stylesheet" href="/static/css/leaflet.css"; link rel="stylesheet" href="/static/css/leaflet.css";
link rel="stylesheet" href="/static/css/MarkerCluster.css"; link rel="stylesheet" href="/static/css/MarkerCluster.css";
link rel="stylesheet" href="/static/css/MarkerCluster.Default.css"; link rel="stylesheet" href="/static/css/MarkerCluster.Default.css";
link rel="icon" type="image/png" sizes="32x32" href="/favicon-32x32.png"; link rel="icon" type="image/png" sizes="32x32" href="/favicon-32x32.png";
link rel="icon" type="image/png" sizes="16x16" href="/favicon-16x16.png"; link rel="icon" type="image/png" sizes="16x16" href="/favicon-16x16.png";
link rel="icon" href="/favicon.ico" sizes="any"; link rel="icon" href="/favicon.ico" sizes="any";
script type="importmap" {(Raw(JS_IMPORTS))} script type="importmap" {(Raw(JS_IMPORTS))}
@if matches!(sack.ctx.mode, Mode::Watch) { script { (Raw(JS_RELOAD)) }
script { (Raw(JS_RELOAD)) } )
}
)
} }
fn navbar() -> impl Renderable { fn navbar() -> impl Renderable {
static ITEMS: &[(&str, &str)] = &[ static ITEMS: &[(&str, &str)] = &[
("Posts", "/posts/"), ("Posts", "/posts/"),
("Slides", "/slides/"), ("Slides", "/slides/"),
("Wiki", "/wiki/"), ("Wiki", "/wiki/"),
("Map", "/map/"), ("Map", "/map/"),
("About", "/about/"), ("About", "/about/"),
("Search", "/search/"), ("Search", "/search/"),
]; ];
maud!( maud!(
nav .p-nav { nav .p-nav {
input #p-nav-toggle type="checkbox" hidden; input #p-nav-toggle type="checkbox" hidden;
div .p-nav__bar { div .p-nav__bar {
a .p-nav__logo href="/" { a .p-nav__logo href="/" {
img .p-nav__logo-icon height="48px" width="51px" src="/static/svg/aya.svg" alt=""; img .p-nav__logo-icon height="48px" width="51px" src="/static/svg/aya.svg" alt="";
div .p-nav__logo-text { div .p-nav__logo-text {
div .p-nav__logo-main { div .p-nav__logo-main {
(Raw(include_str!("logotype.svg"))) (Raw(include_str!("logotype.svg")))
} }
div #p-nav-splash .p-nav__logo-sub { div #p-nav-splash .p-nav__logo-sub {
"夢現の遥か彼方" "夢現の遥か彼方"
} }
} }
} }
label .p-nav__burger for="p-nav-toggle" tabindex="0" { label .p-nav__burger for="p-nav-toggle" tabindex="0" {
span .p-nav__burger-icon {} span .p-nav__burger-icon {}
} }
} }
menu .p-nav__menu { menu .p-nav__menu {
@for (name, url) in ITEMS { @for (name, url) in ITEMS {
li .p-nav__menu-item { li .p-nav__menu-item {
a .p-nav__menu-link href=(*url) { a .p-nav__menu-link href=(*url) {
(*name) (*name)
} }
} }
} }
} }
} }
) )
} }
pub fn footer<'s, 'html>(sack: &'s Sack) -> impl Renderable + 'html pub fn footer(path: Option<&Utf8Path>) -> impl Renderable {
where let copy = format!("Copyright &copy; {} Maciej Jur", &REPO.year);
's: 'html, let mail = "maciej@kamoshi.org";
let href = format!("mailto:{}", mail);
let link = Utf8Path::new(&REPO.link)
.join("src/commit")
.join(&REPO.hash);
let link = match path {
Some(path) => link.join(path),
None => link,
};
maud_move!(
footer .footer {
div .left {
div {
(Raw(copy))
}
a href=(href) {
(mail)
}
}
div .repo {
a href=(link.as_str()) {
(&REPO.hash)
}
div {
(&REPO.date)
}
}
a .right.footer__cc-wrap rel="license" href="http://creativecommons.org/licenses/by/4.0/" {
img .footer__cc-stamp alt="Creative Commons License" width="88" height="31" src="/static/svg/by.svg";
}
}
)
}
fn bare<'data, 'html, R>(title: &'data str, main: R) -> impl Renderable + 'html
where
'data : 'html,
R: Renderable + 'data
{ {
let copy = format!("Copyright &copy; {} Maciej Jur", &sack.ctx.year); maud_move!(
let mail = "maciej@kamoshi.org"; (Raw("<!DOCTYPE html>"))
let href = format!("mailto:{}", mail); html lang="en" {
let link = Utf8Path::new(&sack.ctx.link) (head(title))
.join("src/commit")
.join(&sack.ctx.hash);
let link = match sack.get_file() {
Some(path) => link.join(path),
None => link,
};
maud_move!( body {
footer .footer { (main)
div .left { }
div { }
(Raw(copy)) )
}
a href=(href) {
(mail)
}
}
div .repo {
a href=(link.as_str()) {
(&sack.ctx.hash)
}
div {
(&sack.ctx.date)
}
}
a .right.footer__cc-wrap rel="license" href="http://creativecommons.org/licenses/by/4.0/" {
img .footer__cc-stamp alt="Creative Commons License" width="88" height="31" src="/static/svg/by.svg";
}
}
)
} }
fn bare<'s, 'p, 'html>( fn page<'data, 'main, 'html, T>(
sack: &'s Sack, title: &'data str,
main: impl Renderable + 'p, main: T,
title: String, path: Option<&'data Utf8Path>,
) -> impl Renderable + 'html ) -> impl Renderable + 'html
where where
's: 'html, 'main : 'html,
'p: 'html, 'data : 'html,
T: Renderable + 'main
{ {
maud_move!( maud_move!(
(Raw("<!DOCTYPE html>")) (Raw("<!DOCTYPE html>"))
html lang="en" { html lang="en" {
(head(sack, title)) (head(title))
body { body {
(main) (navbar())
} (main)
} (footer(path))
) }
}
)
} }
fn page<'s, 'p, 'html>( pub(crate) fn to_list(list: Vec<crate::LinkDate>) -> String {
sack: &'s Sack, let mut groups = HashMap::<i32, Vec<_>>::new();
main: impl Renderable + 'p,
title: String,
) -> impl Renderable + 'html
where
's: 'html,
'p: 'html,
{
maud_move!(
(Raw("<!DOCTYPE html>"))
html lang="en" {
(head(sack, title))
body { for page in list {
(navbar()) groups.entry(page.date.year()).or_default().push(page);
(main) }
(footer(sack))
} let mut groups: Vec<_> = groups
} .into_iter()
) .map(|(k, mut v)| {
v.sort_by(|a, b| b.date.cmp(&a.date));
(k, v)
})
.collect();
groups.sort_by(|a, b| b.0.cmp(&a.0));
list::list("", &groups).render().into()
} }
pub(crate) fn to_list(sack: &Sack, list: Vec<crate::LinkDate>, title: String) -> String { pub(crate) fn map() -> impl Renderable {
let mut groups = HashMap::<i32, Vec<_>>::new(); page(
"Map",
maud!(
main {
div #map style="height: 100%; width: 100%" {}
for page in list { script type="module" {
groups.entry(page.date.year()).or_default().push(page); (Raw("import 'photos';"))
} }
}
let mut groups: Vec<_> = groups ),
.into_iter() None,
.map(|(k, mut v)| { )
v.sort_by(|a, b| b.date.cmp(&a.date));
(k, v)
})
.collect();
groups.sort_by(|a, b| b.0.cmp(&a.0));
list::list(sack, &groups, title).render().into()
} }
pub(crate) fn map<'s, 'html>(sack: &'s Sack) -> impl Renderable + 'html pub(crate) fn search() -> impl Renderable {
where page(
's: 'html, "Search",
{ maud!(
page( main #app {}
sack, script type="module" src="/js/search/dist/search.js" {}
maud!( ),
main { None,
div #map style="height: 100%; width: 100%" {} )
script type="module" {
(Raw("import 'photos';"))
}
}
),
String::from("Map"),
)
}
pub(crate) fn search<'s, 'html>(sack: &'s Sack) -> impl Renderable + 'html
where
's: 'html,
{
page(
sack,
maud!(
main #app {}
script type="module" src="/js/search/dist/search.js" {}
),
String::from("Search"),
)
} }

View file

@ -6,88 +6,91 @@ use serde::Deserialize;
use crate::pipeline::{Content, Sack}; use crate::pipeline::{Content, Sack};
use crate::text::md::Outline; use crate::text::md::Outline;
use crate::{LinkDate, Linkable}; use crate::{Linkable, LinkDate};
/// Represents a simple post. /// Represents a simple post.
#[derive(Deserialize, Debug, Clone)] #[derive(Deserialize, Debug, Clone)]
pub(crate) struct Post { pub(crate) struct Post {
pub(crate) title: String, pub(crate) title: String,
#[serde(with = "super::isodate")] #[serde(with = "super::isodate")]
pub(crate) date: DateTime<Utc>, pub(crate) date: DateTime<Utc>,
pub(crate) desc: Option<String>, pub(crate) desc: Option<String>,
} }
impl Content for Post { impl Content for Post {
fn parse(data: String, lib: Option<&Library>) -> (Outline, String, Option<Vec<String>>) { fn parse(data: &str, lib: Option<&Library>) -> (Outline, String, Option<Vec<String>>) {
crate::text::md::parse(data, lib) crate::text::md::parse(data, lib)
} }
fn render<'s, 'p, 'html>( fn transform<'f, 'm, 's, 'html, T>(
self, &'f self,
sack: &'s Sack, content: T,
parsed: impl Renderable + 'p, outline: Outline,
outline: Outline, sack: &'s Sack,
bib: Option<Vec<String>>, bib: Option<Vec<String>>,
) -> impl Renderable + 'html ) -> impl Renderable + 'html
where where
's: 'html, 'f: 'html,
'p: 'html, 'm: 'html,
{ 's: 'html,
post(self, sack, parsed, outline, bib) T: Renderable + 'm,
} {
post(self, content, outline, bib, sack)
}
fn as_link(&self, path: Utf8PathBuf) -> Option<Linkable> { fn as_link(&self, path: Utf8PathBuf) -> Option<Linkable> {
Some(Linkable::Date(LinkDate { Some(Linkable::Date(LinkDate {
link: crate::Link { link: crate::Link {
path, path,
name: self.title.to_owned(), name: self.title.to_owned(),
desc: self.desc.to_owned(), desc: self.desc.to_owned(),
}, },
date: self.date.to_owned(), date: self.date.to_owned(),
})) }))
} }
} }
pub fn post<'s, 'p, 'html>( pub fn post<'f, 'm, 's, 'html, T>(
fm: Post, fm: &'f Post,
sack: &'s Sack, content: T,
content: impl Renderable + 'p, outline: Outline,
outline: Outline, bib: Option<Vec<String>>,
bib: Option<Vec<String>>, sack: &'s Sack,
) -> impl Renderable + 'html ) -> impl Renderable + 'html
where where
's: 'html, 'f: 'html,
'p: 'html, 'm: 'html,
's: 'html,
T: Renderable + 'm
{ {
let heading = fm.title.clone(); let main = maud_move!(
let main = maud_move!( main .wiki-main {
main .wiki-main {
// Slide in/out for mobile // Slide in/out for mobile
input #wiki-aside-shown type="checkbox" hidden; input #wiki-aside-shown type="checkbox" hidden;
aside .wiki-aside { aside .wiki-aside {
// Slide button // Slide button
label .wiki-aside__slider for="wiki-aside-shown" { label .wiki-aside__slider for="wiki-aside-shown" {
img .wiki-icon src="/static/svg/double-arrow.svg" width="24" height="24"; img .wiki-icon src="/static/svg/double-arrow.svg" width="24" height="24";
} }
(crate::html::misc::show_outline(outline)) (crate::html::misc::show_outline(outline))
} }
article .wiki-article /*class:list={classlist)*/ { article .wiki-article /*class:list={classlist)*/ {
header class="markdown" { header class="markdown" {
h1 #top { (heading) } h1 #top { (fm.title.clone()) }
} }
section .wiki-article__markdown.markdown { section .wiki-article__markdown.markdown {
(content) (content)
} }
@if let Some(bib) = bib { @if let Some(bib) = bib {
(crate::html::misc::show_bibliography(bib)) (crate::html::misc::show_bibliography(bib))
} }
} }
} }
); );
crate::html::page(sack, main, fm.title.clone()) crate::html::page(&fm.title, main, sack.get_file())
} }

View file

@ -1,105 +1,87 @@
use camino::Utf8PathBuf; use camino::Utf8PathBuf;
use chrono::{DateTime, Utc}; use chrono::{DateTime, Utc};
use hayagriva::Library; use hayagriva::Library;
use hypertext::{html_elements, maud_move, GlobalAttributes, Raw, Renderable}; use hypertext::{html_elements, maud_move, Renderable, GlobalAttributes, Raw};
use serde::Deserialize; use serde::Deserialize;
use crate::pipeline::{Content, Sack}; use crate::pipeline::{Content, Sack};
use crate::text::md::Outline; use crate::text::md::Outline;
use crate::{Link, LinkDate, Linkable}; use crate::{Link, LinkDate, Linkable};
const CSS: &str = r#"
.slides img {
margin-left: auto;
margin-right: auto;
max-height: 60vh;
}
"#;
/// Represents a slideshow /// Represents a slideshow
#[derive(Deserialize, Debug, Clone)] #[derive(Deserialize, Debug, Clone)]
pub(crate) struct Slideshow { pub(crate) struct Slideshow {
pub title: String, pub title: String,
#[serde(with = "super::isodate")] #[serde(with = "super::isodate")]
pub date: DateTime<Utc>, pub date: DateTime<Utc>,
pub desc: Option<String>, pub desc: Option<String>,
} }
impl Content for Slideshow { impl Content for Slideshow {
fn parse(data: String, _: Option<&Library>) -> (Outline, String, Option<Vec<String>>) { fn transform<'f, 'm, 's, 'html, T>(
let html = data &'f self,
.split("\n-----\n") content: T,
.map(|chunk| { _: Outline,
chunk _: &'s Sack,
.split("\n---\n") _bib: Option<Vec<String>>,
.map(|s| crate::text::md::parse(s.to_owned(), None)) ) -> impl Renderable + 'html
.map(|e| e.1) where
.collect::<Vec<_>>() 'f: 'html,
}) 'm: 'html,
.map(|stack| match stack.len() > 1 { 's: 'html,
true => format!( T: Renderable + 'm {
"<section>{}</section>", show(self, content)
stack }
.into_iter()
.map(|slide| format!("<section>{slide}</section>"))
.collect::<String>()
),
false => format!("<section>{}</section>", stack[0]),
})
.collect::<String>();
(Outline(vec![]), html, None)
}
fn render<'s, 'p, 'html>( fn as_link(&self, path: Utf8PathBuf) -> Option<Linkable> {
self, Some(Linkable::Date(LinkDate {
sack: &'s Sack, link: Link {
parsed: impl Renderable + 'p, path,
_: Outline, name: self.title.to_owned(),
_: Option<Vec<String>>, desc: self.desc.to_owned(),
) -> impl Renderable + 'html },
where date: self.date.to_owned(),
's: 'html, }))
'p: 'html, }
{
show(self, sack, parsed)
}
fn as_link(&self, path: Utf8PathBuf) -> Option<Linkable> { fn parse(data: &str, _: Option<&Library>) -> (Outline, String, Option<Vec<String>>) {
Some(Linkable::Date(LinkDate { let html = data
link: Link { .split("\n-----\n")
path, .map(|chunk| chunk.split("\n---\n").map(|s| crate::text::md::parse(s, None)).map(|e| e.1).collect::<Vec<_>>())
name: self.title.to_owned(), .map(|stack| match stack.len() > 1 {
desc: self.desc.to_owned(), true => format!("<section>{}</section>", stack.into_iter().map(|slide| format!("<section>{slide}</section>")).collect::<String>()),
}, false => format!("<section>{}</section>", stack[0])
date: self.date.to_owned(), })
})) .collect::<String>();
} (Outline(vec![]), html, None)
}
} }
pub fn show<'s, 'p, 'html>( pub fn show<'data, 'show>(
fm: Slideshow, fm: &'data Slideshow,
sack: &'s Sack, slides: impl Renderable + 'data
slides: impl Renderable + 'p, ) -> impl Renderable + 'show
) -> impl Renderable + 'html where
where 'data: 'show
's: 'html,
'p: 'html,
{ {
crate::html::bare( crate::html::bare(&fm.title, maud_move!(
sack, div .reveal {
maud_move!( div .slides {
div .reveal { (slides)
div .slides { }
(slides) }
}
}
script type="module" { script type="module" {
(Raw("import 'reveal';")) (Raw("import 'reveal';"))
} }
style { (Raw(CSS)) } style {r#"
), .slides img {
fm.title.clone(), margin-left: auto;
) margin-right: auto;
max-height: 60vh;
}
"#}
))
} }

View file

@ -10,82 +10,83 @@ use crate::{Link, Linkable};
/// Represents a wiki page /// Represents a wiki page
#[derive(Deserialize, Debug, Clone)] #[derive(Deserialize, Debug, Clone)]
pub struct Wiki { pub struct Wiki {
pub title: String, pub title: String,
} }
impl Content for Wiki { impl Content for Wiki {
fn parse(data: String, lib: Option<&Library>) -> (Outline, String, Option<Vec<String>>) { fn transform<'f, 'm, 's, 'html, T>(
crate::text::md::parse(data, lib) &'f self,
} content: T,
outline: Outline,
sack: &'s Sack,
bib: Option<Vec<String>>,
) -> impl Renderable + 'html
where
'f: 'html,
'm: 'html,
's: 'html,
T: Renderable + 'm {
wiki(self, content, outline, sack, bib)
}
fn render<'s, 'p, 'html>( fn as_link(&self, path: Utf8PathBuf) -> Option<Linkable> {
self, Some(Linkable::Link(Link {
sack: &'s Sack, path,
parsed: impl Renderable + 'p, name: self.title.to_owned(),
outline: Outline, desc: None,
bib: Option<Vec<String>>, }))
) -> impl Renderable + 'html }
where
's: 'html,
'p: 'html,
{
wiki(self, sack, parsed, outline, bib)
}
fn as_link(&self, path: Utf8PathBuf) -> Option<Linkable> { fn parse(data: &str, lib: Option<&Library>) -> (Outline, String, Option<Vec<String>>) {
Some(Linkable::Link(Link { crate::text::md::parse(data, lib)
path, }
name: self.title.to_owned(),
desc: None,
}))
}
} }
fn wiki<'s, 'p, 'html>( fn wiki<'data, 'html, 'sack, T>(
matter: Wiki, fm: &'data Wiki,
sack: &'s Sack, content: T,
parsed: impl Renderable + 'p, _: Outline,
_: Outline, sack: &'sack Sack,
bib: Option<Vec<String>>, bib: Option<Vec<String>>,
) -> impl Renderable + 'html ) -> impl Renderable + 'html
where where
's: 'html, 'sack: 'html,
'p: 'html, 'data: 'html,
T: Renderable + 'data
{ {
let heading = matter.title.clone(); let main = maud_move!(
let main = maud_move!( main .wiki-main {
main .wiki-main {
// Slide in/out for mobile // Slide in/out for mobile
input #wiki-aside-shown type="checkbox" hidden; input #wiki-aside-shown type="checkbox" hidden;
aside .wiki-aside { aside .wiki-aside {
// Slide button // Slide button
label .wiki-aside__slider for="wiki-aside-shown" { label .wiki-aside__slider for="wiki-aside-shown" {
img .wiki-icon src="/static/svg/double-arrow.svg" width="24" height="24"; img .wiki-icon src="/static/svg/double-arrow.svg" width="24" height="24";
} }
// Navigation tree // Navigation tree
section .link-tree { section .link-tree {
div { div {
(crate::html::misc::show_page_tree(sack, "wiki/**/*.html")) (crate::html::misc::show_page_tree(sack, "wiki/**/*.html"))
} }
} }
} }
article .wiki-article /*class:list={classlist)*/ { article .wiki-article /*class:list={classlist)*/ {
header class="markdown" { header class="markdown" {
h1 #top { (heading) } h1 #top { (fm.title.clone()) }
} }
section .wiki-article__markdown.markdown { section .wiki-article__markdown.markdown {
(parsed) (content)
} }
@if let Some(bib) = bib { @if let Some(bib) = bib {
(crate::html::misc::show_bibliography(bib)) (crate::html::misc::show_bibliography(bib))
} }
} }
} }
); );
crate::html::page(sack, main, matter.title.to_owned()) crate::html::page(&fm.title, main, sack.get_file())
} }

View file

@ -1,339 +1,286 @@
mod build; mod build;
mod html; mod html;
mod md;
mod pipeline; mod pipeline;
mod text; mod text;
mod ts; mod ts;
mod utils; mod utils;
mod watch; mod watch;
use std::collections::HashSet;
use std::fs; use std::fs;
use std::process::Command; use std::process::Command;
use camino::{Utf8Path, Utf8PathBuf}; use camino::{Utf8Path, Utf8PathBuf};
use chrono::{DateTime, Datelike, Utc}; use chrono::{DateTime, Datelike, Utc};
use clap::{Parser, ValueEnum}; use clap::{Parser, ValueEnum};
use gray_matter::engine::YAML; use pipeline::{Asset, AssetKind, Content, FileItemKind, Output, PipelineItem, Sack};
use gray_matter::Matter;
use hypertext::{Raw, Renderable}; use hypertext::{Raw, Renderable};
use pipeline::{Asset, AssetKind, Content, FileItemKind, Output, PipelineItem}; use once_cell::sync::Lazy;
use serde::Deserialize; use serde::Deserialize;
use crate::build::build_styles;
use crate::pipeline::Virtual; use crate::pipeline::Virtual;
use crate::build::build_styles;
#[derive(Parser, Debug, Clone)] #[derive(Parser, Debug, Clone)]
struct Args { struct Args {
#[clap(value_enum, index = 1, default_value = "build")] #[clap(value_enum, index = 1, default_value = "build")]
mode: Mode, mode: Mode,
} }
#[derive(ValueEnum, Debug, Clone, Copy)] #[derive(ValueEnum, Debug, Clone, Copy)]
enum Mode { enum Mode {
Build, Build,
Watch, Watch,
} }
#[derive(Debug)] #[derive(Debug)]
struct BuildContext { struct BuildInfo {
pub mode: Mode, pub year: i32,
pub year: i32, pub date: String,
pub date: String, pub link: String,
pub link: String, pub hash: String,
pub hash: String,
} }
static REPO: Lazy<BuildInfo> = Lazy::new(|| {
let time = chrono::Utc::now();
BuildInfo {
year: time.year(),
date: time.format("%Y/%m/%d %H:%M").to_string(),
link: "https://git.kamoshi.org/kamov/website".into(),
hash: String::from_utf8(
Command::new("git")
.args(["rev-parse", "--short", "HEAD"])
.output()
.unwrap()
.stdout
)
.unwrap()
.trim()
.into()
}
});
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct Link { pub struct Link {
pub path: Utf8PathBuf, pub path: Utf8PathBuf,
pub name: String, pub name: String,
pub desc: Option<String>, pub desc: Option<String>,
} }
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct LinkDate { pub struct LinkDate {
pub link: Link, pub link: Link,
pub date: DateTime<Utc>, pub date: DateTime<Utc>,
} }
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub enum Linkable { pub enum Linkable {
Link(Link), Link(Link),
Date(LinkDate), Date(LinkDate),
} }
fn main() {
let args = Args::parse();
let time = chrono::Utc::now();
let ctx = BuildContext { fn to_index<T>(item: PipelineItem) -> PipelineItem
mode: args.mode, where
year: time.year(), T: for<'de> Deserialize<'de> + Content + 'static,
date: time.format("%Y/%m/%d %H:%M").to_string(),
link: "https://git.kamoshi.org/kamov/website".into(),
hash: String::from_utf8(
Command::new("git")
.args(["rev-parse", "--short", "HEAD"])
.output()
.expect("Couldn't load git revision")
.stdout,
)
.expect("Invalid UTF8")
.trim()
.into(),
};
match args.mode {
Mode::Build => {
build(&ctx);
}
Mode::Watch => {
build(&ctx);
watch::watch().unwrap()
}
}
}
struct Source {
path: &'static str,
exts: HashSet<&'static str>,
func: fn(PipelineItem) -> PipelineItem,
}
impl Source {
fn get(&self) -> Vec<PipelineItem> {
pipeline::gather(self.path, &self.exts)
.into_iter()
.map(self.func)
.collect()
}
}
fn build(ctx: &BuildContext) {
if fs::metadata("dist").is_ok() {
println!("Cleaning dist");
fs::remove_dir_all("dist").unwrap();
}
fs::create_dir("dist").unwrap();
let sources = vec![
Source {
path: "content/about.md",
exts: ["md"].into(),
func: as_index::<crate::html::Post>,
},
Source {
path: "content/posts/**/*",
exts: ["md", "mdx"].into(),
func: as_index::<crate::html::Post>,
},
Source {
path: "content/slides/**/*",
exts: ["md", "lhs"].into(),
func: as_index::<crate::html::Slideshow>,
},
Source {
path: "content/wiki/**/*",
exts: ["md"].into(),
func: as_index::<crate::html::Wiki>,
},
];
let assets: Vec<Output> = sources
.iter()
.flat_map(Source::get)
.map(to_bundle)
.filter_map(|item| match item {
PipelineItem::Skip(skip) => {
println!("Skipping {}", skip.path);
None
}
PipelineItem::Take(take) => Some(take),
})
.collect();
let assets: Vec<Output> = vec![
assets,
vec![
Output {
kind: Virtual::new(|sack| crate::html::map(sack).render().to_owned().into()).into(),
path: "map/index.html".into(),
link: None,
},
Output {
kind: Virtual::new(|sack| crate::html::search(sack).render().to_owned().into())
.into(),
path: "search/index.html".into(),
link: None,
},
Output {
kind: Asset {
kind: pipeline::AssetKind::html(|sack| {
let data = std::fs::read_to_string("content/index.md").unwrap();
let (_, html, _) = text::md::parse(data, None);
crate::html::home(sack, Raw(html))
.render()
.to_owned()
.into()
}),
meta: pipeline::FileItem {
kind: pipeline::FileItemKind::Index,
path: "content/index.md".into(),
},
}
.into(),
path: "index.html".into(),
link: None,
},
Output {
kind: Virtual::new(|sack| {
crate::html::to_list(sack, sack.get_links("posts/**/*.html"), "Posts".into())
})
.into(),
path: "posts/index.html".into(),
link: None,
},
Output {
kind: Virtual::new(|sack| {
crate::html::to_list(sack, sack.get_links("slides/**/*.html"), "Slideshows".into())
})
.into(),
path: "slides/index.html".into(),
link: None,
},
],
]
.into_iter()
.flatten()
.collect();
{
let now = std::time::Instant::now();
pipeline::render_all(ctx, &assets);
println!("Elapsed: {:.2?}", now.elapsed());
}
utils::copy_recursively(std::path::Path::new("public"), std::path::Path::new("dist")).unwrap();
build_styles();
let res = Command::new("pagefind")
.args(["--site", "dist"])
.output()
.unwrap();
println!("{}", String::from_utf8(res.stdout).unwrap());
let res = Command::new("esbuild")
.arg("js/vanilla/reveal.js")
.arg("js/vanilla/photos.ts")
.arg("js/search/dist/search.js")
.arg("--format=esm")
.arg("--bundle")
.arg("--splitting")
.arg("--minify")
.arg("--outdir=dist/js/")
.output()
.unwrap();
println!("{}", String::from_utf8(res.stderr).unwrap());
}
pub fn parse_frontmatter<T>(raw: &str) -> (T, String)
where
T: for<'de> Deserialize<'de>,
{ {
let matter = Matter::<YAML>::new(); let meta = match item {
let result = matter.parse(raw); PipelineItem::Skip(meta) if matches!(meta.kind, FileItemKind::Index) => meta,
_ => return item,
};
( let dir = meta.path.parent().unwrap().strip_prefix("content").unwrap();
// Just the front matter let dir = match meta.path.file_stem().unwrap() {
result.data.unwrap().deserialize::<T>().unwrap(), "index" => dir.to_owned(),
// The rest of the content name => dir.join(name),
result.content, };
) let path = dir.join("index.html");
}
fn as_index<T>(item: PipelineItem) -> PipelineItem match meta.path.extension() {
where Some("md" | "mdx" | "lhs") => {
T: for<'de> Deserialize<'de> + Content + Clone + 'static, let data = fs::read_to_string(&meta.path).unwrap();
{ let (fm, md) = md::preflight::<T>(&data);
let meta = match item { let link = T::as_link(&fm, Utf8Path::new("/").join(dir));
PipelineItem::Skip(e) if matches!(e.kind, FileItemKind::Index) => e,
_ => return item,
};
let dir = meta.path.parent().unwrap().strip_prefix("content").unwrap(); let call = move |sack: &Sack| {
let dir = match meta.path.file_stem().unwrap() { let lib = sack.get_library();
"index" => dir.to_owned(), let (outline, html, bib) = T::parse(&md, lib);
name => dir.join(name), T::transform(&fm, Raw(html), outline, sack, bib).render().into()
}; };
let path = dir.join("index.html");
match meta.path.extension() { Output {
Some("md" | "mdx" | "lhs") => { kind: Asset {
let data = fs::read_to_string(&meta.path).unwrap(); kind: pipeline::AssetKind::Html(Box::new(call)),
let (fm, md) = parse_frontmatter::<T>(&data); meta,
let link = T::as_link(&fm, Utf8Path::new("/").join(dir)); }.into(),
path,
Output { link,
kind: Asset { }.into()
kind: pipeline::AssetKind::html(move |sack| { },
let lib = sack.get_library(); _ => meta.into(),
let (outline, parsed, bib) = T::parse(md.clone(), lib); }
T::render(fm.clone(), sack, Raw(parsed), outline, bib)
.render()
.into()
}),
meta,
}
.into(),
path,
link,
}
.into()
}
_ => meta.into(),
}
} }
fn to_bundle(item: PipelineItem) -> PipelineItem { fn to_bundle(item: PipelineItem) -> PipelineItem {
let meta = match item { let meta = match item {
PipelineItem::Skip(meta) if matches!(meta.kind, FileItemKind::Bundle) => meta, PipelineItem::Skip(meta) if matches!(meta.kind, FileItemKind::Bundle) => meta,
_ => return item, _ => return item,
}; };
let path = meta.path.strip_prefix("content").unwrap().to_owned(); let path = meta.path.strip_prefix("content").unwrap().to_owned();
match meta.path.extension() { match meta.path.extension() {
// any image // any image
Some("jpg" | "png" | "gif") => Output { Some("jpg" | "png" | "gif") => {
kind: Asset { Output {
kind: AssetKind::Image, kind: Asset {
meta, kind: AssetKind::Image,
} meta,
.into(), }.into(),
path, path,
link: None, link: None,
} }.into()
.into(), },
// bibliography // bibliography
Some("bib") => { Some("bib") => {
let data = fs::read_to_string(&meta.path).unwrap(); let data = fs::read_to_string(&meta.path).unwrap();
let data = hayagriva::io::from_biblatex_str(&data).unwrap(); let data = hayagriva::io::from_biblatex_str(&data).unwrap();
Output { Output {
kind: Asset { kind: Asset {
kind: AssetKind::Bibtex(data), kind: AssetKind::Bibtex(data),
meta, meta,
} }.into(),
.into(), path,
path, link: None,
link: None, }.into()
} },
.into() _ => meta.into(),
} }
_ => meta.into(), }
}
fn build() {
if fs::metadata("dist").is_ok() {
println!("Cleaning dist");
fs::remove_dir_all("dist").unwrap();
}
fs::create_dir("dist").unwrap();
let assets: Vec<Output> = [
pipeline::gather("content/about.md", &["md"].into())
.into_iter()
.map(to_index::<crate::html::Post> as fn(PipelineItem) -> PipelineItem),
pipeline::gather("content/posts/**/*", &["md", "mdx"].into())
.into_iter()
.map(to_index::<crate::html::Post>),
pipeline::gather("content/slides/**/*", &["md", "lhs"].into())
.into_iter()
.map(to_index::<crate::html::Slideshow>),
pipeline::gather("content/wiki/**/*", &["md"].into())
.into_iter()
.map(to_index::<crate::html::Wiki>),
]
.into_iter()
.flatten()
.map(to_bundle)
.filter_map(|item| match item {
PipelineItem::Skip(skip) => {
println!("Skipping {}", skip.path);
None
},
PipelineItem::Take(take) => Some(take),
})
.collect();
let assets: Vec<Output> = vec![
assets,
vec![
Output {
kind: Virtual::new(|_| crate::html::map().render().to_owned().into()).into(),
path: "map/index.html".into(),
link: None,
},
Output {
kind: Virtual::new(|_| crate::html::search().render().to_owned().into()).into(),
path: "search/index.html".into(),
link: None,
},
Output {
kind: Asset {
kind: pipeline::AssetKind::Html(Box::new(|_| {
let data = std::fs::read_to_string("content/index.md").unwrap();
let (_, html, _) = text::md::parse(&data, None);
crate::html::home(Raw(html)).render().to_owned().into()
})),
meta: pipeline::FileItem {
kind: pipeline::FileItemKind::Index,
path: "content/index.md".into()
}
}.into(),
path: "index.html".into(),
link: None,
},
Output {
kind: Virtual::new(|sack| crate::html::to_list(sack.get_links("posts/**/*.html"))).into(),
path: "posts/index.html".into(),
link: None,
},
Output {
kind: Virtual::new(|sack| crate::html::to_list(sack.get_links("slides/**/*.html"))).into(),
path: "slides/index.html".into(),
link: None,
},
],
]
.into_iter()
.flatten()
.collect();
{
let now = std::time::Instant::now();
pipeline::render_all(&assets);
println!("Elapsed: {:.2?}", now.elapsed());
}
utils::copy_recursively(std::path::Path::new("public"), std::path::Path::new("dist")).unwrap();
build_styles();
let res = Command::new("pagefind")
.args(["--site", "dist"])
.output()
.unwrap();
println!("{}", String::from_utf8(res.stdout).unwrap());
let res = Command::new("esbuild")
.arg("js/vanilla/reveal.js")
.arg("js/vanilla/photos.ts")
.arg("js/search/dist/search.js")
.arg("--format=esm")
.arg("--bundle")
.arg("--splitting")
.arg("--minify")
.arg("--outdir=dist/js/")
.output()
.unwrap();
println!("{}", String::from_utf8(res.stderr).unwrap());
}
fn main() {
let args = Args::parse();
match args.mode {
Mode::Build => build(),
Mode::Watch => {
build();
watch::watch().unwrap()
},
}
} }

43
src/md/matter.rs Normal file
View file

@ -0,0 +1,43 @@
use gray_matter::{engine::YAML, Matter};
use serde::Deserialize;
pub fn preflight<T>(raw: &str) -> (T, String)
where
T: for<'de> Deserialize<'de>,
{
let matter = Matter::<YAML>::new();
let result = matter.parse(raw);
(
// Just the front matter
result.data.unwrap().deserialize::<T>().unwrap(),
// The actual markdown content
result.content,
)
}
mod isodate {
use chrono::{DateTime, Utc};
use serde::{self, Deserialize, Deserializer};
// pub fn serialize<S>(
// date: &DateTime<Utc>,
// serializer: S,
// ) -> Result<S::Ok, S::Error>
// where
// S: Serializer,
// {
// let s = date.to_rfc3339();
// serializer.serialize_str(&s)
// }
pub fn deserialize<'de, D>(deserializer: D) -> Result<DateTime<Utc>, D::Error>
where
D: Deserializer<'de>,
{
let s = String::deserialize(deserializer)?;
let dt = chrono::DateTime::parse_from_rfc3339(&s).map_err(serde::de::Error::custom)?;
Ok(dt.into())
}
}

3
src/md/mod.rs Normal file
View file

@ -0,0 +1,3 @@
mod matter;
pub use matter::preflight;

View file

@ -11,75 +11,67 @@ use hayagriva::Library;
use hypertext::Renderable; use hypertext::Renderable;
use crate::text::md::Outline; use crate::text::md::Outline;
use crate::{BuildContext, Link, LinkDate, Linkable}; use crate::{Link, LinkDate, Linkable};
/// Represents a piece of content that can be rendered as a page. This trait needs to be /// Represents a piece of content that can be rendered as a page. This trait needs to be
/// implemented for the front matter associated with some web page as that is what ultimately /// implemented for the front matter associated with some web page as that is what ultimately
/// matters when rendering the page. Each front matter *definition* maps to exactly one kind of /// matters when rendering the page. Each front matter *definition* maps to exactly one kind of
/// rendered page on the website. /// rendered page on the website.
pub(crate) trait Content { pub(crate) trait Content {
/// Parse the document. Pass an optional library for bibliography. /// Parse the document. Pass an optional library for bibliography.
/// This generates the initial HTML markup from content. fn parse(document: &str, library: Option<&Library>) -> (Outline, String, Option<Vec<String>>);
fn parse(document: String, library: Option<&Library>)
-> (Outline, String, Option<Vec<String>>);
/// Render the full page from parsed content. fn transform<'fm, 'md, 'sack, 'html, T>(
fn render<'s, 'p, 'html>( &'fm self,
self, content: T,
sack: &'s Sack, outline: Outline,
parsed: impl Renderable + 'p, sack: &'sack Sack,
outline: Outline, bib: Option<Vec<String>>,
bib: Option<Vec<String>>, ) -> impl Renderable + 'html
) -> impl Renderable + 'html where
where 'fm: 'html,
's: 'html, 'md: 'html,
'p: 'html; 'sack: 'html,
T: Renderable + 'md;
/// Get link for this content fn as_link(&self, path: Utf8PathBuf) -> Option<Linkable>;
fn as_link(&self, path: Utf8PathBuf) -> Option<Linkable>;
} }
/// Marks whether the item should be treated as a content page, converted into a standalone HTML /// Marks whether the item should be treated as a content page, converted into a standalone HTML
/// page, or as a bundled asset. /// page, or as a bundled asset.
#[derive(Debug)] #[derive(Debug)]
pub(crate) enum FileItemKind { pub(crate) enum FileItemKind {
/// Marks items converted to `index.html`. /// Marks items converted to `index.html`.
Index, Index,
/// Marks items from bundle. /// Marks items from bundle.
Bundle, Bundle,
} }
/// Metadata for a single item consumed by SSG. /// Metadata for a single item consumed by SSG.
#[derive(Debug)] #[derive(Debug)]
pub(crate) struct FileItem { pub(crate) struct FileItem {
/// The kind of an item from disk. /// The kind of an item from disk.
pub kind: FileItemKind, pub kind: FileItemKind,
/// Original source file location. /// Original source file location.
pub path: Utf8PathBuf, pub path: Utf8PathBuf,
} }
/// Marks how the asset should be processed by the SSG. /// Marks how the asset should be processed by the SSG.
pub(crate) enum AssetKind { pub(crate) enum AssetKind {
/// Data renderable to HTML. In order to process the data, a closure should be called. /// Data renderable to HTML. In order to process the data, a closure should be called.
Html(Box<dyn Fn(&Sack) -> String>), Html(Box<dyn Fn(&Sack) -> String>),
/// Bibliographical data. /// Bibliographical data.
Bibtex(Library), Bibtex(Library),
/// Image. For now they are simply cloned to the `dist` director. /// Image. For now they are simply cloned to the `dist` director.
Image, Image,
}
impl AssetKind {
pub fn html(f: impl Fn(&Sack) -> String + 'static) -> Self {
Self::Html(Box::new(f))
}
} }
/// Asset corresponding to a file on disk. /// Asset corresponding to a file on disk.
pub(crate) struct Asset { pub(crate) struct Asset {
/// The kind of a processed asset. /// The kind of a processed asset.
pub kind: AssetKind, pub kind: AssetKind,
/// File metadata /// File metadata
pub meta: FileItem, pub meta: FileItem,
} }
/// Dynamically generated asset not corresponding to any file on disk. This is useful when the /// Dynamically generated asset not corresponding to any file on disk. This is useful when the
@ -87,228 +79,223 @@ pub(crate) struct Asset {
pub(crate) struct Virtual(Box<dyn Fn(&Sack) -> String>); pub(crate) struct Virtual(Box<dyn Fn(&Sack) -> String>);
impl Virtual { impl Virtual {
pub fn new(call: impl Fn(&Sack) -> String + 'static) -> Self { pub fn new(call: impl Fn(&Sack) -> String + 'static) -> Self {
Self(Box::new(call)) Self(Box::new(call))
} }
} }
/// The kind of an output item. /// The kind of an output item.
pub(crate) enum OutputKind { pub(crate) enum OutputKind {
/// Marks an output item which corresponds to a file on disk. /// Marks an output item which corresponds to a file on disk.
Asset(Asset), Asset(Asset),
/// Marks an output item which doesn't correspond to any file. /// Marks an output item which doesn't correspond to any file.
Virtual(Virtual), Virtual(Virtual),
} }
impl From<Asset> for OutputKind { impl From<Asset> for OutputKind {
fn from(value: Asset) -> Self { fn from(value: Asset) -> Self {
OutputKind::Asset(value) OutputKind::Asset(value)
} }
} }
impl From<Virtual> for OutputKind { impl From<Virtual> for OutputKind {
fn from(value: Virtual) -> Self { fn from(value: Virtual) -> Self {
OutputKind::Virtual(value) OutputKind::Virtual(value)
} }
} }
/// Renderable output /// Renderable output
pub(crate) struct Output { pub(crate) struct Output {
/// The kind of an output item /// The kind of an output item
pub(crate) kind: OutputKind, pub(crate) kind: OutputKind,
/// Path for the output in dist /// Path for the output in dist
pub(crate) path: Utf8PathBuf, pub(crate) path: Utf8PathBuf,
/// Optional URL data for outputted page. /// Optional URL data for outputted page.
pub(crate) link: Option<Linkable>, pub(crate) link: Option<Linkable>,
} }
/// Items currently in the pipeline. In order for an item to be rendered, it needs to be marked as /// Items currently in the pipeline. In order for an item to be rendered, it needs to be marked as
/// `Take`, which means it needs to have an output location assigned to itself. /// `Take`, which means it needs to have an output location assigned to itself.
pub(crate) enum PipelineItem { pub(crate) enum PipelineItem {
/// Unclaimed file. /// Unclaimed file.
Skip(FileItem), Skip(FileItem),
/// Data ready to be processed. /// Data ready to be processed.
Take(Output), Take(Output),
} }
impl From<FileItem> for PipelineItem { impl From<FileItem> for PipelineItem {
fn from(value: FileItem) -> Self { fn from(value: FileItem) -> Self {
Self::Skip(value) Self::Skip(value)
} }
} }
impl From<Output> for PipelineItem { impl From<Output> for PipelineItem {
fn from(value: Output) -> Self { fn from(value: Output) -> Self {
Self::Take(value) Self::Take(value)
} }
} }
/// This struct allows for querying the website hierarchy. It is passed to each rendered website /// This struct allows for querying the website hierarchy. It is passed to each rendered website
/// page, so that it can easily access the website metadata. /// page, so that it can easily access the website metadata.
pub(crate) struct Sack<'a> { pub(crate) struct Sack<'a> {
pub ctx: &'a BuildContext, /// Literally all of the content
/// Literally all of the content hole: &'a [Output],
hole: &'a [Output], /// Current path for the page being rendered
/// Current path for the page being rendered path: &'a Utf8PathBuf,
path: &'a Utf8PathBuf, /// Original file location for this page
/// Original file location for this page file: Option<&'a Utf8PathBuf>,
file: Option<&'a Utf8PathBuf>,
} }
impl<'a> Sack<'a> { impl<'a> Sack<'a> {
pub fn get_links(&self, path: &str) -> Vec<LinkDate> { pub fn new(hole: &'a [Output], path: &'a Utf8PathBuf, file: Option<&'a Utf8PathBuf>) -> Self {
let pattern = glob::Pattern::new(path).expect("Bad glob pattern"); Self { hole, path, file }
self.hole }
.iter()
.filter(|item| pattern.matches_path(item.path.as_ref()))
.filter_map(|item| match &item.link {
Some(Linkable::Date(link)) => Some(link.clone()),
_ => None,
})
.collect()
}
pub fn get_tree(&self, path: &str) -> TreePage { pub fn get_links(&self, path: &str) -> Vec<LinkDate> {
let glob = glob::Pattern::new(path).expect("Bad glob pattern"); let pattern = glob::Pattern::new(path).expect("Bad glob pattern");
let list = self self.hole
.hole .iter()
.iter() .filter(|item| pattern.matches_path(item.path.as_ref()))
.filter(|item| glob.matches_path(item.path.as_ref())) .filter_map(|item| match &item.link {
.filter_map(|item| match &item.link { Some(Linkable::Date(link)) => Some(link.clone()),
Some(Linkable::Link(link)) => Some(link.clone()), _ => None,
_ => None, })
}); .collect()
}
let mut tree = TreePage::new(); pub fn get_tree(&self, path: &str) -> TreePage {
for link in list { let glob = glob::Pattern::new(path).expect("Bad glob pattern");
tree.add_link(&link); let list = self
} .hole
.iter()
.filter(|item| glob.matches_path(item.path.as_ref()))
.filter_map(|item| match &item.link {
Some(Linkable::Link(link)) => Some(link.clone()),
_ => None,
});
tree let mut tree = TreePage::new();
} for link in list {
tree.add_link(&link);
}
pub fn get_library(&self) -> Option<&Library> { tree
let glob = format!("{}/*.bib", self.path.parent()?); }
let glob = glob::Pattern::new(&glob).expect("Bad glob pattern");
let opts = glob::MatchOptions {
case_sensitive: true,
require_literal_separator: true,
require_literal_leading_dot: false,
};
self.hole pub fn get_library(&self) -> Option<&Library> {
.iter() let glob = format!("{}/*.bib", self.path.parent()?);
.filter(|item| glob.matches_path_with(item.path.as_ref(), opts)) let glob = glob::Pattern::new(&glob).expect("Bad glob pattern");
.filter_map(|asset| match asset.kind { let opts = glob::MatchOptions {
OutputKind::Asset(ref real) => Some(real), case_sensitive: true,
_ => None, require_literal_separator: true,
}) require_literal_leading_dot: false,
.find_map(|asset| match asset.kind { };
AssetKind::Bibtex(ref lib) => Some(lib),
_ => None,
})
}
/// Get the path for original file location self.hole
pub fn get_file(&self) -> Option<&'a Utf8Path> { .iter()
self.file.map(Utf8PathBuf::as_ref) .filter(|item| glob.matches_path_with(item.path.as_ref(), opts))
} .filter_map(|asset| match asset.kind {
OutputKind::Asset(ref real) => Some(real),
_ => None,
})
.find_map(|asset| match asset.kind {
AssetKind::Bibtex(ref lib) => Some(lib),
_ => None,
})
}
/// Get the path for original file location
pub fn get_file(&self) -> Option<&'a Utf8Path> {
self.file.map(Utf8PathBuf::as_ref)
}
} }
#[derive(Debug)] #[derive(Debug)]
pub(crate) struct TreePage { pub(crate) struct TreePage {
pub link: Option<Link>, pub link: Option<Link>,
pub subs: HashMap<String, TreePage>, pub subs: HashMap<String, TreePage>,
} }
impl TreePage { impl TreePage {
fn new() -> Self { fn new() -> Self {
TreePage { TreePage {
link: None, link: None,
subs: HashMap::new(), subs: HashMap::new(),
} }
} }
fn add_link(&mut self, link: &Link) { fn add_link(&mut self, link: &Link) {
let mut ptr = self; let mut ptr = self;
for part in link.path.iter().skip(1) { for part in link.path.iter().skip(1) {
ptr = ptr.subs.entry(part.to_string()).or_insert(TreePage::new()); ptr = ptr.subs.entry(part.to_string()).or_insert(TreePage::new());
} }
ptr.link = Some(link.clone()); ptr.link = Some(link.clone());
} }
} }
pub fn gather(pattern: &str, exts: &HashSet<&'static str>) -> Vec<PipelineItem> { pub fn gather(pattern: &str, exts: &HashSet<&'static str>) -> Vec<PipelineItem> {
glob(pattern) glob(pattern)
.expect("Invalid glob pattern") .expect("Invalid glob pattern")
.filter_map(|path| { .filter_map(|path| {
let path = path.unwrap(); let path = path.unwrap();
let path = Utf8PathBuf::from_path_buf(path).expect("Filename is not valid UTF8"); let path = Utf8PathBuf::from_path_buf(path).expect("Filename is not valid UTF8");
match path.is_dir() { match path.is_dir() {
true => None, true => None,
false => Some(to_source(path, exts)), false => Some(to_source(path, exts)),
} }
}) })
.map(Into::into) .map(Into::into)
.collect() .collect()
} }
fn to_source(path: Utf8PathBuf, exts: &HashSet<&'static str>) -> FileItem { fn to_source(path: Utf8PathBuf, exts: &HashSet<&'static str>) -> FileItem {
let hit = path.extension().map_or(false, |ext| exts.contains(ext)); let hit = path.extension().map_or(false, |ext| exts.contains(ext));
let kind = match hit { let kind = match hit {
true => FileItemKind::Index, true => FileItemKind::Index,
false => FileItemKind::Bundle, false => FileItemKind::Bundle,
}; };
FileItem { kind, path } FileItem { kind, path }
} }
pub fn render_all(ctx: &BuildContext, items: &[Output]) { pub fn render_all(items: &[Output]) {
for item in items { for item in items {
let file = match &item.kind { let file = match &item.kind {
OutputKind::Asset(a) => Some(&a.meta.path), OutputKind::Asset(a) => Some(&a.meta.path),
OutputKind::Virtual(_) => None, OutputKind::Virtual(_) => None,
}; };
render( render(item, &Sack::new(items, &item.path, file));
item, }
Sack {
ctx,
hole: items,
path: &item.path,
file,
},
);
}
} }
fn render(item: &Output, sack: Sack) { fn render(item: &Output, sack: &Sack) {
let o = Utf8Path::new("dist").join(&item.path); let o = Utf8Path::new("dist").join(&item.path);
fs::create_dir_all(o.parent().unwrap()).unwrap(); fs::create_dir_all(o.parent().unwrap()).unwrap();
match item.kind { match item.kind {
OutputKind::Asset(ref real) => { OutputKind::Asset(ref real) => {
let i = &real.meta.path; let i = &real.meta.path;
match &real.kind { match &real.kind {
AssetKind::Html(closure) => { AssetKind::Html(closure) => {
let mut file = File::create(&o).unwrap(); let mut file = File::create(&o).unwrap();
file.write_all(closure(&sack).as_bytes()).unwrap(); file.write_all(closure(sack).as_bytes()).unwrap();
println!("HTML: {} -> {}", i, o); println!("HTML: {} -> {}", i, o);
} }
AssetKind::Bibtex(_) => {} AssetKind::Bibtex(_) => {}
AssetKind::Image => { AssetKind::Image => {
fs::create_dir_all(o.parent().unwrap()).unwrap(); fs::create_dir_all(o.parent().unwrap()).unwrap();
fs::copy(i, &o).unwrap(); fs::copy(i, &o).unwrap();
println!("Image: {} -> {}", i, o); println!("Image: {} -> {}", i, o);
} }
}; };
} }
OutputKind::Virtual(Virtual(ref closure)) => { OutputKind::Virtual(Virtual(ref closure)) => {
let mut file = File::create(&o).unwrap(); let mut file = File::create(&o).unwrap();
file.write_all(closure(&sack).as_bytes()).unwrap(); file.write_all(closure(sack).as_bytes()).unwrap();
println!("Virtual: -> {}", o); println!("Virtual: -> {}", o);
} }
} }
} }

View file

@ -1,11 +1,6 @@
use std::collections::HashMap; use std::collections::HashMap;
use hayagriva::{ use hayagriva::{archive::ArchivedStyle, citationberg::{IndependentStyle, Locale, Style}, BibliographyDriver, BibliographyRequest, BufWriteFormat, CitationItem, CitationRequest, Library};
archive::ArchivedStyle,
citationberg::{IndependentStyle, Locale, Style},
BibliographyDriver, BibliographyRequest, BufWriteFormat, CitationItem, CitationRequest,
Library,
};
use hypertext::Renderable; use hypertext::Renderable;
use once_cell::sync::Lazy; use once_cell::sync::Lazy;
use pulldown_cmark::{CodeBlockKind, Event, Options, Parser, Tag, TagEnd, TextMergeStream}; use pulldown_cmark::{CodeBlockKind, Event, Options, Parser, Tag, TagEnd, TextMergeStream};
@ -13,339 +8,315 @@ use regex::Regex;
use crate::ts; use crate::ts;
static OPTS: Lazy<Options> = Lazy::new(|| {
Options::empty()
.union(Options::ENABLE_MATH)
.union(Options::ENABLE_TABLES)
.union(Options::ENABLE_TASKLISTS)
.union(Options::ENABLE_STRIKETHROUGH)
.union(Options::ENABLE_SMART_PUNCTUATION)
});
static KATEX_I: Lazy<katex::Opts> = Lazy::new(|| { static OPTS: Lazy<Options> = Lazy::new(||
katex::opts::Opts::builder() Options::empty()
.output_type(katex::OutputType::Mathml) .union(Options::ENABLE_MATH)
.build() .union(Options::ENABLE_TABLES)
.unwrap() .union(Options::ENABLE_TASKLISTS)
}); .union(Options::ENABLE_STRIKETHROUGH)
.union(Options::ENABLE_SMART_PUNCTUATION)
);
static KATEX_B: Lazy<katex::Opts> = Lazy::new(|| { static KATEX_I: Lazy<katex::Opts> = Lazy::new(||
katex::opts::Opts::builder() katex::opts::Opts::builder()
.output_type(katex::OutputType::Mathml) .output_type(katex::OutputType::Mathml)
.display_mode(true) .build()
.build() .unwrap()
.unwrap() );
});
static KATEX_B: Lazy<katex::Opts> = Lazy::new(||
katex::opts::Opts::builder()
.output_type(katex::OutputType::Mathml)
.display_mode(true)
.build()
.unwrap()
);
static LOCALE: Lazy<Vec<Locale>> = Lazy::new(hayagriva::archive::locales); static LOCALE: Lazy<Vec<Locale>> = Lazy::new(hayagriva::archive::locales);
static STYLE: Lazy<IndependentStyle> = static STYLE: Lazy<IndependentStyle> = Lazy::new(||
Lazy::new(|| match ArchivedStyle::InstituteOfPhysicsNumeric.get() { match ArchivedStyle::InstituteOfPhysicsNumeric.get() {
Style::Independent(style) => style, Style::Independent(style) => style,
Style::Dependent(_) => unreachable!(), Style::Dependent(_) => unreachable!(),
}); }
);
pub struct Outline(pub Vec<(String, String)>); pub struct Outline(pub Vec<(String, String)>);
pub fn parse(text: String, lib: Option<&Library>) -> (Outline, String, Option<Vec<String>>) {
let (outline, stream) = {
let stream = Parser::new_ext(&text, *OPTS);
let mut stream: Vec<_> = TextMergeStream::new(stream).collect();
let outline = set_heading_ids(&mut stream);
(outline, stream)
};
let stream = stream pub fn parse(text: &str, lib: Option<&Library>) -> (Outline, String, Option<Vec<String>>) {
.into_iter() let (outline, stream) = {
.map(make_math) let stream = Parser::new_ext(text, *OPTS);
.map(make_emoji) let mut stream: Vec<_> = TextMergeStream::new(stream).collect();
.collect::<Vec<_>>(); let outline = set_heading_ids(&mut stream);
(outline, stream)
};
let stream = make_code(stream) let stream = stream.into_iter()
.into_iter() .map(make_math)
.flat_map(make_ruby) .map(make_emoji)
.flat_map(make_cite) .collect::<Vec<_>>();
.collect::<Vec<_>>();
let (stream, bib) = match lib { let stream = make_code(stream)
Some(lib) => make_bib(stream, lib), .into_iter()
None => (stream, None), .flat_map(make_ruby)
}; .flat_map(make_cite)
.collect::<Vec<_>>();
let mut html = String::new(); let (stream, bib) = match lib {
pulldown_cmark::html::push_html(&mut html, stream.into_iter()); Some(lib) => make_bib(stream, lib),
None => (stream, None),
};
(outline, html, bib) let mut html = String::new();
pulldown_cmark::html::push_html(&mut html, stream.into_iter());
(outline, html, bib)
} }
fn make_bib<'a, 'b>( fn make_bib<'a, 'b>(stream: Vec<Event<'a>>, lib: &'b Library) -> (Vec<Event<'a>>, Option<Vec<String>>) {
stream: Vec<Event<'a>>, let mut driver = BibliographyDriver::new();
lib: &'b Library,
) -> (Vec<Event<'a>>, Option<Vec<String>>) {
let mut driver = BibliographyDriver::new();
for event in stream.iter() { for event in stream.iter() {
match event { match event {
Event::InlineMath(ref text) => match lib.get(text) { Event::InlineMath(ref text) => match lib.get(text) {
Some(entry) => driver.citation(CitationRequest::from_items( Some(entry) => driver.citation(CitationRequest::from_items(vec![CitationItem::with_entry(entry)], &STYLE, &LOCALE)),
vec![CitationItem::with_entry(entry)], None => (),
&STYLE, },
&LOCALE, _ => (),
)), }
None => (), }
},
_ => (),
}
}
// add fake citation to make all entries show up // add fake citation to make all entries show up
driver.citation(CitationRequest::from_items( driver.citation(CitationRequest::from_items(lib.iter().map(CitationItem::with_entry).collect(), &STYLE, &LOCALE));
lib.iter().map(CitationItem::with_entry).collect(),
&STYLE,
&LOCALE,
));
let res = driver.finish(BibliographyRequest { let res = driver.finish(BibliographyRequest { style: &STYLE, locale: None, locale_files: &LOCALE });
style: &STYLE,
locale: None,
locale_files: &LOCALE,
});
let mut n = 0; let mut n = 0;
let stream = stream let stream = stream.into_iter()
.into_iter() .map(|event| match event {
.map(|event| match event { Event::InlineMath(name) => {
Event::InlineMath(name) => { let mut buffer = String::from("<cite>");
let mut buffer = String::from("<cite>"); match res.citations.get(n) {
match res.citations.get(n) { Some(rf) => rf.citation.write_buf(&mut buffer, BufWriteFormat::Html).unwrap(),
Some(rf) => rf None => buffer.push_str(&name),
.citation };
.write_buf(&mut buffer, BufWriteFormat::Html) buffer.push_str("</cite>");
.unwrap(), n += 1;
None => buffer.push_str(&name), Event::InlineHtml(buffer.into())
}; },
buffer.push_str("</cite>"); _ => event
n += 1; })
Event::InlineHtml(buffer.into()) .collect();
}
_ => event,
})
.collect();
let bib = res.bibliography.map(|bib| { let bib = res.bibliography.map(|bib|
bib.items bib.items.iter()
.iter() .map(|x| {
.map(|x| { let mut buffer = String::new();
let mut buffer = String::new(); x.content.write_buf(&mut buffer, BufWriteFormat::Html).unwrap();
x.content buffer
.write_buf(&mut buffer, BufWriteFormat::Html) })
.unwrap(); .collect::<Vec<_>>()
buffer );
})
.collect::<Vec<_>>()
});
(stream, bib) (stream, bib)
} }
static RE_CITE: Lazy<Regex> = Lazy::new(|| Regex::new(r":cite\[([^\]]+)\]").unwrap()); static RE_CITE: Lazy<Regex> = Lazy::new(|| Regex::new(r":cite\[([^\]]+)\]").unwrap());
#[derive(Debug)] #[derive(Debug)]
enum Annotated_<'a> { enum Annotated_<'a> {
Text(&'a str), Text(&'a str),
Cite(&'a str), Cite(&'a str),
} }
fn annotate_(input: &str) -> Vec<Annotated_> { fn annotate_(input: &str) -> Vec<Annotated_> {
let mut parts: Vec<Annotated_> = Vec::new(); let mut parts: Vec<Annotated_> = Vec::new();
let mut last_index = 0; let mut last_index = 0;
for cap in RE_CITE.captures_iter(input) { for cap in RE_CITE.captures_iter(input) {
let cite = cap.get(1).unwrap().as_str(); let cite = cap.get(1).unwrap().as_str();
let index = cap.get(0).unwrap().start(); let index = cap.get(0).unwrap().start();
if index > last_index { if index > last_index {
parts.push(Annotated_::Text(&input[last_index..index])); parts.push(Annotated_::Text(&input[last_index..index]));
} }
parts.push(Annotated_::Cite(cite)); parts.push(Annotated_::Cite(cite));
last_index = cap.get(0).unwrap().end(); last_index = cap.get(0).unwrap().end();
} }
if last_index < input.len() { if last_index < input.len() {
parts.push(Annotated_::Text(&input[last_index..])); parts.push(Annotated_::Text(&input[last_index..]));
} }
parts parts
} }
fn make_cite(event: Event) -> Vec<Event> { fn make_cite(event: Event) -> Vec<Event> {
match event { match event {
Event::Text(ref text) => annotate_(text) Event::Text(ref text) => {
.into_iter() annotate_(text)
.map(|e| match e { .into_iter()
Annotated_::Text(text) => Event::Text(text.to_owned().into()), .map(|e| match e {
Annotated_::Cite(cite) => Event::InlineMath(cite.to_owned().into()), Annotated_::Text(text) => Event::Text(text.to_owned().into()),
}) Annotated_::Cite(cite) => Event::InlineMath(cite.to_owned().into()),
.collect(), })
_ => vec![event], .collect()
} },
_ => vec![event],
}
} }
fn set_heading_ids(events: &mut [Event]) -> Outline { fn set_heading_ids(events: &mut [Event]) -> Outline {
let mut cnt = HashMap::<String, i32>::new(); let mut cnt = HashMap::<String, i32>::new();
let mut out = Vec::new(); let mut out = Vec::new();
let mut buf = String::new(); let mut buf = String::new();
let mut ptr = None; let mut ptr = None;
for event in events { for event in events {
match event { match event {
Event::Start(ref mut tag @ Tag::Heading { .. }) => { Event::Start(ref mut tag @ Tag::Heading {..}) => {
ptr = Some(tag); ptr = Some(tag);
} },
Event::Text(ref text) if ptr.is_some() => buf.push_str(text), Event::Text(ref text) if ptr.is_some() => {
Event::End(TagEnd::Heading(..)) => { buf.push_str(text)
let txt = std::mem::take(&mut buf); },
let url = txt.to_lowercase().replace(' ', "-"); Event::End(TagEnd::Heading(..)) => {
let url = match cnt.get_mut(&url) { let txt = std::mem::take(&mut buf);
Some(ptr) => { let url = txt.to_lowercase().replace(' ', "-");
*ptr += 1; let url = match cnt.get_mut(&url) {
format!("{url}-{ptr}") Some(ptr) => { *ptr += 1; format!("{url}-{ptr}") },
} None => { cnt.insert(url.clone(), 0); url },
None => { };
cnt.insert(url.clone(), 0); match ptr.take().unwrap() {
url Tag::Heading { ref mut id, .. } => *id = Some(url.clone().into()),
} _ => unreachable!(),
}; }
match ptr.take().unwrap() { out.push((txt, url));
Tag::Heading { ref mut id, .. } => *id = Some(url.clone().into()), },
_ => unreachable!(), _ => (),
} }
out.push((txt, url)); };
}
_ => (),
}
}
Outline(out) Outline(out)
} }
fn make_math(event: Event) -> Event { fn make_math(event: Event) -> Event {
match event { match event {
Event::InlineMath(math) => { Event::InlineMath(math) => Event::InlineHtml(katex::render_with_opts(&math, &*KATEX_I).unwrap().into()),
Event::InlineHtml(katex::render_with_opts(&math, &*KATEX_I).unwrap().into()) Event::DisplayMath(math) => Event::Html(katex::render_with_opts(&math, &*KATEX_B).unwrap().into()),
} _ => event
Event::DisplayMath(math) => { }
Event::Html(katex::render_with_opts(&math, &*KATEX_B).unwrap().into())
}
_ => event,
}
} }
fn make_code(es: Vec<Event>) -> Vec<Event> { fn make_code(es: Vec<Event>) -> Vec<Event> {
let mut buff = Vec::new(); let mut buff = Vec::new();
let mut lang = None; let mut lang = None;
let mut code = String::new(); let mut code = String::new();
for event in es { for event in es {
match event { match event {
Event::Start(Tag::CodeBlock(kind)) => match kind { Event::Start(Tag::CodeBlock(kind)) => match kind {
CodeBlockKind::Indented => (), CodeBlockKind::Indented => (),
CodeBlockKind::Fenced(name) => lang = Some(name), CodeBlockKind::Fenced(name) => lang = Some(name),
}, },
Event::End(TagEnd::CodeBlock) => { Event::End(TagEnd::CodeBlock) => {
let lang = lang.take().unwrap_or("".into()); let lang = lang.take().unwrap_or("".into());
let html = ts::highlight(&lang, &code).render().as_str().to_owned(); let html = ts::highlight(&lang, &code).render().as_str().to_owned();
buff.push(Event::Html(html.into())); buff.push(Event::Html(html.into()));
code.clear(); code.clear();
} },
Event::Text(text) => match lang { Event::Text(text) => match lang {
None => buff.push(Event::Text(text)), None => buff.push(Event::Text(text)),
Some(_) => code.push_str(&text), Some(_) => code.push_str(&text),
}, },
_ => buff.push(event), _ => buff.push(event)
} }
} }
buff buff
} }
static RE_RUBY: Lazy<Regex> = Lazy::new(|| Regex::new(r"\[([^\]]+)\]\{([^}]+)\}").unwrap()); static RE_RUBY: Lazy<Regex> = Lazy::new(||
Regex::new(r"\[([^\]]+)\]\{([^}]+)\}").unwrap()
);
#[derive(Debug)] #[derive(Debug)]
enum Annotated<'a> { enum Annotated<'a> {
Text(&'a str), Text(&'a str),
Ruby(&'a str, &'a str), Ruby(&'a str, &'a str),
} }
fn annotate(input: &str) -> Vec<Annotated> { fn annotate(input: &str) -> Vec<Annotated> {
let mut parts: Vec<Annotated> = Vec::new(); let mut parts: Vec<Annotated> = Vec::new();
let mut last_index = 0; let mut last_index = 0;
for cap in RE_RUBY.captures_iter(input) { for cap in RE_RUBY.captures_iter(input) {
let text = cap.get(1).unwrap().as_str(); let text = cap.get(1).unwrap().as_str();
let ruby = cap.get(2).unwrap().as_str(); let ruby = cap.get(2).unwrap().as_str();
let index = cap.get(0).unwrap().start(); let index = cap.get(0).unwrap().start();
if index > last_index { if index > last_index {
parts.push(Annotated::Text(&input[last_index..index])); parts.push(Annotated::Text(&input[last_index..index]));
} }
parts.push(Annotated::Ruby(text, ruby)); parts.push(Annotated::Ruby(text, ruby));
last_index = cap.get(0).unwrap().end(); last_index = cap.get(0).unwrap().end();
} }
if last_index < input.len() { if last_index < input.len() {
parts.push(Annotated::Text(&input[last_index..])); parts.push(Annotated::Text(&input[last_index..]));
} }
parts parts
} }
fn make_ruby(event: Event) -> Vec<Event> { fn make_ruby(event: Event) -> Vec<Event> {
match event { match event {
Event::Text(ref text) => annotate(text) Event::Text(ref text) => annotate(text)
.into_iter() .into_iter()
.map(|el| match el { .map(|el| match el {
Annotated::Text(text) => Event::Text(text.to_owned().into()), Annotated::Text(text) => Event::Text(text.to_owned().into()),
Annotated::Ruby(t, f) => Event::InlineHtml( Annotated::Ruby(t, f) => Event::InlineHtml(format!("<ruby>{t}<rp>(</rp><rt>{f}</rt><rp>)</rp></ruby>").into()),
format!("<ruby>{t}<rp>(</rp><rt>{f}</rt><rp>)</rp></ruby>").into(), })
), .collect(),
}) _ => vec![event],
.collect(), }
_ => vec![event],
}
} }
fn make_emoji(event: Event) -> Event { fn make_emoji(event: Event) -> Event {
match event { match event {
Event::Text(ref text) => { Event::Text(ref text) => {
let mut buf = None; let mut buf = None;
let mut top = 0; let mut top = 0;
let mut old = 0; let mut old = 0;
for (idx, _) in text.match_indices(':') { for (idx, _) in text.match_indices(':') {
let key = &text[old..idx]; let key = &text[old..idx];
if let Some(emoji) = emojis::get_by_shortcode(key) { if let Some(emoji) = emojis::get_by_shortcode(key) {
let buf = buf.get_or_insert_with(|| String::with_capacity(text.len())); let buf = buf.get_or_insert_with(|| String::with_capacity(text.len()));
buf.push_str(&text[top..old - 1]); buf.push_str(&text[top..old-1]);
buf.push_str(emoji.as_str()); buf.push_str(emoji.as_str());
top = idx + 1; top = idx + 1;
} }
old = idx + 1; old = idx + 1;
} }
if let Some(ref mut buf) = buf { if let Some(ref mut buf) = buf {
buf.push_str(&text[top..]); buf.push_str(&text[top..]);
} }
match buf { match buf {
None => event, None => event,
Some(buf) => Event::Text(buf.into()), Some(buf) => Event::Text(buf.into())
} }
} },
_ => event, _ => event,
} }
} }

View file

@ -1,75 +1,80 @@
use std::borrow::Cow;
use hypertext::{html_elements, maud_move, Raw, Renderable, GlobalAttributes};
use tree_sitter_highlight::{Highlighter, HighlightEvent};
mod captures; mod captures;
mod configs; mod configs;
use std::borrow::Cow;
use hypertext::{html_elements, maud_move, GlobalAttributes, Raw, Renderable};
use tree_sitter_highlight::{HighlightEvent, Highlighter};
pub enum Event { pub enum Event {
Write(String), Write(String),
Enter(String), Enter(String),
Close, Close,
} }
pub fn highlight<'data, 'html>(lang: &'data str, code: &'data str) -> impl Renderable + 'html
where pub fn highlight<'data, 'html>(
'data: 'html, lang: &'data str,
code: &'data str
) -> impl Renderable + 'html
where
'data: 'html
{ {
maud_move!( maud_move!(
figure .listing.kanagawa data-lang=(lang) { figure .listing.kanagawa data-lang=(lang) {
pre { pre {
code { code {
(Raw(to_html(lang, code))) (Raw(to_html(lang, code)))
} }
} }
} }
) )
} }
fn to_html(lang: &str, code: &str) -> String { fn to_html(lang: &str, code: &str) -> String {
get_events(lang, code) get_events(lang, code)
.into_iter() .into_iter()
.map(|event| match event { .map(|event| match event {
Event::Write(text) => Cow::from( Event::Write(text) => Cow::from(
text.replace('&', "&amp;") text.replace('&', "&amp;").replace('<', "&lt;").replace('>', "&gt;")
.replace('<', "&lt;") ),
.replace('>', "&gt;"), Event::Enter(class) => Cow::from(
), format!("<span class=\"{}\">", class.replace('.', "-"))
Event::Enter(class) => { ),
Cow::from(format!("<span class=\"{}\">", class.replace('.', "-"))) Event::Close => Cow::from("</span>"),
} })
Event::Close => Cow::from("</span>"), .collect()
})
.collect()
} }
fn get_events(lang: &str, src: &str) -> Vec<Event> { fn get_events(lang: &str, src: &str) -> Vec<Event> {
let config = match configs::get_config(lang) { let config = match configs::get_config(lang) {
Some(c) => c, Some(c) => c,
None => return vec![Event::Write(src.into())], None => return vec![Event::Write(src.into())]
}; };
let mut hl = Highlighter::new();
let highlights = hl
.highlight(config, src.as_bytes(), None, |name| {
configs::get_config(name)
})
.unwrap();
let mut out = vec![]; let mut hl = Highlighter::new();
for event in highlights { let highlights = hl.highlight(
let event = event.unwrap(); config,
let obj = map_event(event, src); src.as_bytes(),
out.push(obj); None,
} |name| configs::get_config(name)
out ).unwrap();
let mut out = vec![];
for event in highlights {
let event = event.unwrap();
let obj = map_event(event, src);
out.push(obj);
}
out
} }
fn map_event(event: HighlightEvent, src: &str) -> Event { fn map_event(event: HighlightEvent, src: &str) -> Event {
match event { match event {
HighlightEvent::Source { start, end } => Event::Write(src[start..end].into()), HighlightEvent::Source {start, end} => Event::Write(src[start..end].into()),
HighlightEvent::HighlightStart(s) => Event::Enter(captures::NAMES[s.0].into()), HighlightEvent::HighlightStart(s) => Event::Enter(captures::NAMES[s.0].into()),
HighlightEvent::HighlightEnd => Event::Close, HighlightEvent::HighlightEnd => Event::Close,
} }
} }

View file

@ -45,14 +45,6 @@
font-family: var(--serif); font-family: var(--serif);
font-weight: 500; font-weight: 500;
} }
&__latest {
padding-left: 1em;
a {
text-decoration: none;
color: var(--c-primary);
}
}
} }
.home-card-image { .home-card-image {