This commit is contained in:
Maciej Jur 2024-09-17 21:43:32 +02:00
parent 730b9aa4f8
commit dafc42a615
Signed by: kamov
GPG key ID: 191CBFF5F72ECAFD
11 changed files with 107 additions and 68 deletions

8
Cargo.lock generated
View file

@ -659,9 +659,9 @@ checksum = "d2fabcfbdc87f4758337ca535fb41a6d701b65693ce38287d856d1674551ec9b"
[[package]]
name = "grass"
version = "0.13.3"
version = "0.13.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a46def7216d331efa51a6aa796ef777bfdfe9605378382827a553344b7e5eefc"
checksum = "f7a68216437ef68f0738e48d6c7bb9e6e6a92237e001b03d838314b068f33c94"
dependencies = [
"getrandom",
"grass_compiler",
@ -669,9 +669,9 @@ dependencies = [
[[package]]
name = "grass_compiler"
version = "0.13.3"
version = "0.13.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f39216c1843182f78541276fec96f88406861f16aa19cc9f8add70f8e67b7577"
checksum = "2d9e3df7f0222ce5184154973d247c591d9aadc28ce7a73c6cd31100c9facff6"
dependencies = [
"codemap",
"indexmap",

View file

@ -1,6 +1,8 @@
use hauchiwa::{HashedStyle, Mode, Sack};
use hypertext::{html_elements, maud_move, Raw, Renderable};
use crate::MyData;
const JS_RELOAD: &str = r#"
const socket = new WebSocket("ws://localhost:1337");
socket.addEventListener("message", (event) => {
@ -10,7 +12,7 @@ socket.addEventListener("message", (event) => {
"#;
pub(crate) fn render_head<'s, 'r>(
sack: &'s Sack,
sack: &'s Sack<MyData>,
title: String,
_styles: &'s [&str],
scripts: Option<&'s [String]>,
@ -64,7 +66,7 @@ fn render_style(style: &HashedStyle) -> impl Renderable + '_ {
}
fn emit_tags_script<'a>(
sack: &'a Sack,
sack: &'a Sack<MyData>,
scripts: &'a [String],
) -> Result<impl Renderable + 'a, String> {
let tags = scripts
@ -79,7 +81,10 @@ fn emit_tags_script<'a>(
))
}
fn emit_tag_script<'a>(sack: &'a Sack, script: &'a str) -> Result<impl Renderable + 'a, String> {
fn emit_tag_script<'a>(
sack: &'a Sack<MyData>,
script: &'a str,
) -> Result<impl Renderable + 'a, String> {
let src = sack
.get_script(script)
.ok_or(format!("Missing script {script}"))?;

View file

@ -2,7 +2,7 @@ use camino::Utf8Path;
use hauchiwa::Sack;
use hypertext::{html_elements, maud, maud_move, GlobalAttributes, Raw, Renderable};
use crate::{html::Post, text::md::parse, Link, LinkDate};
use crate::{html::Post, text::md::parse, Link, LinkDate, MyData};
const INTRO: &str = r#"
##
@ -16,7 +16,7 @@ const INTRO: &str = r#"
"#;
fn intro(sack: &Sack) -> impl Renderable {
fn intro(sack: &Sack<MyData>) -> impl Renderable {
let (parsed, _, _) = parse(INTRO, sack, "".into(), None);
maud!(
section .p-card.intro-jp lang="ja-JP" {
@ -40,7 +40,7 @@ fn photo() -> impl Renderable {
)
}
fn latest(sack: &Sack) -> impl Renderable {
fn latest(sack: &Sack<MyData>) -> impl Renderable {
let links = {
let mut list = sack
.get_meta::<Post>("**")
@ -76,7 +76,7 @@ fn latest(sack: &Sack) -> impl Renderable {
)
}
pub(crate) fn home(sack: &Sack, main: &str) -> String {
pub(crate) fn home(sack: &Sack<MyData>, main: &str) -> String {
let main = maud!(
main .l-home {
article .l-home__article.markdown {

View file

@ -1,10 +1,9 @@
use hauchiwa::Sack;
use hypertext::{html_elements, maud_move, GlobalAttributes, Renderable};
use crate::{html::page, LinkDate};
use crate::{html::page, LinkDate, MySack};
pub fn list<'s, 'g, 'html>(
sack: &'s Sack,
sack: &'s MySack,
groups: &'g [(i32, Vec<LinkDate>)],
title: String,
) -> Result<impl Renderable + 'html, String>

View file

@ -1,10 +1,10 @@
use std::collections::HashMap;
use camino::Utf8Path;
use hauchiwa::{Outline, Sack};
use hauchiwa::Outline;
use hypertext::{html_elements, maud_move, GlobalAttributes, Raw, Renderable};
use crate::{html::Wiki, Link};
use crate::{html::Wiki, Link, MySack};
/// Render the outline for a document
pub(crate) fn show_outline(outline: Outline) -> impl Renderable {
@ -78,7 +78,7 @@ impl TreePage {
}
/// Render the page tree
pub(crate) fn show_page_tree(sack: &Sack, glob: &str) -> impl Renderable {
pub(crate) fn show_page_tree(sack: &MySack, glob: &str) -> impl Renderable {
let tree =
TreePage::from_iter(
sack.get_meta::<Wiki>(glob)

View file

@ -11,7 +11,7 @@ use std::collections::HashMap;
use camino::Utf8Path;
use chrono::Datelike;
use hauchiwa::{Bibliography, Outline, Sack};
use hauchiwa::{Bibliography, Outline};
use hypertext::{html_elements, maud, maud_move, GlobalAttributes, Raw, Renderable};
pub(crate) use home::home;
@ -20,7 +20,7 @@ pub(crate) use post::Post;
pub(crate) use slideshow::Slideshow;
pub(crate) use wiki::Wiki;
use crate::LinkDate;
use crate::{LinkDate, MySack};
fn navbar() -> impl Renderable {
static ITEMS: &[(&str, &str)] = &[
@ -68,16 +68,16 @@ fn navbar() -> impl Renderable {
)
}
pub fn footer<'s, 'html>(sack: &'s Sack) -> impl Renderable + 'html
pub fn footer<'s, 'html>(sack: &'s MySack) -> impl Renderable + 'html
where
's: 'html,
{
let copy = format!("Copyright &copy; {} Maciej Jur", &sack.ctx.year);
let copy = format!("Copyright &copy; {} Maciej Jur", &sack.ctx.data.year);
let mail = "maciej@kamoshi.org";
let href = format!("mailto:{}", mail);
let link = Utf8Path::new(&sack.ctx.link)
let link = Utf8Path::new(&sack.ctx.data.link)
.join("src/commit")
.join(&sack.ctx.hash);
.join(&sack.ctx.data.hash);
let link = match sack.get_file() {
Some(path) => link.join(path),
None => link,
@ -95,10 +95,10 @@ where
}
div .repo {
a href=(link.as_str()) {
(&sack.ctx.hash)
(&sack.ctx.data.hash)
}
div {
(&sack.ctx.date)
(&sack.ctx.data.date)
}
}
a .right.footer__cc-wrap rel="license" href="http://creativecommons.org/licenses/by/4.0/" {
@ -109,7 +109,7 @@ where
}
fn bare<'s, 'p, 'html>(
sack: &'s Sack,
sack: &'s MySack,
main: impl Renderable + 'p,
title: String,
js: Option<&'s [String]>,
@ -133,7 +133,7 @@ where
}
fn full<'s, 'p, 'html>(
sack: &'s Sack,
sack: &'s MySack,
main: impl Renderable + 'p,
title: String,
) -> Result<impl Renderable + 'html, String>
@ -147,7 +147,7 @@ where
}
fn page<'s, 'p, 'html>(
sack: &'s Sack,
sack: &'s MySack,
main: impl Renderable + 'p,
title: String,
js: Option<&'s [String]>,
@ -161,7 +161,7 @@ where
bare(sack, main, title, js)
}
pub(crate) fn to_list(sack: &Sack, list: Vec<LinkDate>, title: String) -> String {
pub(crate) fn to_list(sack: &MySack, list: Vec<LinkDate>, title: String) -> String {
let mut groups = HashMap::<i32, Vec<_>>::new();
for page in list {
@ -181,7 +181,7 @@ pub(crate) fn to_list(sack: &Sack, list: Vec<LinkDate>, title: String) -> String
list::list(sack, &groups, title).unwrap().render().into()
}
pub(crate) fn map<'s, 'html>(sack: &'s Sack) -> Result<impl Renderable + 'html, String>
pub(crate) fn map<'s, 'html>(sack: &'s MySack) -> Result<impl Renderable + 'html, String>
where
's: 'html,
{
@ -198,7 +198,7 @@ where
)
}
pub(crate) fn search(sack: &Sack) -> String {
pub(crate) fn search(sack: &MySack) -> String {
page(
sack,
maud!(
@ -215,7 +215,7 @@ pub(crate) fn search(sack: &Sack) -> String {
pub fn as_html(
meta: &Post,
parsed: &str,
sack: &Sack,
sack: &MySack,
outline: Outline,
bibliography: Bibliography,
) -> String {
@ -225,7 +225,7 @@ pub fn as_html(
pub(crate) fn flox(
title: &str,
parsed: &str,
sack: &Sack,
sack: &MySack,
outline: Outline,
bibliography: Bibliography,
) -> String {

View file

@ -1,10 +1,12 @@
use camino::Utf8Path;
use chrono::{DateTime, Utc};
use hauchiwa::{Bibliography, Outline, Sack};
use hauchiwa::{Bibliography, Outline};
use hayagriva::Library;
use hypertext::{html_elements, maud_move, GlobalAttributes, Raw, Renderable};
use serde::Deserialize;
use crate::MySack;
/// Represents a simple post.
#[derive(Deserialize, Debug, Clone)]
pub struct Post {
@ -17,7 +19,7 @@ pub struct Post {
pub fn parse_content(
content: &str,
sack: &Sack,
sack: &MySack,
path: &Utf8Path,
library: Option<&Library>,
) -> (String, Outline, Bibliography) {
@ -27,7 +29,7 @@ pub fn parse_content(
pub fn as_html(
meta: &Post,
parsed: &str,
sack: &Sack,
sack: &MySack,
outline: Outline,
bibliography: Bibliography,
) -> String {
@ -40,7 +42,7 @@ pub fn as_html(
pub fn post<'s, 'p, 'html>(
meta: &'p Post,
parsed: &'p str,
sack: &'s Sack,
sack: &'s MySack,
outline: Outline,
bibliography: Bibliography,
) -> Result<impl Renderable + 'html, String>
@ -60,7 +62,7 @@ where
pub fn article<'p, 's, 'html>(
title: &'p str,
parsed: &'p str,
_: &'s Sack,
_: &'s MySack,
outline: Outline,
bibliography: Bibliography,
) -> impl Renderable + 'html

View file

@ -2,11 +2,13 @@ use std::fmt::Write;
use camino::Utf8Path;
use chrono::{DateTime, Utc};
use hauchiwa::{Bibliography, Outline, Sack};
use hauchiwa::{Bibliography, Outline};
use hayagriva::Library;
use hypertext::{html_elements, maud, GlobalAttributes, Raw, Renderable};
use serde::Deserialize;
use crate::MySack;
const CSS: &str = r#"
.slides img {
margin-left: auto;
@ -26,7 +28,7 @@ pub(crate) struct Slideshow {
pub fn parse_content(
content: &str,
sack: &Sack,
sack: &MySack,
path: &Utf8Path,
library: Option<&Library>,
) -> (String, Outline, Bibliography) {
@ -57,14 +59,14 @@ pub fn parse_content(
pub fn as_html(
slides: &Slideshow,
parsed: &str,
sack: &Sack,
sack: &MySack,
_: Outline,
_: Bibliography,
) -> String {
show(slides, sack, parsed)
}
pub fn show(fm: &Slideshow, sack: &Sack, slides: &str) -> String {
pub fn show(fm: &Slideshow, sack: &MySack, slides: &str) -> String {
crate::html::bare(
sack,
maud!(

View file

@ -1,9 +1,11 @@
use camino::Utf8Path;
use hauchiwa::{Bibliography, Outline, Sack};
use hauchiwa::{Bibliography, Outline};
use hayagriva::Library;
use hypertext::{html_elements, maud_move, GlobalAttributes, Raw, Renderable};
use serde::Deserialize;
use crate::MySack;
/// Represents a wiki page
#[derive(Deserialize, Debug, Clone)]
pub struct Wiki {
@ -12,7 +14,7 @@ pub struct Wiki {
pub fn parse_content(
content: &str,
sack: &Sack,
sack: &MySack,
path: &Utf8Path,
library: Option<&Library>,
) -> (String, Outline, Bibliography) {
@ -22,7 +24,7 @@ pub fn parse_content(
pub fn as_html(
meta: &Wiki,
parsed: &str,
sack: &Sack,
sack: &MySack,
outline: Outline,
bibliography: Bibliography,
) -> String {
@ -32,7 +34,7 @@ pub fn as_html(
fn wiki(
matter: &Wiki,
parsed: &str,
sack: &Sack,
sack: &MySack,
_: Outline,
bibliography: Bibliography,
) -> String {

View file

@ -2,10 +2,12 @@ mod html;
mod text;
mod ts;
use std::process::Command;
use camino::{Utf8Path, Utf8PathBuf};
use chrono::{DateTime, Utc};
use chrono::{DateTime, Datelike, Utc};
use clap::{Parser, ValueEnum};
use hauchiwa::{Collection, Processor, Website};
use hauchiwa::{Collection, Processor, Sack, Website};
use html::{Post, Slideshow, Wiki};
use hypertext::Renderable;
@ -22,29 +24,54 @@ enum Mode {
}
#[derive(Debug, Clone)]
pub struct Link {
struct MyData {
pub year: i32,
pub date: String,
pub link: String,
pub hash: String,
}
impl MyData {
fn new() -> Self {
let time = chrono::Utc::now();
Self {
year: time.year(),
date: time.format("%Y/%m/%d %H:%M").to_string(),
link: "https://git.kamoshi.org/kamov/website".into(),
hash: String::from_utf8(
Command::new("git")
.args(["rev-parse", "--short", "HEAD"])
.output()
.expect("Couldn't load git revision")
.stdout,
)
.expect("Invalid UTF8")
.trim()
.into(),
}
}
}
type MySack<'a> = Sack<'a, MyData>;
#[derive(Debug, Clone)]
struct Link {
pub path: Utf8PathBuf,
pub name: String,
pub desc: Option<String>,
}
#[derive(Debug, Clone)]
pub struct LinkDate {
struct LinkDate {
pub link: Link,
pub date: DateTime<Utc>,
}
#[derive(Debug, Clone)]
pub enum Linkable {
Link(Link),
Date(LinkDate),
}
fn main() {
let args = Args::parse();
let website = Website::design()
.add_loaders(vec![
.add_collections(vec![
Collection::glob_with::<Post>(
"content",
"about.md",
@ -91,11 +118,13 @@ fn main() {
},
),
])
.js("search", "./js/search/dist/search.js")
.js("photos", "./js/vanilla/photos.js")
.js("reveal", "./js/vanilla/reveal.js")
.js("editor", "./js/flox/main.ts")
.js("lambda", "./js/flox/lambda.ts")
.add_scripts(vec![
("search", "./js/search/dist/search.js"),
("photos", "./js/vanilla/photos.js"),
("reveal", "./js/vanilla/reveal.js"),
("editor", "./js/flox/main.ts"),
("lambda", "./js/flox/lambda.ts"),
])
.add_virtual(
|sack| crate::html::map(sack).unwrap().render().to_owned().into(),
"map/index.html".into(),
@ -172,7 +201,7 @@ fn main() {
.finish();
match args.mode {
Mode::Build => website.build(),
Mode::Watch => website.watch(),
Mode::Build => website.build(MyData::new()),
Mode::Watch => website.watch(MyData::new()),
}
}

View file

@ -1,7 +1,7 @@
use std::collections::HashMap;
use camino::Utf8Path;
use hauchiwa::{Bibliography, Outline, Sack};
use hauchiwa::{Bibliography, Outline};
use hayagriva::{
archive::ArchivedStyle,
citationberg::{IndependentStyle, Locale, Style},
@ -13,7 +13,7 @@ use once_cell::sync::Lazy;
use pulldown_cmark::{CodeBlockKind, Event, Options, Parser, Tag, TagEnd, TextMergeStream};
use regex::Regex;
use crate::ts;
use crate::{ts, MySack};
static OPTS: Lazy<Options> = Lazy::new(|| {
Options::empty()
@ -51,7 +51,7 @@ static STYLE: Lazy<IndependentStyle> =
pub fn parse(
content: &str,
sack: &Sack,
sack: &MySack,
path: &Utf8Path,
library: Option<&Library>,
) -> (String, Outline, Bibliography) {
@ -353,7 +353,7 @@ fn make_emoji(event: Event) -> Event {
}
}
fn swap_hashed_image<'a>(dir: &'a Utf8Path, sack: &'a Sack) -> impl Fn(Event) -> Event + 'a {
fn swap_hashed_image<'a>(dir: &'a Utf8Path, sack: &'a MySack) -> impl Fn(Event) -> Event + 'a {
move |event| match event {
Event::Start(start) => match start {
Tag::Image {