Compare commits

...

3 commits

Author SHA1 Message Date
Yann Esposito (Yogsototh) 4b8a5d3c89
Merge branch 'shake' of gitea.esy.fun:yogsototh/her.esy.fun into shake 2020-06-23 19:29:20 +02:00
Yann Esposito (Yogsototh) aef6f81274
Fixes 2020-06-23 19:28:41 +02:00
Yann Esposito (Yogsototh) 3a13435710
updated with niv 2020-06-23 16:55:38 +02:00
7 changed files with 311 additions and 81 deletions

View file

@ -6,33 +6,28 @@ import Protolude
import Development.Shake
import Development.Shake.Command
import Development.Shake.FilePath
import Development.Shake.Util
import Control.Monad.Fail
import Data.Aeson
import Text.Megaparsec
-- import qualified Text.Megaparsec as Megaparsec
import Data.Default ( Default(def) )
import qualified Data.Set as Set
import qualified Data.Text as T
import Text.Mustache
import Text.Pandoc.Class ( PandocPure, PandocMonad)
import Text.Pandoc.Class (PandocMonad)
import qualified Text.Pandoc.Class as Pandoc
import Text.Pandoc.Definition ( Pandoc(..)
, Block(..)
, Inline
, nullMeta
, docTitle
, lookupMeta
, docDate
, docAuthors
)
import Text.Pandoc.Extensions ( getDefaultExtensions )
import Text.Pandoc.Options ( ReaderOptions(..)
, WriterOptions(..)
, TrackChanges(RejectChanges)
, ObfuscationMethod(..)
)
import qualified Text.Pandoc.Readers as Readers
import qualified Text.Pandoc.Templates as PandocTemplates
import qualified Text.Pandoc.Writers as Writers
main :: IO ()
@ -47,6 +42,9 @@ main = shakeArgs shOpts buildRules
-- Configuration
-- Should probably go in a Reader Monad
srcDir :: FilePath
srcDir = "src"
siteDir :: FilePath
siteDir = "_site"
@ -82,57 +80,115 @@ getBlogpostFromMetas path toc pandoc@(Pandoc meta _) = do
sortByPostDate :: [BlogPost] -> [BlogPost]
sortByPostDate =
sortBy (\b a -> compare (Down (postDate a)) (Down (postDate b)))
sortBy (\a b-> compare (Down (postDate a)) (Down (postDate b)))
build :: FilePath -> FilePath
build = (</>) siteDir
genAllDeps :: [FilePattern] -> Action [FilePath]
genAllDeps patterns = do
allMatchedFiles <- getDirectoryFiles srcDir patterns
allMatchedFiles &
filter ((/= "html") . takeExtension) &
filter (null . takeExtension) &
map (siteDir </>) &
return
buildRules :: Rules ()
buildRules = do
cleanRule
-- build "//*" %> copy
allRule
getPost <- mkGetPost
getPosts <- mkGetPosts getPost
getTemplate <- mkGetTemplate
let cssDeps = map (siteDir </>) <$> getDirectoryFiles "src" ["css/*.css"]
-- templateDeps = getDirectoryFiles "templates" ["*.mustache"]
build "//*.html" %> \out -> do
css <- cssDeps
-- templates <- templateDeps
template <- getTemplate ("templates" </> "main.mustache")
let srcFile = "src" </> (dropDirectory1 (replaceExtension out "org"))
liftIO $ putText $ "need: " <> (toS srcFile) <> " <- " <> (toS out)
need $ css <> [srcFile]
bp <- getPost srcFile
eitherHtml <- liftIO $ Pandoc.runIO $ Writers.writeHtml5String (def { writerTableOfContents = (postToc bp) }) (postBody bp)
alternatives $ do
-- build "articles.html" %> \out -> do
-- css <- genAllDeps ["//*.css"]
-- posts <- getPosts ()
-- need $ css <> map postUrl (sortByPostDate posts)
-- let titles = toS $ T.intercalate "\n" $ map postTitle posts
-- writeFile' out titles
build "//*.html" %> genHtmlAction getPost getTemplate
-- build "//*.org" %> copy
-- build "//*.jpg" %> copy
copy :: FilePath -> Action ()
copy out = do
let src = srcDir </> (dropDirectory1 out)
copyFileChanged src out
genHtml :: (MonadIO m, MonadFail m) => BlogPost -> m Text
genHtml bp = do
eitherHtml <- liftIO $
Pandoc.runIO $
Writers.writeHtml5String
(def { writerTableOfContents = (postToc bp)
, writerEmailObfuscation = ReferenceObfuscation
})
(postBody bp)
case eitherHtml of
Left _ -> fail "BAD"
Right innerHtml ->
let htmlContent = renderMustache template $ object [ "title" .= postTitle bp
Right innerHtml -> return innerHtml
genHtmlAction
:: (FilePath -> Action BlogPost)
-> (FilePath -> Action Template) -> [Char] -> Action ()
genHtmlAction getPost getTemplate out = do
template <- getTemplate ("templates" </> "main.mustache")
let srcFile = srcDir </> (dropDirectory1 (out -<.> "org"))
liftIO $ putText $ "need: " <> (toS srcFile) <> " -> " <> (toS out)
need [srcFile]
bp <- getPost srcFile
innerHtml <- genHtml bp
let htmlContent =
renderMustache template $ object [ "title" .= postTitle bp
, "authors" .= postAuthors bp
, "date" .= postDate bp
, "body" .= innerHtml
]
in writeFile' out (toS htmlContent)
build "articles.html" %> \out -> do
css <- cssDeps
posts <- getPosts ()
need $ css <> map postUrl (sortByPostDate posts)
let titles = toS $ T.intercalate "\n" $ map postTitle posts
writeFile' out titles
build "css/*.css" %> \out -> do
let src = "src" </> (dropDirectory1 out)
dst = out
liftIO $ putText $ toS $ "src:" <> src <> " => dst: " <> dst
copyFile' src dst
writeFile' out (toS htmlContent)
allHtmlAction :: Action ()
allHtmlAction = do
allOrgFiles <- getDirectoryFiles srcDir ["//*.org"]
let allHtmlFiles = map (-<.> "html") allOrgFiles
need (map build (allHtmlFiles
-- <> ["articles.html"]
))
compressImage :: CmdResult b => FilePath -> Action b
compressImage img = do
let src = srcDir </> img
dst = siteDir </> img
need [src]
let dir = takeDirectory dst
dirExists <- doesDirectoryExist dir
when (not dirExists) $
command [] "mkdir" ["-p", dir]
command [] "convert" [src
, "-strip"
, "-resize","320x320>"
, "-interlace","Plane"
, "-quality","85"
, "-define","filter:blur=0.75"
, "-filter","Gaussian"
, "-ordered-dither","o4x4,4"
, dst ]
allRule :: Rules ()
allRule =
phony "all" $ do
allOrgFiles <- getDirectoryFiles "src" ["//*.org"]
let allHtmlFiles = map (flip replaceExtension "html") allOrgFiles
need (map build (allHtmlFiles <> ["index.html", "articles.html"]))
allAssets <- filter (/= ".DS_Store") <$> getDirectoryFiles srcDir ["//*.*"]
forM_ allAssets $ \asset ->
case (takeExtension asset) of
".jpg" -> compressImage asset
".jpeg" -> compressImage asset
".gif" -> compressImage asset
".png" -> compressImage asset
_ -> copyFileChanged (srcDir </> asset) (siteDir </> asset)
allHtmlAction
cleanRule :: Rules ()
cleanRule =
@ -161,7 +217,7 @@ mkGetPost :: Rules (FilePath -> Action BlogPost)
mkGetPost = newCache $ \path -> do
fileContent <- readFile' path
let toc = tocRequested (toS fileContent)
eitherResult <- liftIO $ Pandoc.runIO $ Readers.readOrg def (toS fileContent)
eitherResult <- liftIO $ Pandoc.runIO $ Readers.readOrg (def { readerStandalone = True }) (toS fileContent)
case eitherResult of
Left _ -> fail "BAD"
Right pandoc -> getBlogpostFromMetas path toc pandoc

38
nix/sources.json Normal file
View file

@ -0,0 +1,38 @@
{
"niv": {
"branch": "master",
"description": "Easy dependency management for Nix projects",
"homepage": "https://github.com/nmattia/niv",
"owner": "nmattia",
"repo": "niv",
"rev": "f73bf8d584148677b01859677a63191c31911eae",
"sha256": "0jlmrx633jvqrqlyhlzpvdrnim128gc81q5psz2lpp2af8p8q9qs",
"type": "tarball",
"url": "https://github.com/nmattia/niv/archive/f73bf8d584148677b01859677a63191c31911eae.tar.gz",
"url_template": "https://github.com/<owner>/<repo>/archive/<rev>.tar.gz"
},
"nixpkgs": {
"branch": "nixpkgs-unstable",
"description": "A read-only mirror of NixOS/nixpkgs tracking the released channels. Send issues and PRs to",
"homepage": "https://github.com/NixOS/nixpkgs",
"owner": "NixOS",
"repo": "nixpkgs-channels",
"rev": "a84cbb60f0296210be03c08d243670dd18a3f6eb",
"sha256": "04j07c98iy66hpzha7brz867dcl9lkflck43xvz09dfmlvqyzmiz",
"type": "tarball",
"url": "https://github.com/NixOS/nixpkgs-channels/archive/a84cbb60f0296210be03c08d243670dd18a3f6eb.tar.gz",
"url_template": "https://github.com/<owner>/<repo>/archive/<rev>.tar.gz"
},
"shake": {
"branch": "master",
"description": "Shake build system",
"homepage": "http://shakebuild.com",
"owner": "ndmitchell",
"repo": "shake",
"rev": "4536d9ce5cef0e56395fd61ccef9816c9b420fd1",
"sha256": "1s7hjhcc09l026jaca3ndbb103s9d7qlx4vqzx2s6j4rr751nd70",
"type": "tarball",
"url": "https://github.com/ndmitchell/shake/archive/4536d9ce5cef0e56395fd61ccef9816c9b420fd1.tar.gz",
"url_template": "https://github.com/<owner>/<repo>/archive/<rev>.tar.gz"
}
}

134
nix/sources.nix Normal file
View file

@ -0,0 +1,134 @@
# This file has been generated by Niv.
let
#
# The fetchers. fetch_<type> fetches specs of type <type>.
#
fetch_file = pkgs: spec:
if spec.builtin or true then
builtins_fetchurl { inherit (spec) url sha256; }
else
pkgs.fetchurl { inherit (spec) url sha256; };
fetch_tarball = pkgs: spec:
if spec.builtin or true then
builtins_fetchTarball { inherit (spec) url sha256; }
else
pkgs.fetchzip { inherit (spec) url sha256; };
fetch_git = spec:
builtins.fetchGit { url = spec.repo; inherit (spec) rev ref; };
fetch_builtin-tarball = spec:
builtins.trace
''
WARNING:
The niv type "builtin-tarball" will soon be deprecated. You should
instead use `builtin = true`.
$ niv modify <package> -a type=tarball -a builtin=true
''
builtins_fetchTarball { inherit (spec) url sha256; };
fetch_builtin-url = spec:
builtins.trace
''
WARNING:
The niv type "builtin-url" will soon be deprecated. You should
instead use `builtin = true`.
$ niv modify <package> -a type=file -a builtin=true
''
(builtins_fetchurl { inherit (spec) url sha256; });
#
# Various helpers
#
# The set of packages used when specs are fetched using non-builtins.
mkPkgs = sources:
let
sourcesNixpkgs =
import (builtins_fetchTarball { inherit (sources.nixpkgs) url sha256; }) {};
hasNixpkgsPath = builtins.any (x: x.prefix == "nixpkgs") builtins.nixPath;
hasThisAsNixpkgsPath = <nixpkgs> == ./.;
in
if builtins.hasAttr "nixpkgs" sources
then sourcesNixpkgs
else if hasNixpkgsPath && ! hasThisAsNixpkgsPath then
import <nixpkgs> {}
else
abort
''
Please specify either <nixpkgs> (through -I or NIX_PATH=nixpkgs=...) or
add a package called "nixpkgs" to your sources.json.
'';
# The actual fetching function.
fetch = pkgs: name: spec:
if ! builtins.hasAttr "type" spec then
abort "ERROR: niv spec ${name} does not have a 'type' attribute"
else if spec.type == "file" then fetch_file pkgs spec
else if spec.type == "tarball" then fetch_tarball pkgs spec
else if spec.type == "git" then fetch_git spec
else if spec.type == "builtin-tarball" then fetch_builtin-tarball spec
else if spec.type == "builtin-url" then fetch_builtin-url spec
else
abort "ERROR: niv spec ${name} has unknown type ${builtins.toJSON spec.type}";
# Ports of functions for older nix versions
# a Nix version of mapAttrs if the built-in doesn't exist
mapAttrs = builtins.mapAttrs or (
f: set: with builtins;
listToAttrs (map (attr: { name = attr; value = f attr set.${attr}; }) (attrNames set))
);
# fetchTarball version that is compatible between all the versions of Nix
builtins_fetchTarball = { url, sha256 }@attrs:
let
inherit (builtins) lessThan nixVersion fetchTarball;
in
if lessThan nixVersion "1.12" then
fetchTarball { inherit url; }
else
fetchTarball attrs;
# fetchurl version that is compatible between all the versions of Nix
builtins_fetchurl = { url, sha256 }@attrs:
let
inherit (builtins) lessThan nixVersion fetchurl;
in
if lessThan nixVersion "1.12" then
fetchurl { inherit url; }
else
fetchurl attrs;
# Create the final "sources" from the config
mkSources = config:
mapAttrs (
name: spec:
if builtins.hasAttr "outPath" spec
then abort
"The values in sources.json should not have an 'outPath' attribute"
else
spec // { outPath = fetch config.pkgs name spec; }
) config.sources;
# The "config" used by the fetchers
mkConfig =
{ sourcesFile ? ./sources.json
, sources ? builtins.fromJSON (builtins.readFile sourcesFile)
, pkgs ? mkPkgs sources
}: rec {
# The sources, i.e. the attribute set of spec name to spec
inherit sources;
# The "pkgs" (evaluated nixpkgs) to use for e.g. non-builtin fetchers
inherit pkgs;
};
in
mkSources (mkConfig {}) // { __functor = _: settings: mkSources (mkConfig settings); }

View file

@ -1,5 +1,6 @@
{ pkgs ? import (fetchTarball https://github.com/NixOS/nixpkgs/archive/20.03.tar.gz) {} }:
let
sources = import ./nix/sources.nix;
pkgs = import sources.nixpkgs {};
pkgs1909 = import (fetchTarball https://github.com/NixOS/nixpkgs/archive/19.09.tar.gz) {};
haskellDeps = ps : with ps; [
shake

View file

@ -556,9 +556,10 @@ a,a:visited { color: var(--hl); }
/* ---- SYNTAX HIGHLIGHTING ---- */
#table-of-contents { text-align: left; }
.org-rainbow-delimiters-depth-1, .org-rainbow-delimiters-depth-9,
.org-css-selector, .org-builtin,
.IN_REVIEW {
.IN_REVIEW, .ex {
color:var(--c);
}
@ -573,7 +574,7 @@ a,a:visited { color: var(--hl); }
}
.org-rainbow-delimiters-depth-4, .org-diff-hunk-header, .org-sh-quoted-exec,
.CANCELED {
.CANCELED, .bu {
color:var(--m);
}
.org-rainbow-delimiters-depth-5, .org-diff-removed, .TODO {

View file

@ -44,9 +44,9 @@ goblins.
Those costume looks very bad and cheap.
So much you can only find them not terrorizing but funny and ridiculous.
#+ATTR_HTML: A goblin
#+CAPTION: One goblin during the introduction scene of Troll 2
#+NAME: fig:troll-2-intro
#+ATTR_HTML: A goblin
[[./Troll-2-intro.jpg]]
Soon after that, you realize the acting of all actors is extremely bad.
@ -55,9 +55,9 @@ To give you an idea, the only equal bad acting I ever witnessed was while
looking at amateurs first Youtube movies trying to follow a scenario.
Apparently most actors were amateurs, it was their first and last movie.
#+ATTR_HTML: A bad acting demonstration
#+CAPTION: One particularly terrible acting scene
#+NAME: fig:bad-acting
#+ATTR_HTML: A bad acting demonstration
[[file:bad-acting.png]]
The dialog are, really something...
@ -83,9 +83,9 @@ They win against the monsters with, what I believe was a failed attempt at
humor.
It misses the point so bad, that the irony still make it funny.
#+ATTR_HTML: Eliott prevents his family to eat the food by urinating on the table
#+CAPTION: Our hero save the day by urinating on the table. His family is frozen for 30s said grandpa, they were for 70s.
#+NAME: fig:prevent-eating
#+ATTR_HTML: Eliott prevents his family to eat the food by urinating on the table
[[./prevent-eating-scene.jpg]]
Of course, the very last scene is a classical so terrible cliché.

View file

@ -1492,7 +1492,7 @@ The only way to work around this problem is to use some meta-programming
trick, for example using the pre-processor.
In C++ there is a better way, C++ templates:
#+BEGIN_SRC c++
#+BEGIN_SRC cpp
#include <iostream>
#include <complex>
using namespace std;
@ -3880,9 +3880,7 @@ I will not argue much, but mainly, semantic versionning and Haskell
versionning are just a "right to break things to your users".
I don't want to talk a lot more about this, but, it would be nice if more
people would watch this talk[fn:9] related to versionning.
[fn:9]: [[https://www.youtube.com/watch?v=oyLBGkS5ICk][Spec-ulation Keynote - Rich Hickey]]
people would watch this talk[fn:8] related to versionning.
If you want to know more about Haskell versionning convention:
https://pvp.haskell.org
@ -4109,3 +4107,5 @@ Libraries - parts of hslibs/lang.
[fn:7] Well, you'll certainly need to practice a bit to get used to them
and to understand when you can use them and create your own. But
you already made a big step in this direction.
[fn:8] [[https://www.youtube.com/watch?v=oyLBGkS5ICk][Spec-ulation Keynote - Rich Hickey]]