Compare commits

...

3 commits

Author SHA1 Message Date
Yann Esposito (Yogsototh) 4b8a5d3c89
Merge branch 'shake' of gitea.esy.fun:yogsototh/her.esy.fun into shake 2020-06-23 19:29:20 +02:00
Yann Esposito (Yogsototh) aef6f81274
Fixes 2020-06-23 19:28:41 +02:00
Yann Esposito (Yogsototh) 3a13435710
updated with niv 2020-06-23 16:55:38 +02:00
7 changed files with 311 additions and 81 deletions

View file

@ -6,33 +6,28 @@ import Protolude
import Development.Shake import Development.Shake
import Development.Shake.Command import Development.Shake.Command
import Development.Shake.FilePath import Development.Shake.FilePath
import Development.Shake.Util
import Control.Monad.Fail import Control.Monad.Fail
import Data.Aeson import Data.Aeson
import Text.Megaparsec -- import qualified Text.Megaparsec as Megaparsec
import Data.Default ( Default(def) ) import Data.Default ( Default(def) )
import qualified Data.Set as Set
import qualified Data.Text as T import qualified Data.Text as T
import Text.Mustache import Text.Mustache
import Text.Pandoc.Class ( PandocPure, PandocMonad) import Text.Pandoc.Class (PandocMonad)
import qualified Text.Pandoc.Class as Pandoc import qualified Text.Pandoc.Class as Pandoc
import Text.Pandoc.Definition ( Pandoc(..) import Text.Pandoc.Definition ( Pandoc(..)
, Block(..) , Block(..)
, Inline , Inline
, nullMeta , nullMeta
, docTitle , docTitle
, lookupMeta
, docDate , docDate
, docAuthors , docAuthors
) )
import Text.Pandoc.Extensions ( getDefaultExtensions )
import Text.Pandoc.Options ( ReaderOptions(..) import Text.Pandoc.Options ( ReaderOptions(..)
, WriterOptions(..) , WriterOptions(..)
, TrackChanges(RejectChanges) , ObfuscationMethod(..)
) )
import qualified Text.Pandoc.Readers as Readers import qualified Text.Pandoc.Readers as Readers
import qualified Text.Pandoc.Templates as PandocTemplates
import qualified Text.Pandoc.Writers as Writers import qualified Text.Pandoc.Writers as Writers
main :: IO () main :: IO ()
@ -47,6 +42,9 @@ main = shakeArgs shOpts buildRules
-- Configuration -- Configuration
-- Should probably go in a Reader Monad -- Should probably go in a Reader Monad
srcDir :: FilePath
srcDir = "src"
siteDir :: FilePath siteDir :: FilePath
siteDir = "_site" siteDir = "_site"
@ -82,57 +80,115 @@ getBlogpostFromMetas path toc pandoc@(Pandoc meta _) = do
sortByPostDate :: [BlogPost] -> [BlogPost] sortByPostDate :: [BlogPost] -> [BlogPost]
sortByPostDate = sortByPostDate =
sortBy (\b a -> compare (Down (postDate a)) (Down (postDate b))) sortBy (\a b-> compare (Down (postDate a)) (Down (postDate b)))
build :: FilePath -> FilePath build :: FilePath -> FilePath
build = (</>) siteDir build = (</>) siteDir
genAllDeps :: [FilePattern] -> Action [FilePath]
genAllDeps patterns = do
allMatchedFiles <- getDirectoryFiles srcDir patterns
allMatchedFiles &
filter ((/= "html") . takeExtension) &
filter (null . takeExtension) &
map (siteDir </>) &
return
buildRules :: Rules () buildRules :: Rules ()
buildRules = do buildRules = do
cleanRule cleanRule
allRule -- build "//*" %> copy
getPost <- mkGetPost allRule
getPosts <- mkGetPosts getPost getPost <- mkGetPost
getTemplate <- mkGetTemplate getPosts <- mkGetPosts getPost
let cssDeps = map (siteDir </>) <$> getDirectoryFiles "src" ["css/*.css"] getTemplate <- mkGetTemplate
-- templateDeps = getDirectoryFiles "templates" ["*.mustache"] alternatives $ do
build "//*.html" %> \out -> do -- build "articles.html" %> \out -> do
css <- cssDeps -- css <- genAllDeps ["//*.css"]
-- templates <- templateDeps -- posts <- getPosts ()
template <- getTemplate ("templates" </> "main.mustache") -- need $ css <> map postUrl (sortByPostDate posts)
let srcFile = "src" </> (dropDirectory1 (replaceExtension out "org")) -- let titles = toS $ T.intercalate "\n" $ map postTitle posts
liftIO $ putText $ "need: " <> (toS srcFile) <> " <- " <> (toS out) -- writeFile' out titles
need $ css <> [srcFile] build "//*.html" %> genHtmlAction getPost getTemplate
bp <- getPost srcFile -- build "//*.org" %> copy
eitherHtml <- liftIO $ Pandoc.runIO $ Writers.writeHtml5String (def { writerTableOfContents = (postToc bp) }) (postBody bp) -- build "//*.jpg" %> copy
case eitherHtml of
Left _ -> fail "BAD" copy :: FilePath -> Action ()
Right innerHtml -> copy out = do
let htmlContent = renderMustache template $ object [ "title" .= postTitle bp let src = srcDir </> (dropDirectory1 out)
, "authors" .= postAuthors bp copyFileChanged src out
, "date" .= postDate bp
, "body" .= innerHtml genHtml :: (MonadIO m, MonadFail m) => BlogPost -> m Text
] genHtml bp = do
in writeFile' out (toS htmlContent) eitherHtml <- liftIO $
build "articles.html" %> \out -> do Pandoc.runIO $
css <- cssDeps Writers.writeHtml5String
posts <- getPosts () (def { writerTableOfContents = (postToc bp)
need $ css <> map postUrl (sortByPostDate posts) , writerEmailObfuscation = ReferenceObfuscation
let titles = toS $ T.intercalate "\n" $ map postTitle posts })
writeFile' out titles (postBody bp)
build "css/*.css" %> \out -> do case eitherHtml of
let src = "src" </> (dropDirectory1 out) Left _ -> fail "BAD"
dst = out Right innerHtml -> return innerHtml
liftIO $ putText $ toS $ "src:" <> src <> " => dst: " <> dst
copyFile' src dst genHtmlAction
:: (FilePath -> Action BlogPost)
-> (FilePath -> Action Template) -> [Char] -> Action ()
genHtmlAction getPost getTemplate out = do
template <- getTemplate ("templates" </> "main.mustache")
let srcFile = srcDir </> (dropDirectory1 (out -<.> "org"))
liftIO $ putText $ "need: " <> (toS srcFile) <> " -> " <> (toS out)
need [srcFile]
bp <- getPost srcFile
innerHtml <- genHtml bp
let htmlContent =
renderMustache template $ object [ "title" .= postTitle bp
, "authors" .= postAuthors bp
, "date" .= postDate bp
, "body" .= innerHtml
]
writeFile' out (toS htmlContent)
allHtmlAction :: Action ()
allHtmlAction = do
allOrgFiles <- getDirectoryFiles srcDir ["//*.org"]
let allHtmlFiles = map (-<.> "html") allOrgFiles
need (map build (allHtmlFiles
-- <> ["articles.html"]
))
compressImage :: CmdResult b => FilePath -> Action b
compressImage img = do
let src = srcDir </> img
dst = siteDir </> img
need [src]
let dir = takeDirectory dst
dirExists <- doesDirectoryExist dir
when (not dirExists) $
command [] "mkdir" ["-p", dir]
command [] "convert" [src
, "-strip"
, "-resize","320x320>"
, "-interlace","Plane"
, "-quality","85"
, "-define","filter:blur=0.75"
, "-filter","Gaussian"
, "-ordered-dither","o4x4,4"
, dst ]
allRule :: Rules () allRule :: Rules ()
allRule = allRule =
phony "all" $ do phony "all" $ do
allOrgFiles <- getDirectoryFiles "src" ["//*.org"] allAssets <- filter (/= ".DS_Store") <$> getDirectoryFiles srcDir ["//*.*"]
let allHtmlFiles = map (flip replaceExtension "html") allOrgFiles forM_ allAssets $ \asset ->
need (map build (allHtmlFiles <> ["index.html", "articles.html"])) case (takeExtension asset) of
".jpg" -> compressImage asset
".jpeg" -> compressImage asset
".gif" -> compressImage asset
".png" -> compressImage asset
_ -> copyFileChanged (srcDir </> asset) (siteDir </> asset)
allHtmlAction
cleanRule :: Rules () cleanRule :: Rules ()
cleanRule = cleanRule =
@ -161,7 +217,7 @@ mkGetPost :: Rules (FilePath -> Action BlogPost)
mkGetPost = newCache $ \path -> do mkGetPost = newCache $ \path -> do
fileContent <- readFile' path fileContent <- readFile' path
let toc = tocRequested (toS fileContent) let toc = tocRequested (toS fileContent)
eitherResult <- liftIO $ Pandoc.runIO $ Readers.readOrg def (toS fileContent) eitherResult <- liftIO $ Pandoc.runIO $ Readers.readOrg (def { readerStandalone = True }) (toS fileContent)
case eitherResult of case eitherResult of
Left _ -> fail "BAD" Left _ -> fail "BAD"
Right pandoc -> getBlogpostFromMetas path toc pandoc Right pandoc -> getBlogpostFromMetas path toc pandoc

38
nix/sources.json Normal file
View file

@ -0,0 +1,38 @@
{
"niv": {
"branch": "master",
"description": "Easy dependency management for Nix projects",
"homepage": "https://github.com/nmattia/niv",
"owner": "nmattia",
"repo": "niv",
"rev": "f73bf8d584148677b01859677a63191c31911eae",
"sha256": "0jlmrx633jvqrqlyhlzpvdrnim128gc81q5psz2lpp2af8p8q9qs",
"type": "tarball",
"url": "https://github.com/nmattia/niv/archive/f73bf8d584148677b01859677a63191c31911eae.tar.gz",
"url_template": "https://github.com/<owner>/<repo>/archive/<rev>.tar.gz"
},
"nixpkgs": {
"branch": "nixpkgs-unstable",
"description": "A read-only mirror of NixOS/nixpkgs tracking the released channels. Send issues and PRs to",
"homepage": "https://github.com/NixOS/nixpkgs",
"owner": "NixOS",
"repo": "nixpkgs-channels",
"rev": "a84cbb60f0296210be03c08d243670dd18a3f6eb",
"sha256": "04j07c98iy66hpzha7brz867dcl9lkflck43xvz09dfmlvqyzmiz",
"type": "tarball",
"url": "https://github.com/NixOS/nixpkgs-channels/archive/a84cbb60f0296210be03c08d243670dd18a3f6eb.tar.gz",
"url_template": "https://github.com/<owner>/<repo>/archive/<rev>.tar.gz"
},
"shake": {
"branch": "master",
"description": "Shake build system",
"homepage": "http://shakebuild.com",
"owner": "ndmitchell",
"repo": "shake",
"rev": "4536d9ce5cef0e56395fd61ccef9816c9b420fd1",
"sha256": "1s7hjhcc09l026jaca3ndbb103s9d7qlx4vqzx2s6j4rr751nd70",
"type": "tarball",
"url": "https://github.com/ndmitchell/shake/archive/4536d9ce5cef0e56395fd61ccef9816c9b420fd1.tar.gz",
"url_template": "https://github.com/<owner>/<repo>/archive/<rev>.tar.gz"
}
}

134
nix/sources.nix Normal file
View file

@ -0,0 +1,134 @@
# This file has been generated by Niv.
let
#
# The fetchers. fetch_<type> fetches specs of type <type>.
#
fetch_file = pkgs: spec:
if spec.builtin or true then
builtins_fetchurl { inherit (spec) url sha256; }
else
pkgs.fetchurl { inherit (spec) url sha256; };
fetch_tarball = pkgs: spec:
if spec.builtin or true then
builtins_fetchTarball { inherit (spec) url sha256; }
else
pkgs.fetchzip { inherit (spec) url sha256; };
fetch_git = spec:
builtins.fetchGit { url = spec.repo; inherit (spec) rev ref; };
fetch_builtin-tarball = spec:
builtins.trace
''
WARNING:
The niv type "builtin-tarball" will soon be deprecated. You should
instead use `builtin = true`.
$ niv modify <package> -a type=tarball -a builtin=true
''
builtins_fetchTarball { inherit (spec) url sha256; };
fetch_builtin-url = spec:
builtins.trace
''
WARNING:
The niv type "builtin-url" will soon be deprecated. You should
instead use `builtin = true`.
$ niv modify <package> -a type=file -a builtin=true
''
(builtins_fetchurl { inherit (spec) url sha256; });
#
# Various helpers
#
# The set of packages used when specs are fetched using non-builtins.
mkPkgs = sources:
let
sourcesNixpkgs =
import (builtins_fetchTarball { inherit (sources.nixpkgs) url sha256; }) {};
hasNixpkgsPath = builtins.any (x: x.prefix == "nixpkgs") builtins.nixPath;
hasThisAsNixpkgsPath = <nixpkgs> == ./.;
in
if builtins.hasAttr "nixpkgs" sources
then sourcesNixpkgs
else if hasNixpkgsPath && ! hasThisAsNixpkgsPath then
import <nixpkgs> {}
else
abort
''
Please specify either <nixpkgs> (through -I or NIX_PATH=nixpkgs=...) or
add a package called "nixpkgs" to your sources.json.
'';
# The actual fetching function.
fetch = pkgs: name: spec:
if ! builtins.hasAttr "type" spec then
abort "ERROR: niv spec ${name} does not have a 'type' attribute"
else if spec.type == "file" then fetch_file pkgs spec
else if spec.type == "tarball" then fetch_tarball pkgs spec
else if spec.type == "git" then fetch_git spec
else if spec.type == "builtin-tarball" then fetch_builtin-tarball spec
else if spec.type == "builtin-url" then fetch_builtin-url spec
else
abort "ERROR: niv spec ${name} has unknown type ${builtins.toJSON spec.type}";
# Ports of functions for older nix versions
# a Nix version of mapAttrs if the built-in doesn't exist
mapAttrs = builtins.mapAttrs or (
f: set: with builtins;
listToAttrs (map (attr: { name = attr; value = f attr set.${attr}; }) (attrNames set))
);
# fetchTarball version that is compatible between all the versions of Nix
builtins_fetchTarball = { url, sha256 }@attrs:
let
inherit (builtins) lessThan nixVersion fetchTarball;
in
if lessThan nixVersion "1.12" then
fetchTarball { inherit url; }
else
fetchTarball attrs;
# fetchurl version that is compatible between all the versions of Nix
builtins_fetchurl = { url, sha256 }@attrs:
let
inherit (builtins) lessThan nixVersion fetchurl;
in
if lessThan nixVersion "1.12" then
fetchurl { inherit url; }
else
fetchurl attrs;
# Create the final "sources" from the config
mkSources = config:
mapAttrs (
name: spec:
if builtins.hasAttr "outPath" spec
then abort
"The values in sources.json should not have an 'outPath' attribute"
else
spec // { outPath = fetch config.pkgs name spec; }
) config.sources;
# The "config" used by the fetchers
mkConfig =
{ sourcesFile ? ./sources.json
, sources ? builtins.fromJSON (builtins.readFile sourcesFile)
, pkgs ? mkPkgs sources
}: rec {
# The sources, i.e. the attribute set of spec name to spec
inherit sources;
# The "pkgs" (evaluated nixpkgs) to use for e.g. non-builtin fetchers
inherit pkgs;
};
in
mkSources (mkConfig {}) // { __functor = _: settings: mkSources (mkConfig settings); }

View file

@ -1,5 +1,6 @@
{ pkgs ? import (fetchTarball https://github.com/NixOS/nixpkgs/archive/20.03.tar.gz) {} }:
let let
sources = import ./nix/sources.nix;
pkgs = import sources.nixpkgs {};
pkgs1909 = import (fetchTarball https://github.com/NixOS/nixpkgs/archive/19.09.tar.gz) {}; pkgs1909 = import (fetchTarball https://github.com/NixOS/nixpkgs/archive/19.09.tar.gz) {};
haskellDeps = ps : with ps; [ haskellDeps = ps : with ps; [
shake shake

View file

@ -556,9 +556,10 @@ a,a:visited { color: var(--hl); }
/* ---- SYNTAX HIGHLIGHTING ---- */ /* ---- SYNTAX HIGHLIGHTING ---- */
#table-of-contents { text-align: left; } #table-of-contents { text-align: left; }
.org-rainbow-delimiters-depth-1, .org-rainbow-delimiters-depth-9, .org-rainbow-delimiters-depth-1, .org-rainbow-delimiters-depth-9,
.org-css-selector, .org-builtin, .org-css-selector, .org-builtin,
.IN_REVIEW { .IN_REVIEW, .ex {
color:var(--c); color:var(--c);
} }
@ -573,7 +574,7 @@ a,a:visited { color: var(--hl); }
} }
.org-rainbow-delimiters-depth-4, .org-diff-hunk-header, .org-sh-quoted-exec, .org-rainbow-delimiters-depth-4, .org-diff-hunk-header, .org-sh-quoted-exec,
.CANCELED { .CANCELED, .bu {
color:var(--m); color:var(--m);
} }
.org-rainbow-delimiters-depth-5, .org-diff-removed, .TODO { .org-rainbow-delimiters-depth-5, .org-diff-removed, .TODO {

View file

@ -44,9 +44,9 @@ goblins.
Those costume looks very bad and cheap. Those costume looks very bad and cheap.
So much you can only find them not terrorizing but funny and ridiculous. So much you can only find them not terrorizing but funny and ridiculous.
#+ATTR_HTML: A goblin
#+CAPTION: One goblin during the introduction scene of Troll 2 #+CAPTION: One goblin during the introduction scene of Troll 2
#+NAME: fig:troll-2-intro #+NAME: fig:troll-2-intro
#+ATTR_HTML: A goblin
[[./Troll-2-intro.jpg]] [[./Troll-2-intro.jpg]]
Soon after that, you realize the acting of all actors is extremely bad. Soon after that, you realize the acting of all actors is extremely bad.
@ -55,9 +55,9 @@ To give you an idea, the only equal bad acting I ever witnessed was while
looking at amateurs first Youtube movies trying to follow a scenario. looking at amateurs first Youtube movies trying to follow a scenario.
Apparently most actors were amateurs, it was their first and last movie. Apparently most actors were amateurs, it was their first and last movie.
#+ATTR_HTML: A bad acting demonstration
#+CAPTION: One particularly terrible acting scene #+CAPTION: One particularly terrible acting scene
#+NAME: fig:bad-acting #+NAME: fig:bad-acting
#+ATTR_HTML: A bad acting demonstration
[[file:bad-acting.png]] [[file:bad-acting.png]]
The dialog are, really something... The dialog are, really something...
@ -83,9 +83,9 @@ They win against the monsters with, what I believe was a failed attempt at
humor. humor.
It misses the point so bad, that the irony still make it funny. It misses the point so bad, that the irony still make it funny.
#+ATTR_HTML: Eliott prevents his family to eat the food by urinating on the table
#+CAPTION: Our hero save the day by urinating on the table. His family is frozen for 30s said grandpa, they were for 70s. #+CAPTION: Our hero save the day by urinating on the table. His family is frozen for 30s said grandpa, they were for 70s.
#+NAME: fig:prevent-eating #+NAME: fig:prevent-eating
#+ATTR_HTML: Eliott prevents his family to eat the food by urinating on the table
[[./prevent-eating-scene.jpg]] [[./prevent-eating-scene.jpg]]
Of course, the very last scene is a classical so terrible cliché. Of course, the very last scene is a classical so terrible cliché.

View file

@ -1492,7 +1492,7 @@ The only way to work around this problem is to use some meta-programming
trick, for example using the pre-processor. trick, for example using the pre-processor.
In C++ there is a better way, C++ templates: In C++ there is a better way, C++ templates:
#+BEGIN_SRC c++ #+BEGIN_SRC cpp
#include <iostream> #include <iostream>
#include <complex> #include <complex>
using namespace std; using namespace std;
@ -3880,9 +3880,7 @@ I will not argue much, but mainly, semantic versionning and Haskell
versionning are just a "right to break things to your users". versionning are just a "right to break things to your users".
I don't want to talk a lot more about this, but, it would be nice if more I don't want to talk a lot more about this, but, it would be nice if more
people would watch this talk[fn:9] related to versionning. people would watch this talk[fn:8] related to versionning.
[fn:9]: [[https://www.youtube.com/watch?v=oyLBGkS5ICk][Spec-ulation Keynote - Rich Hickey]]
If you want to know more about Haskell versionning convention: If you want to know more about Haskell versionning convention:
https://pvp.haskell.org https://pvp.haskell.org
@ -4082,30 +4080,32 @@ Thank you man.
As of today, the definition of =IO= is no more visible into =base=. As of today, the definition of =IO= is no more visible into =base=.
We have the following explanation in [[http://hackage.haskell.org/package/base-4.12.0.0/docs/src/GHC.IO.html][=GHC.IO.hs=]]: We have the following explanation in [[http://hackage.haskell.org/package/base-4.12.0.0/docs/src/GHC.IO.html][=GHC.IO.hs=]]:
#+begin_quote #+begin_quote
#+begin_src #+begin_src
The IO Monad is just an instance of the ST monad, where the state is The IO Monad is just an instance of the ST monad, where the state is
the real world. We use the exception mechanism (in GHC.Exception) to the real world. We use the exception mechanism (in GHC.Exception) to
implement IO exceptions. implement IO exceptions.
NOTE: The IO representation is deeply wired in to various parts of the NOTE: The IO representation is deeply wired in to various parts of the
system. The following list may or may not be exhaustive: system. The following list may or may not be exhaustive:
Compiler - types of various primitives in PrimOp.hs Compiler - types of various primitives in PrimOp.hs
RTS - forceIO (StgStartup.cmm) RTS - forceIO (StgStartup.cmm)
- catchzh_fast, (un)?blockAsyncExceptionszh_fast, raisezh_fast - catchzh_fast, (un)?blockAsyncExceptionszh_fast, raisezh_fast
(Exception.cmm) (Exception.cmm)
- raiseAsync (RaiseAsync.c) - raiseAsync (RaiseAsync.c)
Prelude - GHC.IO.hs, and several other places including Prelude - GHC.IO.hs, and several other places including
GHC.Exception.hs. GHC.Exception.hs.
Libraries - parts of hslibs/lang. Libraries - parts of hslibs/lang.
--SDM --SDM
#+end_src #+end_src
#+end_quote #+end_quote
[fn:7] Well, you'll certainly need to practice a bit to get used to them [fn:7] Well, you'll certainly need to practice a bit to get used to them
and to understand when you can use them and create your own. But and to understand when you can use them and create your own. But
you already made a big step in this direction. you already made a big step in this direction.
[fn:8] [[https://www.youtube.com/watch?v=oyLBGkS5ICk][Spec-ulation Keynote - Rich Hickey]]