site: wiki integration - render wiki pages within main site

The github wiki pages are now rendered as part of the website's static
content, like the main site pages (and using the same pandoc markdown).
Building the site now requires that a copy of the wiki is checked out
under wiki/.

Github wiki links are converted to suitable relative links,
in all pages (not just wiki pages).

`make site/index.md-push`, which updated the wiki links on the website
home page and committed/pushed it, is now `./Shake site/index.md`, and
doesn't commit/push.

Shake.hs now also depends on the `regex` package.

[ci skip]
This commit is contained in:
Simon Michael 2019-02-08 11:32:37 -08:00
parent ce2ef2497a
commit d79bd77feb
2 changed files with 76 additions and 25 deletions

View File

@ -162,6 +162,7 @@ DOCSOURCEFILES:= \
$(MANUALSOURCEFILES) \
$(COMMANDHELPFILES) \
site/*.md \
wiki/*.md \
# # file(s) which require recompilation for a build to have an up-to-date version string
# VERSIONSOURCEFILE=hledger/Hledger/Cli/Version.hs
@ -626,21 +627,6 @@ quickheap-%: hledgerprof samplejournals \
###############################################################################
$(call def-help-subheading,DOCUMENTATION: (see also Shake.hs))
site/index.md-push: \
$(call def-help,site/index.md-push, update home page with ./wiki/_Sidebar content and push if changed )
(sed -ne '1,/<!-- WIKICONTENT -->/ p' site/index.md ; \
sed -ne '/^#.*Cookbook/,$$ p' wiki/_Sidebar.md \
| perl -p \
-e 's/\[\[([^\|]*)\|([^\]]*)\]\]/[\1](https:\/\/github.com\/simonmichael\/hledger\/wiki\/\2)/g;' \
-e 's/\[\[([^\]]*)\]\]/[\1](https:\/\/github.com\/simonmichael\/hledger\/wiki\/\1)/g;' \
-e 's/^# >/##/;' \
; \
sed -ne '/<!-- ENDWIKICONTENT -->/,$$ p' site/index.md ) \
> site/_index.md.$$$$ && \
mv site/_index.md.$$$$ site/index.md
git diff --quiet site/index.md || \
(git commit -q -m 'site: home: latest wiki links' -m '[ci skip]' site/index.md && git push)
site-liverender: Shake \
$(call def-help,site-liverender, update the website html when source files are saved )
ls $(DOCSOURCEFILES) | entr ./Shake website

View File

@ -4,6 +4,7 @@
--package base-prelude
--package directory
--package extra
--package regex
--package safe
--package shake
--package time
@ -41,7 +42,9 @@ not having to write :: Action ExitCode after a non-final cmd
-}
{-# LANGUAGE MultiWayIf #-}
{-# LANGUAGE NamedFieldPuns #-}
{-# LANGUAGE PackageImports #-}
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE ScopedTypeVariables #-}
import Prelude ()
@ -51,6 +54,8 @@ import "base" Control.Exception as C
import "directory" System.Directory as S (getDirectoryContents)
import "extra" Data.List.Extra
import "process" System.Process
import "regex" Text.RE.TDFA.String
import "regex" Text.RE.Replace
import "safe" Safe
import "shake" Development.Shake
import "shake" Development.Shake.FilePath
@ -70,6 +75,7 @@ usage = unlines
,""
,"./Shake mainpages build the web pages from the main repo"
,"./Shake wikipages build the web pages from the wiki repo"
-- ,"./Shake site/index.md update wiki links on the website home page"
,"./Shake FILE build any individual file"
,"./Shake setversion update all packages from PKG/.version"
,"./Shake changelogs update the changelogs with any new commits"
@ -368,28 +374,43 @@ main = do
"site/_site/files/README" : [ "site/_site//*" <.> ext | ext <- webassetexts ] |%> \out -> do
copyFile' ("site" </> dropDirectory2 out) out
-- embed the wiki's table of contents into the main site's home page
"site/index.md" %> \out -> do
wikicontent <- readFile' "wiki/_Sidebar.md"
old <- liftIO $ readFileStrictly "site/index.md"
let (startmarker, endmarker) = ("<!-- WIKICONTENT -->", "<!-- ENDWIKICONTENT -->")
(before, after') = break (startmarker `isPrefixOf`) $ lines old
(_, after) = break (endmarker `isPrefixOf`) $ after'
new = unlines $ concat [before, [startmarker], lines wikicontent, after]
liftIO $ writeFile out new
-- render all web pages from the main repo (manuals, home, download, relnotes etc) as html, saved in site/_site/
phony "mainpages" $ need mainpageshtml
-- render all pages from the wiki as html, saved in site/_site/.
-- We assume there are no path collisions with mainrepopages.
-- We assume there are no filename collisions with mainpages.
phony "wikipages" $ need wikipageshtml
-- render one website page as html, saved in sites/_site/
-- render one website page (main or wiki) as html, saved in sites/_site/.
-- In case it's a wiki page, we capture pandoc's output for final processing,
-- and hyperlink any github-style wikilinks.
"site/_site//*.html" %> \out -> do
let name = takeBaseName out
iswikipage = name `elem` wikipagenames
source
| name `elem` wikipagenames = "wiki" </> name <.> "md"
| otherwise = "site" </> name <.> "md"
| iswikipage = "wiki" </> name <.> "md"
| otherwise = "site" </> name <.> "md"
template = "site/site.tmpl"
siteRoot = if "site/_site/doc//*" ?== out then "../.." else "."
need [source, template]
cmd Shell pandoc fromsrcmd "-t html" source
"--template" template
("--metadata=siteRoot:" ++ siteRoot)
("--metadata=title:" ++ name)
"--lua-filter" "tools/pandoc-site.lua"
"--output" out
-- read markdown source, link any wikilinks, pipe it to pandoc, write html out
Stdin . wikify <$> (readFile' source) >>=
(cmd Shell pandoc fromsrcmd "-t html"
"--template" template
("--metadata=siteRoot:" ++ siteRoot)
("--metadata=title:" ++ name)
"--lua-filter=tools/pandoc-site.lua"
"-o" out)
-- render one wiki page as html, saved in site/_site/.
@ -662,3 +683,47 @@ getCurrentDay :: IO Day
getCurrentDay = do
t <- getZonedTime
return $ localDay (zonedTimeToLocalTime t)
-- | Convert Github-style wikilinks to hledger website links.
wikify :: String -> String
wikify =
replaceBy wikilinkre wikilinkReplace .
replaceBy labelledwikilinkre labelledwikilinkReplace
-- couldn't figure out how to use match subgroups, so we don't
-- wikilinkre = [re|\[\[$([^]]+)]]|] -- [[A]]
-- labelledwikilinkre = [re|\[\[$([^(|)]+)\|$([^]]*)\]\]|] -- [[A|B]]
wikilinkre = [re|\[\[[^]]+]]|] -- [[A]]
labelledwikilinkre = [re|\[\[[^(|)]+\|[^]]*\]\]|] -- [[A|B]]. The | is parenthesised to avoid ending the quasiquoter
-- wikilinkReplace _ loc@RELocation{locationCapture} cap@Capture{capturedText} =
wikilinkReplace _ _ Capture{capturedText} =
-- trace (show (loc,cap)) $
Just $ "["++name++"]("++uri++")"
where
name = init $ init $ drop 2 capturedText
uri = nameToUri name
-- labelledwikilinkReplace _ loc@RELocation{locationCapture} cap@Capture{capturedText} =
labelledwikilinkReplace _ _ Capture{capturedText} =
Just $ "["++label++"]("++uri++")"
where
[label,name] = take 2 $ (splitOn "|" $ init $ init $ drop 2 capturedText) ++ [""]
uri = nameToUri name
nameToUri = (++".html") . intercalate "-" . words
-- | Easier regex replace helper. Replaces each occurrence of a
-- regular expression in src, by transforming each matched text with
-- the given function.
replaceBy re f src = replaceAllCaptures TOP f $ src *=~ re
-- not powerful enough, saved for reference:
-- wikify = (*=~/ wikilinkreplace) . (*=~/ labelledwikilinkreplace)
-- where
-- -- [[A]] -> [A](.../A)
-- wikilinkreplace :: SearchReplace RE String
-- wikilinkreplace = [ed|\[\[$([^]]+)]]///[$1]($1.html)|]
-- -- [[A|B]] -> [A](.../B)
-- labelledwikilinkreplace :: SearchReplace RE String
-- labelledwikilinkreplace = [ed|\[\[$([^(|)]+)\|$([^]]*)\]\]///[$1]($2.html)|]