-
Notifications
You must be signed in to change notification settings - Fork 0
/
Blog.hs
584 lines (488 loc) · 18.4 KB
/
Blog.hs
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
{-# LANGUAGE CPP #-}
{-# LANGUAGE NoOverloadedLists #-}
{-# LANGUAGE TypeFamilies #-}
module Blog
( main
) where
import Blog.Agda qualified as Agda
import Blog.Config
import Blog.MMark qualified as MMark
import Blog.Shake
import Blog.Template qualified as Template
import Blog.Type
import Blog.Util
import Control.Applicative
import Control.Arrow
import Control.Concurrent.MVar qualified as MVar
import Control.Monad
import Data.Aeson ((.=))
import Data.Aeson qualified as Aeson
import Data.Aeson.KeyMap qualified as Aeson
import Data.ByteString.Lazy qualified as LBS
import Data.List qualified as List
import Data.Map.Strict qualified as Map
import Data.Set qualified as Set
import Data.Text qualified as Text
import Data.Text.Encoding qualified as Text
import Data.Text.IO qualified as Text
import Data.Time.Clock qualified as Clock
import Data.Time.Format.ISO8601 qualified as Clock
import Development.Shake hiding (shakeOptions)
import Development.Shake qualified as Shake
import Development.Shake.Classes
import Development.Shake.FilePath ((<.>), (</>))
import Development.Shake.FilePath qualified as Shake
import GHC.Clock
import GHC.Conc (numCapabilities)
import Numeric (showFFloat)
import Options.Applicative qualified as A
import Prelude hiding (writeFile)
#if defined(ENABLE_WATCH)
import Control.Exception
import Data.Function (fix)
import Data.Maybe (catMaybes)
import Data.String (fromString)
import Development.Shake.Database qualified as Shake
import GHC.Conc (forkIO, threadDelay)
import Network.HTTP.Types.Status qualified as Status
import Network.Wai qualified as Wai
import Network.Wai.Application.Static qualified as Wai
import Network.Wai.Handler.Warp qualified as Warp
import System.Directory qualified as Dir
import System.FSNotify (Event (..))
import System.FSNotify qualified as FS
import WaiAppStatic.Types qualified as Wai
#endif
data Options = Options
{ command :: Command
, verbosity :: Verbosity
, jobs :: Int
}
data BuildOptions = BuildOptions
{ watch :: Bool
, latex :: Bool
}
newtype CleanOptions
= CleanOptions { cache :: Bool }
data PreviewOptions = PreviewOptions
{ server :: Bool
, host :: String
, port :: Int
}
data Command
= Build BuildOptions
| Clean CleanOptions
| Preview PreviewOptions
shakeOptions :: Options -> IO ShakeOptions
shakeOptions Options{..} = do
version <- getHashedShakeVersion ["Blog.hs"]
pure $ Shake.shakeOptions
{ shakeColor = True
, shakeStaunch = False
, shakeThreads = jobs
, shakeVerbosity = verbosity
, shakeVersion = version
}
-- TODO: filter drafts based on published field
-- TODO: relativize urls
postURL :: Post -> FilePath
postURL Post{..} = "posts" </> Text.unpack slug </> "index.html"
pubList :: FilePath -> Action [Aeson.Value]
pubList file = do
need [file]
liftIO (Aeson.decode <$> LBS.readFile file) >>= \case
Nothing -> fileError (Just file) "invalid json"
Just works -> forM works \Publication{..} ->
pure $ Aeson.object
[ "title" .= title
, "uri" .= uri
, "authors" .= mconcat (List.intersperse ", " authors)
]
atomFeedItem :: Post -> Post
atomFeedItem Post{..} = Post
{ updated = updated <> published
, ..
}
jsonFeed :: Site -> Aeson.Value
jsonFeed Site{..} = Aeson.object
[ "version" .= ("https://jsonfeed.org/version/1.1" :: String)
, "title" .= title
, "home_page_url" .= url
, "feed_url" .= (url <> "/" <> "feed.json")
, "description" .= description
, "authors" .=
[ Aeson.object
[ "name" .= author
, "url" .= url
]
]
, "language" .= lang
, "items" .= (jsonFeedItem <$> posts)
]
where
jsonFeedItem :: Post -> Aeson.Value
jsonFeedItem Post{..} = Aeson.object
[ "id" .= (url <> "/" <> slug)
, "url" .= (url <> "/" <> slug)
, "content_html" .= body
, "title" .= title
, "date_published" .= published.iso8601
, "date_modified" .= (updated.iso8601 <|> published.iso8601)
, "tags" .= tags
]
tagsMeta :: Site -> Aeson.Value
tagsMeta = Aeson.toJSON . Map.foldrWithKey' f [] . tagMap
where
f (Tag k) v = (Aeson.object ["tag" .= ("#" <> k), "site" .= Aeson.Object ("posts" .= v)] :)
tagMap Site{..} =
foldr (\p posts -> Set.foldl' (flip (Map.adjust (p :))) posts p.tags)
(Map.fromAscList . map (, []) $ Set.toAscList tags)
posts
build :: BuildOptions -> Rules ()
build b = do
buildDir <- shakeFiles <$> getShakeOptionsRules
writeFileChanged (buildDir </> ".debug-mode") (show b.watch)
gitHash <- gitHashOracle
buildTime <- liftIO Clock.getCurrentTime
getSite <- newCache \posts -> do
hash <- gitHash "master"
pure $ siteBuild b.watch hash posts
let getSiteEmpty = Aeson.toJSON <$> getSite []
template <- Template.compileDir "templates"
-- fetch and render posts, wrapping the underlying oracle
fetchPost <- fmap wrapPostQ . addOracleCache $ \(PostQ input) -> do
need [input]
(source, input, agda) <- case Shake.splitExtensions input of
(Shake.takeBaseName -> base, ".lagda.md") ->
let input = "agda" </> base <.> "md" in
need [input] *> liftIO (Text.readFile input)
>>= fmap (,input, True) . Agda.readAgda input
_ -> (, input, False) <$> liftIO (Text.readFile input)
MMark.renderMarkdown postExtensions input source >>= \page ->
case Aeson.fromJSON (Aeson.Object page.meta) of
Aeson.Error err ->
fileError (Just input) err
Aeson.Success v | Text.null v.title ->
fileError (Just input) "missing metadata field: title"
Aeson.Success Post{..} -> do
(Aeson.Object meta) <- pure . Aeson.toJSON $ Post
{ tags = linkifyTags tags, .. }
pure $ PostA
( Post { body = page.body, .. }
, Page (Aeson.insert "agda" (Aeson.toJSON agda) meta) page.body
)
-- copy static resources
staticFiles "css/*.css"
staticFiles "fonts//*"
staticFiles "static/*"
-- copy static agda files
route (Dynamic (`Shake.replaceDirectory` "static/agda") "agda/*.html" ) \input output -> do
putInfo $ unwords ["AGDA-STATIC", output]
let mkPage = Page ("title" .= Shake.takeBaseName input)
tPage <- template "agda-page.html"
siteMeta <- getSiteEmpty
liftIO (Text.readFile input)
>>= Agda.readAgdaNaked input
>>= (writePage siteMeta tPage output . mkPage)
-- compile literate agda files
adgaSrcMap <- liftAction $
Map.fromList . fmap (\x -> (agdaOut x, x)) <$> getDirectoryFiles "" agdaPostPatterns
(agdaOut <$> agdaPostPatterns) |%> \output -> do
putInfo $ unwords ["AGDA", output]
input <- (Map.! output) <$> adgaSrcMap
need [input]
command_ [Cwd $ Shake.takeDirectory input] "agda"
[ "--html"
, "--html-highlight=code"
, "--html-dir=../agda"
, "--css=agda.css"
, Shake.takeFileName input
]
-- build resume
when b.latex do
routeStatic "resume/resume.tex" "static/resume.pdf" \_ output -> do
putInfo $ unwords ["RESUME", output]
need ["resume/resume.sty", "resume/latexmkrc"]
buildDir <- shakeFiles <$> getShakeOptions
cmd_ @(String -> [String] -> _)
"latexmk -r resume/latexmkrc -quiet"
[ "-outdir=" ++ Shake.takeDirectory output
, "-auxdir=" ++ buildDir
]
-- write JSON feed
routeStatic1 "feed.json" \output -> do
putInfo $ unwords ["FEED", output]
feed <- getPostFiles
>>= mapP (fmap fst . fetchPost)
>>= fmap jsonFeed . getSite
liftIO $ Aeson.encodeFile output feed
-- write atom feed
routeStatic "pages/feed.xml" "feed.xml" \input output -> do
putInfo $ unwords ["FEED", output]
tItem <- template "atom-item.xml"
posts <- getPostFiles
>>= mapP (fmap (atomFeedItem . fst) . fetchPost)
siteMeta <- jsonInsert "updated" (Clock.iso8601Show buildTime)
. Aeson.toJSON <$> getSite posts
Template.preprocessFile siteMeta tItem input
>>= writeFile output . LBS.fromStrict . Text.encodeUtf8
-- index page
routePage "pages/index.md" \input output -> do
putInfo $ unwords ["PAGE", output]
posts <- getPostFiles
>>= mapP fetchPost
works <- pubList "pages/publications.json"
[tPostList, tPage] <- forP ["post-list.md", "page.html"] template
siteMeta <- jsonInsert "publications" works
. jsonInsert "hide-recent" (null posts)
. Aeson.toJSON <$> getSite (fst <$> take 5 posts)
Template.preprocessFile siteMeta tPostList input
>>= renderMarkdown input
>>= writePage siteMeta tPage output
-- post archive
routeStatic "pages/posts.md" "posts/index.html" \input output -> do
putInfo $ unwords ["PAGE", output]
posts <- getPostFiles
>>= mapP (fmap fst . fetchPost)
[tPostList, tPage] <- forP ["post-list.md", "page.html"] template
siteMeta <- Aeson.toJSON <$> getSite posts
Template.preprocessFile siteMeta tPostList input
>>= renderMarkdown input
>>= writePage siteMeta tPage output
-- post/tag mappings
routeStatic "pages/tags.md" "tags.html" \input output -> do
putInfo $ unwords ["PAGE", output]
posts <- getPostFiles
>>= mapP (fmap fst . fetchPost)
[tPostList, tPage] <- forP ["post-list.md", "page.html"] template
(tagsMeta, siteMeta) <- (tagsMeta &&& Aeson.toJSON) <$> getSite posts
Template.preprocessFile (Aeson.Object $ "tags" .= tagsMeta) tPostList input
>>= renderMarkdown input
>>= writePage siteMeta tPage output
-- notes page
routeStatic "pages/notes.md" "notes.html" \input output -> do
putInfo $ unwords ["PAGE", output]
tPage <- template "page.html"
siteMeta <- getSiteEmpty
MMark.renderMarkdownIO (noteEntry : defaultExtensions) input
>>= writePage siteMeta tPage output
-- 404 page
routePage "pages/404.md" \input output -> do
putInfo $ unwords ["PAGE", output]
t <- template "page.html"
siteMeta <- getSiteEmpty
renderMarkdownIO input
>>= writePage siteMeta t output
postsMap <- liftIO MVar.newEmptyMVar
-- need all posts and map outputs to inputs
action $ do
files <- getPostFiles
map <- Map.fromList <$> forP files \input -> do
(post, page) <- fetchPost input
pure (siteOutput </> postURL post, (input, page))
liftIO $ MVar.putMVar postsMap map
runAfter . void $ MVar.takeMVar postsMap
need $ Map.keys map
-- render each post
siteOutput </> "posts/*/index.html" %> \output -> do
putInfo $ unwords ["POST", output]
(input, page) <- (Map.! output) <$> liftIO (MVar.readMVar postsMap)
t <- template "post.html"
siteMeta <- getSiteEmpty
writePage siteMeta t output page
let
outDir = Shake.takeDirectory output
inDir = Shake.takeDirectory input
-- copy over associated static data
deps <- if length (Shake.splitPath inDir) == 1
then pure [input]
else do
files <- getDirectoryFiles inDir ["*"]
pure $ (inDir </>) <$> files
forP deps \file -> do
copyFileChanged file (Shake.replaceDirectory file outDir)
need deps
where
postFolders
| b.watch = ["drafts", "posts"]
| otherwise = ["posts"]
postExts = ["md", "lhs"]
postPattern = concat [
[ dir </> "*" <.> ext
, dir </> "*" </> "index" <.> ext
, dir </> "*" </> "Index" <.> ext
] | dir <- postFolders
, ext <- postExts
]
getPostFiles = do
buildDir <- shakeFiles <$> getShakeOptions
need [buildDir </> ".debug-mode"]
getDirectoryFiles "" postPattern
agdaOut x = Shake.replaceDirectory (Shake.replaceExtensions x "md") "agda"
agdaPostPatterns
| b.watch = ["drafts/*.lagda.md", "posts/*.lagda.md"]
| otherwise = ["posts/*.lagda.md"]
timerStart :: IO (IO String)
timerStart = do
start <- getMonotonicTime
pure $ do
end <- getMonotonicTime
pure . duration $ end - start
where
duration :: Double -> String
duration x
| x >= 3600 = f (x / 60) "h" "m"
| x >= 60 = f x "m" "s"
| otherwise = showFFloat (Just 2) x "s"
f ((`divMod` (60 :: Int)) . round -> (ms, ss)) m s =
show ms ++ m ++ ['0' | ss < 10] ++ show ss ++ s
#if defined(ENABLE_WATCH)
watch :: ShakeOptions -> Rules () -> IO ()
watch shakeOpts rules = do
Shake.shakeWithDatabase shakeOpts rules \db -> FS.withManager \mgr -> do
fix \loop -> do
timeElapsed <- timerStart
res <- try @ShakeException $ do
(_, after) <- Shake.shakeRunDatabase db []
Shake.shakeRunAfter shakeOpts after
elapsed <- timeElapsed
threadDelay 100000
liveFiles <- Shake.shakeLiveFilesDatabase db >>= mapM Dir.makeAbsolute
files <- case res of
Right () -> do
putStrLn (unwords ["Build completed in", elapsed])
pure liveFiles
Left shakeErr -> do
print shakeErr
-- NOTE: we ignore the files shake reports since they might be be
-- something of the form 'OracleQ "file"' instead of a file path.
errs <- Shake.shakeErrorsDatabase db
failedDeps <- catMaybes <$> mapM (errFile . snd) errs
pure $ failedDeps ++ liveFiles
if null files then
putStrLn "No files to watch"
else do
sema <- MVar.newEmptyMVar
let
watchDirs = Map.fromListWith Set.union $
map (\file -> (Shake.takeDirectory file, Set.singleton file)) files
startWatchers = forM (Map.toList watchDirs) \(dir, liveFilesInDir) -> do
let isChangeToLiveFile (Modified path _ _) = path `Set.member` liveFilesInDir
isChangeToLiveFile _ = False
FS.watchDir mgr dir isChangeToLiveFile \e -> do
putStrLn $ unwords ["Change in", e.eventPath]
MVar.putMVar sema ()
bracket startWatchers sequence $ const do
putStrLn "Watching for changes..."
MVar.takeMVar sema
putChar '\n'
loop
where
errFile :: SomeException -> IO (Maybe FilePath)
errFile err = sequence do
shakeErr <- fromException @ShakeException err
FileError{..} <- fromException shakeErr.shakeExceptionInner
Dir.makeAbsolute <$> path
#endif
timedShake :: ShakeOptions -> Rules () -> IO ()
timedShake shakeOpts rules = do
timeElapsed <- timerStart
shake shakeOpts rules
elapsed <- timeElapsed
putStrLn $ unwords ["Build completed in", elapsed]
run :: Options -> Command -> IO ()
run o (Clean CleanOptions{..}) = do
options <- shakeOptions o
shake options . action $ do
putInfo $ unwords ["Removing", siteOutput]
removeFilesAfter siteOutput ["//*"]
when cache $ do
buildDir <- shakeFiles <$> getShakeOptions
putInfo $ unwords ["Removing", buildDir]
removeFilesAfter buildDir ["//*"]
#if defined(ENABLE_WATCH)
run o (Build b) = do
shakeOpts <- shakeOptions o
(if b.watch then watch else timedShake)
shakeOpts
(build b)
run o (Preview w) = do
shakeOpts <- shakeOptions o
_ <- forkIO (watch shakeOpts (build BuildOptions{ watch = True, latex = True }))
let app = Wai.staticApp (staticSettings siteOutput)
Warp.runSettings warpSettings app
where
warpSettings = Warp.setHost (fromString w.host)
$ Warp.setPort w.port Warp.defaultSettings
nullExtension = not . Shake.hasExtension
staticSettings path = let s = Wai.defaultFileServerSettings path in s
{ Wai.ss404Handler = Just \_ respond ->
LBS.readFile (path </> "404.html")
>>= respond . Wai.responseLBS Status.status404 []
, Wai.ssLookupFile = \pieces ->
case splitAt (length pieces - 1) pieces of
(prefix, [Wai.fromPiece -> fileName])
| nullExtension (Text.unpack fileName) ->
s.ssLookupFile $ prefix <> [Wai.unsafeToPiece $ fileName <> ".html"]
_ -> s.ssLookupFile pieces
, Wai.ssGetMimeType = \file ->
let fileName = Text.unpack $ Wai.fromPiece file.fileName in
if nullExtension fileName then do
htmlExists <- Dir.doesFileExist $ path </> fileName <.> "html"
if htmlExists then pure "text/html" else s.ssGetMimeType file
else s.ssGetMimeType file
}
#else
run o (Build b) | not b.watch = do
shakeOpts <- shakeOptions o
timedShake shakeOpts (build b)
run _ _ = error "watch/preview disabled"
#endif
parseOptions :: A.Parser Options
parseOptions = do
command <- A.hsubparser (mconcat
[ A.command "build" (A.info pBuild (A.progDesc "Build the site"))
, A.command "clean" (A.info pClean (A.progDesc "Remove build files"))
, A.command "preview" (A.info pPreview (A.progDesc "Run a preview server"))
])
verbosity <- A.option A.auto (mconcat
[ A.long "verbosity"
, A.short 'v'
, A.metavar "VERBOSITY"
, A.completeWith $ map show [minBound :: Verbosity .. maxBound]
]) <|> pure Error
jobs <- A.option A.auto (mconcat
[ A.long "jobs"
, A.short 'j'
, A.metavar "N"
]) <|> pure (numCapabilities `div` 2)
pure Options{..}
where
pBuild = Build <$> do
watch <- A.switch (A.long "watch" <> A.short 'w' <> A.help "Watch for changes and rebuild automatically")
latex <- A.switch (A.long "latex" <> A.help "Build PDFs with latexmk")
pure BuildOptions {..}
pClean = Clean <$> do
cache <- A.switch (A.long "cache" <> A.short 'c' <> A.help "Clean cache")
pure CleanOptions {..}
pPreview = Preview <$> do
server <- A.switch (A.long "server" <> A.short 's' <> A.help "Run a preview server")
host <- A.option A.auto (mconcat
[ A.long "host"
, A.short 'h'
, A.metavar "HOST"
, A.value "127.0.0.1"
, A.showDefault
])
port <- A.option A.auto (mconcat
[ A.long "port"
, A.short 'p'
, A.metavar "PORT"
, A.value 8080
, A.showDefault
])
pure PreviewOptions {..}
main :: IO ()
main = do
options <- A.execParser (A.info (parseOptions A.<**> A.helper) mempty)
run options options.command