Merge branch 'master' into dev-metadata-route

This commit is contained in:
Jasper Van der Jeugt 2013-02-14 10:08:21 +01:00
commit 61dcb5f454
32 changed files with 309 additions and 218 deletions

View file

@ -1,5 +1,5 @@
Name: hakyll
Version: 4.1.2.0
Version: 4.1.4.0
Synopsis: A static website compiler library
Description:
@ -64,8 +64,10 @@ Data-files:
Extra-source-files:
tests/data/example.md
tests/data/example.md.metadata
tests/data/russian.md
tests/data/template.html
tests/data/template.html.out
tests/data/posts/2010-08-26-birthday.md
Source-Repository head
Type: git
@ -93,7 +95,7 @@ Library
directory >= 1.0 && < 1.3,
filepath >= 1.0 && < 1.4,
http-conduit >= 1.8 && < 1.9,
http-types >= 0.7 && < 0.8,
http-types >= 0.7 && < 0.9,
lrucache >= 1.1.1 && < 1.2,
mtl >= 1 && < 2.2,
old-locale >= 1.0 && < 1.1,
@ -149,7 +151,6 @@ Library
Hakyll.Core.Provider.Internal
Hakyll.Core.Provider.Metadata
Hakyll.Core.Provider.MetadataCache
Hakyll.Core.Provider.Modified
Hakyll.Core.Rules.Internal
Hakyll.Core.Runtime
Hakyll.Core.Store
@ -192,7 +193,7 @@ Test-suite hakyll-tests
directory >= 1.0 && < 1.3,
filepath >= 1.0 && < 1.4,
http-conduit >= 1.8 && < 1.9,
http-types >= 0.7 && < 0.8,
http-types >= 0.7 && < 0.9,
lrucache >= 1.1.1 && < 1.2,
mtl >= 1 && < 2.2,
old-locale >= 1.0 && < 1.1,

View file

@ -94,7 +94,8 @@ runChecker checker config verbosity check' = do
checkDestination :: Checker ()
checkDestination = do
config <- checkerConfig <$> ask
files <- liftIO $ getRecursiveContents (destinationDirectory config)
files <- liftIO $
getRecursiveContents (const False) (destinationDirectory config)
let htmls =
[ destinationDirectory config </> file

View file

@ -136,7 +136,7 @@ cached name compiler = do
id' <- compilerUnderlying <$> compilerAsk
store <- compilerStore <$> compilerAsk
provider <- compilerProvider <$> compilerAsk
modified <- compilerUnsafeIO $ resourceModified provider id'
let modified = resourceModified provider id'
if modified
then do
x <- compiler

View file

@ -37,7 +37,7 @@ data Configuration = Configuration
--
-- Note that the files in 'destinationDirectory' and 'storeDirectory' will
-- also be ignored. Note that this is the configuration parameter, if you
-- want to use the test, you should use 'shouldIgnoreFile'.
-- want to use the test, you should use 'shouldIgnoreFile'.
--
ignoreFile :: FilePath -> Bool
, -- | Here, you can plug in a system command to upload/deploy your site.

View file

@ -3,44 +3,40 @@
-- caching.
module Hakyll.Core.Provider
( -- * Constructing resource providers
Provider
Internal.Provider
, newProvider
-- * Querying resource properties
, resourceList
, resourceExists
, resourceModified
, resourceModificationTime
, Internal.resourceList
, Internal.resourceExists
, Internal.resourceModified
, Internal.resourceModificationTime
-- * Access to raw resource content
, resourceString
, resourceLBS
, Internal.resourceString
, Internal.resourceLBS
-- * Access to metadata and body content
, resourceMetadata
, resourceBody
, Internal.resourceMetadata
, Internal.resourceBody
) where
--------------------------------------------------------------------------------
import Hakyll.Core.Identifier
import Hakyll.Core.Metadata
import Hakyll.Core.Provider.Internal
import qualified Hakyll.Core.Provider.Internal as Internal
import qualified Hakyll.Core.Provider.MetadataCache as Internal
import Hakyll.Core.Provider.Modified
import Hakyll.Core.Store (Store)
--------------------------------------------------------------------------------
-- | Wrapper to ensure metadata cache is invalidated if necessary
resourceMetadata :: Provider -> Identifier -> IO Metadata
resourceMetadata rp r = do
_ <- resourceModified rp r
Internal.resourceMetadata rp r
--------------------------------------------------------------------------------
-- | Wrapper to ensure metadata cache is invalidated if necessary
resourceBody :: Provider -> Identifier -> IO String
resourceBody rp r = do
_ <- resourceModified rp r
Internal.resourceBody rp r
-- | Create a resource provider
newProvider :: Store -- ^ Store to use
-> (FilePath -> Bool) -- ^ Should we ignore this file?
-> FilePath -- ^ Search directory
-> IO Internal.Provider -- ^ Resulting provider
newProvider store ignore directory = do
-- Delete metadata cache where necessary
p <- Internal.newProvider store ignore directory
mapM_ (Internal.resourceInvalidateMetadataCache p) $
filter (Internal.resourceModified p) $ Internal.resourceList p
return p

View file

@ -1,46 +1,103 @@
--------------------------------------------------------------------------------
{-# LANGUAGE CPP #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
module Hakyll.Core.Provider.Internal
( Provider (..)
( ResourceInfo (..)
, Provider (..)
, newProvider
, resourceList
, resourceExists
, resourceMetadataResource
, resourceFilePath
, resourceString
, resourceLBS
, resourceModified
, resourceModificationTime
) where
--------------------------------------------------------------------------------
import Control.Applicative ((<$>))
import Control.Applicative ((<$>), (<*>))
import Control.DeepSeq (NFData (..), deepseq)
import Control.Monad (forM)
import Data.Binary (Binary (..))
import qualified Data.ByteString.Lazy as BL
import Data.IORef
import Data.Map (Map)
import qualified Data.Map as M
import Data.Maybe (fromMaybe)
import Data.Monoid (mempty)
import Data.Set (Set)
import qualified Data.Set as S
import Data.Time (Day (..), UTCTime (..),
secondsToDiffTime)
import Data.Typeable (Typeable)
import System.Directory (getModificationTime)
import System.FilePath (addExtension, (</>))
--------------------------------------------------------------------------------
#if !MIN_VERSION_directory(1,2,0)
import Data.Time (readTime)
import System.Locale (defaultTimeLocale)
import System.Time (formatCalendarTime, toCalendarTime)
#endif
--------------------------------------------------------------------------------
import Hakyll.Core.Identifier
import Hakyll.Core.Store
import Hakyll.Core.Store (Store)
import qualified Hakyll.Core.Store as Store
import Hakyll.Core.Util.File
--------------------------------------------------------------------------------
-- | Because UTCTime doesn't have a Binary instance...
newtype BinaryTime = BinaryTime {unBinaryTime :: UTCTime}
deriving (Eq, NFData, Ord, Show, Typeable)
--------------------------------------------------------------------------------
instance Binary BinaryTime where
put (BinaryTime (UTCTime (ModifiedJulianDay d) dt)) =
put d >> put (floor dt :: Integer)
get = fmap BinaryTime $ UTCTime
<$> (ModifiedJulianDay <$> get)
<*> (secondsToDiffTime <$> get)
--------------------------------------------------------------------------------
data ResourceInfo = ResourceInfo
{ resourceInfoModified :: BinaryTime
, resourceInfoMetadata :: Maybe Identifier
} deriving (Show, Typeable)
--------------------------------------------------------------------------------
instance Binary ResourceInfo where
put (ResourceInfo mtime meta) = put mtime >> put meta
get = ResourceInfo <$> get <*> get
--------------------------------------------------------------------------------
instance NFData ResourceInfo where
rnf (ResourceInfo mtime meta) = rnf mtime `seq` rnf meta `seq` ()
--------------------------------------------------------------------------------
-- | Responsible for retrieving and listing resources
data Provider = Provider
{ -- Top of the provided directory
providerDirectory :: FilePath
providerDirectory :: FilePath
, -- | A list of all files found
providerSet :: Set Identifier
, -- | Cache keeping track of modified files
providerModifiedCache :: IORef (Map Identifier Bool)
providerFiles :: Map Identifier ResourceInfo
, -- | A list of the files from the previous run
providerOldFiles :: Map Identifier ResourceInfo
, -- | Underlying persistent store for caching
providerStore :: Store
providerStore :: Store
}
@ -51,30 +108,47 @@ newProvider :: Store -- ^ Store to use
-> FilePath -- ^ Search directory
-> IO Provider -- ^ Resulting provider
newProvider store ignore directory = do
list <- map fromFilePath . filter (not . ignore) <$>
getRecursiveContents directory
cache <- newIORef M.empty
return $ Provider directory (S.fromList list) cache store
list <- map fromFilePath <$> getRecursiveContents ignore directory
let universe = S.fromList list
files <- fmap (maxmtime . M.fromList) $ forM list $ \identifier -> do
rInfo <- getResourceInfo directory universe identifier
return (identifier, rInfo)
-- Get the old files from the store, and then immediately replace them by
-- the new files.
oldFiles <- fromMaybe mempty . Store.toMaybe <$> Store.get store oldKey
oldFiles `deepseq` Store.set store oldKey files
return $ Provider directory files oldFiles store
where
oldKey = ["Hakyll.Core.Provider.Internal.newProvider", "oldFiles"]
-- Update modified if metadata is modified
maxmtime files = flip M.map files $ \rInfo@(ResourceInfo mtime meta) ->
let metaMod = fmap resourceInfoModified $ meta >>= flip M.lookup files
in rInfo {resourceInfoModified = maybe mtime (max mtime) metaMod}
--------------------------------------------------------------------------------
getResourceInfo :: FilePath -> Set Identifier -> Identifier -> IO ResourceInfo
getResourceInfo directory universe identifier = do
mtime <- fileModificationTime $ directory </> toFilePath identifier
return $ ResourceInfo (BinaryTime mtime) $
if mdRsc `S.member` universe then Just mdRsc else Nothing
where
mdRsc = fromFilePath $ flip addExtension "metadata" $ toFilePath identifier
--------------------------------------------------------------------------------
resourceList :: Provider -> [Identifier]
resourceList = S.toList . providerSet
resourceList = M.keys . providerFiles
--------------------------------------------------------------------------------
-- | Check if a given resource exists
resourceExists :: Provider -> Identifier -> Bool
resourceExists provider =
(`S.member` providerSet provider) . setVersion Nothing
--------------------------------------------------------------------------------
-- | Each resource may have an associated metadata resource (with a @.metadata@
-- filename)
resourceMetadataResource :: Identifier -> Identifier
resourceMetadataResource =
fromFilePath . flip addExtension "metadata" . toFilePath
(`M.member` providerFiles provider) . setVersion Nothing
--------------------------------------------------------------------------------
@ -92,3 +166,38 @@ resourceString p i = readFile $ resourceFilePath p i
-- | Get the raw body of a resource of a lazy bytestring
resourceLBS :: Provider -> Identifier -> IO BL.ByteString
resourceLBS p i = BL.readFile $ resourceFilePath p i
--------------------------------------------------------------------------------
-- | A resource is modified if it or its metadata has changed
resourceModified :: Provider -> Identifier -> Bool
resourceModified p r = case (ri, oldRi) of
(Nothing, _) -> False
(Just _, Nothing) -> True
(Just n, Just o) -> resourceInfoModified n > resourceInfoModified o
where
normal = setVersion Nothing r
ri = M.lookup normal (providerFiles p)
oldRi = M.lookup normal (providerOldFiles p)
--------------------------------------------------------------------------------
resourceModificationTime :: Provider -> Identifier -> UTCTime
resourceModificationTime p i =
case M.lookup (setVersion Nothing i) (providerFiles p) of
Just ri -> unBinaryTime $ resourceInfoModified ri
Nothing -> error $
"Hakyll.Core.Provider.Internal.resourceModificationTime: " ++
"resource " ++ show i ++ " does not exist"
--------------------------------------------------------------------------------
fileModificationTime :: FilePath -> IO UTCTime
fileModificationTime fp = do
#if MIN_VERSION_directory(1,2,0)
getModificationTime fp
#else
ct <- toCalendarTime =<< getModificationTime fp
let str = formatCalendarTime defaultTimeLocale "%s" ct
return $ readTime defaultTimeLocale "%s" str
#endif

View file

@ -31,13 +31,14 @@ loadMetadata p identifier = do
then second Just <$> loadMetadataHeader fp
else return (M.empty, Nothing)
emd <- if resourceExists p mi then loadMetadataFile mfp else return M.empty
emd <- case mi of
Nothing -> return M.empty
Just mi' -> loadMetadataFile $ resourceFilePath p mi'
return (M.union md emd, body)
where
fp = resourceFilePath p identifier
mi = resourceMetadataResource identifier
mfp = resourceFilePath p mi
mi = M.lookup identifier (providerFiles p) >>= resourceInfoMetadata
--------------------------------------------------------------------------------

View file

@ -23,6 +23,7 @@ resourceMetadata :: Provider -> Identifier -> IO Metadata
resourceMetadata p r
| not (resourceExists p r) = return M.empty
| otherwise = do
-- TODO keep time in md cache
load p r
Store.Found md <- Store.get (providerStore p)
[name, toFilePath r, "metadata"]

View file

@ -1,101 +0,0 @@
--------------------------------------------------------------------------------
{-# LANGUAGE CPP #-}
module Hakyll.Core.Provider.Modified
( resourceModified
, resourceModificationTime
) where
--------------------------------------------------------------------------------
import Control.Applicative ((<$>), (<*>))
import Control.Monad (when)
import qualified Crypto.Hash.MD5 as MD5
import qualified Data.ByteString as B
import qualified Data.ByteString.Lazy as BL
import Data.IORef
import qualified Data.Map as M
import Data.Time (UTCTime)
import System.Directory (getModificationTime)
--------------------------------------------------------------------------------
#if !MIN_VERSION_directory(1,2,0)
import Data.Time (readTime)
import System.Locale (defaultTimeLocale)
import System.Time (formatCalendarTime,
toCalendarTime)
#endif
--------------------------------------------------------------------------------
import Hakyll.Core.Identifier
import Hakyll.Core.Provider.Internal
import Hakyll.Core.Provider.MetadataCache
import Hakyll.Core.Store (Store)
import qualified Hakyll.Core.Store as Store
--------------------------------------------------------------------------------
-- | A resource is modified if it or its metadata has changed
resourceModified :: Provider -> Identifier -> IO Bool
resourceModified p r
| not exists = return False
| otherwise = do
cache <- readIORef cacheRef
case M.lookup normalized cache of
Just m -> return m
Nothing -> do
-- Check if the actual file was modified, and do a recursive
-- call to check if the metadata file was modified
m <- (||)
<$> fileDigestModified store filePath
<*> resourceModified p (resourceMetadataResource r)
modifyIORef cacheRef (M.insert normalized m)
-- Important! (But ugly)
when m $ resourceInvalidateMetadataCache p r
return m
where
normalized = setVersion Nothing r
exists = resourceExists p r
store = providerStore p
cacheRef = providerModifiedCache p
filePath = resourceFilePath p r
--------------------------------------------------------------------------------
-- | Utility: Check if a the digest of a file was modified
fileDigestModified :: Store -> FilePath -> IO Bool
fileDigestModified store fp = do
-- Get the latest seen digest from the store, and calculate the current
-- digest for the
lastDigest <- Store.get store key
newDigest <- fileDigest fp
if Store.Found newDigest == lastDigest
-- All is fine, not modified
then return False
-- Resource modified; store new digest
else do
Store.set store key newDigest
return True
where
key = ["Hakyll.Core.Resource.Provider.fileModified", fp]
--------------------------------------------------------------------------------
-- | Utility: Retrieve a digest for a given file
fileDigest :: FilePath -> IO B.ByteString
fileDigest = fmap MD5.hashlazy . BL.readFile
--------------------------------------------------------------------------------
resourceModificationTime :: Provider -> Identifier -> IO UTCTime
resourceModificationTime p i = do
#if MIN_VERSION_directory(1,2,0)
getModificationTime $ resourceFilePath p i
#else
ct <- toCalendarTime =<< getModificationTime (resourceFilePath p i)
let str = formatCalendarTime defaultTimeLocale "%s" ct
return $ readTime defaultTimeLocale "%s" str
#endif

View file

@ -16,9 +16,9 @@ import Control.Applicative (Applicative, (<$>))
import Control.Monad.Reader (ask)
import Control.Monad.RWS (RWST, runRWST)
import Control.Monad.Trans (liftIO)
import qualified Data.Map as M
import Data.Monoid (Monoid, mappend, mempty)
import Data.Set (Set)
import qualified Data.Set as S
--------------------------------------------------------------------------------
@ -92,7 +92,12 @@ instance MonadMetadata Rules where
runRules :: Rules a -> Provider -> IO RuleSet
runRules rules provider = do
(_, _, ruleSet) <- runRWST (unRules rules) env emptyRulesState
return $ nubCompilers ruleSet
case findDuplicate (map fst $ rulesCompilers ruleSet) of
Nothing -> return ruleSet
Just id' -> error $
"Hakyll.Core.Rules.Internal: two different rules for " ++
show id' ++ " exist, bailing out"
where
env = RulesRead
{ rulesProvider = provider
@ -102,9 +107,10 @@ runRules rules provider = do
--------------------------------------------------------------------------------
-- | Remove duplicate compilers from the 'RuleSet'. When two compilers match an
-- item, we prefer the first one
nubCompilers :: RuleSet -> RuleSet
nubCompilers set = set {rulesCompilers = nubCompilers' (rulesCompilers set)}
findDuplicate :: Ord a => [a] -> Maybe a
findDuplicate = go S.empty
where
nubCompilers' = M.toList . M.fromListWith (flip const)
go _ [] = Nothing
go s (x : xs)
| x `S.member` s = Just x
| otherwise = go (S.insert x s) xs

View file

@ -6,7 +6,7 @@ module Hakyll.Core.Runtime
--------------------------------------------------------------------------------
import Control.Applicative ((<$>))
import Control.Monad (filterM, unless)
import Control.Monad (unless)
import Control.Monad.Error (ErrorT, runErrorT, throwError)
import Control.Monad.Reader (ask)
import Control.Monad.RWS (RWST, runRWST)
@ -140,8 +140,9 @@ scheduleOutOfDate = do
todo <- runtimeTodo <$> get
let identifiers = M.keys universe
modified <- fmap S.fromList $ flip filterM identifiers $
liftIO . resourceModified provider
modified = S.fromList $ flip filter identifiers $
resourceModified provider
let (ood, facts', msgs) = outOfDate identifiers modified facts
todo' = M.filterWithKey
(\id' _ -> id' `S.member` ood) universe

View file

@ -5,6 +5,7 @@
module Hakyll.Core.Store
( Store
, Result (..)
, toMaybe
, new
, set
, get
@ -53,6 +54,13 @@ data Result a
deriving (Show, Eq)
--------------------------------------------------------------------------------
-- | Convert result to 'Maybe'
toMaybe :: Result a -> Maybe a
toMaybe (Found x) = Just x
toMaybe _ = Nothing
--------------------------------------------------------------------------------
-- | Initialize the store
new :: Bool -- ^ Use in-memory caching

View file

@ -57,7 +57,7 @@ unixFilter = unixFilterWith writer reader
--
-- > match "music.wav" $ do
-- > route $ setExtension "ogg"
-- > compile $ getResourceLBS >>= withItemBody (unixFilter "oggenc" ["-"])
-- > compile $ getResourceLBS >>= withItemBody (unixFilterLBS "oggenc" ["-"])
unixFilterLBS :: String -- ^ Program name
-> [String] -- ^ Program args
-> ByteString -- ^ Program input

View file

@ -25,12 +25,13 @@ makeDirectories = createDirectoryIfMissing True . takeDirectory
--------------------------------------------------------------------------------
-- | Get all contents of a directory.
getRecursiveContents :: FilePath -- ^ Directory to search
-> IO [FilePath] -- ^ List of files found
getRecursiveContents top = go ""
getRecursiveContents :: (FilePath -> Bool) -- ^ Ignore this file/directory
-> FilePath -- ^ Directory to search
-> IO [FilePath] -- ^ List of files found
getRecursiveContents ignore top = go ""
where
isProper = (`notElem` [".", ".."])
go dir = do
isProper x = notElem x [".", ".."] && not (ignore x)
go dir = do
dirExists <- doesDirectoryExist (top </> dir)
if not dirExists
then return []

View file

@ -23,7 +23,7 @@ main = do
progName <- getProgName
args <- getArgs
srcDir <- getDataFileName "example"
files <- getRecursiveContents srcDir
files <- getRecursiveContents (const False) srcDir
case args of
[dstDir] -> forM_ files $ \file -> do

View file

@ -8,7 +8,7 @@ module Hakyll.Web.Pandoc.FileType
--------------------------------------------------------------------------------
import System.FilePath (takeExtension)
import System.FilePath (splitExtension)
--------------------------------------------------------------------------------
@ -36,27 +36,31 @@ data FileType
--------------------------------------------------------------------------------
-- | Get the file type for a certain file. The type is determined by extension.
fileType :: FilePath -> FileType
fileType = fileType' . takeExtension
fileType = uncurry fileType' . splitExtension
where
fileType' ".css" = Css
fileType' ".htm" = Html
fileType' ".html" = Html
fileType' ".lhs" = LiterateHaskell Markdown
fileType' ".markdown" = Markdown
fileType' ".md" = Markdown
fileType' ".mdn" = Markdown
fileType' ".mdown" = Markdown
fileType' ".mdwn" = Markdown
fileType' ".mkd" = Markdown
fileType' ".mkdwn" = Markdown
fileType' ".org" = OrgMode
fileType' ".page" = Markdown
fileType' ".rst" = Rst
fileType' ".tex" = LaTeX
fileType' ".text" = PlainText
fileType' ".textile" = Textile
fileType' ".txt" = PlainText
fileType' _ = Binary -- Treat unknown files as binary
fileType' _ ".css" = Css
fileType' _ ".htm" = Html
fileType' _ ".html" = Html
fileType' f ".lhs" = LiterateHaskell $ case fileType f of
-- If no extension is given, default to Markdown + LiterateHaskell
Binary -> Markdown
-- Otherwise, LaTeX + LiterateHaskell or whatever the user specified
x -> x
fileType' _ ".markdown" = Markdown
fileType' _ ".md" = Markdown
fileType' _ ".mdn" = Markdown
fileType' _ ".mdown" = Markdown
fileType' _ ".mdwn" = Markdown
fileType' _ ".mkd" = Markdown
fileType' _ ".mkdwn" = Markdown
fileType' _ ".org" = OrgMode
fileType' _ ".page" = Markdown
fileType' _ ".rst" = Rst
fileType' _ ".tex" = LaTeX
fileType' _ ".text" = PlainText
fileType' _ ".textile" = Textile
fileType' _ ".txt" = PlainText
fileType' _ _ = Binary -- Treat unknown files as binary
--------------------------------------------------------------------------------

View file

@ -41,12 +41,13 @@
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE OverloadedStrings #-}
module Hakyll.Web.Tags
( Tags
( Tags (..)
, getTags
, buildTagsWith
, buildTags
, buildCategories
, tagsRules
, renderTags
, renderTagCloud
, renderTagList
, tagsField
@ -149,7 +150,7 @@ tagsRules tags rules =
--------------------------------------------------------------------------------
-- | Render tags in HTML
-- | Render tags in HTML (the flexible higher-order function)
renderTags :: (String -> String -> Int -> Int -> Int -> String)
-- ^ Produce a tag item: tag, url, count, min count, max count
-> ([String] -> String)
@ -218,13 +219,9 @@ renderTagList = renderTags makeLink (intercalate ", ")
--------------------------------------------------------------------------------
-- | Render tags with links with custom function to get tags. It is typically
-- together with 'getTags' like this:
--
-- > renderTagsFieldWith (customFunction . getTags)
-- > "tags" (fromCapture "tags/*")
-- | Render tags with links with custom function to get tags
tagsFieldWith :: (Identifier -> Compiler [String]) -- ^ Get the tags
-> String -- ^ Destination key
-> String -- ^ Destination field
-> Tags -- ^ Tags structure
-> Context a -- ^ Resulting context
tagsFieldWith getTags' key tags = field key $ \item -> do

View file

@ -207,8 +207,7 @@ modificationTimeFieldWith :: TimeLocale -- ^ Time output locale
-> Context a -- ^ Resulting context
modificationTimeFieldWith locale key fmt = field key $ \i -> do
provider <- compilerProvider <$> compilerAsk
mtime <- compilerUnsafeIO $
resourceModificationTime provider $ itemIdentifier i
let mtime = resourceModificationTime provider $ itemIdentifier i
return $ formatTime locale fmt mtime

View file

@ -37,3 +37,4 @@ case01 = do
doesntExist <- resourceMetadata provider "doesntexist.md"
M.empty @=? doesntExist
cleanTestEnv

View file

@ -53,6 +53,7 @@ rulesTest = do
checkRoute "example.mv1" (sv "mv1" "example.md")
checkRoute "example.mv2" (sv "mv2" "example.md")
readIORef ioref >>= assert
cleanTestEnv
where
sv g = setVersion (Just g)
expected =

View file

@ -67,3 +67,4 @@ wrongType = do
e == typeOf (undefined :: Int) &&
t == typeOf (undefined :: String)
_ -> False
cleanTestEnv

View file

@ -0,0 +1,26 @@
--------------------------------------------------------------------------------
{-# LANGUAGE OverloadedStrings #-}
module Hakyll.Web.Pandoc.FileType.Tests
( tests
) where
--------------------------------------------------------------------------------
import Test.Framework (Test, testGroup)
import Test.HUnit ((@=?))
--------------------------------------------------------------------------------
import Hakyll.Web.Pandoc.FileType
import TestSuite.Util
--------------------------------------------------------------------------------
tests :: Test
tests = testGroup "Hakyll.Web.Pandoc.FileType.Tests" $
fromAssertions "fileType"
[ Markdown @=? fileType "index.md"
, Rst @=? fileType "about/foo.rst"
, LiterateHaskell Markdown @=? fileType "posts/bananas.lhs"
, LiterateHaskell LaTeX @=? fileType "posts/bananas.tex.lhs"
]

View file

@ -44,6 +44,7 @@ case01 = do
pandocCompiler >>= applyTemplate (itemBody tpl) testContext
out @=? itemBody item
cleanTestEnv
--------------------------------------------------------------------------------
@ -63,6 +64,7 @@ testApplyJoinTemplateList = do
applyJoinTemplateList ", " tpl defaultContext [i1, i2]
str @?= "<b>Hello</b>, <b>World</b>"
cleanTestEnv
where
i1 = Item "item1" "Hello"
i2 = Item "item2" "World"

View file

@ -20,6 +20,7 @@ import qualified Hakyll.Core.UnixFilter.Tests
import qualified Hakyll.Core.Util.String.Tests
import qualified Hakyll.Web.Html.RelativizeUrls.Tests
import qualified Hakyll.Web.Html.Tests
import qualified Hakyll.Web.Pandoc.FileType.Tests
import qualified Hakyll.Web.Template.Context.Tests
import qualified Hakyll.Web.Template.Tests
@ -38,6 +39,7 @@ main = defaultMain
, Hakyll.Core.Util.String.Tests.tests
, Hakyll.Web.Html.RelativizeUrls.Tests.tests
, Hakyll.Web.Html.Tests.tests
, Hakyll.Web.Pandoc.FileType.Tests.tests
, Hakyll.Web.Template.Context.Tests.tests
, Hakyll.Web.Template.Tests.tests
]

View file

@ -38,3 +38,4 @@ who still maintains the package. Contributors:
- [seschwar](https://github.com/seschwar)
- [favonia](https://github.com/favonia)
- [Robin Windels](https://github.com/rwindelz)
- [Miikka Koskinen](http://miikka.me/)

View file

@ -17,6 +17,10 @@ this list. This list has no particular ordering.
[source](https://github.com/Keruspe/blog/)
- <http://blog.clement.delafargue.name/>,
[source](https://github.com/divarvel/blog)
- <http://bneijt.nl>,
[source](https://github.com/bneijt/bneijt.nl)
- <http://brianshourd.com/>,
[source](https://github.com/brianshourd/brianshourd.com)
## Hakyll 3.X

Binary file not shown.

Before

Width:  |  Height:  |  Size: 40 KiB

After

Width:  |  Height:  |  Size: 40 KiB

View file

@ -4,6 +4,18 @@ title: Releases
# Releases
## Hakyll 4.1.4.0
*January 26, 2013*
- Export the flexible `renderTags` function
## Hakyll 4.1.3.0
*January 26, 2013*
- Export the constructor of the `Tags` datatype
## Hakyll 4.1.2.0
*January 20, 2013*

View file

@ -32,8 +32,9 @@ started:
$ hakyll-init my-site
If `hakyll-init` is not found, you should make sure `$HOME/.cabal/bin` is in
your `$PATH`.
This creates a folder `my-site` in the current directory, with some example
content and a generic configuration. If `hakyll-init` is not found, you should
make sure `$HOME/.cabal/bin` is in your `$PATH`.
The file `site.hs` holds the configuration of your site, as an executable
haskell program. We can compile and run it like this:

View file

@ -61,7 +61,7 @@ this will become clear soon. The real question here is why we use `create`
instead of `match`.
The answer is simple: there is no `archive.html` file in our project directory!
So if we were to use `match`, no a file would be matched, and hence, nothing
So if we were to use `match`, no file would be matched, and hence, nothing
would appear in the output directory. `create`, however, ensures the items
listed are always produced.

View file

@ -41,9 +41,11 @@ Templates
Let's have a look at a simple template:
<h1>$title$</h1>
<div class="info">Posted on $date$</div>
$body$
```html
<h1>$title$</h1>
<div class="info">Posted on $date$</div>
$body$
```
As you can probably guess, template files just contain text and only the `$`
character has special meaning: text between dollar signs ("fields") is replaced
@ -53,7 +55,9 @@ use `$$`.
You usually compile the templates from disk using the aptly named
`templateCompiler`:
match "templates/*" $ compile templateCompiler
```haskell
match "templates/*" $ compile templateCompiler
```
Notice the lack of `route` here: this is because we don't need to write the
templates to your `_site` folder, we just want to use them elsewhere.
@ -86,7 +90,7 @@ And `$title$` like this:
```haskell
titleContext :: Context a
titleContext :: field "title" $ \item -> do
titleContext = field "title" $ \item -> do
metadata <- getMetadata (itemIdentifier item)
return $ fromMaybe "No title" $ M.lookup "title" metadata
```

View file

@ -31,13 +31,25 @@ recent version of Pandoc (1.9 and onwards). Note that you also need to include
some CSS in order for this to work! This site, for example, uses the [default
Pandoc syntax CSS file][syntax-css].
To highlight a code block, you need to use Pandoc's fenced code block syntax to
set the block's language. For example, here's how you highlight Haskell code:
``` haskell
fac n = foldr (*) 1 [1..n]
```
For details, see Pandoc's user guide on [fenced code
blocks][pandoc-code-blocks] and [inline code][pandoc-inline-code].
[syntax-css]: https://github.com/jaspervdj/hakyll/blob/master/web/css/syntax.css
[pandoc-code-blocks]: http://johnmacfarlane.net/pandoc/README.html#fenced-code-blocks
[pandoc-inline-code]: http://johnmacfarlane.net/pandoc/README.html#verbatim
## When should I rebuild and when should I build?
If you execute a `./site build`, Hakyll will build your site incrementally.
However, we can not detect if you edited `site.hs`. In this case, you first want
to compile it again `site.hs` again, and then do a `./site rebuild`.
to compile `site.hs` again, and then do a `./site rebuild`.
After rebuilding your site, all files will look as "modified" to the filesystem.
This means that when you upload your site, it will usually transfer all files --