Skip to content
Snippets Groups Projects
Commit 5d18e020 authored by Alice Brenon's avatar Alice Brenon
Browse files

* Add unit tests

* Add optparse-applicative parsers for input / output parameters
* Rework metadata types and the way they compose (splitting Article into
  PrimaryKey and Entry)
parent f7b5f829
No related branches found
No related tags found
No related merge requests found
Showing
with 615 additions and 101 deletions
*.o
*.hi
Main
# Revision history for ghc-geode
## 0.2.0.0 -- 2023-07-17
* Add unit tests
* Add optparse-applicative parsers for input / output parameters
* Rework metadata types and the way they compose (splitting Article into
PrimaryKey and Entry)
## 0.1.0.0 -- 2023-05-23
* Exposing some basic stuff required by recent changes in [EDdA-Clinic](https://gitlab.huma-num.fr/alicebrenon/EDdAClinic)
cabal-version: 2.4
name: ghc-geode
name: geode
version: 0.1.0.0
synopsis:
Data structures and tooling used in project GEODE
......@@ -22,10 +22,15 @@ extra-source-files: CHANGELOG.md
library
exposed-modules: GEODE.Metadata
, GEODE.Metadata.Entry
, GEODE.Metadata.SplitContext
, GEODE.Metadata.TSV.Header
, GEODE.Options
-- Modules included in this library but not exported.
other-modules: GEODE.Metadata.Article
, GEODE.Metadata.Projector
other-modules: GEODE.Metadata.Contrastive
, GEODE.Metadata.PrimaryKey
, GEODE.Metadata.TSV
, GEODE.Metadata.Types
-- LANGUAGE extensions used by modules in this package.
......@@ -34,7 +39,29 @@ library
, bytestring >= 0.11.3 && <0.12
, containers >= 0.6.5.1 && <0.7
, cassava >= 0.5.3 && <0.6
, filepath >= 1.4.2.2 && <1.5
, mtl >= 2.2.2 && <2.3
, optparse-applicative >= 0.13.2 && <0.18
, text >= 1.2.5 && <1.3
, unordered-containers >= 0.2.19.1 && <0.3
, vector >= 0.12.3.1 && <0.13
hs-source-dirs: lib
default-language: Haskell2010
test-suite ghc-geode-test
default-language: Haskell2010
type: exitcode-stdio-1.0
hs-source-dirs: test
main-is: Main.hs
other-modules: GEODE.Metadata.TestEntry
, GEODE.Metadata.TestPrimaryKey
, Test.HUnit.Extra
build-depends: base
, bytestring
, containers
, cassava
, geode
, mtl
, HUnit >= 1.6.2.0 && <1.7
, text
, unordered-containers
(use-modules ((gnu packages haskell-xyz) #:select (ghc-cassava))
(use-modules ((gnu packages haskell-xyz) #:select (ghc-cassava
ghc-optparse-applicative))
((gnu packages haskell-check) #:select (ghc-hunit))
((guix build-system haskell) #:select (haskell-build-system))
((guix git-download) #:select (git-predicate))
((guix gexp) #:select (local-file))
......@@ -15,7 +17,7 @@
#:recursive? #t
#:select? (git-predicate %source-dir)))
(build-system haskell-build-system)
(inputs (list ghc-cassava))
(inputs (list ghc-cassava ghc-optparse-applicative ghc-hunit))
(home-page "https://gitlab.liris.cnrs.fr/geode/ghc-geode")
(synopsis "Data structures and tooling used in project GEODE")
(description
......
{-# LANGUAGE NamedFieldPuns, OverloadedStrings #-}
module GEODE.Metadata (
Article(..)
, Authors(..)
, Book(..)
, Domains(..)
, FromBook(..)
, HasAuthors(..)
, HasDomains(..)
, InFile(..)
, TXMText
, Unique(..)
, groupBy
, list
, readTsv
, sortBy
, tsvFile
, tsvLines
) where
{-# LANGUAGE OverloadedStrings #-}
module GEODE.Metadata
( module Contrastive
, module Entry
, module PrimaryKey
, module SplitContext
, module TSV
, module TSV_Header
, module Types
, groupBy
, indexBy
, sortBy ) where
import Data.ByteString.Lazy as ByteString (putStr, readFile, writeFile)
import Data.ByteString.Char8 as StrictByteString (pack)
import Data.Csv
( DecodeOptions(..), EncodeOptions(..), FromRecord, HasHeader(..)
, ToNamedRecord, ToRecord, decodeWith, defaultEncodeOptions, encodeByNameWith
, encodeWith, header )
import Data.Foldable as Foldable (toList)
import Data.List (sortOn)
import Data.Map.Strict as Map (alter, empty, toList)
import Data.Text as Text (Text, intercalate, unpack)
import Data.Vector as Vector (Vector)
import GEODE.Metadata.Article
import GEODE.Metadata.Projector
(FromBook(..), HasAuthors(..), HasDomains(..), InFile(..), TXMText, Unique(..))
import GEODE.Metadata.Types (Authors(..), Book(..), Domains(..))
list :: [Text] -> String
list ts = Text.unpack $ ":" <> intercalate ":" ts <> ":"
readTsv :: FromRecord a => FilePath -> IO (Either String (Vector a))
readTsv source = decodeWith fromTsv HasHeader <$> ByteString.readFile source
where
fromTsv = DecodeOptions {decDelimiter = fromIntegral (fromEnum '\t')}
toTsv :: EncodeOptions
toTsv = defaultEncodeOptions
{ encDelimiter = fromIntegral (fromEnum '\t')
, encUseCrLf = False }
tsvFile :: ToNamedRecord a => FilePath -> [String] -> [a] -> IO ()
tsvFile target fields =
ByteString.writeFile target
. encodeByNameWith toTsv (header $ StrictByteString.pack <$> fields)
tsvLines :: ToRecord a => [a] -> IO ()
tsvLines = ByteString.putStr . encodeWith toTsv
import Data.Map.Strict as Map (Map, alter, empty, insert, toList)
import GEODE.Metadata.Contrastive as Contrastive
import GEODE.Metadata.Entry as Entry
( Entry(headWord, name, page), newEntry, normalize )
import GEODE.Metadata.PrimaryKey as PrimaryKey
import GEODE.Metadata.SplitContext as SplitContext hiding (get, page, rank)
import GEODE.Metadata.TSV as TSV
import GEODE.Metadata.TSV.Header as TSV_Header
( DefaultFields(..), HasDefaultHeader(..), WithDefaultHeader(..), glue )
import GEODE.Metadata.Types as Types
sortBy :: (Foldable t, Ord k) => (a -> k) -> t a -> [a]
sortBy field = sortOn field . Foldable.toList
......@@ -62,3 +31,6 @@ groupBy :: (Foldable t, Ord k) => (v -> k) -> t v -> [(k, [v])]
groupBy field = Map.toList . foldr group Map.empty
where
group article = Map.alter (Just . maybe [article] (article:)) (field article)
indexBy :: (Foldable t, Ord k) => (a -> k) -> t a -> Map k a
indexBy f = foldr (\a -> Map.insert (f a) a) Map.empty
{-# LANGUAGE DeriveGeneric, OverloadedStrings #-}
module GEODE.Metadata.Contrastive
( Contrastive(..)
, MultiText(..)
, formatList ) where
import Data.Csv
( FromField(..), FromNamedRecord(..), ToNamedRecord(..), ToField(..) )
import Data.Text (Text, intercalate, splitOn, uncons, unsnoc)
import GEODE.Metadata.TSV.Header (DefaultFields(..), HasDefaultHeader(..))
import GHC.Generics (Generic)
newtype MultiText = MultiText
{ getList :: [Text] } deriving (Show)
formatList :: MultiText -> Text
formatList = colonFormat . getList
where
colonFormat ts = ":" <> intercalate ":" ts <> ":"
instance FromField MultiText where
parseField f = parseField f >>= checkAndSplit
where
popBoundaries t0 = do
(firstChar,t1) <- uncons t0
(middle,lastChar) <- unsnoc t1
pure (firstChar,middle,lastChar)
checkAndSplit t =
case popBoundaries t of
Just (':',"",':') -> pure $ MultiText []
Just (':',fields,':') -> pure.MultiText $ splitOn ":" fields
_ -> mempty
instance ToField MultiText where
toField = toField . formatList
data Contrastive = Contrastive
{ authors :: MultiText
, domains :: MultiText
, subCorpus :: MultiText } deriving (Generic, Show)
instance FromNamedRecord Contrastive
instance ToNamedRecord Contrastive
instance HasDefaultHeader Contrastive where
defaultFields = DefaultFields [ "authors", "domains", "subCorpus" ]
{-# LANGUAGE DeriveGeneric, FlexibleContexts, OverloadedStrings #-}
module GEODE.Metadata.Entry
( Entry(..)
, newEntry
, normalize ) where
import Data.Char (isAlphaNum, isSpace, isUpper, toLower)
import Data.Csv (FromNamedRecord(..), ToNamedRecord(..))
import Data.Text as Text (Text, concat, foldl', pack, snoc)
import GEODE.Metadata.SplitContext (Field(..), SplitContext(..), next)
import GEODE.Metadata.TSV.Header
(DefaultFields(..), HasDefaultHeader(..), HasDefaultHeader(..))
import GHC.Generics (Generic)
data Entry = Entry
{ headWord :: Text
, name :: Text
, page :: Int } deriving (Generic, Show)
instance FromNamedRecord Entry
instance ToNamedRecord Entry
instance HasDefaultHeader Entry where
defaultFields = DefaultFields [ "headWord", "name", "page" ]
normalize :: Text -> Text
normalize = Text.foldl' appendIf mempty
where
appendIf tmpText newChar
| isSpace newChar = tmpText
| isUpper newChar = tmpText `snoc` toLower newChar
| isAlphaNum newChar = tmpText `snoc` newChar
| otherwise = tmpText `snoc` '-'
newEntry :: SplitContext m => Text -> m Entry
newEntry headWord = do
count <- Text.pack . show <$> next (HeadWord prefix)
Entry headWord (Text.concat [prefix, "-", count]) <$> get Page
where
prefix = normalize headWord
{-# LANGUAGE DeriveGeneric, FlexibleContexts, FlexibleInstances, NamedFieldPuns, OverloadedStrings #-}
module GEODE.Metadata.PrimaryKey
( Book(..)
, PrimaryKey(..)
, relativePath
, uid ) where
import Data.Csv (FromField(..), FromNamedRecord(..), ToNamedRecord(..), ToField(..))
import GEODE.Metadata.Types (Has(..))
import GEODE.Metadata.TSV.Header (DefaultFields(..), HasDefaultHeader(..))
import GHC.Generics (Generic)
import System.FilePath ((</>), (<.>))
import Text.Printf (printf)
data Book = EDdA | LGE | Wikipedia deriving (Eq, Ord, Read, Show)
instance FromField Book where
parseField "EDdA" = pure EDdA
parseField "LGE" = pure LGE
parseField "Wikipedia" = pure Wikipedia
parseField _ = mempty
instance ToField Book where
toField = toField . show
data PrimaryKey = PrimaryKey
{ book :: Book
, tome :: Int
, rank :: Int } deriving (Eq, Ord, Generic, Show)
instance FromNamedRecord PrimaryKey
instance ToNamedRecord PrimaryKey
uid :: Has PrimaryKey a => a -> String
uid a = printf "%s_%d_%d" (show $ book) tome rank
where
PrimaryKey {book, tome, rank} = get a
relativePath :: Has PrimaryKey a => a -> String -> FilePath
relativePath a extension =
(show book) </> ("T" <> show tome) </> (show rank) <.> extension
where
PrimaryKey {book, tome, rank} = get a
instance HasDefaultHeader PrimaryKey where
defaultFields = DefaultFields [ "book", "tome", "rank" ]
{-# LANGUAGE ConstraintKinds #-}
module GEODE.Metadata.Projector
( FromBook(..)
, HasAuthors(..)
, HasDomains(..)
, InFile(..)
, TXMText
, Unique(..) ) where
import GEODE.Metadata.Types (Authors(..), Book, Domains(..))
--( FromBook(..)
--, FromTome(..)
( HasAuthors(..)
, HasDomains(..) ) where
--, InFile(..)
--, Named(..)
--, TXMText
--, Unique(..) ) where
import GEODE.Metadata.Types (Authors(..), Domains(..))
--import GEODE.Metadata.Types (Authors(..), Book, Domains(..))
import Data.Text (Text)
{-
class Unique a where
uid :: a -> String
class FromBook a where
book :: a -> Book
class FromTome a where
tome :: a -> Int
class Named a where
name :: a -> Text
-}
class HasAuthors a where
authors_ :: a -> Authors
......@@ -28,7 +39,9 @@ class HasDomains a where
domains :: a -> [Text]
domains = getDomains . domains_
{-
class InFile a where
relativePath :: a -> FilePath
relativePath :: a -> String -> FilePath
-}
type TXMText a = (Unique a, FromBook a, HasAuthors a, HasDomains a)
--type TXMText a = (Unique a, FromBook a, HasAuthors a, HasDomains a)
{-# LANGUAGE DeriveFunctor, GADTs #-}
module GEODE.Metadata.SplitContext
( Current(..)
, Field(..)
, HeadWords
, SplitContext(..)
, SplitContextT(..)
, evalSplit
, evalSplitT
, next
, runSplit
, runSplitT ) where
import Control.Monad.Identity (Identity(..))
import Control.Monad.IO.Class (MonadIO(..))
import Control.Monad.Trans (MonadTrans(..))
import Data.Map as Map (Map, (!?), delete, empty, insert)
import Data.Text (Text)
type HeadWords = Map Text Int
data Current = Current { page :: Int, rank :: Int, headWords :: HeadWords }
data Field a where
Page :: Field Int
Rank :: Field Int
HeadWord :: Text -> Field (Maybe Int)
class Monad m => SplitContext m where
context :: m Current
update :: (Current -> Current) -> m ()
get :: Field a -> m a
get Page = page <$> context
get Rank = rank <$> context
get (HeadWord h) = ((!? h) . headWords) <$> context
set :: Field a -> a -> m ()
set Page p = update $ \c -> c { page = p }
set Rank r = update $ \c -> c { rank = r }
set (HeadWord h) Nothing = update $ \c -> c { headWords = delete h $ headWords c }
set (HeadWord h) (Just n) = update $ \c -> c { headWords = insert h n $ headWords c }
editLookup :: Field a -> (a -> a) -> m a
editLookup field f = get field >>= (\a -> set field a *> pure a) . f
lookupEdit :: Field a -> (a -> a) -> m a
lookupEdit field f = get field >>= \a -> set field (f a) *> pure a
edit :: Field a -> (a -> a) -> m ()
edit field = (() <$) . editLookup field
{-# MINIMAL context, update #-}
newtype SplitContextT m a =
SplitContextT { runWithContext :: Current -> m (a, Current) }
deriving (Functor)
instance Monad m => Applicative (SplitContextT m) where
pure a = SplitContextT $ \current -> pure (a, current)
sCF <*> sCA = SplitContextT $ \current0 -> do
(f, current1) <- runWithContext sCF current0
(a, current2) <- runWithContext sCA current1
pure (f a, current2)
instance Monad m => Monad (SplitContextT m) where
sCA >>= f = SplitContextT $ \current0 -> do
(a, current1) <- runWithContext sCA current0
runWithContext (f a) current1
instance Monad m => SplitContext (SplitContextT m) where
context = SplitContextT $ \current -> pure (current, current)
update f = SplitContextT $ \current -> pure ((), f current)
instance MonadTrans SplitContextT where
lift m = SplitContextT $ \current -> m >>= \a -> pure (a, current)
instance MonadIO m => MonadIO (SplitContextT m) where
liftIO = lift . liftIO
next :: SplitContext m => Field a -> m Int
next f@(HeadWord _) = lookupEdit f (Just . maybe 1 (+1)) >>= pure . maybe 0 id
next Page = lookupEdit Page (+1)
next Rank = lookupEdit Rank (+1)
runSplitT :: SplitContextT m a -> m (a, Current)
runSplitT = flip runWithContext $
Current { page = 1, rank = 1, headWords = Map.empty }
evalSplitT :: Functor m => SplitContextT m a -> m a
evalSplitT = fmap fst . runSplitT
runSplit :: SplitContextT Identity a -> (a, Current)
runSplit = runIdentity . runSplitT
evalSplit :: SplitContextT Identity a -> a
evalSplit = fst . runSplit
{-# LANGUAGE ScopedTypeVariables #-}
module GEODE.Metadata.TSV
( readNamedTsv
, readTsv
, toTsv
, tsvFile
, tsvLines ) where
import Data.ByteString.Lazy as ByteString (putStr, readFile, writeFile)
import Data.Csv
( DecodeOptions(..), EncodeOptions(..), FromNamedRecord(..), FromRecord
, HasHeader(..), ToNamedRecord(..), ToRecord(..), decodeByNameWith
, decodeWith, defaultEncodeOptions, encodeByNameWith, encodeWith )
import Data.Foldable (toList)
import Data.Vector (Vector)
import GEODE.Metadata.TSV.Header (HasDefaultHeader, getHeader, for)
readNamedTsv :: FromNamedRecord a => FilePath -> IO (Either String (Vector a))
readNamedTsv source =
(fmap snd . decodeByNameWith fromTsv) <$> ByteString.readFile source
where
fromTsv = DecodeOptions {decDelimiter = fromIntegral (fromEnum '\t')}
readTsv :: FromRecord a => FilePath -> IO (Either String (Vector a))
readTsv source = decodeWith fromTsv NoHeader <$> ByteString.readFile source
where
fromTsv = DecodeOptions {decDelimiter = fromIntegral (fromEnum '\t')}
toTsv :: EncodeOptions
toTsv = defaultEncodeOptions
{ encDelimiter = fromIntegral (fromEnum '\t')
, encUseCrLf = False }
tsvFile :: forall a t. (Foldable t, HasDefaultHeader a, ToNamedRecord a) =>
FilePath -> t a -> IO ()
tsvFile target = ByteString.writeFile target . encode . toList
where
encode = encodeByNameWith toTsv (getHeader (for :: a))
tsvLines :: (Foldable t, ToRecord a) => t a -> IO ()
tsvLines = ByteString.putStr . encodeWith toTsv . toList
{-# LANGUAGE ExplicitNamespaces, ScopedTypeVariables, TypeOperators #-}
module GEODE.Metadata.TSV.Header
( DefaultFields(..)
, HasDefaultHeader(..)
, WithDefaultHeader(..)
, for
, getHeader
, glue ) where
import Data.ByteString.Char8 as StrictByteString (pack)
import Data.Csv (Header, ToNamedRecord(..), ToRecord(..))
import Data.HashMap.Strict ((!))
import Data.Vector (fromList)
import GEODE.Metadata.Types (type (@)(..))
newtype WithDefaultHeader a = WithDefaultHeader a
newtype DefaultFields a = DefaultFields [String]
class HasDefaultHeader a where
defaultFields :: DefaultFields a
instance (HasDefaultHeader a, HasDefaultHeader b) => HasDefaultHeader (a @ b) where
defaultFields = DefaultFields (a ++ b)
where
DefaultFields a = (defaultFields :: DefaultFields a)
DefaultFields b = (defaultFields :: DefaultFields b)
getHeader :: forall a. HasDefaultHeader a => a -> Header
getHeader _ = StrictByteString.pack <$> fromList fields
where
DefaultFields fields = (defaultFields :: DefaultFields a)
for :: HasDefaultHeader a => a
for = undefined
instance (HasDefaultHeader a, ToNamedRecord a) => ToRecord (WithDefaultHeader a) where
toRecord (WithDefaultHeader a) = (toNamedRecord a !) <$> getHeader a
glue :: a -> b -> WithDefaultHeader (a @ b)
glue a b = WithDefaultHeader (a :@: b)
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ExplicitNamespaces, FlexibleInstances, MultiParamTypeClasses, TypeOperators #-}
module GEODE.Metadata.Types
( Authors(..)
, Book(..)
, Domains(..) ) where
import Control.Applicative (empty)
import Data.Csv (Field, FromField(..), Parser, ToField(..))
import Data.Text (Text, intercalate, splitOn)
data Book = EDdA | LGE | Wikipedia deriving (Eq, Ord, Read, Show)
newtype Authors = Authors
{ getAuthors :: [Text] } deriving Show
newtype Domains = Domains
{ getDomains :: [Text] } deriving Show
instance FromField Book where
parseField "EDdA" = pure EDdA
parseField "LGE" = pure LGE
parseField "Wikipedia" = pure Wikipedia
parseField _ = empty
instance ToField Book where
toField = toField . show
sepBy :: Text -> Field -> Parser [Text]
sepBy s = fmap (splitOn s) . parseField
instance FromField Authors where
parseField = fmap Authors . sepBy " & "
instance ToField Authors where
toField = toField . intercalate " & ". getAuthors
instance FromField Domains where
parseField = fmap Domains . sepBy " | "
instance ToField Domains where
toField = toField . intercalate " | ". getDomains
( Has(..)
, type (@)(..) ) where
import Data.Csv (FromNamedRecord(..), ToNamedRecord(..))
import Data.HashMap.Strict (union)
infixr 9 @
infixr 9 :@:
data a @ b = a :@: b
class Has a b where
get :: b -> a
instance Has a a where
get = id
instance Has a c => Has a (b @ c) where
get (_ :@: c) = get c
instance {-# OVERLAPS #-} Has a (a @ b) where
get (a :@: _) = a
instance (ToNamedRecord a, ToNamedRecord b) => ToNamedRecord (a @ b) where
toNamedRecord (a :@: b) = union (toNamedRecord a) (toNamedRecord b)
instance (FromNamedRecord a, FromNamedRecord b) => FromNamedRecord (a @ b) where
parseNamedRecord nr = (:@:) <$> parseNamedRecord nr <*> parseNamedRecord nr
{-# LANGUAGE NamedFieldPuns #-}
module GEODE.Options
( Input(..)
, IOConfig(..)
, Output(..)
, input
, ioConfig
, output
, xmlOutput ) where
import Control.Applicative ((<|>))
import Data.List.NonEmpty (NonEmpty(..))
import Options.Applicative
( Parser, argument, flag', help, long, metavar, short, str, strOption, value )
import Options.Applicative.NonEmpty (some1)
import Text.Printf (printf)
data Input = StdIn | File FilePath
input :: String -> Parser Input
input stdinSemantics =
argument
(File <$> str)
( value StdIn
<> metavar "INPUT_FILE"
<> help (printf "path of the file to process (%s)" stdinSemantics) )
data Output = Metadata | TextRoot FilePath | XMLRoot FilePath
xmlOutput :: Parser FilePath
xmlOutput =
strOption
( long "xml-root" <> metavar "DIRECTORY" <> short 'x'
<> help "Root path where to output XML files" )
output :: Parser Output
output =
flag' Metadata ( long "metadata"
<> short 'm'
<> help "Print metadata for splitted files on stdout" )
<|> (TextRoot <$> strOption
( long "text-root"
<> metavar "DIRECTORY"
<> short 't'
<> help "Root path where to output text files" ))
<|> (XMLRoot <$> xmlOutput)
data IOConfig = IOConfig
{ from :: Input
, to :: NonEmpty Output }
ioConfig :: String -> Parser IOConfig
ioConfig stdinSemantics = IOConfig
<$> input stdinSemantics
<*> some1 output
{-# LANGUAGE OverloadedStrings #-}
module GEODE.Metadata.TestEntry (testEntry) where
import Data.Csv (ToNamedRecord(..))
import Data.Foldable (toList)
import Data.HashMap.Strict ((!?))
import Data.Text (dropEnd)
import GEODE.Metadata (evalSplit, newEntry, normalize)
import GEODE.Metadata.Entry (Entry(..))
import GEODE.Metadata.TSV.Header (getHeader, for)
import Test.HUnit (Test(..), (~?=))
import Test.HUnit.Extra (isJust)
testEntry :: Test
testEntry = TestLabel "Testing the Entry data type" $
TestList [ testToNamedRecord, testNormalize, testEntryConstructor ]
testToNamedRecord :: Test
testToNamedRecord = TestLabel "Testing ToNamedRecord instance" $
TestList [ has3Keys, validDefaultHeader ]
where
has3Keys = length aNamedRecord ~?= 3
validDefaultHeader = TestList . toList $
(isJust . (aNamedRecord !?)) <$> getHeader (for :: Entry)
aNamedRecord = toNamedRecord anEntry
testNormalize :: Test
testNormalize = TestLabel "Testing function normalize" . TestList $
check <$> [ ("", "")
, ("é", "é")
, (headWord anEntry, dropEnd 2 $ name anEntry) ]
where
check (a, b) = normalize a ~?= b
testEntryConstructor :: Test
testEntryConstructor = TestLabel "Testing the entry constructor" . TestList $
[ a0 ~?= "a-0"
, a1 ~?= "a-1"
, b0 ~?= "b-0" ]
where
(a0, a1, b0) = evalSplit $ (,,)
<$> (name <$> newEntry "A")
<*> (name <$> newEntry "A")
<*> (name <$> newEntry "B")
anEntry :: Entry
anEntry = Entry "ALCALA DE HÉNARÈS" "alcaladehénarès-0" 1212
{-# LANGUAGE OverloadedStrings #-}
module GEODE.Metadata.TestPrimaryKey (testPrimaryKey) where
import Data.Csv (ToNamedRecord(..))
import Data.Foldable (toList)
import Data.HashMap.Strict ((!?))
import GEODE.Metadata (Book(..), PrimaryKey(..))
import GEODE.Metadata.TSV.Header (getHeader, for)
import Test.HUnit (Test(..), (~?=))
import Test.HUnit.Extra (isJust)
testPrimaryKey :: Test
testPrimaryKey = TestLabel "Testing the PrimaryKey data type" $
TestList [ testToNamedRecord ]
testToNamedRecord :: Test
testToNamedRecord = TestLabel "Testing ToNamedRecord instance" $
TestList [ has3Keys, validDefaultHeader ]
where
has3Keys = length aNamedRecord ~?= 3
validDefaultHeader = TestList . toList $
(isJust . (aNamedRecord !?)) <$> getHeader (for :: PrimaryKey)
aNamedRecord = toNamedRecord aPrimaryKey
aPrimaryKey :: PrimaryKey
aPrimaryKey = PrimaryKey LGE 1 1212 -- ALCALA DE HÉNARÈS
module Main (main) where
import Test.HUnit (Test(..), runTestTTAndExit)
import GEODE.Metadata.TestPrimaryKey (testPrimaryKey)
import GEODE.Metadata.TestEntry (testEntry)
testMetadata :: Test
testMetadata = TestLabel "Metadata suite" $
TestList [ testPrimaryKey, testEntry ]
main :: IO ()
main = runTestTTAndExit testMetadata
module Test.HUnit.Extra
( isJust
, isNothing ) where
import Test.HUnit (Test(..), assertBool)
import qualified Data.Maybe as Maybe (isJust, isNothing)
isJust :: Maybe a -> Test
isJust = TestCase . assertBool "Expected a Just" . Maybe.isJust
isNothing :: Maybe a -> Test
isNothing = TestCase . assertBool "Expected a Nothing" . Maybe.isNothing
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment