{-# LANGUAGE DeriveDataTypeable #-}
+-- | Definition of the 'Backend' type representing the database
+-- backends we support. We should be able to support anything that
+-- Groundhog does.
+--
module Backend (
Backend(..) )
where
deriving (Data, Eq, Read, Show, Typeable)
instance Default Backend where
+ -- | The default 'Backend' is 'Sqlite' because it is the simplest,
+ -- and can run in memory.
def = Sqlite
-- | This is where most of the work happens. This function is called
-- on every file that we would like to import. It determines which
--- importer to use based on the DTD, processes the file, and then
--- returns whether or not any records were imported. If the file was
--- processed, the number of records imported is returned (wrapped in
--- a Just). Otherwise, if the file was not processed, 'Nothing' is
+-- importer to use based on the DTD, attempts to process the file,
+-- and then returns whether or not it was successful. If the file
+-- was processed, 'True' is returned. Otherwise, 'False' is
-- returned.
--
--- Since we are already in arrow world with HXT, the
--- 'import_with_dtd' function is lifted to an 'Arrow' as well with
--- 'arr'. This prevents us from having to do a bunch of unwrapping
--- and rewrapping with the associated error checking.
+-- The implementation is straightforward with one exception: since
+-- we are already in arrow world with HXT, the @import_with_dtd@
+-- function is lifted to an 'Arrow' as well with 'arr'. This
+-- prevents us from having to do a bunch of unwrapping and
+-- rewrapping with the associated error checking.
--
import_file :: Configuration -- ^ A configuration object needed for the
-- 'backend' and 'connection_string'.
-- | Entry point of the program. It twiddles some knobs for
--- configuration options and then calls 'import_file' on each XML file
--- given on the command-line.
+-- configuration options and then calls 'import_file' on each XML
+-- file given on the command-line.
--
--- Any file successfully processed is then removed, and we're done.
+-- Any file successfully processed is then optionally removed, and
+-- we're done.
--
main :: IO ()
main = do
+-- | Modifications to the code generation used in Groundhog.
+--
module TSN.Codegen (
tsn_codegen_config,
tsn_db_field_namer -- Used in a TSN.XML.News test.
)
where
-import Data.Char ( toLower, toUpper )
+import Data.Char ( toUpper )
import Data.List.Utils ( join, split )
import Database.Groundhog.TH (
CodegenConfig ( namingStyle ),
- NamingStyle ( mkDbConstrName, mkDbFieldName, mkExprFieldName ),
+ NamingStyle ( mkDbFieldName, mkExprFieldName ),
defaultCodegenConfig,
lowerCaseSuffixNamingStyle )
+
-- | The lowercase naming style for database entities, provided by
-- Groundhog. Makes a better starting point than the default.
--
lowercase_ns :: NamingStyle
lowercase_ns = lowerCaseSuffixNamingStyle
+
-- | A database field name creator. It takes the field name (from a
-- record type) and drops the first component determined by
-- underscores. So, foo_bar_baz would get mapped to bar_baz in the
-- database.
+--
+-- Examples:
+--
+-- >>> tsn_db_field_namer "herp" "derp" 0 "xml_player_name" 0
+-- "player_name"
+--
tsn_db_field_namer :: String -> String -> Int -> String -> Int -> String
tsn_db_field_namer _ _ _ fieldname _ =
(join "_") . tail . (split "_") $ fieldname
+-- | An expression field name creator. \"Expression\" in the context
+-- of Groundhog means a constructor/type that you can use in queries
+-- and update statement. We take the field name (from a record type)
+-- as an argument and capitalize the first letter of each word.
+--
+-- Examples:
+--
+-- >>> tsn_expr_field_namer "herp" "derp" 0 "foo_bar" 0
+-- "Foo_Bar"
+--
tsn_expr_field_namer :: String -> String -> Int -> String -> Int -> String
tsn_expr_field_namer _ _ _ fieldname _ =
(join "_") . (map capitalize) . (split "_") $ fieldname
capitalize [] = []
capitalize (c:cs) = (toUpper c : cs)
+
+-- | Combine the modifications above into a new naming style based on
+-- the 'lowecase_ns'.
+--
tsn_naming_style :: NamingStyle
tsn_naming_style = lowercase_ns { mkDbFieldName = tsn_db_field_namer,
mkExprFieldName = tsn_expr_field_namer }
+-- | Create a 'CodegenConfig' by replacing the default 'namingStyle'
+-- with our modified version.
+--
tsn_codegen_config :: CodegenConfig
tsn_codegen_config =
defaultCodegenConfig { namingStyle = tsn_naming_style }
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeFamilies #-}
+-- | Parse TSN XML for the DTD "Odds_XML.dtd". Each document contains
+-- a root element \<message\> that contains a bunch of other
+-- unorganized crap.
+--
module TSN.XML.Odds (
Odds,
Message,
odds_tests )
where
-
--- | Parse TSN XML for the DTD "Odds_XML.dtd". Each document contains
--- a root element \<message\> that contains a bunch of other
--- unorganized crap.
---
-
import Control.Monad ( forM_ )
import Data.Tuple.Curry ( uncurryN )
import Database.Groundhog (
yes )
--- | A typeclass for XML types that can be converted into an associated
--- database type. The story behind this is long, but basically, we need
--- to different types for each XML thingie we're going to import: a
--- database type and an XML type. Both Groundhog and HXT are very
--- particular about the types that they can use, and there's no way
--- to reuse e.g. a type that HXT can pickle in Groundhog. So this
--- typeclass gives us a way to get the database type from the XML
--- type that we have to define for HXT.
+-- | A typeclass for XML types that can be converted into an
+-- associated database type. The story behind this is long, but
+-- basically, we need to different types most XML thingies we're
+-- going to import: a database type and an XML type.
+--
+-- Both Groundhog and HXT are very particular about the types that
+-- they can use, and there's no way to reuse e.g. a type that HXT
+-- can pickle in Groundhog. This typeclass gives us a standard way
+-- to get the database type from the XML type that we have to define
+-- for HXT.
--
class FromXml a where
- -- | Each instance a must declare its associated database type (Db a)
+ -- | Each instance @a@ must declare its associated database type @Db a@.
type Db a :: *
- -- | And provide a function for getting a (Db a) out of an "a".
+ -- | And provide a function for getting a @Db a@ out of an @a@.
from_xml :: a -> Db a
--- | Represents the DTD filename ("SYSTEM") part of the DOCTYPE
+-- | Represents the DTD filename (\"SYSTEM\") part of the DOCTYPE
-- definition.
newtype DtdName = DtdName String
-- | A list of options passed to 'readDocument' when we parse an XML
-- document. All cosmetic whitespace should be removed, otherwise we
--- have to parse it in each pickler.
+-- would have to parse whitespace in each (un)pickler.
--
parse_opts :: SysConfigList
parse_opts = [ withRemoveWS yes ]
-- | Given a root element name and a file path, return both the
--- original unpickled root "object" and the one that was constructed
--- by pickled and unpickling the original. This is used in a number
--- of XML tests which pickle/unpickle and then make sure that the
--- output is the same as the input.
+-- original unpickled root \"object\" and the one that was
+-- constructed by pickled and unpickling the original. This is used
+-- in a number of XML tests which pickle/unpickle and then make sure
+-- that the output is the same as the input.
--
-- We return the object instead of an XmlTree (which would save us
-- an unpickle call) because otherwise the type of @a@ in the call
-- would result. By taking the unpickler as an argument, we allow
-- the caller to indirectly specify a concrete type.
--
+-- Apologies the the name; unpickleable means \"we can unpickle
+-- it\", not \"not pickleable.\"
+--
unpickleable :: XmlPickler a => FilePath -> PU a -> IO Bool
unpickleable filepath unpickler = do
xmldoc <- try_unpickle `catch` (\(SomeException _) -> return [])