-{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE RecordWildCards #-}
-{-# LANGUAGE ScopedTypeVariables #-}
-{-# LANGUAGE StandaloneDeriving #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeFamilies #-}
--- | Parse TSN XML for the DTD "newsxml.dtd". Each document contains a
--- root element \<message\> that contains an entire news item.
+-- | Parse TSN XML for the DTD \"newsxml.dtd\". Each document contains
+-- a root element \<message\> that contains an entire news item.
--
module TSN.XML.News (
- Message,
- news_tests )
+ dtd,
+ pickle_message,
+ -- * Tests
+ news_tests,
+ -- * WARNING: these are private but exported to silence warnings
+ News_NewsLocationConstructor(..),
+ News_NewsTeamConstructor(..),
+ NewsConstructor(..),
+ NewsLocationConstructor(..),
+ NewsTeamConstructor(..) )
where
+-- System imports.
import Data.Data ( Data, constrFields, dataTypeConstrs, dataTypeOf )
+import Data.Time.Clock ( UTCTime )
import Data.List.Utils ( join, split )
import Data.Tuple.Curry ( uncurryN )
import Data.Typeable ( Typeable )
import Database.Groundhog (
- defaultMigrationLogger,
- insert,
+ countAll,
+ deleteAll,
+ insert_,
migrate,
- runMigration )
+ runMigration,
+ silentMigrationLogger )
import Database.Groundhog.Core ( DefaultKey )
+import Database.Groundhog.Generic ( runDbConn )
+import Database.Groundhog.Sqlite ( withSqliteConn )
import Database.Groundhog.TH (
+ defaultCodegenConfig,
groundhog,
mkPersist )
import Test.Tasty ( TestTree, testGroup )
import Test.Tasty.HUnit ( (@?=), testCase )
import Text.XML.HXT.Core (
PU,
- XmlPickler(..),
- unpickleDoc,
- xp12Tuple,
+ xp13Tuple,
xpAttr,
xpElem,
xpInt,
xpTriple,
xpWrap )
+-- Local imports.
import TSN.Codegen (
tsn_codegen_config,
tsn_db_field_namer ) -- Used in a test
-import TSN.DbImport ( DbImport(..), ImportResult(..) )
-import Xml ( ToFromXml(..), pickle_unpickle, unpickleable )
-
+import TSN.Database ( insert_or_select )
+import TSN.DbImport ( DbImport(..), ImportResult(..), run_dbmigrate )
+import TSN.Picklers ( xp_time_stamp )
+import TSN.XmlImport ( XmlImport(..) )
+import Xml (
+ FromXml(..),
+ ToDb(..),
+ pickle_unpickle,
+ unpickleable,
+ unsafe_unpickle )
+
+
+-- | The DTD to which this module corresponds. Used to invoke dbimport.
+--
+dtd :: String
+dtd = "newsxml.dtd"
--- | The database type for teams as they show up in the news. We need
--- this separate from its XML representation because of the
--- DefaultKey pointing to a message. We don't know how to create one
--- of those unless we've just inserted a message into the database,
--- so it screws up pickling.
-data NewsTeam =
- NewsTeam {
- nt_news_id :: DefaultKey Message, -- ^ foreign key.
- db_team_name :: String }
-deriving instance Eq NewsTeam -- Standalone instances necessary for
-deriving instance Show NewsTeam -- Groundhog types with DefaultKeys
-
--- | The XML type for teams as they show up in the news. See
--- 'NewsTeam' for why there are two types.
-data NewsTeamXml =
- NewsTeamXml {
- xml_team_name :: String }
- deriving (Eq, Show)
-
--- | Specify how to convert between the two representations NewsTeam
--- (database) and NewsTeamXml (XML).
-instance ToFromXml NewsTeam where
- type Xml NewsTeam = NewsTeamXml
- type Container NewsTeam = Message
- -- Use a record wildcard here so GHC doesn't complain that we never
- -- used our named fields.
- to_xml (NewsTeam {..}) = NewsTeamXml db_team_name
- -- We can't create a DefaultKey Message...
- from_xml = error "Called from_xml on a NewsTeam."
- -- unless we're handed one.
- from_xml_fk key = (NewsTeam key) . xml_team_name
-
-
--- | The database type for locations as they show up in the news. We
--- need this separate from its XML representation because of the
--- DefaultKey pointing to a message. We don't know how to create one
--- of those unless we've just inserted a message into the database,
--- so it screws up pickling.
-data NewsLocation =
- NewsLocation {
- loc_news_id :: DefaultKey Message, -- ^ foreign key.
- db_city ::String,
- db_state :: String,
- db_country :: String }
-deriving instance Eq NewsLocation -- Standalone instances necessary for
-deriving instance Show NewsLocation -- Groundhog types with DefaultKeys
-
--- | The XML type for locations as they show up in the news. See
--- 'NewsLocation' for why there are two types.
-data NewsLocationXml =
- NewsLocationXml {
- xml_city :: String,
- xml_state :: String,
- xml_country :: String }
- deriving (Eq, Show)
+--
+-- DB/XML Data types
+--
+-- * News/Message
--- | Specify how to convert between the two representations
--- NewsLocation (database) and NewsLocationXml (XML).
-instance ToFromXml NewsLocation where
- type Xml NewsLocation = NewsLocationXml
- type Container NewsLocation = Message
- -- Use a record wildcard here so GHC doesn't complain that we never
- -- used our named fields.
- to_xml (NewsLocation {..}) = NewsLocationXml db_city db_state db_country
- -- We can't create a DefaultKey Message...
- from_xml = error "Called from_xml on a NewsLocation."
- -- unless we're given one.
- from_xml_fk key (NewsLocationXml x y z) = NewsLocation key x y z
-
-
--- | The msg_id child of <message> contains an event_id attribute; we
--- embed it into the 'Message' type. We (pointlessly) use the "db_"
--- prefix here so that the two names collide on "id" when Groundhog
--- is creating its fields using our field namer.
+-- | The msg_id child of \<message\> contains an event_id attribute; we
+-- embed it into the 'News' type. We (pointlessly) use the \"db_\"
+-- prefix here so that the two names don't collide on \"id\" when
+-- Groundhog is creating its fields using our field namer.
+--
data MsgId =
MsgId {
db_msg_id :: Int,
deriving (Data, Eq, Show, Typeable)
-data MessageXml =
- MessageXml {
+-- | The XML representation of a news item (\<message\>).
+--
+data Message =
+ Message {
xml_xml_file_id :: Int,
xml_heading :: String,
xml_mid :: MsgId,
xml_category :: String,
xml_sport :: String,
- xml_url :: String,
- xml_teams :: [NewsTeamXml],
- xml_locations :: [NewsLocationXml],
+ xml_url :: Maybe String,
+ xml_teams :: [NewsTeam],
+ xml_locations :: [NewsLocation],
xml_sms :: String,
- xml_text :: String,
- xml_continue :: String,
- xml_time_stamp :: String }
+ xml_editor :: Maybe String,
+ xml_text :: Maybe String, -- Text and continue seem to show up in pairs,
+ xml_continue :: Maybe String, -- either both present or both missing.
+ xml_time_stamp :: UTCTime }
deriving (Eq, Show)
-data Message =
- Message {
+
+-- | The database representation of a news item. We drop several
+-- uninteresting fields from 'Message', and omit the list fields which
+-- will be represented as join tables.
+--
+data News =
+ News {
+ db_xml_file_id :: Int,
db_mid :: MsgId,
db_sport :: String,
- db_url :: String,
+ db_url :: Maybe String,
db_sms :: String,
- db_text :: String,
- db_continue :: String }
+ db_editor :: Maybe String,
+ db_text :: Maybe String,
+ db_continue :: Maybe String,
+ db_time_stamp :: UTCTime }
deriving (Data, Eq, Show, Typeable)
-instance ToFromXml Message where
- type Xml Message = MessageXml
- type Container Message = ()
-
- -- Use a record wildcard here so GHC doesn't complain that we never
- -- used our named fields.
- to_xml (Message {..}) =
- MessageXml
- 0
- ""
- db_mid
- ""
- db_sport
- db_url
- []
- []
- db_sms
- db_text
- db_continue
- ""
-
- -- We don't need the key argument (from_xml_fk) since the XML type
- -- contains more information in this case.
- from_xml (MessageXml _ _ c _ e f _ _ g h i _) =
- Message c e f g h i
-mkPersist tsn_codegen_config [groundhog|
+instance ToDb Message where
+ -- | The database representation of 'Message' is 'News'.
+ type Db Message = News
+
+-- | Convert the XML representation 'Message' to the database
+-- representation 'News'.
+--
+instance FromXml Message where
+ -- | We use a record wildcard so GHC doesn't complain that we never
+ -- used the field names.
+ --
+ from_xml Message{..} = News { db_xml_file_id = xml_xml_file_id,
+ db_mid = xml_mid,
+ db_sport = xml_sport,
+ db_url = xml_url,
+ db_sms = xml_sms,
+ db_editor = xml_editor,
+ db_text = xml_text,
+ db_continue = xml_continue,
+ db_time_stamp = xml_time_stamp }
+
+-- | This lets us insert the XML representation 'Message' directly.
+--
+instance XmlImport Message
+
+
+-- * NewsTeam
+
+-- | The database type for teams as they show up in the news.
+--
+data NewsTeam =
+ NewsTeam { team_name :: String }
+ deriving (Eq, Show)
+
+
+
+-- * News_NewsTeam
+
+-- | Mapping between News records and NewsTeam records in the
+-- database. We don't name the fields because we don't use the names
+-- explicitly; that means we have to give them nice database names
+-- via groundhog.
+--
+data News_NewsTeam = News_NewsTeam
+ (DefaultKey News)
+ (DefaultKey NewsTeam)
+
+
+-- * NewsLocation
+
+-- | The database type for locations as they show up in the news.
+--
+data NewsLocation =
+ NewsLocation {
+ city :: Maybe String,
+ state :: Maybe String,
+ country :: String }
+ deriving (Eq, Show)
+
+
+-- * News_NewsLocation
+
+-- | Mapping between News records and NewsLocation records in the
+-- database. We don't name the fields because we don't use the names
+-- explicitly; that means we have to give them nice database names
+-- via groundhog.
+--
+data News_NewsLocation = News_NewsLocation
+ (DefaultKey News)
+ (DefaultKey NewsLocation)
+
+
+
+--
+-- Database code
+--
+
+-- | Define 'dbmigrate' and 'dbimport' for 'Message's. The import is
+-- slightly non-generic because of our 'News_NewsTeam' and
+-- 'News_NewsLocation' join tables.
+--
+instance DbImport Message where
+ dbmigrate _ =
+ run_dbmigrate $ do
+ migrate (undefined :: News)
+ migrate (undefined :: NewsTeam)
+ migrate (undefined :: News_NewsTeam)
+ migrate (undefined :: NewsLocation)
+ migrate (undefined :: News_NewsLocation)
+
+ dbimport message = do
+ -- Insert the message and acquire its primary key (unique ID)
+ news_id <- insert_xml message
+
+ -- And insert each one into its own table. We use insert_xml_or_select
+ -- because we know that most teams will already exist, and we
+ -- want to get back the id for the existing team when
+ -- there's a collision.
+ nt_ids <- mapM insert_or_select (xml_teams message)
+
+ -- Now that the teams have been inserted, create
+ -- news__news_team records mapping beween the two.
+ let news_news_teams = map (News_NewsTeam news_id) nt_ids
+ mapM_ insert_ news_news_teams
+
+ -- Do all of that over again for the NewsLocations.
+ loc_ids <- mapM insert_or_select (xml_locations message)
+ let news_news_locations = map (News_NewsLocation news_id) loc_ids
+ mapM_ insert_ news_news_locations
+
+ return ImportSucceeded
+
+
+-- These types don't have special XML representations or field name
+-- collisions so we use the defaultCodegenConfig and give their
+-- fields nice simple names.
+mkPersist defaultCodegenConfig [groundhog|
- entity: NewsTeam
dbName: news_teams
+ constructors:
+ - name: NewsTeam
+ uniques:
+ - name: unique_news_team
+ type: constraint
+ fields: [team_name]
- entity: NewsLocation
dbName: news_locations
+ constructors:
+ - name: NewsLocation
+ uniques:
+ - name: unique_news_location
+ type: constraint
+ fields: [city, state, country]
-- entity: Message
- dbName: news
+|]
+
+
+-- These types have fields with e.g. db_ and xml_ prefixes, so we
+-- use our own codegen to peel those off before naming the columns.
+mkPersist tsn_codegen_config [groundhog|
+- entity: News
constructors:
- - name: Message
+ - name: News
+ uniques:
+ - name: unique_news
+ type: constraint
+ # Prevent multiple imports of the same message.
+ fields: [db_xml_file_id]
fields:
- name: db_mid
embeddedType:
- {name: msg_id, dbName: msg_id}
- {name: event_id, dbName: event_id}
+
- embedded: MsgId
fields:
- name: db_msg_id
dbName: msg_id
- name: db_event_id
dbName: event_id
+
+- entity: News_NewsTeam
+ dbName: news__news_teams
+ constructors:
+ - name: News_NewsTeam
+ fields:
+ - name: news_NewsTeam0 # Default created by mkNormalFieldName
+ dbName: news_id
+ reference:
+ onDelete: cascade
+ - name: news_NewsTeam1 # Default created by mkNormalFieldName
+ dbName: news_teams_id
+ reference:
+ onDelete: cascade
+
+- entity: News_NewsLocation
+ dbName: news__news_locations
+ constructors:
+ - name: News_NewsLocation
+ fields:
+ - name: news_NewsLocation0 # Default created by mkNormalFieldName
+ dbName: news_id
+ reference:
+ onDelete: cascade
+ - name: news_NewsLocation1 # Default created by mkNormalFieldName
+ dbName: news_locations_id
+ reference:
+ onDelete: cascade
|]
-pickle_news_team :: PU NewsTeamXml
+
+--
+-- XML Picklers
+--
+
+-- | Convert a 'NewsTeam' to/from XML.
+--
+pickle_news_team :: PU NewsTeam
pickle_news_team =
xpElem "team" $
xpWrap (from_string, to_string) xpText
where
- to_string :: NewsTeamXml -> String
- to_string = xml_team_name
+ to_string :: NewsTeam -> String
+ to_string = team_name
- from_string :: String -> NewsTeamXml
- from_string = NewsTeamXml
+ from_string :: String -> NewsTeam
+ from_string = NewsTeam
-instance XmlPickler NewsTeamXml where
- xpickle = pickle_news_team
+-- | Convert a 'MsgId' to/from XML.
+--
pickle_msg_id :: PU MsgId
pickle_msg_id =
xpElem "msg_id" $
from_tuple = uncurryN MsgId
to_tuple m = (db_msg_id m, db_event_id m)
-instance XmlPickler MsgId where
- xpickle = pickle_msg_id
-pickle_location :: PU NewsLocationXml
+-- | Convert a 'NewsLocation' to/from XML.
+--
+pickle_location :: PU NewsLocation
pickle_location =
xpElem "location" $
xpWrap (from_tuple, to_tuple) $
- xpTriple (xpElem "city" xpText)
- (xpElem "state" xpText)
+ xpTriple (xpOption (xpElem "city" xpText))
+ (xpOption (xpElem "state" xpText))
(xpElem "country" xpText)
where
from_tuple =
- uncurryN NewsLocationXml
- to_tuple l = (xml_city l, xml_state l, xml_country l)
-
-instance XmlPickler NewsLocationXml where
- xpickle = pickle_location
+ uncurryN NewsLocation
+ to_tuple l = (city l, state l, country l)
-pickle_message :: PU MessageXml
+-- | Convert a 'Message' to/from XML.
+--
+pickle_message :: PU Message
pickle_message =
xpElem "message" $
xpWrap (from_tuple, to_tuple) $
- xp12Tuple (xpElem "XML_File_ID" xpInt)
+ xp13Tuple (xpElem "XML_File_ID" xpInt)
(xpElem "heading" xpText)
pickle_msg_id
(xpElem "category" xpText)
(xpElem "sport" xpText)
- (xpElem "url" xpText)
- (xpList $ pickle_news_team)
- (xpList $ pickle_location)
+ (xpElem "url" $ xpOption xpText)
+ (xpList pickle_news_team)
+ (xpList pickle_location)
(xpElem "SMS" xpText)
- (xpElem "text" xpText)
+ (xpOption (xpElem "Editor" xpText))
+ (xpOption (xpElem "text" xpText))
pickle_continue
- (xpElem "time_stamp" xpText)
+ (xpElem "time_stamp" xp_time_stamp)
where
- from_tuple = uncurryN MessageXml
- to_tuple m = (xml_xml_file_id m,
- xml_heading m,
- xml_mid m,
- xml_category m,
- xml_sport m,
- xml_url m,
- xml_teams m,
- xml_locations m,
+ from_tuple = uncurryN Message
+ to_tuple m = (xml_xml_file_id m, -- Verbose,
+ xml_heading m, -- but
+ xml_mid m, -- eliminates
+ xml_category m, -- GHC
+ xml_sport m, -- warnings
+ xml_url m, -- .
+ xml_teams m, -- .
+ xml_locations m, -- .
xml_sms m,
+ xml_editor m,
xml_text m,
xml_continue m,
xml_time_stamp m)
- pickle_continue :: PU String
+ -- | We combine all of the \<continue\> elements into one 'String'
+ -- while unpickling and do the reverse while pickling.
+ --
+ pickle_continue :: PU (Maybe String)
pickle_continue =
- xpWrap (to_string, from_string) $
- xpElem "continue" $
- (xpList $ xpElem "P" xpText)
+ xpOption $
+ xpWrap (to_string, from_string) $
+ xpElem "continue" $
+ xpList (xpElem "P" xpText)
where
from_string :: String -> [String]
from_string = split "\n"
to_string :: [String] -> String
to_string = join "\n"
-instance XmlPickler MessageXml where
- xpickle = pickle_message
-
+--
+-- Tasty Tests
+--
-instance DbImport Message where
- dbimport _ xml = do
- runMigration defaultMigrationLogger $ do
- migrate (undefined :: Message)
- migrate (undefined :: NewsTeam)
- migrate (undefined :: NewsLocation)
- let root_element = unpickleDoc xpickle xml :: Maybe MessageXml
- case root_element of
- Nothing -> do
- let errmsg = "Could not unpickle News message in dbimport."
- return $ ImportFailed errmsg
- Just message -> do
- news_id <- insert (from_xml message :: Message)
- let nts :: [NewsTeam] = map (from_xml_fk news_id)
- (xml_teams message)
- let nlocs :: [NewsLocation] = map (from_xml_fk news_id)
- (xml_locations message)
- nt_ids <- mapM insert nts
- loc_ids <- mapM insert nlocs
-
- return $ ImportSucceeded (1 + (length nt_ids) + (length loc_ids))
-
-
--- * Tasty Tests
+-- | A list of all tests for this module.
+--
news_tests :: TestTree
news_tests =
testGroup
"News tests"
[ test_news_fields_have_correct_names,
+ test_on_delete_cascade,
test_pickle_of_unpickle_is_identity,
test_unpickle_succeeds ]
+-- | Make sure our codegen is producing the correct database names.
+--
test_news_fields_have_correct_names :: TestTree
test_news_fields_have_correct_names =
- testCase "news fields get correct database names" $ do
+ testCase "news fields get correct database names" $
mapM_ check (zip actual expected)
where
-- This is cool, it uses the (derived) Data instance of
- -- News.Message to get its constructor names.
+ -- News.News to get its constructor names.
field_names :: [String]
field_names =
- constrFields . head $ dataTypeConstrs $ dataTypeOf (undefined :: Message)
+ constrFields . head $ dataTypeConstrs $ dataTypeOf (undefined :: News)
expected :: [String]
expected =
map (\x -> tsn_db_field_namer "herp" "derp" 8675309 x 90210) field_names
actual :: [String]
- actual = ["mid", "sport", "url", "sms", "text", "continue"]
+ actual = ["xml_file_id",
+ "mid",
+ "sport",
+ "url",
+ "sms",
+ "editor",
+ "text",
+ "continue"]
check (x,y) = (x @?= y)
--- | Warning, succeess of this test does not mean that unpickling
--- succeeded.
+-- | If we unpickle something and then pickle it, we should wind up
+-- with the same thing we started with. WARNING: success of this
+-- test does not mean that unpickling succeeded.
+--
test_pickle_of_unpickle_is_identity :: TestTree
-test_pickle_of_unpickle_is_identity =
- testCase "pickle composed with unpickle is the identity" $ do
- let path = "test/xml/newsxml.xml"
- (expected :: [MessageXml], actual) <- pickle_unpickle "message" path
- actual @?= expected
+test_pickle_of_unpickle_is_identity = testGroup "pickle-unpickle tests"
+ [ check "pickle composed with unpickle is the identity"
+ "test/xml/newsxml.xml",
+
+ check "pickle composed with unpickle is the identity (with Editor)"
+ "test/xml/newsxml-with-editor.xml" ]
+ where
+ check desc path = testCase desc $ do
+ (expected, actual) <- pickle_unpickle pickle_message path
+ actual @?= expected
+-- | Make sure we can actually unpickle these things.
+--
test_unpickle_succeeds :: TestTree
-test_unpickle_succeeds =
- testCase "unpickling succeeds" $ do
- let path = "test/xml/newsxml.xml"
- actual <- unpickleable path pickle_message
- let expected = True
- actual @?= expected
+test_unpickle_succeeds = testGroup "unpickle tests"
+ [ check "unpickling succeeds"
+ "test/xml/newsxml.xml",
+
+ check "unpickling succeeds (with Editor)"
+ "test/xml/newsxml-with-editor.xml" ]
+ where
+ check desc path = testCase desc $ do
+ actual <- unpickleable path pickle_message
+ let expected = True
+ actual @?= expected
+
+
+-- | Make sure everything gets deleted when we delete the top-level
+-- record.
+--
+test_on_delete_cascade :: TestTree
+test_on_delete_cascade = testGroup "cascading delete tests"
+ [ check "deleting news deletes its children"
+ "test/xml/newsxml.xml"
+ 4 -- 2 news_teams and 2 news_locations that should remain.
+ ]
+ where
+ check desc path expected = testCase desc $ do
+ news <- unsafe_unpickle path pickle_message
+ let a = undefined :: News
+ let b = undefined :: NewsTeam
+ let c = undefined :: News_NewsTeam
+ let d = undefined :: NewsLocation
+ let e = undefined :: News_NewsLocation
+ actual <- withSqliteConn ":memory:" $ runDbConn $ do
+ runMigration silentMigrationLogger $ do
+ migrate a
+ migrate b
+ migrate c
+ migrate d
+ migrate e
+ _ <- dbimport news
+ deleteAll a
+ count_a <- countAll a
+ count_b <- countAll b
+ count_c <- countAll c
+ count_d <- countAll d
+ count_e <- countAll e
+ return $ count_a + count_b + count_c + count_d + count_e
+ actual @?= expected