X-Git-Url: http://gitweb.michael.orlitzky.com/?a=blobdiff_plain;f=src%2FTSN%2FXML%2FNews.hs;h=86eed4a8682679abe61c5cc1751375fba1bc3316;hb=b830faafac5d720a8eecae369806d9142a186485;hp=78dc935ca6d3ab1e3e4947cc6a5ae0e23fbf97bb;hpb=84cf25190eeb5c2059cf6b9aea47e0f2bb073188;p=dead%2Fhtsn-import.git diff --git a/src/TSN/XML/News.hs b/src/TSN/XML/News.hs index 78dc935..86eed4a 100644 --- a/src/TSN/XML/News.hs +++ b/src/TSN/XML/News.hs @@ -7,8 +7,8 @@ {-# LANGUAGE TemplateHaskell #-} {-# LANGUAGE TypeFamilies #-} --- | Parse TSN XML for the DTD "newsxml.dtd". Each document contains a --- root element \ that contains an entire news item. +-- | Parse TSN XML for the DTD \"newsxml.dtd\". Each document contains +-- a root element \ that contains an entire news item. -- module TSN.XML.News ( pickle_message, @@ -29,9 +29,15 @@ import Data.List.Utils ( join, split ) import Data.Tuple.Curry ( uncurryN ) import Data.Typeable ( Typeable ) import Database.Groundhog ( + countAll, + executeRaw, insert_, - migrate ) + migrate, + runMigration, + silentMigrationLogger ) import Database.Groundhog.Core ( DefaultKey ) +import Database.Groundhog.Generic ( runDbConn ) +import Database.Groundhog.Sqlite ( withSqliteConn ) import Database.Groundhog.TH ( defaultCodegenConfig, groundhog, @@ -55,10 +61,16 @@ import Text.XML.HXT.Core ( import TSN.Codegen ( tsn_codegen_config, tsn_db_field_namer ) -- Used in a test +import TSN.Database ( insert_or_select ) import TSN.DbImport ( DbImport(..), ImportResult(..), run_dbmigrate ) import TSN.Picklers ( xp_time_stamp ) import TSN.XmlImport ( XmlImport(..) ) -import Xml ( FromXml(..), ToDb(..), pickle_unpickle, unpickleable ) +import Xml ( + FromXml(..), + ToDb(..), + pickle_unpickle, + unpickleable, + unsafe_unpickle ) -- @@ -71,6 +83,7 @@ import Xml ( FromXml(..), ToDb(..), pickle_unpickle, unpickleable ) -- embed it into the 'News' type. We (pointlessly) use the "db_" -- prefix here so that the two names don't collide on "id" when -- Groundhog is creating its fields using our field namer. +-- data MsgId = MsgId { db_msg_id :: Int, @@ -78,7 +91,7 @@ data MsgId = deriving (Data, Eq, Show, Typeable) --- | The XML representation of a news item (message). +-- | The XML representation of a news item (\). -- data Message = Message { @@ -118,6 +131,7 @@ data News = instance ToDb Message where + -- | The database representation of 'Message' is 'News'. type Db Message = News -- | Convert the XML representation 'Message' to the database @@ -137,7 +151,7 @@ instance FromXml Message where db_continue = xml_continue, db_time_stamp = xml_time_stamp } --- | This lets us call 'insert_xml' on a 'Message'. +-- | This lets us insert the XML representation 'Message' directly. -- instance XmlImport Message @@ -151,22 +165,6 @@ data NewsTeam = deriving (Eq, Show) -instance ToDb NewsTeam where - -- | The database representaion of a 'NewsTeam' is itself. - type Db NewsTeam = NewsTeam - --- | This is needed to define the XmlImport instance for NewsTeam. --- -instance FromXml NewsTeam where - -- | How to we get a 'NewsTeam' from itself? - from_xml = id - --- | Allow us to call 'insert_xml' on the XML representation of --- NewsTeams. --- -instance XmlImport NewsTeam - - -- * News_NewsTeam @@ -191,21 +189,6 @@ data NewsLocation = country :: String } deriving (Eq, Show) -instance ToDb NewsLocation where - -- | The database representation of a 'NewsLocation' is itself. - type Db NewsLocation = NewsLocation - --- | This is needed to define the XmlImport instance for NewsLocation. --- -instance FromXml NewsLocation where - -- | How to we get a 'NewsLocation' from itself? - from_xml = id - --- | Allow us to call 'insert_xml' on the XML representation of --- NewsLocations. --- -instance XmlImport NewsLocation - -- * News_NewsLocation @@ -231,10 +214,10 @@ data News_NewsLocation = News_NewsLocation instance DbImport Message where dbmigrate _ = run_dbmigrate $ do - migrate (undefined :: NewsTeam) - migrate (undefined :: NewsLocation) migrate (undefined :: News) + migrate (undefined :: NewsTeam) migrate (undefined :: News_NewsTeam) + migrate (undefined :: NewsLocation) migrate (undefined :: News_NewsLocation) dbimport message = do @@ -245,7 +228,7 @@ instance DbImport Message where -- because we know that most teams will already exist, and we -- want to get back the id for the existing team when -- there's a collision. - nt_ids <- mapM insert_xml_or_select (xml_teams message) + nt_ids <- mapM insert_or_select (xml_teams message) -- Now that the teams have been inserted, create -- news__news_team records mapping beween the two. @@ -253,7 +236,7 @@ instance DbImport Message where mapM_ insert_ news_news_teams -- Do all of that over again for the NewsLocations. - loc_ids <- mapM insert_xml_or_select (xml_locations message) + loc_ids <- mapM insert_or_select (xml_locations message) let news_news_locations = map (News_NewsLocation news_id) loc_ids mapM_ insert_ news_news_locations @@ -289,7 +272,6 @@ mkPersist defaultCodegenConfig [groundhog| -- use our own codegen to peel those off before naming the columns. mkPersist tsn_codegen_config [groundhog| - entity: News - dbName: news constructors: - name: News uniques: @@ -339,6 +321,7 @@ mkPersist tsn_codegen_config [groundhog| onDelete: cascade |] + -- -- XML Picklers -- @@ -447,6 +430,7 @@ news_tests = testGroup "News tests" [ test_news_fields_have_correct_names, + test_on_delete_cascade, test_pickle_of_unpickle_is_identity, test_unpickle_succeeds ] @@ -512,3 +496,39 @@ test_unpickle_succeeds = testGroup "unpickle tests" actual <- unpickleable path pickle_message let expected = True actual @?= expected + + +-- | Make sure everything gets deleted when we delete the top-level +-- record. +-- +test_on_delete_cascade :: TestTree +test_on_delete_cascade = testGroup "cascading delete tests" + [ check "deleting news deletes its children" + "test/xml/newsxml.xml" + 4 -- 2 news_teams and 2 news_locations that should remain. + ] + where + check desc path expected = testCase desc $ do + news <- unsafe_unpickle path pickle_message + let a = undefined :: News + let b = undefined :: NewsTeam + let c = undefined :: News_NewsTeam + let d = undefined :: NewsLocation + let e = undefined :: News_NewsLocation + actual <- withSqliteConn ":memory:" $ runDbConn $ do + runMigration silentMigrationLogger $ do + migrate a + migrate b + migrate c + migrate d + migrate e + _ <- dbimport news + -- No idea how 'delete' works, so do this instead. + executeRaw False "DELETE FROM news;" [] + count_a <- countAll a + count_b <- countAll b + count_c <- countAll c + count_d <- countAll d + count_e <- countAll e + return $ count_a + count_b + count_c + count_d + count_e + actual @?= expected