From: Michael Orlitzky Date: Sat, 5 Jul 2014 23:59:20 +0000 (-0400) Subject: Use TSN.Location for the News locations. X-Git-Tag: 0.0.6~25 X-Git-Url: https://gitweb.michael.orlitzky.com/?a=commitdiff_plain;h=02a9363651a96a2469567cb1fb303a9a3cf33dce;p=dead%2Fhtsn-import.git Use TSN.Location for the News locations. Update the newsxml dbschema diagram. Remove some TODOs. --- diff --git a/doc/TODO b/doc/TODO index f9d38e6..9087bed 100644 --- a/doc/TODO +++ b/doc/TODO @@ -66,8 +66,8 @@ 5. Consolidate all of the make_game_time functions which take a date/time and produce a combined time. -6. Move the News/Scores Locations into TSN.Locations. +6. Move the Scores Locations into TSN.Locations. -7. Regenerate the News/Scores dbschema diagrams. +7. Regenerate the Scores dbschema diagrams. 8. Re-test import of News/Scores samples. diff --git a/doc/dbschema/newsxml.png b/doc/dbschema/newsxml.png index 06be40f..970a9a0 100644 Binary files a/doc/dbschema/newsxml.png and b/doc/dbschema/newsxml.png differ diff --git a/src/TSN/XML/News.hs b/src/TSN/XML/News.hs index 9134771..50584be 100644 --- a/src/TSN/XML/News.hs +++ b/src/TSN/XML/News.hs @@ -15,10 +15,9 @@ module TSN.XML.News ( -- * Tests news_tests, -- * WARNING: these are private but exported to silence warnings - News_NewsLocationConstructor(..), + News_LocationConstructor(..), News_NewsTeamConstructor(..), NewsConstructor(..), - NewsLocationConstructor(..), NewsTeamConstructor(..) ) where @@ -64,6 +63,7 @@ import TSN.Codegen ( import TSN.Database ( insert_or_select ) import TSN.DbImport ( DbImport(..), ImportResult(..), run_dbmigrate ) import TSN.Picklers ( xp_time_stamp ) +import TSN.Location ( Location(..) ) import TSN.XmlImport ( XmlImport(..) ) import Xml ( FromXml(..), @@ -108,7 +108,7 @@ data Message = xml_sport :: String, xml_url :: Maybe String, xml_teams :: [NewsTeam], - xml_locations :: [NewsLocation], + xml_locations :: [NewsLocationXml], xml_sms :: String, xml_editor :: Maybe String, xml_text :: Maybe String, -- Text and continue seem to show up in pairs, @@ -164,7 +164,13 @@ instance XmlImport Message -- * NewsTeam --- | The database type for teams as they show up in the news. +-- | The database/XML type for teams as they show up in the news. We +-- can't reuse the representation from "TSN.Team" because they +-- require a team id. We wouldn't want to make the team ID optional +-- and then insert a team with no id, only to find the same team +-- later with an id and be unable to update the record. (We could +-- add the update logic, but it would be more trouble than it's +-- worth.) -- data NewsTeam = NewsTeam { team_name :: String } @@ -184,28 +190,48 @@ data News_NewsTeam = News_NewsTeam (DefaultKey NewsTeam) --- * NewsLocation +-- * NewsLocationXml --- | The database type for locations as they show up in the news. +-- | The XML type for locations as they show up in the news. The +-- associated database type comes from "TSN.Location". -- -data NewsLocation = - NewsLocation { - city :: Maybe String, - state :: Maybe String, - country :: String } +data NewsLocationXml = + NewsLocationXml { + xml_city :: Maybe String, + xml_state :: Maybe String, + xml_country :: String } deriving (Eq, Show) --- * News_NewsLocation +instance ToDb NewsLocationXml where + -- | The database analogue of a NewsLocationXml is a Location. + type Db NewsLocationXml = Location --- | Mapping between News records and NewsLocation records in the + +instance FromXml NewsLocationXml where + -- | To convert from the XML representation to the database one, we + -- don't have to do anything. Just copy the fields. + -- + from_xml NewsLocationXml{..} = + Location xml_city xml_state xml_country + + +-- | Allow us to import the XML representation directly into the +-- database, without having to perform the conversion manually first. +-- +instance XmlImport NewsLocationXml + + +-- * News_Location + +-- | Mapping between 'News' records and 'Location' records in the -- database. We don't name the fields because we don't use the names -- explicitly; that means we have to give them nice database names -- via groundhog. -- -data News_NewsLocation = News_NewsLocation - (DefaultKey News) - (DefaultKey NewsLocation) +data News_Location = News_Location + (DefaultKey News) + (DefaultKey Location) @@ -215,25 +241,24 @@ data News_NewsLocation = News_NewsLocation -- | Define 'dbmigrate' and 'dbimport' for 'Message's. The import is -- slightly non-generic because of our 'News_NewsTeam' and --- 'News_NewsLocation' join tables. +-- 'News_Location' join tables. -- instance DbImport Message where dbmigrate _ = run_dbmigrate $ do + migrate (undefined :: Location) migrate (undefined :: News) migrate (undefined :: NewsTeam) migrate (undefined :: News_NewsTeam) - migrate (undefined :: NewsLocation) - migrate (undefined :: News_NewsLocation) + migrate (undefined :: News_Location) dbimport message = do -- Insert the message and acquire its primary key (unique ID) news_id <- insert_xml message - -- And insert each one into its own table. We use insert_xml_or_select - -- because we know that most teams will already exist, and we - -- want to get back the id for the existing team when - -- there's a collision. + -- Now insert the teams. We use insert_or_select because we know + -- that most teams will already exist, and we want to get back the + -- id for the existing team when there's a collision. nt_ids <- mapM insert_or_select (xml_teams message) -- Now that the teams have been inserted, create @@ -241,9 +266,9 @@ instance DbImport Message where let news_news_teams = map (News_NewsTeam news_id) nt_ids mapM_ insert_ news_news_teams - -- Do all of that over again for the NewsLocations. - loc_ids <- mapM insert_or_select (xml_locations message) - let news_news_locations = map (News_NewsLocation news_id) loc_ids + -- Do all of that over again for the Locations. + loc_ids <- mapM insert_xml_or_select (xml_locations message) + let news_news_locations = map (News_Location news_id) loc_ids mapM_ insert_ news_news_locations return ImportSucceeded @@ -262,15 +287,6 @@ mkPersist defaultCodegenConfig [groundhog| type: constraint fields: [team_name] -- entity: NewsLocation - dbName: news_locations - constructors: - - name: NewsLocation - uniques: - - name: unique_news_location - type: constraint - fields: [city, state, country] - |] @@ -312,17 +328,17 @@ mkPersist tsn_codegen_config [groundhog| reference: onDelete: cascade -- entity: News_NewsLocation - dbName: news__news_locations +- entity: News_Location + dbName: news__locations constructors: - - name: News_NewsLocation + - name: News_Location fields: - - name: news_NewsLocation0 # Default created by mkNormalFieldName + - name: news_Location0 # Default created by mkNormalFieldName dbName: news_id reference: onDelete: cascade - - name: news_NewsLocation1 # Default created by mkNormalFieldName - dbName: news_locations_id + - name: news_Location1 # Default created by mkNormalFieldName + dbName: locations_id reference: onDelete: cascade |] @@ -358,9 +374,9 @@ pickle_msg_id = to_tuple m = (db_msg_id m, db_event_id m) --- | Convert a 'NewsLocation' to/from XML. +-- | Convert a 'NewsLocationXml' to/from XML. -- -pickle_location :: PU NewsLocation +pickle_location :: PU NewsLocationXml pickle_location = xpElem "location" $ xpWrap (from_tuple, to_tuple) $ @@ -369,8 +385,8 @@ pickle_location = (xpElem "country" xpText) where from_tuple = - uncurryN NewsLocation - to_tuple l = (city l, state l, country l) + uncurryN NewsLocationXml + to_tuple l = (xml_city l, xml_state l, xml_country l) -- | Convert a 'Message' to/from XML. @@ -516,11 +532,11 @@ test_on_delete_cascade = testGroup "cascading delete tests" where check desc path expected = testCase desc $ do news <- unsafe_unpickle path pickle_message - let a = undefined :: News - let b = undefined :: NewsTeam - let c = undefined :: News_NewsTeam - let d = undefined :: NewsLocation - let e = undefined :: News_NewsLocation + let a = undefined :: Location + let b = undefined :: News + let c = undefined :: NewsTeam + let d = undefined :: News_NewsTeam + let e = undefined :: News_Location actual <- withSqliteConn ":memory:" $ runDbConn $ do runMigration silentMigrationLogger $ do migrate a @@ -529,7 +545,7 @@ test_on_delete_cascade = testGroup "cascading delete tests" migrate d migrate e _ <- dbimport news - deleteAll a + deleteAll b count_a <- countAll a count_b <- countAll b count_c <- countAll c