]> gitweb.michael.orlitzky.com - dead/htsn-import.git/blob - src/TSN/XML/News.hs
Move pickle_location into the TSN.Location module and update TSN.XML.News.
[dead/htsn-import.git] / src / TSN / XML / News.hs
1 {-# LANGUAGE DeriveDataTypeable #-}
2 {-# LANGUAGE FlexibleInstances #-}
3 {-# LANGUAGE GADTs #-}
4 {-# LANGUAGE QuasiQuotes #-}
5 {-# LANGUAGE RecordWildCards #-}
6 {-# LANGUAGE TemplateHaskell #-}
7 {-# LANGUAGE TypeFamilies #-}
8
9 -- | Parse TSN XML for the DTD \"newsxml.dtd\". Each document contains
10 -- a root element \<message\> that contains an entire news item.
11 --
12 module TSN.XML.News (
13 dtd,
14 pickle_message,
15 -- * Tests
16 news_tests,
17 -- * WARNING: these are private but exported to silence warnings
18 News_LocationConstructor(..),
19 News_NewsTeamConstructor(..),
20 NewsConstructor(..),
21 NewsTeamConstructor(..) )
22 where
23
24 -- System imports.
25 import Data.Data ( Data, constrFields, dataTypeConstrs, dataTypeOf )
26 import Data.Time.Clock ( UTCTime )
27 import Data.List.Utils ( join, split )
28 import Data.Tuple.Curry ( uncurryN )
29 import Data.Typeable ( Typeable )
30 import Database.Groundhog (
31 countAll,
32 deleteAll,
33 insert_,
34 migrate,
35 runMigration,
36 silentMigrationLogger )
37 import Database.Groundhog.Core ( DefaultKey )
38 import Database.Groundhog.Generic ( runDbConn )
39 import Database.Groundhog.Sqlite ( withSqliteConn )
40 import Database.Groundhog.TH (
41 defaultCodegenConfig,
42 groundhog,
43 mkPersist )
44 import Test.Tasty ( TestTree, testGroup )
45 import Test.Tasty.HUnit ( (@?=), testCase )
46 import Text.XML.HXT.Core (
47 PU,
48 xp13Tuple,
49 xpAttr,
50 xpElem,
51 xpInt,
52 xpList,
53 xpOption,
54 xpPair,
55 xpText,
56 xpWrap )
57
58 -- Local imports.
59 import TSN.Codegen (
60 tsn_codegen_config,
61 tsn_db_field_namer ) -- Used in a test
62 import TSN.Database ( insert_or_select )
63 import TSN.DbImport ( DbImport(..), ImportResult(..), run_dbmigrate )
64 import TSN.Picklers ( xp_time_stamp )
65 import TSN.Location ( Location(..), pickle_location )
66 import TSN.XmlImport ( XmlImport(..) )
67 import Xml (
68 FromXml(..),
69 ToDb(..),
70 pickle_unpickle,
71 unpickleable,
72 unsafe_unpickle )
73
74
75 -- | The DTD to which this module corresponds. Used to invoke dbimport.
76 --
77 dtd :: String
78 dtd = "newsxml.dtd"
79
80
81 --
82 -- DB/XML Data types
83 --
84
85 -- * News/Message
86
87 -- | The msg_id child of \<message\> contains an event_id attribute; we
88 -- embed it into the 'News' type. We (pointlessly) use the \"db_\"
89 -- prefix here so that the two names don't collide on \"id\" when
90 -- Groundhog is creating its fields using our field namer.
91 --
92 data MsgId =
93 MsgId {
94 db_msg_id :: Int,
95 db_event_id :: Maybe Int }
96 deriving (Data, Eq, Show, Typeable)
97
98
99 -- | The XML representation of a news item (\<message\>).
100 --
101 data Message =
102 Message {
103 xml_xml_file_id :: Int,
104 xml_heading :: String,
105 xml_mid :: MsgId,
106 xml_category :: String,
107 xml_sport :: String,
108 xml_url :: Maybe String,
109 xml_teams :: [NewsTeam],
110 xml_locations :: [Location],
111 xml_sms :: String,
112 xml_editor :: Maybe String,
113 xml_text :: Maybe String, -- Text and continue seem to show up in pairs,
114 xml_continue :: Maybe String, -- either both present or both missing.
115 xml_time_stamp :: UTCTime }
116 deriving (Eq, Show)
117
118
119 -- | The database representation of a news item. We drop several
120 -- uninteresting fields from 'Message', and omit the list fields which
121 -- will be represented as join tables.
122 --
123 data News =
124 News {
125 db_xml_file_id :: Int,
126 db_mid :: MsgId,
127 db_sport :: String,
128 db_url :: Maybe String,
129 db_sms :: String,
130 db_editor :: Maybe String,
131 db_text :: Maybe String,
132 db_continue :: Maybe String,
133 db_time_stamp :: UTCTime }
134 deriving (Data, Eq, Show, Typeable)
135
136
137
138 instance ToDb Message where
139 -- | The database representation of 'Message' is 'News'.
140 type Db Message = News
141
142 -- | Convert the XML representation 'Message' to the database
143 -- representation 'News'.
144 --
145 instance FromXml Message where
146 -- | We use a record wildcard so GHC doesn't complain that we never
147 -- used the field names.
148 --
149 from_xml Message{..} = News { db_xml_file_id = xml_xml_file_id,
150 db_mid = xml_mid,
151 db_sport = xml_sport,
152 db_url = xml_url,
153 db_sms = xml_sms,
154 db_editor = xml_editor,
155 db_text = xml_text,
156 db_continue = xml_continue,
157 db_time_stamp = xml_time_stamp }
158
159 -- | This lets us insert the XML representation 'Message' directly.
160 --
161 instance XmlImport Message
162
163
164 -- * NewsTeam
165
166 -- | The database/XML type for teams as they show up in the news. We
167 -- can't reuse the representation from "TSN.Team" because they
168 -- require a team id. We wouldn't want to make the team ID optional
169 -- and then insert a team with no id, only to find the same team
170 -- later with an id and be unable to update the record. (We could
171 -- add the update logic, but it would be more trouble than it's
172 -- worth.)
173 --
174 data NewsTeam =
175 NewsTeam { team_name :: String }
176 deriving (Eq, Show)
177
178
179
180 -- * News_NewsTeam
181
182 -- | Mapping between News records and NewsTeam records in the
183 -- database. We don't name the fields because we don't use the names
184 -- explicitly; that means we have to give them nice database names
185 -- via groundhog.
186 --
187 data News_NewsTeam = News_NewsTeam
188 (DefaultKey News)
189 (DefaultKey NewsTeam)
190
191
192 -- * News_Location
193
194 -- | Mapping between 'News' records and 'Location' records in the
195 -- database. We don't name the fields because we don't use the names
196 -- explicitly; that means we have to give them nice database names
197 -- via groundhog.
198 --
199 data News_Location = News_Location
200 (DefaultKey News)
201 (DefaultKey Location)
202
203
204
205 --
206 -- Database code
207 --
208
209 -- | Define 'dbmigrate' and 'dbimport' for 'Message's. The import is
210 -- slightly non-generic because of our 'News_NewsTeam' and
211 -- 'News_Location' join tables.
212 --
213 instance DbImport Message where
214 dbmigrate _ =
215 run_dbmigrate $ do
216 migrate (undefined :: Location)
217 migrate (undefined :: News)
218 migrate (undefined :: NewsTeam)
219 migrate (undefined :: News_NewsTeam)
220 migrate (undefined :: News_Location)
221
222 dbimport message = do
223 -- Insert the message and acquire its primary key (unique ID)
224 news_id <- insert_xml message
225
226 -- Now insert the teams. We use insert_or_select because we know
227 -- that most teams will already exist, and we want to get back the
228 -- id for the existing team when there's a collision.
229 nt_ids <- mapM insert_or_select (xml_teams message)
230
231 -- Now that the teams have been inserted, create
232 -- news__news_team records mapping beween the two.
233 let news_news_teams = map (News_NewsTeam news_id) nt_ids
234 mapM_ insert_ news_news_teams
235
236 -- Do all of that over again for the Locations.
237 loc_ids <- mapM insert_or_select (xml_locations message)
238 let news_news_locations = map (News_Location news_id) loc_ids
239 mapM_ insert_ news_news_locations
240
241 return ImportSucceeded
242
243
244 -- These types don't have special XML representations or field name
245 -- collisions so we use the defaultCodegenConfig and give their
246 -- fields nice simple names.
247 mkPersist defaultCodegenConfig [groundhog|
248 - entity: NewsTeam
249 dbName: news_teams
250 constructors:
251 - name: NewsTeam
252 uniques:
253 - name: unique_news_team
254 type: constraint
255 fields: [team_name]
256
257 |]
258
259
260 -- These types have fields with e.g. db_ and xml_ prefixes, so we
261 -- use our own codegen to peel those off before naming the columns.
262 mkPersist tsn_codegen_config [groundhog|
263 - entity: News
264 constructors:
265 - name: News
266 uniques:
267 - name: unique_news
268 type: constraint
269 # Prevent multiple imports of the same message.
270 fields: [db_xml_file_id]
271 fields:
272 - name: db_mid
273 embeddedType:
274 - {name: msg_id, dbName: msg_id}
275 - {name: event_id, dbName: event_id}
276
277 - embedded: MsgId
278 fields:
279 - name: db_msg_id
280 dbName: msg_id
281 - name: db_event_id
282 dbName: event_id
283
284 - entity: News_NewsTeam
285 dbName: news__news_teams
286 constructors:
287 - name: News_NewsTeam
288 fields:
289 - name: news_NewsTeam0 # Default created by mkNormalFieldName
290 dbName: news_id
291 reference:
292 onDelete: cascade
293 - name: news_NewsTeam1 # Default created by mkNormalFieldName
294 dbName: news_teams_id
295 reference:
296 onDelete: cascade
297
298 - entity: News_Location
299 dbName: news__locations
300 constructors:
301 - name: News_Location
302 fields:
303 - name: news_Location0 # Default created by mkNormalFieldName
304 dbName: news_id
305 reference:
306 onDelete: cascade
307 - name: news_Location1 # Default created by mkNormalFieldName
308 dbName: locations_id
309 reference:
310 onDelete: cascade
311 |]
312
313
314 --
315 -- XML Picklers
316 --
317
318 -- | Convert a 'NewsTeam' to/from XML.
319 --
320 pickle_news_team :: PU NewsTeam
321 pickle_news_team =
322 xpElem "team" $
323 xpWrap (from_string, to_string) xpText
324 where
325 to_string :: NewsTeam -> String
326 to_string = team_name
327
328 from_string :: String -> NewsTeam
329 from_string = NewsTeam
330
331
332 -- | Convert a 'MsgId' to/from XML.
333 --
334 pickle_msg_id :: PU MsgId
335 pickle_msg_id =
336 xpElem "msg_id" $
337 xpWrap (from_tuple, to_tuple) $
338 xpPair xpInt (xpAttr "EventId" (xpOption xpInt))
339 where
340 from_tuple = uncurryN MsgId
341 to_tuple m = (db_msg_id m, db_event_id m)
342
343
344
345 -- | Convert a 'Message' to/from XML.
346 --
347 pickle_message :: PU Message
348 pickle_message =
349 xpElem "message" $
350 xpWrap (from_tuple, to_tuple) $
351 xp13Tuple (xpElem "XML_File_ID" xpInt)
352 (xpElem "heading" xpText)
353 pickle_msg_id
354 (xpElem "category" xpText)
355 (xpElem "sport" xpText)
356 (xpElem "url" $ xpOption xpText)
357 (xpList pickle_news_team)
358 (xpList pickle_location)
359 (xpElem "SMS" xpText)
360 (xpOption (xpElem "Editor" xpText))
361 (xpOption (xpElem "text" xpText))
362 pickle_continue
363 (xpElem "time_stamp" xp_time_stamp)
364 where
365 from_tuple = uncurryN Message
366 to_tuple m = (xml_xml_file_id m, -- Verbose,
367 xml_heading m, -- but
368 xml_mid m, -- eliminates
369 xml_category m, -- GHC
370 xml_sport m, -- warnings
371 xml_url m, -- .
372 xml_teams m, -- .
373 xml_locations m, -- .
374 xml_sms m,
375 xml_editor m,
376 xml_text m,
377 xml_continue m,
378 xml_time_stamp m)
379
380 -- | We combine all of the \<continue\> elements into one 'String'
381 -- while unpickling and do the reverse while pickling.
382 --
383 pickle_continue :: PU (Maybe String)
384 pickle_continue =
385 xpOption $
386 xpWrap (to_string, from_string) $
387 xpElem "continue" $
388 xpList (xpElem "P" xpText)
389 where
390 from_string :: String -> [String]
391 from_string = split "\n"
392
393 to_string :: [String] -> String
394 to_string = join "\n"
395
396
397 --
398 -- Tasty Tests
399 --
400
401 -- | A list of all tests for this module.
402 --
403 news_tests :: TestTree
404 news_tests =
405 testGroup
406 "News tests"
407 [ test_news_fields_have_correct_names,
408 test_on_delete_cascade,
409 test_pickle_of_unpickle_is_identity,
410 test_unpickle_succeeds ]
411
412
413 -- | Make sure our codegen is producing the correct database names.
414 --
415 test_news_fields_have_correct_names :: TestTree
416 test_news_fields_have_correct_names =
417 testCase "news fields get correct database names" $
418 mapM_ check (zip actual expected)
419 where
420 -- This is cool, it uses the (derived) Data instance of
421 -- News.News to get its constructor names.
422 field_names :: [String]
423 field_names =
424 constrFields . head $ dataTypeConstrs $ dataTypeOf (undefined :: News)
425
426 expected :: [String]
427 expected =
428 map (\x -> tsn_db_field_namer "herp" "derp" 8675309 x 90210) field_names
429
430 actual :: [String]
431 actual = ["xml_file_id",
432 "mid",
433 "sport",
434 "url",
435 "sms",
436 "editor",
437 "text",
438 "continue"]
439
440 check (x,y) = (x @?= y)
441
442
443 -- | If we unpickle something and then pickle it, we should wind up
444 -- with the same thing we started with. WARNING: success of this
445 -- test does not mean that unpickling succeeded.
446 --
447 test_pickle_of_unpickle_is_identity :: TestTree
448 test_pickle_of_unpickle_is_identity = testGroup "pickle-unpickle tests"
449 [ check "pickle composed with unpickle is the identity"
450 "test/xml/newsxml.xml",
451
452 check "pickle composed with unpickle is the identity (with Editor)"
453 "test/xml/newsxml-with-editor.xml" ]
454 where
455 check desc path = testCase desc $ do
456 (expected, actual) <- pickle_unpickle pickle_message path
457 actual @?= expected
458
459
460 -- | Make sure we can actually unpickle these things.
461 --
462 test_unpickle_succeeds :: TestTree
463 test_unpickle_succeeds = testGroup "unpickle tests"
464 [ check "unpickling succeeds"
465 "test/xml/newsxml.xml",
466
467 check "unpickling succeeds (with Editor)"
468 "test/xml/newsxml-with-editor.xml" ]
469 where
470 check desc path = testCase desc $ do
471 actual <- unpickleable path pickle_message
472 let expected = True
473 actual @?= expected
474
475
476 -- | Make sure everything gets deleted when we delete the top-level
477 -- record.
478 --
479 test_on_delete_cascade :: TestTree
480 test_on_delete_cascade = testGroup "cascading delete tests"
481 [ check "deleting news deletes its children"
482 "test/xml/newsxml.xml"
483 4 -- 2 news_teams and 2 news_locations that should remain.
484 ]
485 where
486 check desc path expected = testCase desc $ do
487 news <- unsafe_unpickle path pickle_message
488 let a = undefined :: Location
489 let b = undefined :: News
490 let c = undefined :: NewsTeam
491 let d = undefined :: News_NewsTeam
492 let e = undefined :: News_Location
493 actual <- withSqliteConn ":memory:" $ runDbConn $ do
494 runMigration silentMigrationLogger $ do
495 migrate a
496 migrate b
497 migrate c
498 migrate d
499 migrate e
500 _ <- dbimport news
501 deleteAll b
502 count_a <- countAll a
503 count_b <- countAll b
504 count_c <- countAll c
505 count_d <- countAll d
506 count_e <- countAll e
507 return $ count_a + count_b + count_c + count_d + count_e
508 actual @?= expected