]> gitweb.michael.orlitzky.com - dead/htsn-import.git/blob - src/TSN/XML/News.hs
Add the 'xp_attr_option' pickler and use it to fix tests broken by HXT.
[dead/htsn-import.git] / src / TSN / XML / News.hs
1 {-# LANGUAGE DeriveGeneric #-}
2 {-# LANGUAGE DeriveDataTypeable #-}
3 {-# LANGUAGE FlexibleInstances #-}
4 {-# LANGUAGE GADTs #-}
5 {-# LANGUAGE QuasiQuotes #-}
6 {-# LANGUAGE RecordWildCards #-}
7 {-# LANGUAGE TemplateHaskell #-}
8 {-# LANGUAGE TypeFamilies #-}
9
10 -- | Parse TSN XML for the DTD \"newsxml.dtd\". Each document contains
11 -- a root element \<message\> that contains an entire news item.
12 --
13 module TSN.XML.News (
14 dtd,
15 has_only_single_sms,
16 pickle_message,
17 -- * Tests
18 news_tests,
19 -- * WARNING: these are private but exported to silence warnings
20 News_LocationConstructor(..),
21 News_TeamConstructor(..),
22 NewsConstructor(..) )
23 where
24
25 -- System imports.
26 import Data.Data ( Data, constrFields, dataTypeConstrs, dataTypeOf )
27 import Data.Time.Clock ( UTCTime )
28 import Data.List.Utils ( join, split )
29 import Data.Tuple.Curry ( uncurryN )
30 import Data.Typeable ( Typeable )
31 import qualified Data.Vector.HFixed as H ( HVector, convert )
32 import Database.Groundhog (
33 countAll,
34 deleteAll,
35 insert_,
36 migrate )
37 import Database.Groundhog.Core ( DefaultKey )
38 import Database.Groundhog.Generic ( runDbConn, runMigrationSilent )
39 import Database.Groundhog.Sqlite ( withSqliteConn )
40 import Database.Groundhog.TH (
41 groundhog,
42 mkPersist )
43 import qualified GHC.Generics as GHC ( Generic )
44 import Test.Tasty ( TestTree, testGroup )
45 import Test.Tasty.HUnit ( (@?=), testCase )
46 import Text.XML.HXT.Core (
47 PU,
48 XmlTree,
49 (/>),
50 (>>>),
51 addNav,
52 descendantAxis,
53 filterAxis,
54 followingSiblingAxis,
55 hasName,
56 remNav,
57 runLA,
58 xp13Tuple,
59 xpAttr,
60 xpElem,
61 xpInt,
62 xpList,
63 xpOption,
64 xpPair,
65 xpText,
66 xpWrap )
67
68 -- Local imports.
69 import TSN.Codegen (
70 tsn_codegen_config,
71 tsn_db_field_namer ) -- Used in a test
72 import TSN.Database ( insert_or_select )
73 import TSN.DbImport ( DbImport(..), ImportResult(..), run_dbmigrate )
74 import TSN.Location ( Location(..), pickle_location )
75 import TSN.Picklers ( xp_attr_option, xp_time_stamp )
76 import TSN.Team ( Team(..) )
77 import TSN.XmlImport ( XmlImport(..) )
78 import Xml (
79 FromXml(..),
80 ToDb(..),
81 pickle_unpickle,
82 unpickleable,
83 unsafe_read_invalid_document,
84 unsafe_unpickle )
85
86
87 -- | The DTD to which this module corresponds. Used to invoke dbimport.
88 --
89 dtd :: String
90 dtd = "newsxml.dtd"
91
92
93 --
94 -- DB/XML Data types
95 --
96
97 -- * News/Message
98
99 -- | The msg_id child of \<message\> contains an event_id attribute; we
100 -- embed it into the 'News' type. We (pointlessly) use the \"db_\"
101 -- prefix here so that the two names don't collide on \"id\" when
102 -- Groundhog is creating its fields using our field namer.
103 --
104 -- The leading underscores prevent unused field warnings.
105 --
106 data MsgId =
107 MsgId {
108 _db_msg_id :: Int,
109 _db_event_id :: Maybe Int }
110 deriving (Data, Eq, GHC.Generic, Show, Typeable)
111
112 -- | For 'H.convert'.
113 --
114 instance H.HVector MsgId
115
116
117 -- | The XML representation of a news item (\<message\>).
118 --
119 data Message =
120 Message {
121 xml_xml_file_id :: Int,
122 xml_heading :: String,
123 xml_mid :: MsgId,
124 xml_category :: String,
125 xml_sport :: String,
126 xml_url :: Maybe String,
127 xml_teams :: [NewsTeamXml],
128 xml_locations :: [Location],
129 xml_sms :: Maybe String,
130 xml_editor :: Maybe String,
131 xml_text :: Maybe String, -- Text and continue seem to show up in pairs,
132 xml_continue :: Maybe String, -- either both present or both missing.
133 xml_time_stamp :: UTCTime }
134 deriving (Eq, GHC.Generic, Show)
135
136
137 -- | For 'H.convert'.
138 --
139 instance H.HVector Message
140
141
142 -- | The database representation of a news item. We drop several
143 -- uninteresting fields from 'Message', and omit the list fields which
144 -- will be represented as join tables.
145 --
146 data News =
147 News {
148 db_xml_file_id :: Int,
149 db_mid :: MsgId,
150 db_sport :: String,
151 db_url :: Maybe String,
152 db_sms :: Maybe String,
153 db_editor :: Maybe String,
154 db_text :: Maybe String,
155 db_continue :: Maybe String,
156 db_time_stamp :: UTCTime }
157 deriving (Data, Eq, Show, Typeable)
158
159
160
161 instance ToDb Message where
162 -- | The database representation of 'Message' is 'News'.
163 type Db Message = News
164
165 -- | Convert the XML representation 'Message' to the database
166 -- representation 'News'.
167 --
168 instance FromXml Message where
169 -- | We use a record wildcard so GHC doesn't complain that we never
170 -- used the field names.
171 --
172 -- To convert, we drop some fields.
173 --
174 from_xml Message{..} = News { db_xml_file_id = xml_xml_file_id,
175 db_mid = xml_mid,
176 db_sport = xml_sport,
177 db_url = xml_url,
178 db_sms = xml_sms,
179 db_editor = xml_editor,
180 db_text = xml_text,
181 db_continue = xml_continue,
182 db_time_stamp = xml_time_stamp }
183
184 -- | This lets us insert the XML representation 'Message' directly.
185 --
186 instance XmlImport Message
187
188
189 -- * NewsTeamXml
190
191 -- | The XML type for teams as they show up in the news. We can't
192 -- reuse the representation from "TSN.Team" because our name doesn't
193 -- appear optional and we have no abbreviation.
194 --
195 data NewsTeamXml =
196 NewsTeamXml { xml_team_id :: String,
197 xml_team_name :: String }
198 deriving (Eq, GHC.Generic, Show)
199
200
201 -- | For 'H.convert'.
202 --
203 instance H.HVector NewsTeamXml
204
205
206 instance ToDb NewsTeamXml where
207 -- | The database representation of 'NewsTeamXml' is 'Team'.
208 type Db NewsTeamXml = Team
209
210 -- | Convert the XML representation 'NewsTeamXml' to the database
211 -- representation 'Team'.
212 --
213 instance FromXml NewsTeamXml where
214 from_xml NewsTeamXml{..} =
215 Team { team_id = xml_team_id,
216 abbreviation = Nothing,
217 name = Just xml_team_name }
218
219 -- | Allow us to import 'NewsTeamXml' directly.
220 --
221 instance XmlImport NewsTeamXml
222
223
224 -- * News_Team
225
226 -- | Mapping between News records and Team records in the database. We
227 -- don't name the fields because we don't use the names explicitly;
228 -- that means we have to give them nice database names via
229 -- groundhog.
230 --
231 data News_Team = News_Team (DefaultKey News) (DefaultKey Team)
232
233
234 -- * News_Location
235
236 -- | Mapping between 'News' records and 'Location' records in the
237 -- database. We don't name the fields because we don't use the names
238 -- explicitly; that means we have to give them nice database names
239 -- via groundhog.
240 --
241 data News_Location = News_Location
242 (DefaultKey News)
243 (DefaultKey Location)
244
245
246
247
248 -- | Some newsxml documents contain two \<SMS\> elements in a row,
249 -- violating the DTD. The second one has always been empty, but it's
250 -- irrelevant: we can't parse these, and would like to detect them
251 -- in order to report the fact that the busted document is
252 -- unsupported.
253 --
254 -- This function detects whether two \<SMS\> elements appear in a
255 -- row, as siblings.
256 --
257 has_only_single_sms :: XmlTree -> Bool
258 has_only_single_sms xmltree =
259 case elements of
260 [] -> True
261 _ -> False
262 where
263 parse :: XmlTree -> [XmlTree]
264 parse = runLA $ hasName "/"
265 /> hasName "message"
266 >>> addNav
267 >>> descendantAxis
268 >>> filterAxis (hasName "SMS")
269 >>> followingSiblingAxis
270 >>> remNav
271 >>> hasName "SMS"
272
273 elements = parse xmltree
274
275
276 --
277 -- * Database code
278 --
279
280 -- | Define 'dbmigrate' and 'dbimport' for 'Message's. The import is
281 -- slightly non-generic because of our 'News_Team' and
282 -- 'News_Location' join tables.
283 --
284 instance DbImport Message where
285 dbmigrate _ =
286 run_dbmigrate $ do
287 migrate (undefined :: Location)
288 migrate (undefined :: News)
289 migrate (undefined :: Team)
290 migrate (undefined :: News_Team)
291 migrate (undefined :: News_Location)
292
293 dbimport message = do
294 -- Insert the message and acquire its primary key (unique ID)
295 news_id <- insert_xml message
296
297 -- Now insert the teams. We use insert_xml_or_select because we
298 -- know that most teams will already exist, and we want to get
299 -- back the id for the existing team when there's a collision.
300 team_ids <- mapM insert_xml_or_select (xml_teams message)
301
302 -- Now that the teams have been inserted, create
303 -- news__team records mapping beween the two.
304 let news_teams = map (News_Team news_id) team_ids
305 mapM_ insert_ news_teams
306
307 -- Do all of that over again for the Locations.
308 loc_ids <- mapM insert_or_select (xml_locations message)
309 let news_news_locations = map (News_Location news_id) loc_ids
310 mapM_ insert_ news_news_locations
311
312 return ImportSucceeded
313
314
315
316 -- These types have fields with e.g. db_ and xml_ prefixes, so we
317 -- use our own codegen to peel those off before naming the columns.
318 mkPersist tsn_codegen_config [groundhog|
319 - entity: News
320 constructors:
321 - name: News
322 uniques:
323 - name: unique_news
324 type: constraint
325 # Prevent multiple imports of the same message.
326 fields: [db_xml_file_id]
327 fields:
328 - name: db_mid
329 embeddedType:
330 - {name: msg_id, dbName: msg_id}
331 - {name: event_id, dbName: event_id}
332
333 - embedded: MsgId
334 fields:
335 - name: _db_msg_id
336 dbName: msg_id
337 - name: _db_event_id
338 dbName: event_id
339
340 - entity: News_Team
341 dbName: news__teams
342 constructors:
343 - name: News_Team
344 fields:
345 - name: news_Team0 # Default created by mkNormalFieldName
346 dbName: news_id
347 reference:
348 onDelete: cascade
349 - name: news_Team1 # Default created by mkNormalFieldName
350 dbName: teams_id
351 reference:
352 onDelete: cascade
353
354 - entity: News_Location
355 dbName: news__locations
356 constructors:
357 - name: News_Location
358 fields:
359 - name: news_Location0 # Default created by mkNormalFieldName
360 dbName: news_id
361 reference:
362 onDelete: cascade
363 - name: news_Location1 # Default created by mkNormalFieldName
364 dbName: locations_id
365 reference:
366 onDelete: cascade
367 |]
368
369
370 --
371 -- XML Picklers
372 --
373
374 -- | Convert a 'NewsTeamXml' to/from XML.
375 --
376 pickle_news_team :: PU NewsTeamXml
377 pickle_news_team =
378 xpElem "team" $
379 xpWrap (from_tuple, H.convert) $
380 xpPair (xpAttr "id" xpText)
381 xpText -- team name
382 where
383 from_tuple = uncurry NewsTeamXml
384
385
386 -- | Convert a 'MsgId' to/from XML.
387 --
388 pickle_msg_id :: PU MsgId
389 pickle_msg_id =
390 xpElem "msg_id" $
391 xpWrap (from_tuple, H.convert) $
392 xpPair xpInt (xpAttr "EventId" xp_attr_option)
393 where
394 from_tuple = uncurryN MsgId
395
396
397
398 -- | Convert a 'Message' to/from XML.
399 --
400 pickle_message :: PU Message
401 pickle_message =
402 xpElem "message" $
403 xpWrap (from_tuple, H.convert) $
404 xp13Tuple (xpElem "XML_File_ID" xpInt)
405 (xpElem "heading" xpText)
406 pickle_msg_id
407 (xpElem "category" xpText)
408 (xpElem "sport" xpText)
409 (xpElem "url" $ xpOption xpText)
410 (xpList pickle_news_team)
411 (xpList pickle_location)
412 (xpElem "SMS" $ xpOption xpText)
413 (xpOption (xpElem "Editor" xpText))
414 (xpOption (xpElem "text" xpText))
415 pickle_continue
416 (xpElem "time_stamp" xp_time_stamp)
417 where
418 from_tuple = uncurryN Message
419
420
421 -- | We combine all of the \<continue\> elements into one 'String'
422 -- while unpickling and do the reverse while pickling.
423 --
424 pickle_continue :: PU (Maybe String)
425 pickle_continue =
426 xpOption $
427 xpWrap (to_string, from_string) $
428 xpElem "continue" $
429 xpList (xpElem "P" xpText)
430 where
431 from_string :: String -> [String]
432 from_string = split "\n"
433
434 to_string :: [String] -> String
435 to_string = join "\n"
436
437
438 --
439 -- Tasty Tests
440 --
441
442 -- | A list of all tests for this module.
443 --
444 news_tests :: TestTree
445 news_tests =
446 testGroup
447 "News tests"
448 [ test_news_fields_have_correct_names,
449 test_on_delete_cascade,
450 test_pickle_of_unpickle_is_identity,
451 test_unpickle_succeeds,
452 test_sms_detected_correctly ]
453
454
455 -- | Make sure our codegen is producing the correct database names.
456 --
457 test_news_fields_have_correct_names :: TestTree
458 test_news_fields_have_correct_names =
459 testCase "news fields get correct database names" $
460 mapM_ check (zip actual expected)
461 where
462 -- This is cool, it uses the (derived) Data instance of
463 -- News.News to get its constructor names.
464 field_names :: [String]
465 field_names =
466 constrFields . head $ dataTypeConstrs $ dataTypeOf (undefined :: News)
467
468 expected :: [String]
469 expected =
470 map (\x -> tsn_db_field_namer "herp" "derp" 8675309 x 90210) field_names
471
472 actual :: [String]
473 actual = ["xml_file_id",
474 "mid",
475 "sport",
476 "url",
477 "sms",
478 "editor",
479 "text",
480 "continue"]
481
482 check (x,y) = (x @?= y)
483
484
485 -- | If we unpickle something and then pickle it, we should wind up
486 -- with the same thing we started with. WARNING: success of this
487 -- test does not mean that unpickling succeeded.
488 --
489 test_pickle_of_unpickle_is_identity :: TestTree
490 test_pickle_of_unpickle_is_identity = testGroup "pickle-unpickle tests"
491 [ check "pickle composed with unpickle is the identity"
492 "test/xml/newsxml.xml",
493
494 check "pickle composed with unpickle is the identity (with Editor)"
495 "test/xml/newsxml-with-editor.xml",
496
497 check "pickle composed with unpickle is the identity (empty SMS)"
498 "test/xml/newsxml-empty-sms.xml" ]
499 where
500 check desc path = testCase desc $ do
501 (expected, actual) <- pickle_unpickle pickle_message path
502 actual @?= expected
503
504
505 -- | Make sure we can actually unpickle these things.
506 --
507 test_unpickle_succeeds :: TestTree
508 test_unpickle_succeeds = testGroup "unpickle tests"
509 [ check "unpickling succeeds"
510 "test/xml/newsxml.xml",
511
512 check "unpickling succeeds (with Editor)"
513 "test/xml/newsxml-with-editor.xml",
514
515 check "unpickling succeeds (empty SMS)"
516 "test/xml/newsxml-empty-sms.xml" ]
517 where
518 check desc path = testCase desc $ do
519 actual <- unpickleable path pickle_message
520 let expected = True
521 actual @?= expected
522
523
524 -- | Make sure everything gets deleted when we delete the top-level
525 -- record.
526 --
527 test_on_delete_cascade :: TestTree
528 test_on_delete_cascade = testGroup "cascading delete tests"
529 [ check "deleting news deletes its children"
530 "test/xml/newsxml.xml"
531 4 -- 2 news_teams and 2 news_locations that should remain.
532 ,
533 check "deleting news deletes its children (empty SMS)"
534 "test/xml/newsxml-empty-sms.xml"
535 4 -- 2 news_teams and 2 news_locations
536 ]
537 where
538 check desc path expected = testCase desc $ do
539 news <- unsafe_unpickle path pickle_message
540 let a = undefined :: Location
541 let b = undefined :: News
542 let c = undefined :: Team
543 let d = undefined :: News_Team
544 let e = undefined :: News_Location
545 actual <- withSqliteConn ":memory:" $ runDbConn $ do
546 runMigrationSilent $ do
547 migrate a
548 migrate b
549 migrate c
550 migrate d
551 migrate e
552 _ <- dbimport news
553 deleteAll b
554 count_a <- countAll a
555 count_b <- countAll b
556 count_c <- countAll c
557 count_d <- countAll d
558 count_e <- countAll e
559 return $ count_a + count_b + count_c + count_d + count_e
560 actual @?= expected
561
562
563 -- | We want to make sure the single-SMS documents and the multi-SMS
564 -- documents are identified correctly.
565 --
566 test_sms_detected_correctly :: TestTree
567 test_sms_detected_correctly =
568 testGroup "newsxml SMS count determined correctly"
569 [ check "test/xml/newsxml.xml"
570 "single SMS detected correctly"
571 True,
572 check "test/xml/newsxml-multiple-sms.xml"
573 "multiple SMS detected correctly"
574 False ]
575 where
576 check path desc expected = testCase desc $ do
577 xmltree <- unsafe_read_invalid_document path
578 let actual = has_only_single_sms xmltree
579 actual @?= expected