]> gitweb.michael.orlitzky.com - dead/htsn-import.git/blob - src/TSN/XML/News.hs
Replace all raw DELETE queries with deleteAll.
[dead/htsn-import.git] / src / TSN / XML / News.hs
1 {-# LANGUAGE DeriveDataTypeable #-}
2 {-# LANGUAGE FlexibleInstances #-}
3 {-# LANGUAGE GADTs #-}
4 {-# LANGUAGE QuasiQuotes #-}
5 {-# LANGUAGE RecordWildCards #-}
6 {-# LANGUAGE StandaloneDeriving #-}
7 {-# LANGUAGE TemplateHaskell #-}
8 {-# LANGUAGE TypeFamilies #-}
9
10 -- | Parse TSN XML for the DTD \"newsxml.dtd\". Each document contains
11 -- a root element \<message\> that contains an entire news item.
12 --
13 module TSN.XML.News (
14 dtd,
15 pickle_message,
16 -- * Tests
17 news_tests,
18 -- * WARNING: these are private but exported to silence warnings
19 News_NewsLocationConstructor(..),
20 News_NewsTeamConstructor(..),
21 NewsConstructor(..),
22 NewsLocationConstructor(..),
23 NewsTeamConstructor(..) )
24 where
25
26 -- System imports.
27 import Data.Data ( Data, constrFields, dataTypeConstrs, dataTypeOf )
28 import Data.Time.Clock ( UTCTime )
29 import Data.List.Utils ( join, split )
30 import Data.Tuple.Curry ( uncurryN )
31 import Data.Typeable ( Typeable )
32 import Database.Groundhog (
33 countAll,
34 deleteAll,
35 insert_,
36 migrate,
37 runMigration,
38 silentMigrationLogger )
39 import Database.Groundhog.Core ( DefaultKey )
40 import Database.Groundhog.Generic ( runDbConn )
41 import Database.Groundhog.Sqlite ( withSqliteConn )
42 import Database.Groundhog.TH (
43 defaultCodegenConfig,
44 groundhog,
45 mkPersist )
46 import Test.Tasty ( TestTree, testGroup )
47 import Test.Tasty.HUnit ( (@?=), testCase )
48 import Text.XML.HXT.Core (
49 PU,
50 xp13Tuple,
51 xpAttr,
52 xpElem,
53 xpInt,
54 xpList,
55 xpOption,
56 xpPair,
57 xpText,
58 xpTriple,
59 xpWrap )
60
61 -- Local imports.
62 import TSN.Codegen (
63 tsn_codegen_config,
64 tsn_db_field_namer ) -- Used in a test
65 import TSN.Database ( insert_or_select )
66 import TSN.DbImport ( DbImport(..), ImportResult(..), run_dbmigrate )
67 import TSN.Picklers ( xp_time_stamp )
68 import TSN.XmlImport ( XmlImport(..) )
69 import Xml (
70 FromXml(..),
71 ToDb(..),
72 pickle_unpickle,
73 unpickleable,
74 unsafe_unpickle )
75
76
77 -- | The DTD to which this module corresponds. Used to invoke dbimport.
78 --
79 dtd :: String
80 dtd = "newsxml.dtd"
81
82
83 --
84 -- DB/XML Data types
85 --
86
87 -- * News/Message
88
89 -- | The msg_id child of <message> contains an event_id attribute; we
90 -- embed it into the 'News' type. We (pointlessly) use the "db_"
91 -- prefix here so that the two names don't collide on "id" when
92 -- Groundhog is creating its fields using our field namer.
93 --
94 data MsgId =
95 MsgId {
96 db_msg_id :: Int,
97 db_event_id :: Maybe Int }
98 deriving (Data, Eq, Show, Typeable)
99
100
101 -- | The XML representation of a news item (\<message\>).
102 --
103 data Message =
104 Message {
105 xml_xml_file_id :: Int,
106 xml_heading :: String,
107 xml_mid :: MsgId,
108 xml_category :: String,
109 xml_sport :: String,
110 xml_url :: Maybe String,
111 xml_teams :: [NewsTeam],
112 xml_locations :: [NewsLocation],
113 xml_sms :: String,
114 xml_editor :: Maybe String,
115 xml_text :: Maybe String, -- Text and continue seem to show up in pairs,
116 xml_continue :: Maybe String, -- either both present or both missing.
117 xml_time_stamp :: UTCTime }
118 deriving (Eq, Show)
119
120
121 -- | The database representation of a news item. We drop several
122 -- uninteresting fields from 'Message', and omit the list fields which
123 -- will be represented as join tables.
124 --
125 data News =
126 News {
127 db_xml_file_id :: Int,
128 db_mid :: MsgId,
129 db_sport :: String,
130 db_url :: Maybe String,
131 db_sms :: String,
132 db_editor :: Maybe String,
133 db_text :: Maybe String,
134 db_continue :: Maybe String,
135 db_time_stamp :: UTCTime }
136 deriving (Data, Eq, Show, Typeable)
137
138
139
140 instance ToDb Message where
141 -- | The database representation of 'Message' is 'News'.
142 type Db Message = News
143
144 -- | Convert the XML representation 'Message' to the database
145 -- representation 'News'.
146 --
147 instance FromXml Message where
148 -- | We use a record wildcard so GHC doesn't complain that we never
149 -- used the field names.
150 --
151 from_xml Message{..} = News { db_xml_file_id = xml_xml_file_id,
152 db_mid = xml_mid,
153 db_sport = xml_sport,
154 db_url = xml_url,
155 db_sms = xml_sms,
156 db_editor = xml_editor,
157 db_text = xml_text,
158 db_continue = xml_continue,
159 db_time_stamp = xml_time_stamp }
160
161 -- | This lets us insert the XML representation 'Message' directly.
162 --
163 instance XmlImport Message
164
165
166 -- * NewsTeam
167
168 -- | The database type for teams as they show up in the news.
169 --
170 data NewsTeam =
171 NewsTeam { team_name :: String }
172 deriving (Eq, Show)
173
174
175
176 -- * News_NewsTeam
177
178 -- | Mapping between News records and NewsTeam records in the
179 -- database. We don't name the fields because we don't use the names
180 -- explicitly; that means we have to give them nice database names
181 -- via groundhog.
182 --
183 data News_NewsTeam = News_NewsTeam
184 (DefaultKey News)
185 (DefaultKey NewsTeam)
186
187
188 -- * NewsLocation
189
190 -- | The database type for locations as they show up in the news.
191 --
192 data NewsLocation =
193 NewsLocation {
194 city :: Maybe String,
195 state :: Maybe String,
196 country :: String }
197 deriving (Eq, Show)
198
199
200 -- * News_NewsLocation
201
202 -- | Mapping between News records and NewsLocation records in the
203 -- database. We don't name the fields because we don't use the names
204 -- explicitly; that means we have to give them nice database names
205 -- via groundhog.
206 --
207 data News_NewsLocation = News_NewsLocation
208 (DefaultKey News)
209 (DefaultKey NewsLocation)
210
211
212
213 --
214 -- Database code
215 --
216
217 -- | Define 'dbmigrate' and 'dbimport' for 'Message's. The import is
218 -- slightly non-generic because of our 'News_NewsTeam' and
219 -- 'News_NewsLocation' join tables.
220 --
221 instance DbImport Message where
222 dbmigrate _ =
223 run_dbmigrate $ do
224 migrate (undefined :: News)
225 migrate (undefined :: NewsTeam)
226 migrate (undefined :: News_NewsTeam)
227 migrate (undefined :: NewsLocation)
228 migrate (undefined :: News_NewsLocation)
229
230 dbimport message = do
231 -- Insert the message and acquire its primary key (unique ID)
232 news_id <- insert_xml message
233
234 -- And insert each one into its own table. We use insert_xml_or_select
235 -- because we know that most teams will already exist, and we
236 -- want to get back the id for the existing team when
237 -- there's a collision.
238 nt_ids <- mapM insert_or_select (xml_teams message)
239
240 -- Now that the teams have been inserted, create
241 -- news__news_team records mapping beween the two.
242 let news_news_teams = map (News_NewsTeam news_id) nt_ids
243 mapM_ insert_ news_news_teams
244
245 -- Do all of that over again for the NewsLocations.
246 loc_ids <- mapM insert_or_select (xml_locations message)
247 let news_news_locations = map (News_NewsLocation news_id) loc_ids
248 mapM_ insert_ news_news_locations
249
250 return ImportSucceeded
251
252
253 -- These types don't have special XML representations or field name
254 -- collisions so we use the defaultCodegenConfig and give their
255 -- fields nice simple names.
256 mkPersist defaultCodegenConfig [groundhog|
257 - entity: NewsTeam
258 dbName: news_teams
259 constructors:
260 - name: NewsTeam
261 uniques:
262 - name: unique_news_team
263 type: constraint
264 fields: [team_name]
265
266 - entity: NewsLocation
267 dbName: news_locations
268 constructors:
269 - name: NewsLocation
270 uniques:
271 - name: unique_news_location
272 type: constraint
273 fields: [city, state, country]
274
275 |]
276
277
278 -- These types have fields with e.g. db_ and xml_ prefixes, so we
279 -- use our own codegen to peel those off before naming the columns.
280 mkPersist tsn_codegen_config [groundhog|
281 - entity: News
282 constructors:
283 - name: News
284 uniques:
285 - name: unique_news
286 type: constraint
287 # Prevent multiple imports of the same message.
288 fields: [db_xml_file_id]
289 fields:
290 - name: db_mid
291 embeddedType:
292 - {name: msg_id, dbName: msg_id}
293 - {name: event_id, dbName: event_id}
294
295 - embedded: MsgId
296 fields:
297 - name: db_msg_id
298 dbName: msg_id
299 - name: db_event_id
300 dbName: event_id
301
302 - entity: News_NewsTeam
303 dbName: news__news_teams
304 constructors:
305 - name: News_NewsTeam
306 fields:
307 - name: news_NewsTeam0 # Default created by mkNormalFieldName
308 dbName: news_id
309 reference:
310 onDelete: cascade
311 - name: news_NewsTeam1 # Default created by mkNormalFieldName
312 dbName: news_teams_id
313 reference:
314 onDelete: cascade
315
316 - entity: News_NewsLocation
317 dbName: news__news_locations
318 constructors:
319 - name: News_NewsLocation
320 fields:
321 - name: news_NewsLocation0 # Default created by mkNormalFieldName
322 dbName: news_id
323 reference:
324 onDelete: cascade
325 - name: news_NewsLocation1 # Default created by mkNormalFieldName
326 dbName: news_locations_id
327 reference:
328 onDelete: cascade
329 |]
330
331
332 --
333 -- XML Picklers
334 --
335
336 -- | Convert a 'NewsTeam' to/from XML.
337 --
338 pickle_news_team :: PU NewsTeam
339 pickle_news_team =
340 xpElem "team" $
341 xpWrap (from_string, to_string) xpText
342 where
343 to_string :: NewsTeam -> String
344 to_string = team_name
345
346 from_string :: String -> NewsTeam
347 from_string = NewsTeam
348
349
350 -- | Convert a 'MsgId' to/from XML.
351 --
352 pickle_msg_id :: PU MsgId
353 pickle_msg_id =
354 xpElem "msg_id" $
355 xpWrap (from_tuple, to_tuple) $
356 xpPair xpInt (xpAttr "EventId" (xpOption xpInt))
357 where
358 from_tuple = uncurryN MsgId
359 to_tuple m = (db_msg_id m, db_event_id m)
360
361
362 -- | Convert a 'NewsLocation' to/from XML.
363 --
364 pickle_location :: PU NewsLocation
365 pickle_location =
366 xpElem "location" $
367 xpWrap (from_tuple, to_tuple) $
368 xpTriple (xpOption (xpElem "city" xpText))
369 (xpOption (xpElem "state" xpText))
370 (xpElem "country" xpText)
371 where
372 from_tuple =
373 uncurryN NewsLocation
374 to_tuple l = (city l, state l, country l)
375
376
377 -- | Convert a 'Message' to/from XML.
378 --
379 pickle_message :: PU Message
380 pickle_message =
381 xpElem "message" $
382 xpWrap (from_tuple, to_tuple) $
383 xp13Tuple (xpElem "XML_File_ID" xpInt)
384 (xpElem "heading" xpText)
385 pickle_msg_id
386 (xpElem "category" xpText)
387 (xpElem "sport" xpText)
388 (xpElem "url" $ xpOption xpText)
389 (xpList pickle_news_team)
390 (xpList pickle_location)
391 (xpElem "SMS" xpText)
392 (xpOption (xpElem "Editor" xpText))
393 (xpOption (xpElem "text" xpText))
394 pickle_continue
395 (xpElem "time_stamp" xp_time_stamp)
396 where
397 from_tuple = uncurryN Message
398 to_tuple m = (xml_xml_file_id m, -- Verbose,
399 xml_heading m, -- but
400 xml_mid m, -- eliminates
401 xml_category m, -- GHC
402 xml_sport m, -- warnings
403 xml_url m, -- .
404 xml_teams m, -- .
405 xml_locations m, -- .
406 xml_sms m,
407 xml_editor m,
408 xml_text m,
409 xml_continue m,
410 xml_time_stamp m)
411
412 -- | We combine all of the \<continue\> elements into one 'String'
413 -- while unpickling and do the reverse while pickling.
414 --
415 pickle_continue :: PU (Maybe String)
416 pickle_continue =
417 xpOption $
418 xpWrap (to_string, from_string) $
419 xpElem "continue" $
420 xpList (xpElem "P" xpText)
421 where
422 from_string :: String -> [String]
423 from_string = split "\n"
424
425 to_string :: [String] -> String
426 to_string = join "\n"
427
428
429 --
430 -- Tasty Tests
431 --
432
433 -- | A list of all tests for this module.
434 --
435 news_tests :: TestTree
436 news_tests =
437 testGroup
438 "News tests"
439 [ test_news_fields_have_correct_names,
440 test_on_delete_cascade,
441 test_pickle_of_unpickle_is_identity,
442 test_unpickle_succeeds ]
443
444
445 -- | Make sure our codegen is producing the correct database names.
446 --
447 test_news_fields_have_correct_names :: TestTree
448 test_news_fields_have_correct_names =
449 testCase "news fields get correct database names" $
450 mapM_ check (zip actual expected)
451 where
452 -- This is cool, it uses the (derived) Data instance of
453 -- News.News to get its constructor names.
454 field_names :: [String]
455 field_names =
456 constrFields . head $ dataTypeConstrs $ dataTypeOf (undefined :: News)
457
458 expected :: [String]
459 expected =
460 map (\x -> tsn_db_field_namer "herp" "derp" 8675309 x 90210) field_names
461
462 actual :: [String]
463 actual = ["xml_file_id",
464 "mid",
465 "sport",
466 "url",
467 "sms",
468 "editor",
469 "text",
470 "continue"]
471
472 check (x,y) = (x @?= y)
473
474
475 -- | If we unpickle something and then pickle it, we should wind up
476 -- with the same thing we started with. WARNING: success of this
477 -- test does not mean that unpickling succeeded.
478 --
479 test_pickle_of_unpickle_is_identity :: TestTree
480 test_pickle_of_unpickle_is_identity = testGroup "pickle-unpickle tests"
481 [ check "pickle composed with unpickle is the identity"
482 "test/xml/newsxml.xml",
483
484 check "pickle composed with unpickle is the identity (with Editor)"
485 "test/xml/newsxml-with-editor.xml" ]
486 where
487 check desc path = testCase desc $ do
488 (expected, actual) <- pickle_unpickle pickle_message path
489 actual @?= expected
490
491
492 -- | Make sure we can actually unpickle these things.
493 --
494 test_unpickle_succeeds :: TestTree
495 test_unpickle_succeeds = testGroup "unpickle tests"
496 [ check "unpickling succeeds"
497 "test/xml/newsxml.xml",
498
499 check "unpickling succeeds (with Editor)"
500 "test/xml/newsxml-with-editor.xml" ]
501 where
502 check desc path = testCase desc $ do
503 actual <- unpickleable path pickle_message
504 let expected = True
505 actual @?= expected
506
507
508 -- | Make sure everything gets deleted when we delete the top-level
509 -- record.
510 --
511 test_on_delete_cascade :: TestTree
512 test_on_delete_cascade = testGroup "cascading delete tests"
513 [ check "deleting news deletes its children"
514 "test/xml/newsxml.xml"
515 4 -- 2 news_teams and 2 news_locations that should remain.
516 ]
517 where
518 check desc path expected = testCase desc $ do
519 news <- unsafe_unpickle path pickle_message
520 let a = undefined :: News
521 let b = undefined :: NewsTeam
522 let c = undefined :: News_NewsTeam
523 let d = undefined :: NewsLocation
524 let e = undefined :: News_NewsLocation
525 actual <- withSqliteConn ":memory:" $ runDbConn $ do
526 runMigration silentMigrationLogger $ do
527 migrate a
528 migrate b
529 migrate c
530 migrate d
531 migrate e
532 _ <- dbimport news
533 deleteAll a
534 count_a <- countAll a
535 count_b <- countAll b
536 count_c <- countAll c
537 count_d <- countAll d
538 count_e <- countAll e
539 return $ count_a + count_b + count_c + count_d + count_e
540 actual @?= expected