]> gitweb.michael.orlitzky.com - dead/htsn-import.git/blob - src/TSN/XML/News.hs
Update TODO and add a cascading delete test for newsxml.
[dead/htsn-import.git] / src / TSN / XML / News.hs
1 {-# LANGUAGE DeriveDataTypeable #-}
2 {-# LANGUAGE FlexibleInstances #-}
3 {-# LANGUAGE GADTs #-}
4 {-# LANGUAGE QuasiQuotes #-}
5 {-# LANGUAGE RecordWildCards #-}
6 {-# LANGUAGE StandaloneDeriving #-}
7 {-# LANGUAGE TemplateHaskell #-}
8 {-# LANGUAGE TypeFamilies #-}
9
10 -- | Parse TSN XML for the DTD \"newsxml.dtd\". Each document contains
11 -- a root element \<message\> that contains an entire news item.
12 --
13 module TSN.XML.News (
14 pickle_message,
15 -- * Tests
16 news_tests,
17 -- * WARNING: these are private but exported to silence warnings
18 News_NewsLocationConstructor(..),
19 News_NewsTeamConstructor(..),
20 NewsConstructor(..),
21 NewsLocationConstructor(..),
22 NewsTeamConstructor(..) )
23 where
24
25 -- System imports.
26 import Data.Data ( Data, constrFields, dataTypeConstrs, dataTypeOf )
27 import Data.Time.Clock ( UTCTime )
28 import Data.List.Utils ( join, split )
29 import Data.Tuple.Curry ( uncurryN )
30 import Data.Typeable ( Typeable )
31 import Database.Groundhog (
32 countAll,
33 executeRaw,
34 insert_,
35 migrate,
36 runMigration,
37 silentMigrationLogger )
38 import Database.Groundhog.Core ( DefaultKey )
39 import Database.Groundhog.Generic ( runDbConn )
40 import Database.Groundhog.Sqlite ( withSqliteConn )
41 import Database.Groundhog.TH (
42 defaultCodegenConfig,
43 groundhog,
44 mkPersist )
45 import Test.Tasty ( TestTree, testGroup )
46 import Test.Tasty.HUnit ( (@?=), testCase )
47 import Text.XML.HXT.Core (
48 PU,
49 xp13Tuple,
50 xpAttr,
51 xpElem,
52 xpInt,
53 xpList,
54 xpOption,
55 xpPair,
56 xpText,
57 xpTriple,
58 xpWrap )
59
60 -- Local imports.
61 import TSN.Codegen (
62 tsn_codegen_config,
63 tsn_db_field_namer ) -- Used in a test
64 import TSN.Database ( insert_or_select )
65 import TSN.DbImport ( DbImport(..), ImportResult(..), run_dbmigrate )
66 import TSN.Picklers ( xp_time_stamp )
67 import TSN.XmlImport ( XmlImport(..) )
68 import Xml (
69 FromXml(..),
70 ToDb(..),
71 pickle_unpickle,
72 unpickleable,
73 unsafe_unpickle )
74
75
76 --
77 -- DB/XML Data types
78 --
79
80 -- * News/Message
81
82 -- | The msg_id child of <message> contains an event_id attribute; we
83 -- embed it into the 'News' type. We (pointlessly) use the "db_"
84 -- prefix here so that the two names don't collide on "id" when
85 -- Groundhog is creating its fields using our field namer.
86 --
87 data MsgId =
88 MsgId {
89 db_msg_id :: Int,
90 db_event_id :: Maybe Int }
91 deriving (Data, Eq, Show, Typeable)
92
93
94 -- | The XML representation of a news item (\<message\>).
95 --
96 data Message =
97 Message {
98 xml_xml_file_id :: Int,
99 xml_heading :: String,
100 xml_mid :: MsgId,
101 xml_category :: String,
102 xml_sport :: String,
103 xml_url :: Maybe String,
104 xml_teams :: [NewsTeam],
105 xml_locations :: [NewsLocation],
106 xml_sms :: String,
107 xml_editor :: Maybe String,
108 xml_text :: Maybe String, -- Text and continue seem to show up in pairs,
109 xml_continue :: Maybe String, -- either both present or both missing.
110 xml_time_stamp :: UTCTime }
111 deriving (Eq, Show)
112
113
114 -- | The database representation of a news item. We drop several
115 -- uninteresting fields from 'Message', and omit the list fields which
116 -- will be represented as join tables.
117 --
118 data News =
119 News {
120 db_xml_file_id :: Int,
121 db_mid :: MsgId,
122 db_sport :: String,
123 db_url :: Maybe String,
124 db_sms :: String,
125 db_editor :: Maybe String,
126 db_text :: Maybe String,
127 db_continue :: Maybe String,
128 db_time_stamp :: UTCTime }
129 deriving (Data, Eq, Show, Typeable)
130
131
132
133 instance ToDb Message where
134 -- | The database representation of 'Message' is 'News'.
135 type Db Message = News
136
137 -- | Convert the XML representation 'Message' to the database
138 -- representation 'News'.
139 --
140 instance FromXml Message where
141 -- | We use a record wildcard so GHC doesn't complain that we never
142 -- used the field names.
143 --
144 from_xml Message{..} = News { db_xml_file_id = xml_xml_file_id,
145 db_mid = xml_mid,
146 db_sport = xml_sport,
147 db_url = xml_url,
148 db_sms = xml_sms,
149 db_editor = xml_editor,
150 db_text = xml_text,
151 db_continue = xml_continue,
152 db_time_stamp = xml_time_stamp }
153
154 -- | This lets us insert the XML representation 'Message' directly.
155 --
156 instance XmlImport Message
157
158
159 -- * NewsTeam
160
161 -- | The database type for teams as they show up in the news.
162 --
163 data NewsTeam =
164 NewsTeam { team_name :: String }
165 deriving (Eq, Show)
166
167
168
169 -- * News_NewsTeam
170
171 -- | Mapping between News records and NewsTeam records in the
172 -- database. We don't name the fields because we don't use the names
173 -- explicitly; that means we have to give them nice database names
174 -- via groundhog.
175 --
176 data News_NewsTeam = News_NewsTeam
177 (DefaultKey News)
178 (DefaultKey NewsTeam)
179
180
181 -- * NewsLocation
182
183 -- | The database type for locations as they show up in the news.
184 --
185 data NewsLocation =
186 NewsLocation {
187 city :: Maybe String,
188 state :: Maybe String,
189 country :: String }
190 deriving (Eq, Show)
191
192
193 -- * News_NewsLocation
194
195 -- | Mapping between News records and NewsLocation records in the
196 -- database. We don't name the fields because we don't use the names
197 -- explicitly; that means we have to give them nice database names
198 -- via groundhog.
199 --
200 data News_NewsLocation = News_NewsLocation
201 (DefaultKey News)
202 (DefaultKey NewsLocation)
203
204
205
206 --
207 -- Database code
208 --
209
210 -- | Define 'dbmigrate' and 'dbimport' for 'Message's. The import is
211 -- slightly non-generic because of our 'News_NewsTeam' and
212 -- 'News_NewsLocation' join tables.
213 --
214 instance DbImport Message where
215 dbmigrate _ =
216 run_dbmigrate $ do
217 migrate (undefined :: News)
218 migrate (undefined :: NewsTeam)
219 migrate (undefined :: News_NewsTeam)
220 migrate (undefined :: NewsLocation)
221 migrate (undefined :: News_NewsLocation)
222
223 dbimport message = do
224 -- Insert the message and acquire its primary key (unique ID)
225 news_id <- insert_xml message
226
227 -- And insert each one into its own table. We use insert_xml_or_select
228 -- because we know that most teams will already exist, and we
229 -- want to get back the id for the existing team when
230 -- there's a collision.
231 nt_ids <- mapM insert_or_select (xml_teams message)
232
233 -- Now that the teams have been inserted, create
234 -- news__news_team records mapping beween the two.
235 let news_news_teams = map (News_NewsTeam news_id) nt_ids
236 mapM_ insert_ news_news_teams
237
238 -- Do all of that over again for the NewsLocations.
239 loc_ids <- mapM insert_or_select (xml_locations message)
240 let news_news_locations = map (News_NewsLocation news_id) loc_ids
241 mapM_ insert_ news_news_locations
242
243 return ImportSucceeded
244
245
246 -- These types don't have special XML representations or field name
247 -- collisions so we use the defaultCodegenConfig and give their
248 -- fields nice simple names.
249 mkPersist defaultCodegenConfig [groundhog|
250 - entity: NewsTeam
251 dbName: news_teams
252 constructors:
253 - name: NewsTeam
254 uniques:
255 - name: unique_news_team
256 type: constraint
257 fields: [team_name]
258
259 - entity: NewsLocation
260 dbName: news_locations
261 constructors:
262 - name: NewsLocation
263 uniques:
264 - name: unique_news_location
265 type: constraint
266 fields: [city, state, country]
267
268 |]
269
270
271 -- These types have fields with e.g. db_ and xml_ prefixes, so we
272 -- use our own codegen to peel those off before naming the columns.
273 mkPersist tsn_codegen_config [groundhog|
274 - entity: News
275 dbName: news
276 constructors:
277 - name: News
278 uniques:
279 - name: unique_news
280 type: constraint
281 # Prevent multiple imports of the same message.
282 fields: [db_xml_file_id]
283 fields:
284 - name: db_mid
285 embeddedType:
286 - {name: msg_id, dbName: msg_id}
287 - {name: event_id, dbName: event_id}
288
289 - embedded: MsgId
290 fields:
291 - name: db_msg_id
292 dbName: msg_id
293 - name: db_event_id
294 dbName: event_id
295
296 - entity: News_NewsTeam
297 dbName: news__news_teams
298 constructors:
299 - name: News_NewsTeam
300 fields:
301 - name: news_NewsTeam0 # Default created by mkNormalFieldName
302 dbName: news_id
303 reference:
304 onDelete: cascade
305 - name: news_NewsTeam1 # Default created by mkNormalFieldName
306 dbName: news_teams_id
307 reference:
308 onDelete: cascade
309
310 - entity: News_NewsLocation
311 dbName: news__news_locations
312 constructors:
313 - name: News_NewsLocation
314 fields:
315 - name: news_NewsLocation0 # Default created by mkNormalFieldName
316 dbName: news_id
317 reference:
318 onDelete: cascade
319 - name: news_NewsLocation1 # Default created by mkNormalFieldName
320 dbName: news_locations_id
321 reference:
322 onDelete: cascade
323 |]
324
325
326 --
327 -- XML Picklers
328 --
329
330 -- | Convert a 'NewsTeam' to/from XML.
331 --
332 pickle_news_team :: PU NewsTeam
333 pickle_news_team =
334 xpElem "team" $
335 xpWrap (from_string, to_string) xpText
336 where
337 to_string :: NewsTeam -> String
338 to_string = team_name
339
340 from_string :: String -> NewsTeam
341 from_string = NewsTeam
342
343
344 -- | Convert a 'MsgId' to/from XML.
345 --
346 pickle_msg_id :: PU MsgId
347 pickle_msg_id =
348 xpElem "msg_id" $
349 xpWrap (from_tuple, to_tuple) $
350 xpPair xpInt (xpAttr "EventId" (xpOption xpInt))
351 where
352 from_tuple = uncurryN MsgId
353 to_tuple m = (db_msg_id m, db_event_id m)
354
355
356 -- | Convert a 'NewsLocation' to/from XML.
357 --
358 pickle_location :: PU NewsLocation
359 pickle_location =
360 xpElem "location" $
361 xpWrap (from_tuple, to_tuple) $
362 xpTriple (xpOption (xpElem "city" xpText))
363 (xpOption (xpElem "state" xpText))
364 (xpElem "country" xpText)
365 where
366 from_tuple =
367 uncurryN NewsLocation
368 to_tuple l = (city l, state l, country l)
369
370
371 -- | Convert a 'Message' to/from XML.
372 --
373 pickle_message :: PU Message
374 pickle_message =
375 xpElem "message" $
376 xpWrap (from_tuple, to_tuple) $
377 xp13Tuple (xpElem "XML_File_ID" xpInt)
378 (xpElem "heading" xpText)
379 pickle_msg_id
380 (xpElem "category" xpText)
381 (xpElem "sport" xpText)
382 (xpElem "url" $ xpOption xpText)
383 (xpList pickle_news_team)
384 (xpList pickle_location)
385 (xpElem "SMS" xpText)
386 (xpOption (xpElem "Editor" xpText))
387 (xpOption (xpElem "text" xpText))
388 pickle_continue
389 (xpElem "time_stamp" xp_time_stamp)
390 where
391 from_tuple = uncurryN Message
392 to_tuple m = (xml_xml_file_id m, -- Verbose,
393 xml_heading m, -- but
394 xml_mid m, -- eliminates
395 xml_category m, -- GHC
396 xml_sport m, -- warnings
397 xml_url m, -- .
398 xml_teams m, -- .
399 xml_locations m, -- .
400 xml_sms m,
401 xml_editor m,
402 xml_text m,
403 xml_continue m,
404 xml_time_stamp m)
405
406 -- | We combine all of the \<continue\> elements into one 'String'
407 -- while unpickling and do the reverse while pickling.
408 --
409 pickle_continue :: PU (Maybe String)
410 pickle_continue =
411 xpOption $
412 xpWrap (to_string, from_string) $
413 xpElem "continue" $
414 xpList (xpElem "P" xpText)
415 where
416 from_string :: String -> [String]
417 from_string = split "\n"
418
419 to_string :: [String] -> String
420 to_string = join "\n"
421
422
423 --
424 -- Tasty Tests
425 --
426
427 -- | A list of all tests for this module.
428 --
429 news_tests :: TestTree
430 news_tests =
431 testGroup
432 "News tests"
433 [ test_news_fields_have_correct_names,
434 test_on_delete_cascade,
435 test_pickle_of_unpickle_is_identity,
436 test_unpickle_succeeds ]
437
438
439 -- | Make sure our codegen is producing the correct database names.
440 --
441 test_news_fields_have_correct_names :: TestTree
442 test_news_fields_have_correct_names =
443 testCase "news fields get correct database names" $
444 mapM_ check (zip actual expected)
445 where
446 -- This is cool, it uses the (derived) Data instance of
447 -- News.News to get its constructor names.
448 field_names :: [String]
449 field_names =
450 constrFields . head $ dataTypeConstrs $ dataTypeOf (undefined :: News)
451
452 expected :: [String]
453 expected =
454 map (\x -> tsn_db_field_namer "herp" "derp" 8675309 x 90210) field_names
455
456 actual :: [String]
457 actual = ["xml_file_id",
458 "mid",
459 "sport",
460 "url",
461 "sms",
462 "editor",
463 "text",
464 "continue"]
465
466 check (x,y) = (x @?= y)
467
468
469 -- | If we unpickle something and then pickle it, we should wind up
470 -- with the same thing we started with. WARNING: success of this
471 -- test does not mean that unpickling succeeded.
472 --
473 test_pickle_of_unpickle_is_identity :: TestTree
474 test_pickle_of_unpickle_is_identity = testGroup "pickle-unpickle tests"
475 [ check "pickle composed with unpickle is the identity"
476 "test/xml/newsxml.xml",
477
478 check "pickle composed with unpickle is the identity (with Editor)"
479 "test/xml/newsxml-with-editor.xml" ]
480 where
481 check desc path = testCase desc $ do
482 (expected, actual) <- pickle_unpickle pickle_message path
483 actual @?= expected
484
485
486 -- | Make sure we can actually unpickle these things.
487 --
488 test_unpickle_succeeds :: TestTree
489 test_unpickle_succeeds = testGroup "unpickle tests"
490 [ check "unpickling succeeds"
491 "test/xml/newsxml.xml",
492
493 check "unpickling succeeds (with Editor)"
494 "test/xml/newsxml-with-editor.xml" ]
495 where
496 check desc path = testCase desc $ do
497 actual <- unpickleable path pickle_message
498 let expected = True
499 actual @?= expected
500
501
502 -- | Make sure everything gets deleted when we delete the top-level
503 -- record.
504 --
505 test_on_delete_cascade :: TestTree
506 test_on_delete_cascade = testGroup "cascading delete tests"
507 [ check "deleting news deletes its children"
508 "test/xml/newsxml.xml" ]
509 where
510 check desc path = testCase desc $ do
511 news <- unsafe_unpickle path pickle_message
512 let a = undefined :: News
513 let b = undefined :: NewsTeam
514 let c = undefined :: News_NewsTeam
515 let d = undefined :: NewsLocation
516 let e = undefined :: News_NewsLocation
517 actual <- withSqliteConn ":memory:" $ runDbConn $ do
518 runMigration silentMigrationLogger $ do
519 migrate a
520 migrate b
521 migrate c
522 migrate d
523 migrate e
524 _ <- dbimport news
525 -- No idea how 'delete' works, so do this instead.
526 executeRaw False "DELETE FROM news;" []
527 count_a <- countAll a
528 count_b <- countAll b
529 count_c <- countAll c
530 count_d <- countAll d
531 count_e <- countAll e
532 return $ count_a + count_b + count_c + count_d + count_e
533 -- There are 2 news_teams and 2 news_locations that should remain.
534 let expected = 4
535 actual @?= expected