]> gitweb.michael.orlitzky.com - dead/htsn-import.git/blob - src/TSN/XML/News.hs
Add a unique xml_file_id to News.
[dead/htsn-import.git] / src / TSN / XML / News.hs
1 {-# LANGUAGE DeriveDataTypeable #-}
2 {-# LANGUAGE FlexibleInstances #-}
3 {-# LANGUAGE GADTs #-}
4 {-# LANGUAGE QuasiQuotes #-}
5 {-# LANGUAGE RecordWildCards #-}
6 {-# LANGUAGE StandaloneDeriving #-}
7 {-# LANGUAGE TemplateHaskell #-}
8 {-# LANGUAGE TypeFamilies #-}
9
10 -- | Parse TSN XML for the DTD "newsxml.dtd". Each document contains a
11 -- root element \<message\> that contains an entire news item.
12 --
13 module TSN.XML.News (
14 pickle_message,
15 -- * Tests
16 news_tests,
17 -- * WARNING: these are private but exported to silence warnings
18 News_NewsLocationConstructor(..),
19 News_NewsTeamConstructor(..),
20 NewsConstructor(..),
21 NewsLocationConstructor(..),
22 NewsTeamConstructor(..) )
23 where
24
25 -- System imports.
26 import Data.Data ( Data, constrFields, dataTypeConstrs, dataTypeOf )
27 import Data.Time.Clock ( UTCTime )
28 import Data.List.Utils ( join, split )
29 import Data.Tuple.Curry ( uncurryN )
30 import Data.Typeable ( Typeable )
31 import Database.Groundhog (
32 insert_,
33 migrate )
34 import Database.Groundhog.Core ( DefaultKey )
35 import Database.Groundhog.TH (
36 defaultCodegenConfig,
37 groundhog,
38 mkPersist )
39 import Test.Tasty ( TestTree, testGroup )
40 import Test.Tasty.HUnit ( (@?=), testCase )
41 import Text.XML.HXT.Core (
42 PU,
43 xp13Tuple,
44 xpAttr,
45 xpElem,
46 xpInt,
47 xpList,
48 xpOption,
49 xpPair,
50 xpText,
51 xpTriple,
52 xpWrap )
53
54 -- Local imports.
55 import TSN.Codegen (
56 tsn_codegen_config,
57 tsn_db_field_namer ) -- Used in a test
58 import TSN.DbImport ( DbImport(..), ImportResult(..), run_dbmigrate )
59 import TSN.Picklers ( xp_time_stamp )
60 import TSN.XmlImport ( XmlImport(..) )
61 import Xml ( FromXml(..), ToDb(..), pickle_unpickle, unpickleable )
62
63
64 --
65 -- DB/XML Data types
66 --
67
68 -- * News/Message
69
70 -- | The msg_id child of <message> contains an event_id attribute; we
71 -- embed it into the 'News' type. We (pointlessly) use the "db_"
72 -- prefix here so that the two names don't collide on "id" when
73 -- Groundhog is creating its fields using our field namer.
74 data MsgId =
75 MsgId {
76 db_msg_id :: Int,
77 db_event_id :: Maybe Int }
78 deriving (Data, Eq, Show, Typeable)
79
80
81 -- | The XML representation of a news item (message).
82 --
83 data Message =
84 Message {
85 xml_xml_file_id :: Int,
86 xml_heading :: String,
87 xml_mid :: MsgId,
88 xml_category :: String,
89 xml_sport :: String,
90 xml_url :: Maybe String,
91 xml_teams :: [NewsTeam],
92 xml_locations :: [NewsLocation],
93 xml_sms :: String,
94 xml_editor :: Maybe String,
95 xml_text :: Maybe String, -- Text and continue seem to show up in pairs,
96 xml_continue :: Maybe String, -- either both present or both missing.
97 xml_time_stamp :: UTCTime }
98 deriving (Eq, Show)
99
100
101 -- | The database representation of a news item. We drop several
102 -- uninteresting fields from 'Message', and omit the list fields which
103 -- will be represented as join tables.
104 --
105 data News =
106 News {
107 db_xml_file_id :: Int,
108 db_mid :: MsgId,
109 db_sport :: String,
110 db_url :: Maybe String,
111 db_sms :: String,
112 db_editor :: Maybe String,
113 db_text :: Maybe String,
114 db_continue :: Maybe String,
115 db_time_stamp :: UTCTime }
116 deriving (Data, Eq, Show, Typeable)
117
118
119
120 instance ToDb Message where
121 type Db Message = News
122
123 -- | Convert the XML representation 'Message' to the database
124 -- representation 'News'.
125 --
126 instance FromXml Message where
127 -- | We use a record wildcard so GHC doesn't complain that we never
128 -- used the field names.
129 --
130 from_xml Message{..} = News { db_xml_file_id = xml_xml_file_id,
131 db_mid = xml_mid,
132 db_sport = xml_sport,
133 db_url = xml_url,
134 db_sms = xml_sms,
135 db_editor = xml_editor,
136 db_text = xml_text,
137 db_continue = xml_continue,
138 db_time_stamp = xml_time_stamp }
139
140 -- | This lets us call 'insert_xml' on a 'Message'.
141 --
142 instance XmlImport Message
143
144
145 -- * NewsTeam
146
147 -- | The database type for teams as they show up in the news.
148 --
149 data NewsTeam =
150 NewsTeam { team_name :: String }
151 deriving (Eq, Show)
152
153
154 instance ToDb NewsTeam where
155 -- | The database representaion of a 'NewsTeam' is itself.
156 type Db NewsTeam = NewsTeam
157
158 -- | This is needed to define the XmlImport instance for NewsTeam.
159 --
160 instance FromXml NewsTeam where
161 -- | How to we get a 'NewsTeam' from itself?
162 from_xml = id
163
164 -- | Allow us to call 'insert_xml' on the XML representation of
165 -- NewsTeams.
166 --
167 instance XmlImport NewsTeam
168
169
170
171 -- * News_NewsTeam
172
173 -- | Mapping between News records and NewsTeam records in the
174 -- database. We don't name the fields because we don't use the names
175 -- explicitly; that means we have to give them nice database names
176 -- via groundhog.
177 --
178 data News_NewsTeam = News_NewsTeam
179 (DefaultKey News)
180 (DefaultKey NewsTeam)
181
182
183 -- * NewsLocation
184
185 -- | The database type for locations as they show up in the news.
186 --
187 data NewsLocation =
188 NewsLocation {
189 city :: Maybe String,
190 state :: Maybe String,
191 country :: String }
192 deriving (Eq, Show)
193
194 instance ToDb NewsLocation where
195 -- | The database representation of a 'NewsLocation' is itself.
196 type Db NewsLocation = NewsLocation
197
198 -- | This is needed to define the XmlImport instance for NewsLocation.
199 --
200 instance FromXml NewsLocation where
201 -- | How to we get a 'NewsLocation' from itself?
202 from_xml = id
203
204 -- | Allow us to call 'insert_xml' on the XML representation of
205 -- NewsLocations.
206 --
207 instance XmlImport NewsLocation
208
209
210 -- * News_NewsLocation
211
212 -- | Mapping between News records and NewsLocation records in the
213 -- database. We don't name the fields because we don't use the names
214 -- explicitly; that means we have to give them nice database names
215 -- via groundhog.
216 --
217 data News_NewsLocation = News_NewsLocation
218 (DefaultKey News)
219 (DefaultKey NewsLocation)
220
221
222
223 --
224 -- Database code
225 --
226
227 -- | Define 'dbmigrate' and 'dbimport' for 'Message's. The import is
228 -- slightly non-generic because of our 'News_NewsTeam' and
229 -- 'News_NewsLocation' join tables.
230 --
231 instance DbImport Message where
232 dbmigrate _ =
233 run_dbmigrate $ do
234 migrate (undefined :: NewsTeam)
235 migrate (undefined :: NewsLocation)
236 migrate (undefined :: News)
237 migrate (undefined :: News_NewsTeam)
238 migrate (undefined :: News_NewsLocation)
239
240 dbimport message = do
241 -- Insert the message and acquire its primary key (unique ID)
242 news_id <- insert_xml message
243
244 -- And insert each one into its own table. We use insert_xml_or_select
245 -- because we know that most teams will already exist, and we
246 -- want to get back the id for the existing team when
247 -- there's a collision.
248 nt_ids <- mapM insert_xml_or_select (xml_teams message)
249
250 -- Now that the teams have been inserted, create
251 -- news__news_team records mapping beween the two.
252 let news_news_teams = map (News_NewsTeam news_id) nt_ids
253 mapM_ insert_ news_news_teams
254
255 -- Do all of that over again for the NewsLocations.
256 loc_ids <- mapM insert_xml_or_select (xml_locations message)
257 let news_news_locations = map (News_NewsLocation news_id) loc_ids
258 mapM_ insert_ news_news_locations
259
260 return ImportSucceeded
261
262
263 -- These types don't have special XML representations or field name
264 -- collisions so we use the defaultCodegenConfig and give their
265 -- fields nice simple names.
266 mkPersist defaultCodegenConfig [groundhog|
267 - entity: NewsTeam
268 dbName: news_teams
269 constructors:
270 - name: NewsTeam
271 uniques:
272 - name: unique_news_team
273 type: constraint
274 fields: [team_name]
275
276 - entity: NewsLocation
277 dbName: news_locations
278 constructors:
279 - name: NewsLocation
280 uniques:
281 - name: unique_news_location
282 type: constraint
283 fields: [city, state, country]
284
285 |]
286
287
288 -- These types have fields with e.g. db_ and xml_ prefixes, so we
289 -- use our own codegen to peel those off before naming the columns.
290 mkPersist tsn_codegen_config [groundhog|
291 - entity: News
292 dbName: news
293 constructors:
294 - name: News
295 uniques:
296 - name: unique_news
297 type: constraint
298 # Prevent multiple imports of the same message.
299 fields: [db_xml_file_id]
300 fields:
301 - name: db_mid
302 embeddedType:
303 - {name: msg_id, dbName: msg_id}
304 - {name: event_id, dbName: event_id}
305
306 - embedded: MsgId
307 fields:
308 - name: db_msg_id
309 dbName: msg_id
310 - name: db_event_id
311 dbName: event_id
312
313 - entity: News_NewsTeam
314 dbName: news__news_teams
315 constructors:
316 - name: News_NewsTeam
317 fields:
318 - name: news_NewsTeam0 # Default created by mkNormalFieldName
319 dbName: news_id
320 reference:
321 onDelete: cascade
322 - name: news_NewsTeam1 # Default created by mkNormalFieldName
323 dbName: news_teams_id
324 reference:
325 onDelete: cascade
326
327 - entity: News_NewsLocation
328 dbName: news__news_locations
329 constructors:
330 - name: News_NewsLocation
331 fields:
332 - name: news_NewsLocation0 # Default created by mkNormalFieldName
333 dbName: news_id
334 reference:
335 onDelete: cascade
336 - name: news_NewsLocation1 # Default created by mkNormalFieldName
337 dbName: news_locations_id
338 reference:
339 onDelete: cascade
340 |]
341
342 --
343 -- XML Picklers
344 --
345
346 -- | Convert a 'NewsTeam' to/from XML.
347 --
348 pickle_news_team :: PU NewsTeam
349 pickle_news_team =
350 xpElem "team" $
351 xpWrap (from_string, to_string) xpText
352 where
353 to_string :: NewsTeam -> String
354 to_string = team_name
355
356 from_string :: String -> NewsTeam
357 from_string = NewsTeam
358
359
360 -- | Convert a 'MsgId' to/from XML.
361 --
362 pickle_msg_id :: PU MsgId
363 pickle_msg_id =
364 xpElem "msg_id" $
365 xpWrap (from_tuple, to_tuple) $
366 xpPair xpInt (xpAttr "EventId" (xpOption xpInt))
367 where
368 from_tuple = uncurryN MsgId
369 to_tuple m = (db_msg_id m, db_event_id m)
370
371
372 -- | Convert a 'NewsLocation' to/from XML.
373 --
374 pickle_location :: PU NewsLocation
375 pickle_location =
376 xpElem "location" $
377 xpWrap (from_tuple, to_tuple) $
378 xpTriple (xpOption (xpElem "city" xpText))
379 (xpOption (xpElem "state" xpText))
380 (xpElem "country" xpText)
381 where
382 from_tuple =
383 uncurryN NewsLocation
384 to_tuple l = (city l, state l, country l)
385
386
387 -- | Convert a 'Message' to/from XML.
388 --
389 pickle_message :: PU Message
390 pickle_message =
391 xpElem "message" $
392 xpWrap (from_tuple, to_tuple) $
393 xp13Tuple (xpElem "XML_File_ID" xpInt)
394 (xpElem "heading" xpText)
395 pickle_msg_id
396 (xpElem "category" xpText)
397 (xpElem "sport" xpText)
398 (xpElem "url" $ xpOption xpText)
399 (xpList pickle_news_team)
400 (xpList pickle_location)
401 (xpElem "SMS" xpText)
402 (xpOption (xpElem "Editor" xpText))
403 (xpOption (xpElem "text" xpText))
404 pickle_continue
405 (xpElem "time_stamp" xp_time_stamp)
406 where
407 from_tuple = uncurryN Message
408 to_tuple m = (xml_xml_file_id m, -- Verbose,
409 xml_heading m, -- but
410 xml_mid m, -- eliminates
411 xml_category m, -- GHC
412 xml_sport m, -- warnings
413 xml_url m, -- .
414 xml_teams m, -- .
415 xml_locations m, -- .
416 xml_sms m,
417 xml_editor m,
418 xml_text m,
419 xml_continue m,
420 xml_time_stamp m)
421
422 -- | We combine all of the \<continue\> elements into one 'String'
423 -- while unpickling and do the reverse while pickling.
424 --
425 pickle_continue :: PU (Maybe String)
426 pickle_continue =
427 xpOption $
428 xpWrap (to_string, from_string) $
429 xpElem "continue" $
430 xpList (xpElem "P" xpText)
431 where
432 from_string :: String -> [String]
433 from_string = split "\n"
434
435 to_string :: [String] -> String
436 to_string = join "\n"
437
438
439 --
440 -- Tasty Tests
441 --
442
443 -- | A list of all tests for this module.
444 --
445 news_tests :: TestTree
446 news_tests =
447 testGroup
448 "News tests"
449 [ test_news_fields_have_correct_names,
450 test_pickle_of_unpickle_is_identity,
451 test_unpickle_succeeds ]
452
453
454 -- | Make sure our codegen is producing the correct database names.
455 --
456 test_news_fields_have_correct_names :: TestTree
457 test_news_fields_have_correct_names =
458 testCase "news fields get correct database names" $
459 mapM_ check (zip actual expected)
460 where
461 -- This is cool, it uses the (derived) Data instance of
462 -- News.News to get its constructor names.
463 field_names :: [String]
464 field_names =
465 constrFields . head $ dataTypeConstrs $ dataTypeOf (undefined :: News)
466
467 expected :: [String]
468 expected =
469 map (\x -> tsn_db_field_namer "herp" "derp" 8675309 x 90210) field_names
470
471 actual :: [String]
472 actual = ["mid", "sport", "url", "sms", "editor", "text", "continue"]
473
474 check (x,y) = (x @?= y)
475
476
477 -- | If we unpickle something and then pickle it, we should wind up
478 -- with the same thing we started with. WARNING: success of this
479 -- test does not mean that unpickling succeeded.
480 --
481 test_pickle_of_unpickle_is_identity :: TestTree
482 test_pickle_of_unpickle_is_identity = testGroup "pickle-unpickle tests"
483 [ check "pickle composed with unpickle is the identity"
484 "test/xml/newsxml.xml",
485
486 check "pickle composed with unpickle is the identity (with Editor)"
487 "test/xml/newsxml-with-editor.xml" ]
488 where
489 check desc path = testCase desc $ do
490 (expected, actual) <- pickle_unpickle pickle_message path
491 actual @?= expected
492
493
494 -- | Make sure we can actually unpickle these things.
495 --
496 test_unpickle_succeeds :: TestTree
497 test_unpickle_succeeds = testGroup "unpickle tests"
498 [ check "unpickling succeeds"
499 "test/xml/newsxml.xml",
500
501 check "unpickling succeeds (with Editor)"
502 "test/xml/newsxml-with-editor.xml" ]
503 where
504 check desc path = testCase desc $ do
505 actual <- unpickleable path pickle_message
506 let expected = True
507 actual @?= expected