]> gitweb.michael.orlitzky.com - dead/htsn-import.git/blob - src/TSN/XML/News.hs
Un-haddock two comments that wound up in the wrong place due to template haskell.
[dead/htsn-import.git] / src / TSN / XML / News.hs
1 {-# LANGUAGE DeriveDataTypeable #-}
2 {-# LANGUAGE FlexibleInstances #-}
3 {-# LANGUAGE GADTs #-}
4 {-# LANGUAGE QuasiQuotes #-}
5 {-# LANGUAGE RecordWildCards #-}
6 {-# LANGUAGE StandaloneDeriving #-}
7 {-# LANGUAGE TemplateHaskell #-}
8 {-# LANGUAGE TypeFamilies #-}
9
10 -- | Parse TSN XML for the DTD "newsxml.dtd". Each document contains a
11 -- root element \<message\> that contains an entire news item.
12 --
13 module TSN.XML.News (
14 pickle_message,
15 -- * Tests
16 news_tests,
17 -- * WARNING: these are private but exported to silence warnings
18 News_NewsLocationConstructor(..),
19 News_NewsTeamConstructor(..),
20 NewsConstructor(..),
21 NewsLocationConstructor(..),
22 NewsTeamConstructor(..) )
23 where
24
25 -- System imports.
26 import Data.Data ( Data, constrFields, dataTypeConstrs, dataTypeOf )
27 import Data.List.Utils ( join, split )
28 import Data.Tuple.Curry ( uncurryN )
29 import Data.Typeable ( Typeable )
30 import Database.Groundhog (
31 insert_,
32 migrate )
33 import Database.Groundhog.Core ( DefaultKey )
34 import Database.Groundhog.TH (
35 defaultCodegenConfig,
36 groundhog,
37 mkPersist )
38 import Test.Tasty ( TestTree, testGroup )
39 import Test.Tasty.HUnit ( (@?=), testCase )
40 import Text.XML.HXT.Core (
41 PU,
42 xp13Tuple,
43 xpAttr,
44 xpElem,
45 xpInt,
46 xpList,
47 xpOption,
48 xpPair,
49 xpText,
50 xpTriple,
51 xpWrap )
52
53 -- Local imports.
54 import TSN.Codegen (
55 tsn_codegen_config,
56 tsn_db_field_namer ) -- Used in a test
57 import TSN.DbImport ( DbImport(..), ImportResult(..), run_dbmigrate )
58 import TSN.XmlImport ( XmlImport(..) )
59 import Xml ( FromXml(..), pickle_unpickle, unpickleable )
60
61
62
63 -- | The database type for teams as they show up in the news.
64 --
65 data NewsTeam =
66 NewsTeam { team_name :: String }
67 deriving (Eq, Show)
68
69 -- | This is needed to define the XmlImport instance for NewsTeam; it
70 -- basically says that the DB representation is the same as the XML
71 -- representation.
72 --
73 instance FromXml NewsTeam where
74 type Db NewsTeam = NewsTeam
75 from_xml = id
76
77 -- | Allow us to call 'insert_xml' on the XML representation of
78 -- NewsTeams.
79 --
80 instance XmlImport NewsTeam
81
82
83 -- | Mapping between News records and NewsTeam records in the
84 -- database. We don't name the fields because we don't use the names
85 -- explicitly; that means we have to give them nice database names
86 -- via groundhog.
87 --
88 data News_NewsTeam = News_NewsTeam
89 (DefaultKey News)
90 (DefaultKey NewsTeam)
91
92
93 -- | The database type for locations as they show up in the news.
94 --
95 data NewsLocation =
96 NewsLocation {
97 city :: Maybe String,
98 state :: Maybe String,
99 country :: String }
100 deriving (Eq, Show)
101
102 -- | This is needed to define the XmlImport instance for NewsLocation; it
103 -- basically says that the DB representation is the same as the XML
104 -- representation.
105 --
106 instance FromXml NewsLocation where
107 type Db NewsLocation = NewsLocation
108 from_xml = id
109
110 -- | Allow us to call 'insert_xml' on the XML representation of
111 -- NewsLocations.
112 --
113 instance XmlImport NewsLocation
114
115
116 -- | Mapping between News records and NewsLocation records in the
117 -- database. We don't name the fields because we don't use the names
118 -- explicitly; that means we have to give them nice database names
119 -- via groundhog.
120 --
121 data News_NewsLocation = News_NewsLocation
122 (DefaultKey News)
123 (DefaultKey NewsLocation)
124
125
126 -- | The msg_id child of <message> contains an event_id attribute; we
127 -- embed it into the 'News' type. We (pointlessly) use the "db_"
128 -- prefix here so that the two names don't collide on "id" when
129 -- Groundhog is creating its fields using our field namer.
130 data MsgId =
131 MsgId {
132 db_msg_id :: Int,
133 db_event_id :: Maybe Int }
134 deriving (Data, Eq, Show, Typeable)
135
136
137 -- | The XML representation of a news item (message).
138 --
139 data Message =
140 Message {
141 xml_xml_file_id :: Int,
142 xml_heading :: String,
143 xml_mid :: MsgId,
144 xml_category :: String,
145 xml_sport :: String,
146 xml_url :: Maybe String,
147 xml_teams :: [NewsTeam],
148 xml_locations :: [NewsLocation],
149 xml_sms :: String,
150 xml_editor :: Maybe String,
151 xml_text :: Maybe String, -- Text and continue seem to show up in pairs,
152 xml_continue :: Maybe String, -- either both present or both missing.
153 xml_time_stamp :: String }
154 deriving (Eq, Show)
155
156
157 -- | The database representation of a news item. We drop several
158 -- uninteresting fields from 'Message', and omit the list fields which
159 -- will be represented as join tables.
160 --
161 data News =
162 News {
163 db_mid :: MsgId,
164 db_sport :: String,
165 db_url :: Maybe String,
166 db_sms :: String,
167 db_editor :: Maybe String,
168 db_text :: Maybe String,
169 db_continue :: Maybe String }
170 deriving (Data, Eq, Show, Typeable)
171
172
173 -- | Convert the XML representation 'Message' to the database
174 -- representation 'News'.
175 --
176 instance FromXml Message where
177 type Db Message = News
178
179 -- | We use a record wildcard so GHC doesn't complain that we never
180 -- used the field names.
181 --
182 from_xml Message{..} = News { db_mid = xml_mid,
183 db_sport = xml_sport,
184 db_url = xml_url,
185 db_sms = xml_sms,
186 db_editor = xml_editor,
187 db_text = xml_text,
188 db_continue = xml_continue }
189
190 -- | This lets us call 'insert_xml' on a 'Message'.
191 --
192 instance XmlImport Message
193
194 -- | Define 'dbmigrate' and 'dbimport' for 'Message's. The import is
195 -- slightly non-generic because of our 'News_NewsTeam' and
196 -- 'News_NewsLocation' join tables.
197 --
198 instance DbImport Message where
199 dbmigrate _ =
200 run_dbmigrate $ do
201 migrate (undefined :: NewsTeam)
202 migrate (undefined :: NewsLocation)
203 migrate (undefined :: News)
204 migrate (undefined :: News_NewsTeam)
205 migrate (undefined :: News_NewsLocation)
206
207 dbimport message = do
208 -- Insert the message and acquire its primary key (unique ID)
209 news_id <- insert_xml message
210
211 -- And insert each one into its own table. We use insert_xml_or_select
212 -- because we know that most teams will already exist, and we
213 -- want to get back the id for the existing team when
214 -- there's a collision.
215 nt_ids <- mapM insert_xml_or_select (xml_teams message)
216
217 -- Now that the teams have been inserted, create
218 -- news__news_team records mapping beween the two.
219 let news_news_teams = map (News_NewsTeam news_id) nt_ids
220 mapM_ insert_ news_news_teams
221
222 -- Do all of that over again for the NewsLocations.
223 loc_ids <- mapM insert_xml_or_select (xml_locations message)
224 let news_news_locations = map (News_NewsLocation news_id) loc_ids
225 mapM_ insert_ news_news_locations
226
227 return ImportSucceeded
228
229
230 -- These types don't have special XML representations or field name
231 -- collisions so we use the defaultCodegenConfig and give their
232 -- fields nice simple names.
233 mkPersist defaultCodegenConfig [groundhog|
234 - entity: NewsTeam
235 dbName: news_teams
236 constructors:
237 - name: NewsTeam
238 uniques:
239 - name: unique_news_team
240 type: constraint
241 fields: [team_name]
242
243 - entity: NewsLocation
244 dbName: news_locations
245 constructors:
246 - name: NewsLocation
247 uniques:
248 - name: unique_news_location
249 type: constraint
250 fields: [city, state, country]
251
252 |]
253
254
255 -- These types have fields with e.g. db_ and xml_ prefixes, so we
256 -- use our own codegen to peel those off before naming the columns.
257 mkPersist tsn_codegen_config [groundhog|
258 - entity: News
259 dbName: news
260 constructors:
261 - name: News
262 fields:
263 - name: db_mid
264 embeddedType:
265 - {name: msg_id, dbName: msg_id}
266 - {name: event_id, dbName: event_id}
267 - embedded: MsgId
268 fields:
269 - name: db_msg_id
270 dbName: msg_id
271 - name: db_event_id
272 dbName: event_id
273
274
275 - entity: News_NewsTeam
276 dbName: news__news_teams
277 constructors:
278 - name: News_NewsTeam
279 fields:
280 - name: news_NewsTeam0 # Default created by mkNormalFieldName
281 dbName: news_id
282 - name: news_NewsTeam1 # Default created by mkNormalFieldName
283 dbName: news_teams_id
284
285 - entity: News_NewsLocation
286 dbName: news__news_locations
287 constructors:
288 - name: News_NewsLocation
289 fields:
290 - name: news_NewsLocation0 # Default created by mkNormalFieldName
291 dbName: news_id
292 - name: news_NewsLocation1 # Default created by mkNormalFieldName
293 dbName: news_locations_id
294 |]
295
296
297 -- | Convert a 'NewsTeam' to/from XML.
298 --
299 pickle_news_team :: PU NewsTeam
300 pickle_news_team =
301 xpElem "team" $
302 xpWrap (from_string, to_string) xpText
303 where
304 to_string :: NewsTeam -> String
305 to_string = team_name
306
307 from_string :: String -> NewsTeam
308 from_string = NewsTeam
309
310
311 -- | Convert a 'MsgId' to/from XML.
312 --
313 pickle_msg_id :: PU MsgId
314 pickle_msg_id =
315 xpElem "msg_id" $
316 xpWrap (from_tuple, to_tuple) $
317 xpPair xpInt (xpAttr "EventId" (xpOption xpInt))
318 where
319 from_tuple = uncurryN MsgId
320 to_tuple m = (db_msg_id m, db_event_id m)
321
322
323 -- | Convert a 'NewsLocation' to/from XML.
324 --
325 pickle_location :: PU NewsLocation
326 pickle_location =
327 xpElem "location" $
328 xpWrap (from_tuple, to_tuple) $
329 xpTriple (xpOption (xpElem "city" xpText))
330 (xpOption (xpElem "state" xpText))
331 (xpElem "country" xpText)
332 where
333 from_tuple =
334 uncurryN NewsLocation
335 to_tuple l = (city l, state l, country l)
336
337
338 -- | Convert a 'Message' to/from XML.
339 --
340 pickle_message :: PU Message
341 pickle_message =
342 xpElem "message" $
343 xpWrap (from_tuple, to_tuple) $
344 xp13Tuple (xpElem "XML_File_ID" xpInt)
345 (xpElem "heading" xpText)
346 pickle_msg_id
347 (xpElem "category" xpText)
348 (xpElem "sport" xpText)
349 (xpElem "url" $ xpOption xpText)
350 (xpList pickle_news_team)
351 (xpList pickle_location)
352 (xpElem "SMS" xpText)
353 (xpOption (xpElem "Editor" xpText))
354 (xpOption (xpElem "text" xpText))
355 pickle_continue
356 (xpElem "time_stamp" xpText)
357 where
358 from_tuple = uncurryN Message
359 to_tuple m = (xml_xml_file_id m, -- Verbose,
360 xml_heading m, -- but
361 xml_mid m, -- eliminates
362 xml_category m, -- GHC
363 xml_sport m, -- warnings
364 xml_url m, -- .
365 xml_teams m, -- .
366 xml_locations m, -- .
367 xml_sms m,
368 xml_editor m,
369 xml_text m,
370 xml_continue m,
371 xml_time_stamp m)
372
373 -- | We combine all of the \<continue\> elements into one 'String'
374 -- while unpickling and do the reverse while pickling.
375 --
376 pickle_continue :: PU (Maybe String)
377 pickle_continue =
378 xpOption $
379 xpWrap (to_string, from_string) $
380 xpElem "continue" $
381 xpList (xpElem "P" xpText)
382 where
383 from_string :: String -> [String]
384 from_string = split "\n"
385
386 to_string :: [String] -> String
387 to_string = join "\n"
388
389
390 --
391 -- Tasty Tests
392 --
393
394 -- | A list of all tests for this module.
395 --
396 news_tests :: TestTree
397 news_tests =
398 testGroup
399 "News tests"
400 [ test_news_fields_have_correct_names,
401 test_pickle_of_unpickle_is_identity,
402 test_unpickle_succeeds ]
403
404
405 -- | Make sure our codegen is producing the correct database names.
406 --
407 test_news_fields_have_correct_names :: TestTree
408 test_news_fields_have_correct_names =
409 testCase "news fields get correct database names" $
410 mapM_ check (zip actual expected)
411 where
412 -- This is cool, it uses the (derived) Data instance of
413 -- News.News to get its constructor names.
414 field_names :: [String]
415 field_names =
416 constrFields . head $ dataTypeConstrs $ dataTypeOf (undefined :: News)
417
418 expected :: [String]
419 expected =
420 map (\x -> tsn_db_field_namer "herp" "derp" 8675309 x 90210) field_names
421
422 actual :: [String]
423 actual = ["mid", "sport", "url", "sms", "editor", "text", "continue"]
424
425 check (x,y) = (x @?= y)
426
427
428 -- | If we unpickle something and then pickle it, we should wind up
429 -- with the same thing we started with. WARNING: succeess of this
430 -- test does not mean that unpickling succeeded.
431 --
432 test_pickle_of_unpickle_is_identity :: TestTree
433 test_pickle_of_unpickle_is_identity = testGroup "pickle-unpickle tests"
434 [ check "pickle composed with unpickle is the identity"
435 "test/xml/newsxml.xml",
436
437 check "pickle composed with unpickle is the identity (with Editor)"
438 "test/xml/newsxml-with-editor.xml" ]
439 where
440 check desc path = testCase desc $ do
441 (expected, actual) <- pickle_unpickle pickle_message path
442 actual @?= expected
443
444
445 -- | Make sure we can actually unpickle these things.
446 --
447 test_unpickle_succeeds :: TestTree
448 test_unpickle_succeeds = testGroup "unpickle tests"
449 [ check "unpickling succeeds"
450 "test/xml/newsxml.xml",
451
452 check "unpickling succeeds (with Editor)"
453 "test/xml/newsxml-with-editor.xml" ]
454 where
455 check desc path = testCase desc $ do
456 actual <- unpickleable path pickle_message
457 let expected = True
458 actual @?= expected