X-Git-Url: http://gitweb.michael.orlitzky.com/?p=dead%2Flwn-epub.git;a=blobdiff_plain;f=src%2FLWN%2FPage.hs;h=3d53284c4b66e9c67ff222d19982ce4e2ced45d2;hp=f0ada7cd13cfff0707904272f93fa2b4b354f9e3;hb=6a7cfdf0880ee5c5367e794babb30fa7eac22f39;hpb=fc0052e451aa03675ebd9a128dfa46573b9357d7 diff --git a/src/LWN/Page.hs b/src/LWN/Page.hs index f0ada7c..3d53284 100644 --- a/src/LWN/Page.hs +++ b/src/LWN/Page.hs @@ -3,6 +3,7 @@ module LWN.Page where +import Control.Concurrent.ParallelIO (parallel) import qualified Data.Map as Map (lookup) import Data.Time (getCurrentTime) import qualified Data.ByteString.Lazy as B (ByteString, hPut) @@ -19,7 +20,9 @@ import Text.Pandoc ( defaultWriterOptions, readHtml, writeEPUB, - writerEPUBMetadata) + writerEPUBMetadata, + writerUserDataDir) +import Text.Pandoc.Shared ( readDataFile ) import Text.XML.HXT.Core ( ArrowXml, IOSArrow, @@ -33,6 +36,7 @@ import Text.XML.HXT.Core ( getChildren, getText, hasName, + none, processAttrl, processTopDown, this, @@ -41,7 +45,7 @@ import Text.XML.HXT.Core ( when) import Text.HandsomeSoup (css, parseHtml) -import Configuration (Cfg) +import Configuration (Cfg, full_stories) import LWN.Article import LWN.HTTP ( ImageMap, @@ -57,6 +61,7 @@ import LWN.XHTML ( is_image, preprocess, remove_byline, + remove_full_story_paragraphs, remove_title, to_xhtml, to_xml, @@ -110,6 +115,20 @@ instance XHTML Page where +-- | Stolen from writeEPUB. +default_stylesheet :: IO String +default_stylesheet = + -- This comes with Pandoc, I guess. + readDataFile (writerUserDataDir defaultWriterOptions) "epub.css" + + +construct_stylesheet :: IO String +construct_stylesheet = do + defaults <- default_stylesheet + -- Allow word-wrapping in
 elements.
+  let my_additions = "\n" ++ "pre { white-space: pre-wrap; }" ++ "\n"
+  return $ defaults ++ my_additions
+
 page_from_url :: Cfg -> URL -> IO (Maybe Page)
 page_from_url cfg url = do
   contents <- get_article_contents cfg url  
@@ -126,14 +145,14 @@ insert_full_stories story_map =
     lookup_func :: (ArrowXml a) => URL -> a XmlTree XmlTree
     lookup_func href =
       case Map.lookup href story_map of
-        -- Leave it alone if we don't have the full story.
-        Nothing -> this
+        -- Drop the paragraph if we don't have the contents.
+        Nothing -> none
         Just v -> to_xml v
 
     article_xml :: (ArrowXml a) => a XmlTree XmlTree
     article_xml =
       lookup_func
-      $<
+      $< -- From HXT's Control.Arrow.ArrowList
       (this /> full_story_link >>> getAttrValue "href")
                    
 replace_remote_img_srcs :: (ArrowXml a) => ImageMap -> a XmlTree XmlTree
@@ -172,7 +191,7 @@ type StoryMap = Map.Map URL Article
 -- These come *before* preprocessing.
 download_full_story_urls :: Cfg -> [URL] -> IO StoryMap
 download_full_story_urls cfg story_urls = do
-  pages <- mapM (page_from_url cfg) story_urls
+  pages <- parallel $ map (page_from_url cfg) story_urls
   let pairs = zip story_urls pages
   return $ foldl my_insert empty_map pairs
   where
@@ -191,8 +210,12 @@ download_full_stories cfg xml = do
 
 parse :: Cfg -> IOSArrow XmlTree XmlTree -> IO (Maybe Page)
 parse cfg xml = do
-  story_map <- download_full_stories cfg xml
-  let fs_xml = xml >>> insert_full_stories story_map
+  fs_xml <- if (full_stories cfg) then do
+             story_map <- download_full_stories cfg xml
+             return $ xml >>> insert_full_stories story_map
+           else do
+             -- Get rid of them if we don't want them.
+             return $ xml >>> remove_full_story_paragraphs
 
   let clean_xml = fs_xml >>> preprocess
   image_map <- download_images clean_xml
@@ -260,8 +283,6 @@ ap_parse_articles xml = do
   parsed_byline   <- parse_byline xml
   parsed_body     <- ap_parse_body xml
 
-  putStrLn $ fromJust parsed_headline
-
   if (isNothing parsed_headline) || (isNothing parsed_body)
   then return []
   else do
@@ -281,9 +302,9 @@ fp_parse :: IOSArrow XmlTree XmlTree -> IO (Maybe Page)
 fp_parse xml = do
     hl <- parse_headline xml
     parsed_articles <- fp_parse_articles xml
-    case parsed_articles of
-      []          -> return Nothing
-      x -> return $ Just $ FullPage (fromJust hl) x
+    return $ case parsed_articles of
+      [] -> Nothing
+      x  -> Just $ FullPage (fromJust hl) x
 
 
 
@@ -352,7 +373,6 @@ fp_parse_articles xml = do
                           []    -> []
   -- Put the separator back, it was lost during the split.
   let split_articles = map (article_separator ++) split_articles'
-  --_ <- mapM print_article split_articles
   real_articles <- mapM parse_html_article split_articles
   let just_articles = catMaybes real_articles
   return just_articles
@@ -397,12 +417,16 @@ epublish obj handle = do
   hClose handle
 
 xhtml_to_epub :: String -> String -> IO B.ByteString
-xhtml_to_epub epmd =
-   write_epub . read_html
-   where
-     my_writer_options = defaultWriterOptions { writerEPUBMetadata = epmd }
-     write_epub = writeEPUB Nothing [] my_writer_options
-     read_html  = readHtml defaultParserState
+xhtml_to_epub epmd xhtml = do
+  stylesheet <- construct_stylesheet
+  writeEPUB
+    (Just stylesheet)
+    []
+    my_writer_options
+    (read_html xhtml)
+  where
+    my_writer_options = defaultWriterOptions { writerEPUBMetadata = epmd }
+    read_html  = readHtml defaultParserState
 
 
 --