From aad40cd8e1e8c84c5fc294674a7159bb40838440 Mon Sep 17 00:00:00 2001 From: Michael Orlitzky Date: Sun, 8 Jul 2012 19:17:29 -0400 Subject: [PATCH] Move the pure-xml functions into the LWN.XHTML module. --- src/LWN/HTTP.hs | 48 ++++++++++++++ src/LWN/Page.hs | 169 ++++++----------------------------------------- src/LWN/XHTML.hs | 117 ++++++++++++++++++++++++++++++-- 3 files changed, 180 insertions(+), 154 deletions(-) diff --git a/src/LWN/HTTP.hs b/src/LWN/HTTP.hs index 4cfcb04..7da6ed1 100644 --- a/src/LWN/HTTP.hs +++ b/src/LWN/HTTP.hs @@ -5,6 +5,7 @@ where import qualified Data.ByteString as B (hPut) import qualified Data.Map as Map (Map, empty, insert) +import Data.Maybe (fromJust) import Network.Curl ( CurlCode(..), CurlOption(..), @@ -18,8 +19,11 @@ import Network.Curl ( import Network.Curl.Download (openURI) import System.Directory (doesFileExist, getTemporaryDirectory) import System.IO (hClose, hPutStrLn, stderr) +import qualified System.IO.UTF8 as Utf8 (readFile) import System.IO.Temp (openBinaryTempFile, openTempFile) +import qualified Configuration as C (Cfg, password, use_account, username) +import LWN.Article (real_article_path) import LWN.URI (URL, filename) login_url :: URL @@ -182,3 +186,47 @@ download_image_urls image_urls = do my_insert :: ImageMap -> (URL, Maybe FilePath) -> ImageMap my_insert dict (_, Nothing) = dict my_insert dict (k, Just v) = Map.insert k v dict + + +-- | Try to parse the given article using HXT. We try a few different +-- methods; if none of them work, we return 'Nothing'. +get_article_contents :: C.Cfg -> URL -> IO (Maybe String) +get_article_contents cfg article_name = do + my_article <- real_article_path article_name + is_file <- doesFileExist my_article + case is_file of + True -> do + contents <- Utf8.readFile my_article + return $ Just $ contents + False -> do + -- Download the URL and try to parse it. + if C.use_account cfg then do + -- use_account would be false if these fromJusts would fail. + cj <- make_cookie_jar + li_result <- log_in cj + (fromJust $ C.username cfg) + (fromJust $ C.password cfg) + + case li_result of + Left err -> do + let msg = "Failed to log in. " ++ err + hPutStrLn stderr msg + Right response_body -> do + hPutStrLn stderr response_body + + html <- get_page (Just cj) my_article + + case html of + Left err -> do + let msg = "Failed to retrieve page. " ++ err + hPutStrLn stderr msg + return Nothing + Right h -> return $ Just h + else do + html <- get_page Nothing my_article + case html of + Left err -> do + let msg = "Failed to retrieve page. " ++ err + hPutStrLn stderr msg + return Nothing + Right h -> return $ Just h diff --git a/src/LWN/Page.hs b/src/LWN/Page.hs index 3027eae..97171c6 100644 --- a/src/LWN/Page.hs +++ b/src/LWN/Page.hs @@ -9,9 +9,7 @@ import qualified Data.ByteString.Lazy as B (ByteString, hPut) import Data.String.Utils (split, strip) import Data.Maybe (catMaybes, fromJust, isNothing) import Prelude hiding (readFile) -import System.Directory (doesFileExist) -import System.IO (Handle, hClose, hFlush, hPutStrLn, stderr) -import System.IO.UTF8 (readFile) +import System.IO (Handle, hClose, hFlush) import Test.HUnit (Assertion, assertEqual) import Test.Framework (Test, testGroup) import Test.Framework.Providers.HUnit (testCase) @@ -24,92 +22,40 @@ import Text.Pandoc ( import Text.XML.HXT.Core ( ArrowXml, IOSArrow, - IOStateArrow, XmlTree, (>>>), (/>), (//>), changeAttrValue, - getAttrValue, getChildren, getText, - hasAttrValue, hasName, - isElem, - mkName, - none, processAttrl, processTopDown, runX, - setElemName, xshow, when) import Text.HandsomeSoup (css, parseHtml) -import Configuration (Cfg, password, use_account, username) +import Configuration (Cfg) import LWN.Article import LWN.HTTP ( ImageMap, download_image_urls, - get_page, - log_in, - make_cookie_jar) -import LWN.URI (URL, try_make_absolute_url) -import LWN.XHTML (XHTML, parse_lwn, to_xhtml) -import Misc (contains) - - --- | Try to parse the given article using HXT. We try a few different --- methods; if none of them work, we return 'Nothing'. -get_xml_from_article :: Cfg -> URL -> IO (Maybe (IOStateArrow s b XmlTree)) -get_xml_from_article cfg article_name = do - my_article <- real_article_path article_name - is_file <- doesFileExist my_article - case is_file of - True -> do - contents <- readFile my_article - return $ Just $ parse_lwn contents - False -> do - -- Download the URL and try to parse it. - if use_account cfg then do - -- use_account would be false if these fromJusts would fail. - cj <- make_cookie_jar - li_result <- log_in cj - (fromJust $ username cfg) - (fromJust $ password cfg) - - case li_result of - Left err -> do - let msg = "Failed to log in. " ++ err - hPutStrLn stderr msg - Right response_body -> do - hPutStrLn stderr response_body - - html <- get_page (Just cj) my_article - - case html of - Left err -> do - let msg = "Failed to retrieve page. " ++ err - hPutStrLn stderr msg - return Nothing - Right h -> return $ Just $ parse_lwn h - else do - html <- get_page Nothing my_article - case html of - Left err -> do - let msg = "Failed to retrieve page. " ++ err - hPutStrLn stderr msg - return Nothing - Right h -> return $ Just $ parse_lwn h + get_article_contents) +import LWN.URI (URL) +import LWN.XHTML ( + XHTML, + image_srcs, + is_image, + preprocess, + remove_byline, + remove_title, + to_xhtml, + xml_from_contents) --- Should be called *after* preprocessing. -download_images :: IOSArrow XmlTree XmlTree -> IO ImageMap -download_images xml = do - image_urls <- runX $ xml >>> image_srcs - download_image_urls image_urls - data Page = -- | An LWN page with one article on it. @@ -158,43 +104,19 @@ instance XHTML Page where page_from_url :: Cfg -> URL -> IO (Maybe Page) page_from_url cfg url = do - maybe_html <- get_xml_from_article cfg url - case maybe_html of + contents <- get_article_contents cfg url + case (xml_from_contents contents) of Just html -> parse html Nothing -> return Nothing -is_link :: (ArrowXml a) => a XmlTree XmlTree -is_link = - isElem >>> hasName "a" - - -remove_comment_links :: (ArrowXml a) => a XmlTree XmlTree -remove_comment_links = - processTopDown $ kill_comments `when` is_link - where - is_comment_link = - hasAttrValue "href" (contains "#Comments") - - kill_comments = - none `when` is_comment_link - -replace_links_with_spans :: (ArrowXml a) => a XmlTree XmlTree -replace_links_with_spans = - processTopDown $ (make_span >>> remove_attrs) `when` is_link - where - make_span = setElemName $ mkName "span" - remove_attrs = processAttrl none +-- Should be called *after* preprocessing. +download_images :: IOSArrow XmlTree XmlTree -> IO ImageMap +download_images xml = do + image_urls <- runX $ xml >>> image_srcs + download_image_urls image_urls --- | Preprocessing common to both page types. -preprocess :: (ArrowXml a) => a XmlTree XmlTree -preprocess = - make_image_srcs_absolute - >>> - remove_comment_links - >>> - replace_links_with_spans replace_remote_img_srcs :: (ArrowXml a) => ImageMap -> a XmlTree XmlTree @@ -324,33 +246,6 @@ fp_parse_article_title xml = do -is_title :: (ArrowXml a) => a XmlTree XmlTree -is_title = - (hasName "h2") - >>> - (hasAttrValue "class" (== "SummaryHL")) - - -is_byline :: (ArrowXml a) => a XmlTree XmlTree -is_byline = - (hasName "div") - >>> - (hasAttrValue "class" (== "FeatureByLine")) - - -is_image :: (ArrowXml a) => a XmlTree XmlTree -is_image = isElem >>> hasName "img" - -remove_title :: (ArrowXml a) => a XmlTree XmlTree -remove_title = - processTopDown ((none) `when` is_title) - - -remove_byline :: (ArrowXml a) => a XmlTree XmlTree -remove_byline = - processTopDown ((none) `when` is_byline) - - fp_parse_article_body :: IOSArrow XmlTree XmlTree -> IO (Maybe String) fp_parse_article_body xml = do @@ -457,30 +352,6 @@ xhtml_to_epub epmd = read_html = readHtml defaultParserState - --- --- Misc --- - -image_srcs :: (ArrowXml a) => a XmlTree URL -image_srcs = - css "img" - >>> - getAttrValue "src" - -make_image_srcs_absolute :: (ArrowXml a) => a XmlTree XmlTree -make_image_srcs_absolute = - processTopDown (make_srcs_absolute `when` is_image) - where - change_src :: (ArrowXml a) => a XmlTree XmlTree - change_src = - changeAttrValue try_make_absolute_url - - make_srcs_absolute :: (ArrowXml a) => a XmlTree XmlTree - make_srcs_absolute = - processAttrl $ change_src `when` hasName "src" - - -- -- Tests -- diff --git a/src/LWN/XHTML.hs b/src/LWN/XHTML.hs index f6632db..8dfe3b2 100644 --- a/src/LWN/XHTML.hs +++ b/src/LWN/XHTML.hs @@ -1,25 +1,45 @@ module LWN.XHTML ( XHTML, XML, - my_read_opts, + image_srcs, + is_image, parse_lwn, + preprocess, + remove_byline, + remove_title, to_xhtml, - to_xml - ) + to_xml, + xml_from_contents) where +import Text.HandsomeSoup (css) import Text.XML.HXT.Core ( + (>>>), ArrowXml, IOStateArrow, SysConfigList, XmlTree, + changeAttrValue, + getAttrValue, + hasAttrValue, + hasName, + isElem, + mkName, no, + none, + processAttrl, + processTopDown, readString, + setElemName, + when, withParseHTML, withValidate, withWarnings, - yes - ) + yes) + +import LWN.URI (URL, try_make_absolute_url) +import Misc (contains) + class XHTML a where to_xhtml :: a -> String @@ -36,3 +56,90 @@ my_read_opts = [ withValidate no, -- | My version of HandsomeSoup's parseHTML. parse_lwn :: String -> IOStateArrow s b XmlTree parse_lwn = readString my_read_opts + + +-- | Takes the result of get_article_contents and calls parse_lwn on +-- the contained value. +xml_from_contents :: (Maybe String) -> Maybe (IOStateArrow s b XmlTree) +xml_from_contents = + fmap parse_lwn + + + +-- | Preprocessing common to both page types. +preprocess :: (ArrowXml a) => a XmlTree XmlTree +preprocess = + make_image_srcs_absolute + >>> + remove_comment_links + >>> + replace_links_with_spans + + +is_link :: (ArrowXml a) => a XmlTree XmlTree +is_link = + isElem >>> hasName "a" + + +remove_comment_links :: (ArrowXml a) => a XmlTree XmlTree +remove_comment_links = + processTopDown $ kill_comments `when` is_link + where + is_comment_link = + hasAttrValue "href" (contains "#Comments") + + kill_comments = + none `when` is_comment_link + +replace_links_with_spans :: (ArrowXml a) => a XmlTree XmlTree +replace_links_with_spans = + processTopDown $ (make_span >>> remove_attrs) `when` is_link + where + make_span = setElemName $ mkName "span" + remove_attrs = processAttrl none + + +is_title :: (ArrowXml a) => a XmlTree XmlTree +is_title = + (hasName "h2") + >>> + (hasAttrValue "class" (== "SummaryHL")) + + +is_byline :: (ArrowXml a) => a XmlTree XmlTree +is_byline = + (hasName "div") + >>> + (hasAttrValue "class" (== "FeatureByLine")) + + +is_image :: (ArrowXml a) => a XmlTree XmlTree +is_image = isElem >>> hasName "img" + +remove_title :: (ArrowXml a) => a XmlTree XmlTree +remove_title = + processTopDown ((none) `when` is_title) + + +remove_byline :: (ArrowXml a) => a XmlTree XmlTree +remove_byline = + processTopDown ((none) `when` is_byline) + + +image_srcs :: (ArrowXml a) => a XmlTree URL +image_srcs = + css "img" + >>> + getAttrValue "src" + +make_image_srcs_absolute :: (ArrowXml a) => a XmlTree XmlTree +make_image_srcs_absolute = + processTopDown (make_srcs_absolute `when` is_image) + where + change_src :: (ArrowXml a) => a XmlTree XmlTree + change_src = + changeAttrValue try_make_absolute_url + + make_srcs_absolute :: (ArrowXml a) => a XmlTree XmlTree + make_srcs_absolute = + processAttrl $ change_src `when` hasName "src" -- 2.44.2