import qualified Data.ByteString as B (hPut)
import qualified Data.Map as Map (Map, empty, insert)
+import Data.Maybe (fromJust)
import Network.Curl (
CurlCode(..),
CurlOption(..),
import Network.Curl.Download (openURI)
import System.Directory (doesFileExist, getTemporaryDirectory)
import System.IO (hClose, hPutStrLn, stderr)
+import qualified System.IO.UTF8 as Utf8 (readFile)
import System.IO.Temp (openBinaryTempFile, openTempFile)
+import qualified Configuration as C (Cfg, password, use_account, username)
+import LWN.Article (real_article_path)
import LWN.URI (URL, filename)
login_url :: URL
my_insert :: ImageMap -> (URL, Maybe FilePath) -> ImageMap
my_insert dict (_, Nothing) = dict
my_insert dict (k, Just v) = Map.insert k v dict
+
+
+-- | Try to parse the given article using HXT. We try a few different
+-- methods; if none of them work, we return 'Nothing'.
+get_article_contents :: C.Cfg -> URL -> IO (Maybe String)
+get_article_contents cfg article_name = do
+ my_article <- real_article_path article_name
+ is_file <- doesFileExist my_article
+ case is_file of
+ True -> do
+ contents <- Utf8.readFile my_article
+ return $ Just $ contents
+ False -> do
+ -- Download the URL and try to parse it.
+ if C.use_account cfg then do
+ -- use_account would be false if these fromJusts would fail.
+ cj <- make_cookie_jar
+ li_result <- log_in cj
+ (fromJust $ C.username cfg)
+ (fromJust $ C.password cfg)
+
+ case li_result of
+ Left err -> do
+ let msg = "Failed to log in. " ++ err
+ hPutStrLn stderr msg
+ Right response_body -> do
+ hPutStrLn stderr response_body
+
+ html <- get_page (Just cj) my_article
+
+ case html of
+ Left err -> do
+ let msg = "Failed to retrieve page. " ++ err
+ hPutStrLn stderr msg
+ return Nothing
+ Right h -> return $ Just h
+ else do
+ html <- get_page Nothing my_article
+ case html of
+ Left err -> do
+ let msg = "Failed to retrieve page. " ++ err
+ hPutStrLn stderr msg
+ return Nothing
+ Right h -> return $ Just h
import Data.String.Utils (split, strip)
import Data.Maybe (catMaybes, fromJust, isNothing)
import Prelude hiding (readFile)
-import System.Directory (doesFileExist)
-import System.IO (Handle, hClose, hFlush, hPutStrLn, stderr)
-import System.IO.UTF8 (readFile)
+import System.IO (Handle, hClose, hFlush)
import Test.HUnit (Assertion, assertEqual)
import Test.Framework (Test, testGroup)
import Test.Framework.Providers.HUnit (testCase)
import Text.XML.HXT.Core (
ArrowXml,
IOSArrow,
- IOStateArrow,
XmlTree,
(>>>),
(/>),
(//>),
changeAttrValue,
- getAttrValue,
getChildren,
getText,
- hasAttrValue,
hasName,
- isElem,
- mkName,
- none,
processAttrl,
processTopDown,
runX,
- setElemName,
xshow,
when)
import Text.HandsomeSoup (css, parseHtml)
-import Configuration (Cfg, password, use_account, username)
+import Configuration (Cfg)
import LWN.Article
import LWN.HTTP (
ImageMap,
download_image_urls,
- get_page,
- log_in,
- make_cookie_jar)
-import LWN.URI (URL, try_make_absolute_url)
-import LWN.XHTML (XHTML, parse_lwn, to_xhtml)
-import Misc (contains)
-
-
--- | Try to parse the given article using HXT. We try a few different
--- methods; if none of them work, we return 'Nothing'.
-get_xml_from_article :: Cfg -> URL -> IO (Maybe (IOStateArrow s b XmlTree))
-get_xml_from_article cfg article_name = do
- my_article <- real_article_path article_name
- is_file <- doesFileExist my_article
- case is_file of
- True -> do
- contents <- readFile my_article
- return $ Just $ parse_lwn contents
- False -> do
- -- Download the URL and try to parse it.
- if use_account cfg then do
- -- use_account would be false if these fromJusts would fail.
- cj <- make_cookie_jar
- li_result <- log_in cj
- (fromJust $ username cfg)
- (fromJust $ password cfg)
-
- case li_result of
- Left err -> do
- let msg = "Failed to log in. " ++ err
- hPutStrLn stderr msg
- Right response_body -> do
- hPutStrLn stderr response_body
-
- html <- get_page (Just cj) my_article
-
- case html of
- Left err -> do
- let msg = "Failed to retrieve page. " ++ err
- hPutStrLn stderr msg
- return Nothing
- Right h -> return $ Just $ parse_lwn h
- else do
- html <- get_page Nothing my_article
- case html of
- Left err -> do
- let msg = "Failed to retrieve page. " ++ err
- hPutStrLn stderr msg
- return Nothing
- Right h -> return $ Just $ parse_lwn h
+ get_article_contents)
+import LWN.URI (URL)
+import LWN.XHTML (
+ XHTML,
+ image_srcs,
+ is_image,
+ preprocess,
+ remove_byline,
+ remove_title,
+ to_xhtml,
+ xml_from_contents)
--- Should be called *after* preprocessing.
-download_images :: IOSArrow XmlTree XmlTree -> IO ImageMap
-download_images xml = do
- image_urls <- runX $ xml >>> image_srcs
- download_image_urls image_urls
-
data Page =
-- | An LWN page with one article on it.
page_from_url :: Cfg -> URL -> IO (Maybe Page)
page_from_url cfg url = do
- maybe_html <- get_xml_from_article cfg url
- case maybe_html of
+ contents <- get_article_contents cfg url
+ case (xml_from_contents contents) of
Just html -> parse html
Nothing -> return Nothing
-is_link :: (ArrowXml a) => a XmlTree XmlTree
-is_link =
- isElem >>> hasName "a"
-
-
-remove_comment_links :: (ArrowXml a) => a XmlTree XmlTree
-remove_comment_links =
- processTopDown $ kill_comments `when` is_link
- where
- is_comment_link =
- hasAttrValue "href" (contains "#Comments")
-
- kill_comments =
- none `when` is_comment_link
-
-replace_links_with_spans :: (ArrowXml a) => a XmlTree XmlTree
-replace_links_with_spans =
- processTopDown $ (make_span >>> remove_attrs) `when` is_link
- where
- make_span = setElemName $ mkName "span"
- remove_attrs = processAttrl none
+-- Should be called *after* preprocessing.
+download_images :: IOSArrow XmlTree XmlTree -> IO ImageMap
+download_images xml = do
+ image_urls <- runX $ xml >>> image_srcs
+ download_image_urls image_urls
--- | Preprocessing common to both page types.
-preprocess :: (ArrowXml a) => a XmlTree XmlTree
-preprocess =
- make_image_srcs_absolute
- >>>
- remove_comment_links
- >>>
- replace_links_with_spans
replace_remote_img_srcs :: (ArrowXml a) => ImageMap -> a XmlTree XmlTree
-is_title :: (ArrowXml a) => a XmlTree XmlTree
-is_title =
- (hasName "h2")
- >>>
- (hasAttrValue "class" (== "SummaryHL"))
-
-
-is_byline :: (ArrowXml a) => a XmlTree XmlTree
-is_byline =
- (hasName "div")
- >>>
- (hasAttrValue "class" (== "FeatureByLine"))
-
-
-is_image :: (ArrowXml a) => a XmlTree XmlTree
-is_image = isElem >>> hasName "img"
-
-remove_title :: (ArrowXml a) => a XmlTree XmlTree
-remove_title =
- processTopDown ((none) `when` is_title)
-
-
-remove_byline :: (ArrowXml a) => a XmlTree XmlTree
-remove_byline =
- processTopDown ((none) `when` is_byline)
-
-
fp_parse_article_body :: IOSArrow XmlTree XmlTree -> IO (Maybe String)
fp_parse_article_body xml = do
read_html = readHtml defaultParserState
-
---
--- Misc
---
-
-image_srcs :: (ArrowXml a) => a XmlTree URL
-image_srcs =
- css "img"
- >>>
- getAttrValue "src"
-
-make_image_srcs_absolute :: (ArrowXml a) => a XmlTree XmlTree
-make_image_srcs_absolute =
- processTopDown (make_srcs_absolute `when` is_image)
- where
- change_src :: (ArrowXml a) => a XmlTree XmlTree
- change_src =
- changeAttrValue try_make_absolute_url
-
- make_srcs_absolute :: (ArrowXml a) => a XmlTree XmlTree
- make_srcs_absolute =
- processAttrl $ change_src `when` hasName "src"
-
-
--
-- Tests
--
module LWN.XHTML (
XHTML,
XML,
- my_read_opts,
+ image_srcs,
+ is_image,
parse_lwn,
+ preprocess,
+ remove_byline,
+ remove_title,
to_xhtml,
- to_xml
- )
+ to_xml,
+ xml_from_contents)
where
+import Text.HandsomeSoup (css)
import Text.XML.HXT.Core (
+ (>>>),
ArrowXml,
IOStateArrow,
SysConfigList,
XmlTree,
+ changeAttrValue,
+ getAttrValue,
+ hasAttrValue,
+ hasName,
+ isElem,
+ mkName,
no,
+ none,
+ processAttrl,
+ processTopDown,
readString,
+ setElemName,
+ when,
withParseHTML,
withValidate,
withWarnings,
- yes
- )
+ yes)
+
+import LWN.URI (URL, try_make_absolute_url)
+import Misc (contains)
+
class XHTML a where
to_xhtml :: a -> String
-- | My version of HandsomeSoup's parseHTML.
parse_lwn :: String -> IOStateArrow s b XmlTree
parse_lwn = readString my_read_opts
+
+
+-- | Takes the result of get_article_contents and calls parse_lwn on
+-- the contained value.
+xml_from_contents :: (Maybe String) -> Maybe (IOStateArrow s b XmlTree)
+xml_from_contents =
+ fmap parse_lwn
+
+
+
+-- | Preprocessing common to both page types.
+preprocess :: (ArrowXml a) => a XmlTree XmlTree
+preprocess =
+ make_image_srcs_absolute
+ >>>
+ remove_comment_links
+ >>>
+ replace_links_with_spans
+
+
+is_link :: (ArrowXml a) => a XmlTree XmlTree
+is_link =
+ isElem >>> hasName "a"
+
+
+remove_comment_links :: (ArrowXml a) => a XmlTree XmlTree
+remove_comment_links =
+ processTopDown $ kill_comments `when` is_link
+ where
+ is_comment_link =
+ hasAttrValue "href" (contains "#Comments")
+
+ kill_comments =
+ none `when` is_comment_link
+
+replace_links_with_spans :: (ArrowXml a) => a XmlTree XmlTree
+replace_links_with_spans =
+ processTopDown $ (make_span >>> remove_attrs) `when` is_link
+ where
+ make_span = setElemName $ mkName "span"
+ remove_attrs = processAttrl none
+
+
+is_title :: (ArrowXml a) => a XmlTree XmlTree
+is_title =
+ (hasName "h2")
+ >>>
+ (hasAttrValue "class" (== "SummaryHL"))
+
+
+is_byline :: (ArrowXml a) => a XmlTree XmlTree
+is_byline =
+ (hasName "div")
+ >>>
+ (hasAttrValue "class" (== "FeatureByLine"))
+
+
+is_image :: (ArrowXml a) => a XmlTree XmlTree
+is_image = isElem >>> hasName "img"
+
+remove_title :: (ArrowXml a) => a XmlTree XmlTree
+remove_title =
+ processTopDown ((none) `when` is_title)
+
+
+remove_byline :: (ArrowXml a) => a XmlTree XmlTree
+remove_byline =
+ processTopDown ((none) `when` is_byline)
+
+
+image_srcs :: (ArrowXml a) => a XmlTree URL
+image_srcs =
+ css "img"
+ >>>
+ getAttrValue "src"
+
+make_image_srcs_absolute :: (ArrowXml a) => a XmlTree XmlTree
+make_image_srcs_absolute =
+ processTopDown (make_srcs_absolute `when` is_image)
+ where
+ change_src :: (ArrowXml a) => a XmlTree XmlTree
+ change_src =
+ changeAttrValue try_make_absolute_url
+
+ make_srcs_absolute :: (ArrowXml a) => a XmlTree XmlTree
+ make_srcs_absolute =
+ processAttrl $ change_src `when` hasName "src"