+{-# LANGUAGE DoAndIfThenElse #-}
module Main
where
-import Data.Maybe (fromJust)
-import Text.HandsomeSoup (parseHtml)
-import System.Time (ClockTime( TOD ), getClockTime)
+import Control.Concurrent (threadDelay)
+import Control.Monad (when)
+import Data.Maybe (fromJust, isJust)
+import Prelude hiding (readFile)
+import System.Directory (doesFileExist)
+import System.IO (
+ Handle,
+ IOMode (WriteMode),
+ hPutStrLn,
+ openBinaryFile,
+ stderr,
+ stdout
+ )
+import System.IO.UTF8 (readFile)
+import Test.HUnit (Assertion, assertEqual)
+import Test.Framework (Test, testGroup)
+import Test.Framework.Providers.HUnit (testCase)
+import Text.Regex.Posix ((=~))
+import Text.XML.HXT.Core (
+ IOStateArrow,
+ SysConfigList,
+ XmlTree,
+ no,
+ readString,
+ withParseHTML,
+ withValidate,
+ withWarnings,
+ yes
+ )
+import CommandLine (show_help)
+import Configuration (Cfg(..), get_cfg, use_account)
+import LWN.HTTP (get_page, log_in, make_cookie_jar)
+import LWN.Page (epublish, parse)
+import LWN.URI (add_trailing_slash, is_lwn_url, make_absolute_url, make_https)
+import Misc (contains)
-import Epublishable
-import LWN.ArticlePage
+
+my_read_opts :: SysConfigList
+my_read_opts = [ withValidate no,
+ withParseHTML yes,
+ withWarnings no ]
+
+-- | My version of HandsomeSoup's parseHTML.
+my_read :: String -> IOStateArrow s b XmlTree
+my_read = readString my_read_opts
+
+
+-- |A wrapper around threadDelay which takes seconds instead of
+-- microseconds as its argument.
+thread_sleep :: Int -> IO ()
+thread_sleep seconds = do
+ let microseconds = seconds * (10 ^ (6 :: Int))
+ threadDelay microseconds
+
+
+-- | Try to parse the given article using HXT. We try a few different
+-- methods; if none of them work, we return 'Nothing'.
+get_xml_from_article :: Cfg -> IO (Maybe (IOStateArrow s b XmlTree))
+get_xml_from_article cfg = do
+ my_article <- real_article_path (article cfg)
+ is_file <- doesFileExist my_article
+ case is_file of
+ True -> do
+ contents <- readFile my_article
+ return $ Just $ my_read contents
+ False -> do
+ -- Download the URL and try to parse it.
+ if use_account cfg then do
+ -- use_account would be false if these fromJusts would fail.
+ cj <- make_cookie_jar
+ li_result <- log_in cj
+ (fromJust $ username cfg)
+ (fromJust $ password cfg)
+
+ -- Without this, the cookie file is empty during
+ -- get_page. Whaaat?
+ thread_sleep 1
+
+ when (isJust li_result) $ do
+ let msg = "Failed to log in. " ++ (fromJust li_result)
+ hPutStrLn stderr msg
+
+ html <- get_page (Just cj) my_article
+
+ case html of
+ Left err -> do
+ let msg = "Failed to retrieve page. " ++ err
+ hPutStrLn stderr msg
+ return Nothing
+ Right h -> return $ Just $ my_read h
+ else do
+ html <- get_page Nothing my_article
+ case html of
+ Left err -> do
+ let msg = "Failed to retrieve page. " ++ err
+ hPutStrLn stderr msg
+ return Nothing
+ Right h -> return $ Just $ my_read h
+
+-- | If we're given an empty path, return a handle to
+-- 'stdout'. Otherwise, open the given file and return a read/write
+-- handle to that.
+get_output_handle :: FilePath -> IO Handle
+get_output_handle path =
+ if (null path) then
+ return stdout
+ else
+ openBinaryFile path WriteMode
+
+
+
+-- | Convert the given article to either a URL or a filesystem
+-- path. If the given article exists on the filesystem, we assume
+-- it's a file. Otherwise, we check to see if it's a URL. Failing
+-- that, we try to construct a URL from what we're given and do our
+-- best.
+real_article_path :: String -> IO String
+real_article_path s = do
+ is_file <- doesFileExist s
+ return $ if is_file then s else add_trailing_slash check_cases
+ where
+ abs_current =
+ case make_absolute_url "current" of
+ Nothing -> s
+ Just ac -> ac
+ abs_article =
+ case make_absolute_url ("Articles/" ++ s) of
+ Nothing -> s
+ Just as -> as
+
+ check_cases :: String
+ check_cases
+ | is_lwn_url s = make_https s
+ | s `contains` "current" = abs_current
+ | s =~ "^[0-9]+$" = abs_article
+ | otherwise = s -- Give up
main :: IO ()
main = do
- article_html <- readFile "test/fixtures/501317-article.html"
- ioap <- parse $ parseHtml article_html
- let article_page :: ArticlePage = fromJust $ ioap
- (TOD t _) <- getClockTime
- epublish article_page "out.epub" t
- putStrLn "Done."
+ cfg <- get_cfg
+ output_handle <- get_output_handle (output cfg)
+
+ when (use_account cfg) $ do
+ putStrLn "Using account."
+
+ maybe_html <- get_xml_from_article cfg
+
+ case maybe_html of
+ Just html -> do
+ result <- parse html
+ case result of
+ Just stuff -> epublish stuff output_handle
+ Nothing -> do
+ _ <- show_help
+ return ()
+
+ Nothing -> do
+ _ <- show_help
+ return ()
+
+
+test_current_article_path :: Assertion
+test_current_article_path = do
+ let expected = "https://lwn.net/current/"
+ actual <- real_article_path "current"
+ assertEqual "Current article path constructed" expected actual
+
+test_numbered_article_path :: Assertion
+test_numbered_article_path = do
+ let expected = "https://lwn.net/Articles/69/"
+ actual <- real_article_path "69" -- I'm twelve
+ assertEqual "Numbered article path constructed" expected actual
+
+
+test_full_article_path :: Assertion
+test_full_article_path = do
+ let expected = "https://lwn.net/Articles/502979/"
+ actual <- real_article_path "https://lwn.net/Articles/502979/"
+ assertEqual "Full article path left alone" expected actual
+
+test_non_https_article_path :: Assertion
+test_non_https_article_path = do
+ let expected = "https://lwn.net/Articles/502979/"
+ actual <- real_article_path "http://lwn.net/Articles/502979/"
+ assertEqual "Non-https URL made https" expected actual
+
+main_tests :: Test
+main_tests =
+ testGroup "Main Tests" [
+ testCase "Current article path constructed" test_current_article_path,
+ testCase "Numbered article path constructed" test_numbered_article_path,
+ testCase "Full article path left alone" test_full_article_path,
+ testCase "Non-https URL made https" test_non_https_article_path ]