+{-# LANGUAGE DoAndIfThenElse #-}
module Main
where
-data Article = Article { headline :: String,
- byline :: String,
- body_html :: String }
- deriving (Eq, Show)
+import Data.List (isPrefixOf)
+import Data.Maybe (fromJust)
+import Prelude hiding (readFile)
+import System.Directory (doesFileExist)
+import System.IO (
+ Handle,
+ IOMode (WriteMode),
+ hPutStrLn,
+ openBinaryFile,
+ stderr,
+ stdout
+ )
+import System.IO.UTF8 (readFile)
+import Test.HUnit (Assertion, assertEqual)
+import Test.Framework (Test, testGroup)
+import Test.Framework.Providers.HUnit (testCase)
+import Text.Regex.Posix ((=~))
+import Text.XML.HXT.Core (
+ IOStateArrow,
+ SysConfigList,
+ XmlTree,
+ no,
+ readString,
+ withParseHTML,
+ withValidate,
+ withWarnings,
+ yes
+ )
+import CommandLine (show_help)
+import Configuration (Cfg(..), get_cfg, use_account)
+import LWN.HTTP (get_page, log_in, make_cookie_jar)
+import LWN.Page (epublish, parse)
+import LWN.URI (
+ add_trailing_slash,
+ is_lwn_url,
+ try_make_absolute_url,
+ make_https)
-parse_article :: String -> String
-parse_article _ = ""
+
+
+my_read_opts :: SysConfigList
+my_read_opts = [ withValidate no,
+ withParseHTML yes,
+ withWarnings no ]
+
+-- | My version of HandsomeSoup's parseHTML.
+my_read :: String -> IOStateArrow s b XmlTree
+my_read = readString my_read_opts
+
+
+-- | Try to parse the given article using HXT. We try a few different
+-- methods; if none of them work, we return 'Nothing'.
+get_xml_from_article :: Cfg -> IO (Maybe (IOStateArrow s b XmlTree))
+get_xml_from_article cfg = do
+ my_article <- real_article_path (article cfg)
+ is_file <- doesFileExist my_article
+ case is_file of
+ True -> do
+ contents <- readFile my_article
+ return $ Just $ my_read contents
+ False -> do
+ -- Download the URL and try to parse it.
+ if use_account cfg then do
+ -- use_account would be false if these fromJusts would fail.
+ cj <- make_cookie_jar
+ li_result <- log_in cj
+ (fromJust $ username cfg)
+ (fromJust $ password cfg)
+
+ case li_result of
+ Left err -> do
+ let msg = "Failed to log in. " ++ err
+ hPutStrLn stderr msg
+ Right response_body -> do
+ hPutStrLn stderr response_body
+
+ html <- get_page (Just cj) my_article
+
+ case html of
+ Left err -> do
+ let msg = "Failed to retrieve page. " ++ err
+ hPutStrLn stderr msg
+ return Nothing
+ Right h -> return $ Just $ my_read h
+ else do
+ html <- get_page Nothing my_article
+ case html of
+ Left err -> do
+ let msg = "Failed to retrieve page. " ++ err
+ hPutStrLn stderr msg
+ return Nothing
+ Right h -> return $ Just $ my_read h
+
+-- | If we're given an empty path, return a handle to
+-- 'stdout'. Otherwise, open the given file and return a read/write
+-- handle to that.
+get_output_handle :: FilePath -> IO Handle
+get_output_handle path =
+ if (null path) then
+ return stdout
+ else
+ openBinaryFile path WriteMode
+
+
+
+-- | Convert the given article to either a URL or a filesystem
+-- path. If the given article exists on the filesystem, we assume
+-- it's a file. Otherwise, we check to see if it's a URL. Failing
+-- that, we try to construct a URL from what we're given and do our
+-- best.
+real_article_path :: String -> IO String
+real_article_path path = do
+ is_file <- doesFileExist path
+ return $ if is_file then path else add_trailing_slash check_cases
+ where
+ abs_current = try_make_absolute_url ("/" ++ path)
+ abs_article = try_make_absolute_url ("Articles/" ++ path)
+
+ check_cases :: String
+ check_cases
+ | is_lwn_url path = make_https path
+ | isPrefixOf "current" path = abs_current
+ | path =~ "^[0-9]+$" = abs_article
+ | otherwise = path -- Give up
main :: IO ()
main = do
- putStrLn "Hello, world."
+ cfg <- get_cfg
+ output_handle <- get_output_handle (output cfg)
+ maybe_html <- get_xml_from_article cfg
+
+ case maybe_html of
+ Just html -> do
+ result <- parse html
+ case result of
+ Just stuff -> epublish stuff output_handle
+ Nothing -> do
+ _ <- show_help
+ return ()
+
+ Nothing -> do
+ _ <- show_help
+ return ()
+
+
+test_current_article_path :: Assertion
+test_current_article_path = do
+ let expected = "https://lwn.net/current/"
+ actual <- real_article_path "current"
+ assertEqual "Current article path constructed" expected actual
+
+test_current_bigpage_article_path :: Assertion
+test_current_bigpage_article_path = do
+ let expected = "https://lwn.net/current/bigpage"
+ actual <- real_article_path "current/bigpage"
+ assertEqual "Current bigpage article path constructed" expected actual
+
+test_numbered_article_path :: Assertion
+test_numbered_article_path = do
+ let expected = "https://lwn.net/Articles/69/"
+ actual <- real_article_path "69" -- I'm twelve
+ assertEqual "Numbered article path constructed" expected actual
+
+
+test_full_article_path :: Assertion
+test_full_article_path = do
+ let expected = "https://lwn.net/Articles/502979/"
+ actual <- real_article_path "https://lwn.net/Articles/502979/"
+ assertEqual "Full article path left alone" expected actual
+
+test_non_https_article_path :: Assertion
+test_non_https_article_path = do
+ let expected = "https://lwn.net/Articles/502979/"
+ actual <- real_article_path "http://lwn.net/Articles/502979/"
+ assertEqual "Non-https URL made https" expected actual
+
+main_tests :: Test
+main_tests =
+ testGroup "Main Tests" [
+ testCase "Current article path constructed" test_current_article_path,
+ testCase
+ "Current bigpage article path constructed"
+ test_current_bigpage_article_path,
+ testCase "Numbered article path constructed" test_numbered_article_path,
+ testCase "Full article path left alone" test_full_article_path,
+ testCase "Non-https URL made https" test_non_https_article_path ]