-{-# LANGUAGE ScopedTypeVariables, RecordWildCards #-}
+{-# LANGUAGE DoAndIfThenElse #-}
module Main
where
-import Prelude hiding (readFile)
+import Control.Concurrent.ParallelIO (stopGlobalPool)
import System.Directory (doesFileExist)
import System.IO (
Handle,
IOMode (WriteMode),
+ hPutStrLn,
openBinaryFile,
+ stderr,
stdout
)
-import System.IO.UTF8 (readFile)
-import Test.HUnit (Assertion, assertEqual)
-import Test.Framework (Test, testGroup)
-import Test.Framework.Providers.HUnit (testCase)
-import Text.Regex.Posix ((=~))
-import Text.XML.HXT.Core
import CommandLine (show_help)
import Configuration (Cfg(..), get_cfg)
-import LWN.Page
-import LWN.URI (is_lwn_url, make_absolute_url, make_https)
-import Misc (contains)
+import LWN.Article (real_article_path)
+import LWN.HTTP (get_login_cookie)
+import LWN.Page (epublish, page_from_url)
-my_read_opts :: SysConfigList
-my_read_opts = [ withValidate no,
- withParseHTML yes,
- withWarnings no ]
-
--- | My version of HandsomeSoup's parseHTML.
-my_read :: String -> IOStateArrow s b XmlTree
-my_read = readString my_read_opts
-
--- | Try to parse the given article using HXT. We try a few different
--- methods; if none of them work, we return 'Nothing'.
-get_xml_from_article :: String -> IO (Maybe (IOStateArrow s b XmlTree))
-get_xml_from_article s = do
- article <- real_article_path s
- is_file <- doesFileExist article
- case is_file of
- True -> do
- contents <- readFile article
- return $ Just $ my_read contents
- False -> do
- -- Download the URL and try to parse it.
- return Nothing
-
-- | If we're given an empty path, return a handle to
-- 'stdout'. Otherwise, open the given file and return a read/write
-- handle to that.
openBinaryFile path WriteMode
-
--- | Convert the given article to either a URL or a filesystem
--- path. If the given article exists on the filesystem, we assume
--- it's a file. Otherwise, we check to see if it's a URL. Failing
--- that, we try to construct a URL from what we're given and do our
--- best.
-real_article_path :: String -> IO String
-real_article_path s = do
- is_file <- doesFileExist s
- return $ if is_file then s else check_cases
- where
- abs_current =
- case make_absolute_url "current" of
- Nothing -> s
- Just ac -> ac
- abs_article =
- case make_absolute_url ("Articles/" ++ s) of
- Nothing -> s
- Just as -> as
-
- check_cases :: String
- check_cases
- | is_lwn_url s = make_https s
- | s `contains` "current" = abs_current
- | s =~ "^[0-9]+$" = abs_article
- | otherwise = s -- Give up
+argument_is_file :: Cfg -> IO Bool
+argument_is_file cfg = do
+ path <- real_article_path (article cfg)
+ doesFileExist path
main :: IO ()
main = do
- Cfg{..} <- get_cfg
- output_handle <- get_output_handle output
- maybe_html <- get_xml_from_article article
-
- case maybe_html of
-
- Just html -> do
- result <- parse html
- case result of
- Just stuff -> epublish stuff output_handle
- Nothing -> do
- _ <- show_help
- return ()
-
+ cfg' <- get_cfg
+ aif <- argument_is_file cfg'
+ cfg <- case aif of
+ False -> get_login_cookie cfg'
+ True -> return cfg'
+
+ page <- page_from_url cfg (article cfg)
+ case page of
+ Just p -> do
+ output_handle <- get_output_handle (output cfg)
+ epublish p output_handle
Nothing -> do
+ hPutStrLn stderr "ERROR: could not parse an LWN page from the given URL."
_ <- show_help
return ()
-
-
-test_current_article_path :: Assertion
-test_current_article_path = do
- let expected = "https://lwn.net/current"
- actual <- real_article_path "current"
- assertEqual "Current article path constructed" expected actual
-
-test_numbered_article_path :: Assertion
-test_numbered_article_path = do
- let expected = "https://lwn.net/Articles/69"
- actual <- real_article_path "69" -- I'm twelve
- assertEqual "Numbered article path constructed" expected actual
-
-
-test_full_article_path :: Assertion
-test_full_article_path = do
- let expected = "https://lwn.net/Articles/502979/"
- actual <- real_article_path "https://lwn.net/Articles/502979/"
- assertEqual "Full article path left alone" expected actual
-
-test_non_https_article_path :: Assertion
-test_non_https_article_path = do
- let expected = "https://lwn.net/Articles/502979/"
- actual <- real_article_path "http://lwn.net/Articles/502979/"
- assertEqual "Non-https URL made https" expected actual
-
-main_tests :: Test
-main_tests =
- testGroup "Main Tests" [
- testCase "Current article path constructed" test_current_article_path,
- testCase "Numbered article path constructed" test_numbered_article_path,
- testCase "Full article path left alone" test_full_article_path,
- testCase "Non-https URL made https" test_non_https_article_path ]
-
\ No newline at end of file
+ -- Necessary, for some reason.
+ stopGlobalPool