X-Git-Url: http://gitweb.michael.orlitzky.com/?p=dead%2Flwn-epub.git;a=blobdiff_plain;f=src%2FMain.hs;h=1f6d7c5f436a583d5143b05c1cf55aaf4126995e;hp=4cc4216d0e02e3c81ccaff617acb97ff26033bc5;hb=291c39f59e958a012dd6e4ddec9b0276a4045b45;hpb=35e0551101fa2267f28ff434a6006a3612ad54c9 diff --git a/src/Main.hs b/src/Main.hs index 4cc4216..1f6d7c5 100644 --- a/src/Main.hs +++ b/src/Main.hs @@ -1,10 +1,8 @@ -{-# LANGUAGE ScopedTypeVariables, RecordWildCards, DoAndIfThenElse #-} +{-# LANGUAGE DoAndIfThenElse #-} module Main where -import Control.Monad (when) -import Data.Maybe (fromJust) -import Prelude hiding (readFile) +import Control.Concurrent.ParallelIO (stopGlobalPool) import System.Directory (doesFileExist) import System.IO ( Handle, @@ -14,63 +12,14 @@ import System.IO ( stderr, stdout ) -import System.IO.UTF8 (readFile) -import Test.HUnit (Assertion, assertEqual) -import Test.Framework (Test, testGroup) -import Test.Framework.Providers.HUnit (testCase) -import Text.Regex.Posix ((=~)) -import Text.XML.HXT.Core hiding (when) import CommandLine (show_help) -import Configuration (Cfg(..), get_cfg, use_account) -import LWN.HTTP (get_page, log_in, make_cookie_jar) -import LWN.Page (epublish, parse) -import LWN.URI (is_lwn_url, make_absolute_url, make_https) -import Misc (contains) +import Configuration (Cfg(..), get_cfg) +import LWN.Article (real_article_path) +import LWN.HTTP (get_login_cookie) +import LWN.Page (epublish, page_from_url) -my_read_opts :: SysConfigList -my_read_opts = [ withValidate no, - withParseHTML yes, - withWarnings no ] - --- | My version of HandsomeSoup's parseHTML. -my_read :: String -> IOStateArrow s b XmlTree -my_read = readString my_read_opts - --- | Try to parse the given article using HXT. We try a few different --- methods; if none of them work, we return 'Nothing'. -get_xml_from_article :: Cfg -> IO (Maybe (IOStateArrow s b XmlTree)) -get_xml_from_article cfg = do - my_article <- real_article_path (article cfg) - is_file <- doesFileExist my_article - case is_file of - True -> do - contents <- readFile my_article - return $ Just $ my_read contents - False -> do - -- Download the URL and try to parse it. - if use_account cfg then do - -- use_account would be false if these fromJusts would fail. - cj <- make_cookie_jar - li_result <- log_in cj - (fromJust $ username cfg) - (fromJust $ password cfg) - when (not li_result) $ do - hPutStrLn stderr "Failed to log in." - - html <- get_page (Just cj) my_article - return $ - case html of - Nothing -> Nothing - Just h -> Just $ my_read h - else do - html <- get_page Nothing my_article - return $ - case html of - Nothing -> Nothing - Just h -> Just $ my_read h - -- | If we're given an empty path, return a handle to -- 'stdout'. Otherwise, open the given file and return a read/write -- handle to that. @@ -82,83 +31,28 @@ get_output_handle path = openBinaryFile path WriteMode - --- | Convert the given article to either a URL or a filesystem --- path. If the given article exists on the filesystem, we assume --- it's a file. Otherwise, we check to see if it's a URL. Failing --- that, we try to construct a URL from what we're given and do our --- best. -real_article_path :: String -> IO String -real_article_path s = do - is_file <- doesFileExist s - return $ if is_file then s else check_cases - where - abs_current = - case make_absolute_url "current" of - Nothing -> s - Just ac -> ac - abs_article = - case make_absolute_url ("Articles/" ++ s) of - Nothing -> s - Just as -> as - - check_cases :: String - check_cases - | is_lwn_url s = make_https s - | s `contains` "current" = abs_current - | s =~ "^[0-9]+$" = abs_article - | otherwise = s -- Give up +argument_is_file :: Cfg -> IO Bool +argument_is_file cfg = do + path <- real_article_path (article cfg) + doesFileExist path main :: IO () main = do - cfg <- get_cfg - output_handle <- get_output_handle (output cfg) - - maybe_html <- get_xml_from_article cfg - - case maybe_html of - Just html -> do - result <- parse html - case result of - Just stuff -> epublish stuff output_handle - Nothing -> do - _ <- show_help - return () - + cfg' <- get_cfg + aif <- argument_is_file cfg' + cfg <- case aif of + False -> get_login_cookie cfg' + True -> return cfg' + + page <- page_from_url cfg (article cfg) + case page of + Just p -> do + output_handle <- get_output_handle (output cfg) + epublish p output_handle Nothing -> do + hPutStrLn stderr "ERROR: could not parse an LWN page from the given URL." _ <- show_help return () - -test_current_article_path :: Assertion -test_current_article_path = do - let expected = "https://lwn.net/current" - actual <- real_article_path "current" - assertEqual "Current article path constructed" expected actual - -test_numbered_article_path :: Assertion -test_numbered_article_path = do - let expected = "https://lwn.net/Articles/69" - actual <- real_article_path "69" -- I'm twelve - assertEqual "Numbered article path constructed" expected actual - - -test_full_article_path :: Assertion -test_full_article_path = do - let expected = "https://lwn.net/Articles/502979/" - actual <- real_article_path "https://lwn.net/Articles/502979/" - assertEqual "Full article path left alone" expected actual - -test_non_https_article_path :: Assertion -test_non_https_article_path = do - let expected = "https://lwn.net/Articles/502979/" - actual <- real_article_path "http://lwn.net/Articles/502979/" - assertEqual "Non-https URL made https" expected actual - -main_tests :: Test -main_tests = - testGroup "Main Tests" [ - testCase "Current article path constructed" test_current_article_path, - testCase "Numbered article path constructed" test_numbered_article_path, - testCase "Full article path left alone" test_full_article_path, - testCase "Non-https URL made https" test_non_https_article_path ] + -- Necessary, for some reason. + stopGlobalPool