X-Git-Url: http://gitweb.michael.orlitzky.com/?p=dead%2Flwn-epub.git;a=blobdiff_plain;f=src%2FMain.hs;h=1f6d7c5f436a583d5143b05c1cf55aaf4126995e;hp=981a70531d717c1c7bef8f3fa874d26e943140eb;hb=291c39f59e958a012dd6e4ddec9b0276a4045b45;hpb=4220827f62d772d7edcbdcc1c2f13d6c2eb5f534 diff --git a/src/Main.hs b/src/Main.hs index 981a705..1f6d7c5 100644 --- a/src/Main.hs +++ b/src/Main.hs @@ -2,8 +2,7 @@ module Main where -import Data.Maybe (fromJust) -import Prelude hiding (readFile) +import Control.Concurrent.ParallelIO (stopGlobalPool) import System.Directory (doesFileExist) import System.IO ( Handle, @@ -11,63 +10,14 @@ import System.IO ( hPutStrLn, openBinaryFile, stderr, - stdout) -import System.IO.UTF8 (readFile) -import Text.XML.HXT.Core ( - IOStateArrow, - XmlTree) + stdout + ) import CommandLine (show_help) -import Configuration (Cfg(..), get_cfg, use_account) +import Configuration (Cfg(..), get_cfg) import LWN.Article (real_article_path) -import LWN.HTTP (get_page, log_in, make_cookie_jar) -import LWN.Page (epublish, parse) -import LWN.XHTML (parse_lwn) - - - --- | Try to parse the given article using HXT. We try a few different --- methods; if none of them work, we return 'Nothing'. -get_xml_from_article :: Cfg -> IO (Maybe (IOStateArrow s b XmlTree)) -get_xml_from_article cfg = do - my_article <- real_article_path (article cfg) - is_file <- doesFileExist my_article - case is_file of - True -> do - contents <- readFile my_article - return $ Just $ parse_lwn contents - False -> do - -- Download the URL and try to parse it. - if use_account cfg then do - -- use_account would be false if these fromJusts would fail. - cj <- make_cookie_jar - li_result <- log_in cj - (fromJust $ username cfg) - (fromJust $ password cfg) - - case li_result of - Left err -> do - let msg = "Failed to log in. " ++ err - hPutStrLn stderr msg - Right response_body -> do - hPutStrLn stderr response_body - - html <- get_page (Just cj) my_article - - case html of - Left err -> do - let msg = "Failed to retrieve page. " ++ err - hPutStrLn stderr msg - return Nothing - Right h -> return $ Just $ parse_lwn h - else do - html <- get_page Nothing my_article - case html of - Left err -> do - let msg = "Failed to retrieve page. " ++ err - hPutStrLn stderr msg - return Nothing - Right h -> return $ Just $ parse_lwn h +import LWN.HTTP (get_login_cookie) +import LWN.Page (epublish, page_from_url) -- | If we're given an empty path, return a handle to @@ -81,21 +31,28 @@ get_output_handle path = openBinaryFile path WriteMode +argument_is_file :: Cfg -> IO Bool +argument_is_file cfg = do + path <- real_article_path (article cfg) + doesFileExist path + main :: IO () main = do - cfg <- get_cfg - output_handle <- get_output_handle (output cfg) - maybe_html <- get_xml_from_article cfg - - case maybe_html of - Just html -> do - result <- parse html - case result of - Just stuff -> epublish stuff output_handle - Nothing -> do - _ <- show_help - return () - + cfg' <- get_cfg + aif <- argument_is_file cfg' + cfg <- case aif of + False -> get_login_cookie cfg' + True -> return cfg' + + page <- page_from_url cfg (article cfg) + case page of + Just p -> do + output_handle <- get_output_handle (output cfg) + epublish p output_handle Nothing -> do + hPutStrLn stderr "ERROR: could not parse an LWN page from the given URL." _ <- show_help return () + + -- Necessary, for some reason. + stopGlobalPool