X-Git-Url: http://gitweb.michael.orlitzky.com/?p=dead%2Flwn-epub.git;a=blobdiff_plain;f=src%2FMain.hs;h=5ce40269bd950a382201501f2101ed7ba7505ccf;hp=655f1bef40c0680bda2c98e2d4ae9bc6f06c2cd8;hb=f3321e2ce7d7645ad562dc8f6620bfd561edc75d;hpb=6789361c1ee570df06878535aff73d99b87528a5 diff --git a/src/Main.hs b/src/Main.hs index 655f1be..5ce4026 100644 --- a/src/Main.hs +++ b/src/Main.hs @@ -1,44 +1,74 @@ -{-# LANGUAGE ScopedTypeVariables, RecordWildCards #-} +{-# LANGUAGE DoAndIfThenElse #-} module Main where +import Data.Maybe (fromJust) import Prelude hiding (readFile) -import System.Directory(doesFileExist) +import System.Directory (doesFileExist) import System.IO ( Handle, IOMode (WriteMode), + hPutStrLn, openBinaryFile, - stdout - ) + stderr, + stdout) import System.IO.UTF8 (readFile) -import Text.XML.HXT.Core +import Text.XML.HXT.Core ( + IOStateArrow, + XmlTree) -import CommandLine (Args(..), apply_args, show_help) -import LWN.Page +import CommandLine (show_help) +import Configuration (Cfg(..), get_cfg, use_account) +import LWN.Article (real_article_path) +import LWN.HTTP (get_page, log_in, make_cookie_jar) +import LWN.Page (epublish, parse) +import LWN.XHTML (parse_lwn) -my_read_opts :: SysConfigList -my_read_opts = [ withValidate no, - withParseHTML yes, - withWarnings no ] - --- | My version of HandsomeSoup's parseHTML. -my_read :: String -> IOStateArrow s b XmlTree -my_read = readString my_read_opts -- | Try to parse the given article using HXT. We try a few different -- methods; if none of them work, we return 'Nothing'. -get_xml_from_article :: String -> IO (Maybe (IOStateArrow s b XmlTree)) -get_xml_from_article s = do - article <- real_article_path s - is_file <- doesFileExist article +get_xml_from_article :: Cfg -> IO (Maybe (IOStateArrow s b XmlTree)) +get_xml_from_article cfg = do + my_article <- real_article_path (article cfg) + is_file <- doesFileExist my_article case is_file of True -> do - contents <- readFile article - return $ Just $ my_read contents + contents <- readFile my_article + return $ Just $ parse_lwn contents False -> do -- Download the URL and try to parse it. - return Nothing + if use_account cfg then do + -- use_account would be false if these fromJusts would fail. + cj <- make_cookie_jar + li_result <- log_in cj + (fromJust $ username cfg) + (fromJust $ password cfg) + + case li_result of + Left err -> do + let msg = "Failed to log in. " ++ err + hPutStrLn stderr msg + Right response_body -> do + hPutStrLn stderr response_body + + html <- get_page (Just cj) my_article + + case html of + Left err -> do + let msg = "Failed to retrieve page. " ++ err + hPutStrLn stderr msg + return Nothing + Right h -> return $ Just $ parse_lwn h + else do + html <- get_page Nothing my_article + case html of + Left err -> do + let msg = "Failed to retrieve page. " ++ err + hPutStrLn stderr msg + return Nothing + Right h -> return $ Just $ parse_lwn h + -- | If we're given an empty path, return a handle to -- 'stdout'. Otherwise, open the given file and return a read/write @@ -51,30 +81,21 @@ get_output_handle path = openBinaryFile path WriteMode --- | Convert the given article to either a URL or a filesystem --- path. If the given article exists on the filesystem, we assume --- it's a file. Otherwise, we check to see if it's a URL. Failing --- that, we try to construct a URL from what we're given and do our --- best. -real_article_path :: String -> IO String -real_article_path = return . id - main :: IO () main = do - Args{..} <- apply_args - output_handle <- get_output_handle output - maybe_html <- get_xml_from_article article + cfg <- get_cfg + output_handle <- get_output_handle (output cfg) + maybe_html <- get_xml_from_article cfg case maybe_html of - Just html -> do result <- parse html case result of Just stuff -> epublish stuff output_handle - Nothing -> return () + Nothing -> do + _ <- show_help + return () Nothing -> do _ <- show_help return () - - putStrLn "Done."