X-Git-Url: http://gitweb.michael.orlitzky.com/?a=blobdiff_plain;f=src%2FLWN%2FHTTP.hs;h=ef4fc5aeebf14cbefd25be3cf7084052eeac4025;hb=d7eb43dabd083ff2e12e9cfbf9bc2b6fcaa55e0b;hp=a8a1980f273cfdc352911bb7bcd3b55b58137cdf;hpb=f3321e2ce7d7645ad562dc8f6620bfd561edc75d;p=dead%2Flwn-epub.git diff --git a/src/LWN/HTTP.hs b/src/LWN/HTTP.hs index a8a1980..ef4fc5a 100644 --- a/src/LWN/HTTP.hs +++ b/src/LWN/HTTP.hs @@ -3,8 +3,10 @@ module LWN.HTTP where +import Control.Concurrent.ParallelIO (parallel) import qualified Data.ByteString as B (hPut) import qualified Data.Map as Map (Map, empty, insert) +import Data.Maybe (fromJust, isNothing) import Network.Curl ( CurlCode(..), CurlOption(..), @@ -18,8 +20,11 @@ import Network.Curl ( import Network.Curl.Download (openURI) import System.Directory (doesFileExist, getTemporaryDirectory) import System.IO (hClose, hPutStrLn, stderr) +import qualified System.IO.UTF8 as Utf8 (readFile) import System.IO.Temp (openBinaryTempFile, openTempFile) +import qualified Configuration as C (Cfg(..)) +import LWN.Article (real_article_path) import LWN.URI (URL, filename) login_url :: URL @@ -39,7 +44,7 @@ default_curl_opts :: [CurlOption] default_curl_opts = [ -- The Global cache is not thread-friendly. CurlDNSUseGlobalCache False, - + -- And we don't want to use a DNS cache anyway. CurlDNSCacheTimeout 0, @@ -130,7 +135,7 @@ log_in cookie_jar username password = CurlCookieJar cookie_jar, CurlPost True, CurlPostFields post_data ] - + curl_opts :: [CurlOption] curl_opts = default_curl_opts ++ post_opts @@ -173,7 +178,7 @@ type ImageMap = Map.Map URL FilePath download_image_urls :: [URL] -> IO ImageMap download_image_urls image_urls = do - files <- mapM save_image image_urls + files <- parallel $ map save_image image_urls let pairs = zip image_urls files return $ foldl my_insert empty_map pairs where @@ -182,3 +187,48 @@ download_image_urls image_urls = do my_insert :: ImageMap -> (URL, Maybe FilePath) -> ImageMap my_insert dict (_, Nothing) = dict my_insert dict (k, Just v) = Map.insert k v dict + + + + + +get_login_cookie :: C.Cfg -> IO C.Cfg +get_login_cookie cfg + | isNothing (C.username cfg) = return cfg + | isNothing (C.password cfg) = return cfg + | otherwise = do + let uname = fromJust $ C.username cfg + let pword = fromJust $ C.password cfg + cj <- make_cookie_jar + li_result <- log_in cj uname pword + + case li_result of + Left err -> do + let msg = "Failed to log in. " ++ err + hPutStrLn stderr msg + Right response_body -> do + hPutStrLn stderr response_body + + return $ cfg { C.cookie_jar = Just cj } + + +-- | Try to parse the given article using HXT. We try a few different +-- methods; if none of them work, we return 'Nothing'. +get_article_contents :: C.Cfg -> URL -> IO (Maybe String) +get_article_contents cfg article_name = do + my_article <- real_article_path article_name + is_file <- doesFileExist my_article + case is_file of + True -> do + contents <- Utf8.readFile my_article + return $ Just $ contents + False -> do + -- Download the URL and try to parse it. + html <- get_page (C.cookie_jar cfg) my_article + + case html of + Left err -> do + let msg = "Failed to retrieve page. " ++ err + hPutStrLn stderr msg + return Nothing + Right h -> return $ Just h