From: Michael Orlitzky Date: Mon, 20 Aug 2012 19:55:02 +0000 (-0400) Subject: Replace the curl routines with http-conduit ones. X-Git-Tag: v0.0.1~4 X-Git-Url: http://gitweb.michael.orlitzky.com/?p=dead%2Flwn-epub.git;a=commitdiff_plain;h=cf0659082f2d0e6afa6b43a6ad55432a00b155e9 Replace the curl routines with http-conduit ones. --- diff --git a/src/LWN/HTTP.hs b/src/LWN/HTTP.hs index ef4fc5a..6216a0f 100644 --- a/src/LWN/HTTP.hs +++ b/src/LWN/HTTP.hs @@ -4,140 +4,102 @@ module LWN.HTTP where import Control.Concurrent.ParallelIO (parallel) -import qualified Data.ByteString as B (hPut) +import qualified Data.ByteString.Char8 as C (ByteString, pack) +import qualified Data.ByteString.Lazy as L (ByteString, hPut) +import Data.ByteString.Lazy.UTF8 (toString) import qualified Data.Map as Map (Map, empty, insert) import Data.Maybe (fromJust, isNothing) -import Network.Curl ( - CurlCode(..), - CurlOption(..), - CurlResponse, - do_curl_, - initialize, - respBody, - respCurlCode, - withCurlDo +import Data.Time (getCurrentTime) +import Network.HTTP.Conduit ( + CookieJar, + Response(..), + Request(..), + httpLbs, + insertCookiesIntoRequest, + parseUrl, + updateCookieJar, + urlEncodedBody, + withManager ) -import Network.Curl.Download (openURI) +import Network.HTTP.Types (status200, status302) +import Network.HTTP.Types.Method (methodPost) import System.Directory (doesFileExist, getTemporaryDirectory) -import System.IO (hClose, hPutStrLn, stderr) +import System.IO (hPutStrLn, stderr) import qualified System.IO.UTF8 as Utf8 (readFile) -import System.IO.Temp (openBinaryTempFile, openTempFile) +import System.IO.Temp (openBinaryTempFile) -import qualified Configuration as C (Cfg(..)) +-- Also grab the empty cookie jar from Configuration since we'll +-- use it in a few places. +import qualified Configuration as C (Cfg(..), cj_empty) import LWN.Article (real_article_path) import LWN.URI (URL, filename) -login_url :: URL -login_url = "https://lwn.net/login" - -username_field :: String -username_field = "Username" - -password_field :: String -password_field = "Password" - -submit_field :: String -submit_field = "submit" - - -default_curl_opts :: [CurlOption] -default_curl_opts = - [ -- The Global cache is not thread-friendly. - CurlDNSUseGlobalCache False, - - -- And we don't want to use a DNS cache anyway. - CurlDNSCacheTimeout 0, +-- | The type of response we get back from http-conduit's httpLbs. +type LBSResponse = Response L.ByteString - -- Follow redirects. - CurlFollowLocation True, - - -- Give it a little time... - CurlTimeout 45 ] +login_url :: URL +login_url = "https://lwn.net/login" -make_cookie_jar :: IO FilePath -make_cookie_jar = do - temp_dir <- getTemporaryDirectory - let file_name_template = "lwn-epub-cookies.txt" - (out_path, out_handle) <- openTempFile temp_dir file_name_template - hClose out_handle -- We just want to create it for now. - return out_path +username_field :: C.ByteString +username_field = C.pack "Username" -get_page :: Maybe FilePath -> URL -> IO (Either String String) -get_page cookie_file url = - withCurlDo $ do - -- Create a curl instance. - curl <- initialize +password_field :: C.ByteString +password_field = C.pack "Password" - -- Perform the request, and get back a CurlResponse object. - -- The cast is needed to specify how we would like our headers - -- and body returned (Strings). - resp <- do_curl_ curl url curl_opts :: IO CurlResponse +submit_field :: C.ByteString +submit_field = C.pack "submit" - -- Pull out the response code as a CurlCode. - let code = respCurlCode resp - return $ - case code of - CurlOK -> Right (respBody resp) - error_code -> Left ("HTTP Error: " ++ (show error_code)) - -- If an error occurred, we want to dump as much information as - -- possible. If this becomes a problem, we can use respGetInfo to - -- query the response object for more information - where - get_opts = - case cookie_file of - Nothing -> [] - Just cookies -> [ CurlCookieFile cookies ] +-- | Get the requested URL as a L.ByteString, or return the response +-- as an error. Use the given cookie jar for the request. +get_page :: CookieJar -> URL -> IO (Either LBSResponse L.ByteString) +get_page cj url = do + init_req <- parseUrl url + let req' = init_req { checkStatus = \_ _ -> Nothing } + now <- getCurrentTime + let (req, _) = insertCookiesIntoRequest req' cj now + resp <- withManager $ httpLbs req - curl_opts = default_curl_opts ++ get_opts + return $ if (responseStatus resp) == status200 then + Right (responseBody resp) + else + Left resp -- | Log in using curl. Store the resulting session cookies in the -- supplied file. -log_in :: FilePath -> String -> String -> IO (Either String String) -log_in cookie_jar username password = - withCurlDo $ do - -- Create a curl instance. - curl <- initialize - - -- Perform the request, and get back a CurlResponse object. - -- The cast is needed to specify how we would like our headers - -- and body returned (Strings). - resp <- do_curl_ curl login_url curl_opts :: IO CurlResponse - - -- Pull out the response code as a CurlCode. - let code = respCurlCode resp - - return $ - case code of - CurlOK -> Right (respBody resp) - error_code -> Left $ "HTTP Error: " ++ (show error_code) - -- If an error occurred, we want to dump as much information as - -- possible. If this becomes a problem, we can use respGetInfo to - -- query the response object for more information +log_in :: String -> String -> IO (Either LBSResponse CookieJar) +log_in username password = do + init_req <- parseUrl login_url + let req' = init_req { method = methodPost, + checkStatus = \_ _ -> Nothing, + redirectCount = 0 } + let req = urlEncodedBody post_data req' + + resp <- withManager $ httpLbs req + + -- The login page redirects. If we follow it, we lose our cookies. + if (responseStatus resp) == status302 then do + now <- getCurrentTime + let (cj,_) = updateCookieJar resp req now C.cj_empty + return $ Right cj + else do + return $ Left resp + where - post_submit :: String - post_submit = submit_field ++ "=Log+In" + post_submit :: (C.ByteString, C.ByteString) + post_submit = (submit_field, C.pack "Log+In") - post_username :: String - post_username = username_field ++ "=" ++ username + post_username :: (C.ByteString, C.ByteString) + post_username = (username_field, C.pack username) - post_password :: String - post_password = password_field ++ "=" ++ password + post_password :: (C.ByteString, C.ByteString) + post_password = (password_field, C.pack password) - post_data :: [String] + post_data :: [(C.ByteString, C.ByteString)] post_data = [post_username, post_password, post_submit] - post_opts :: [CurlOption] - post_opts = - [ CurlCookieSession True, - CurlCookieJar cookie_jar, - CurlPost True, - CurlPostFields post_data ] - - curl_opts :: [CurlOption] - curl_opts = default_curl_opts ++ post_opts -- | Save the image at 'url'. Saves to a temporary file, and @@ -160,13 +122,16 @@ save_image url = do Just file -> do temp_dir <- getTemporaryDirectory (out_path, out_handle) <- openBinaryTempFile temp_dir file - result <- openURI url + -- We don't need to be logged in to get the images, so use an + -- empty cookie jar. + result <- get_page C.cj_empty url case result of Left err -> do - hPutStrLn stderr ("HTTP Error: " ++ err) + hPutStrLn stderr $ "Failed to retrieve image. " ++ + "Server response:\n" ++ (show err) return Nothing Right bs -> do - B.hPut out_handle bs + L.hPut out_handle bs return $ Just out_path @@ -199,17 +164,14 @@ get_login_cookie cfg | otherwise = do let uname = fromJust $ C.username cfg let pword = fromJust $ C.password cfg - cj <- make_cookie_jar - li_result <- log_in cj uname pword + li_result <- log_in uname pword case li_result of Left err -> do - let msg = "Failed to log in. " ++ err + let msg = "Failed to log in. Server response:\n" ++ (show err) hPutStrLn stderr msg - Right response_body -> do - hPutStrLn stderr response_body - - return $ cfg { C.cookie_jar = Just cj } + return cfg + Right cj -> return $ cfg { C.cookie_jar = cj } -- | Try to parse the given article using HXT. We try a few different @@ -223,12 +185,14 @@ get_article_contents cfg article_name = do contents <- Utf8.readFile my_article return $ Just $ contents False -> do - -- Download the URL and try to parse it. + -- Download the URL. html <- get_page (C.cookie_jar cfg) my_article case html of Left err -> do - let msg = "Failed to retrieve page. " ++ err + let msg = "Failed to retrieve article. " ++ + "Server response:\n" ++ (show err) hPutStrLn stderr msg return Nothing - Right h -> return $ Just h + Right lbs_article -> + return $ Just (toString lbs_article)