X-Git-Url: http://gitweb.michael.orlitzky.com/?p=dead%2Flwn-epub.git;a=blobdiff_plain;f=src%2FLWN%2FHTTP.hs;h=a8a1980f273cfdc352911bb7bcd3b55b58137cdf;hp=3793f10ae7470966d5b1526bcdf435af05f6fb5a;hb=f3321e2ce7d7645ad562dc8f6620bfd561edc75d;hpb=35e0551101fa2267f28ff434a6006a3612ad54c9 diff --git a/src/LWN/HTTP.hs b/src/LWN/HTTP.hs index 3793f10..a8a1980 100644 --- a/src/LWN/HTTP.hs +++ b/src/LWN/HTTP.hs @@ -4,26 +4,25 @@ module LWN.HTTP where import qualified Data.ByteString as B (hPut) - +import qualified Data.Map as Map (Map, empty, insert) import Network.Curl ( CurlCode(..), CurlOption(..), CurlResponse, - URLString, do_curl_, initialize, respBody, respCurlCode, - withCurlDo + withCurlDo ) import Network.Curl.Download (openURI) import System.Directory (doesFileExist, getTemporaryDirectory) import System.IO (hClose, hPutStrLn, stderr) import System.IO.Temp (openBinaryTempFile, openTempFile) -import LWN.URI (filename) +import LWN.URI (URL, filename) -login_url :: URLString +login_url :: URL login_url = "https://lwn.net/login" username_field :: String @@ -44,11 +43,11 @@ default_curl_opts = -- And we don't want to use a DNS cache anyway. CurlDNSCacheTimeout 0, - -- Give it a little time... - CurlTimeout 45, + -- Follow redirects. + CurlFollowLocation True, - -- And let us know when things go wrong. - CurlVerbose True ] + -- Give it a little time... + CurlTimeout 45 ] make_cookie_jar :: IO FilePath @@ -59,8 +58,8 @@ make_cookie_jar = do hClose out_handle -- We just want to create it for now. return out_path -get_page :: Maybe FilePath -> URLString -> IO (Maybe String) -get_page cookie_jar url = +get_page :: Maybe FilePath -> URL -> IO (Either String String) +get_page cookie_file url = withCurlDo $ do -- Create a curl instance. curl <- initialize @@ -73,24 +72,25 @@ get_page cookie_jar url = -- Pull out the response code as a CurlCode. let code = respCurlCode resp - case code of - CurlOK -> return $ Just (respBody resp) - error_code -> do - hPutStrLn stderr ("HTTP Error: " ++ (show error_code)) + return $ + case code of + CurlOK -> Right (respBody resp) + error_code -> Left ("HTTP Error: " ++ (show error_code)) -- If an error occurred, we want to dump as much information as -- possible. If this becomes a problem, we can use respGetInfo to -- query the response object for more information - return Nothing where get_opts = - case cookie_jar of + case cookie_file of Nothing -> [] - Just cookies -> [ CurlCookieJar cookies ] + Just cookies -> [ CurlCookieFile cookies ] curl_opts = default_curl_opts ++ get_opts -log_in :: FilePath -> String -> String -> IO Bool +-- | Log in using curl. Store the resulting session cookies in the +-- supplied file. +log_in :: FilePath -> String -> String -> IO (Either String String) log_in cookie_jar username password = withCurlDo $ do -- Create a curl instance. @@ -104,14 +104,13 @@ log_in cookie_jar username password = -- Pull out the response code as a CurlCode. let code = respCurlCode resp - case code of - CurlOK -> return True - error_code -> do - hPutStrLn stderr ("HTTP Error: " ++ (show error_code)) + return $ + case code of + CurlOK -> Right (respBody resp) + error_code -> Left $ "HTTP Error: " ++ (show error_code) -- If an error occurred, we want to dump as much information as -- possible. If this becomes a problem, we can use respGetInfo to -- query the response object for more information - return False where post_submit :: String post_submit = submit_field ++ "=Log+In" @@ -143,7 +142,7 @@ log_in cookie_jar username password = -- We need to be able to parse the filename out of the URL -- so that when we stick our image in the document, the reader -- knows that type (jpg, png, etc.) it is. -save_image :: URLString -> IO (Maybe FilePath) +save_image :: URL -> IO (Maybe FilePath) save_image url = do it_exists <- doesFileExist url if it_exists then do @@ -164,3 +163,22 @@ save_image url = do Right bs -> do B.hPut out_handle bs return $ Just out_path + + + + +-- | Map absolute image URLs to local system file paths where the +-- image referenced by the URL is stored. +type ImageMap = Map.Map URL FilePath + +download_image_urls :: [URL] -> IO ImageMap +download_image_urls image_urls = do + files <- mapM save_image image_urls + let pairs = zip image_urls files + return $ foldl my_insert empty_map pairs + where + empty_map = Map.empty :: ImageMap + + my_insert :: ImageMap -> (URL, Maybe FilePath) -> ImageMap + my_insert dict (_, Nothing) = dict + my_insert dict (k, Just v) = Map.insert k v dict