{-# LANGUAGE DoAndIfThenElse #-} module LWN.HTTP where import Control.Concurrent.ParallelIO (parallel) import qualified Data.ByteString.Char8 as C (ByteString, pack) import qualified Data.ByteString.Lazy as L (ByteString, hPut) import Data.ByteString.Lazy.UTF8 (toString) import qualified Data.Map as Map (Map, empty, insert) import Data.Maybe (fromJust, isNothing) import Data.Time (getCurrentTime) import Network.HTTP.Conduit ( CookieJar, Response(..), Request(..), httpLbs, insertCookiesIntoRequest, parseUrl, updateCookieJar, urlEncodedBody, withManager ) import Network.HTTP.Types (status200, status302) import Network.HTTP.Types.Method (methodPost) import System.Directory (doesFileExist, getTemporaryDirectory) import System.IO (hPutStrLn, stderr) import qualified System.IO.UTF8 as Utf8 (readFile) import System.IO.Temp (openBinaryTempFile) -- Also grab the empty cookie jar from Configuration since we'll -- use it in a few places. import qualified Configuration as C (Cfg(..), cj_empty) import LWN.Article (real_article_path) import LWN.URI (URL, filename) -- | The type of response we get back from http-conduit's httpLbs. type LBSResponse = Response L.ByteString login_url :: URL login_url = "https://lwn.net/login" username_field :: C.ByteString username_field = C.pack "Username" password_field :: C.ByteString password_field = C.pack "Password" submit_field :: C.ByteString submit_field = C.pack "submit" -- | Get the requested URL as a L.ByteString, or return the response -- as an error. Use the given cookie jar for the request. get_page :: CookieJar -> URL -> IO (Either LBSResponse L.ByteString) get_page cj url = do init_req <- parseUrl url let req' = init_req { checkStatus = \_ _ -> Nothing } now <- getCurrentTime let (req, _) = insertCookiesIntoRequest req' cj now resp <- withManager $ httpLbs req return $ if (responseStatus resp) == status200 then Right (responseBody resp) else Left resp -- | Log in using curl. Store the resulting session cookies in the -- supplied file. log_in :: String -> String -> IO (Either LBSResponse CookieJar) log_in username password = do init_req <- parseUrl login_url let req' = init_req { method = methodPost, checkStatus = \_ _ -> Nothing, redirectCount = 0 } let req = urlEncodedBody post_data req' resp <- withManager $ httpLbs req -- The login page redirects. If we follow it, we lose our cookies. if (responseStatus resp) == status302 then do now <- getCurrentTime let (cj,_) = updateCookieJar resp req now C.cj_empty return $ Right cj else do return $ Left resp where post_submit :: (C.ByteString, C.ByteString) post_submit = (submit_field, C.pack "Log+In") post_username :: (C.ByteString, C.ByteString) post_username = (username_field, C.pack username) post_password :: (C.ByteString, C.ByteString) post_password = (password_field, C.pack password) post_data :: [(C.ByteString, C.ByteString)] post_data = [post_username, post_password, post_submit] -- | Save the image at 'url'. Saves to a temporary file, and -- returns the path to that file if successful. Otherwise, -- returns 'Nothing'. -- -- We need to be able to parse the filename out of the URL -- so that when we stick our image in the document, the reader -- knows that type (jpg, png, etc.) it is. save_image :: URL -> IO (Maybe FilePath) save_image url = do it_exists <- doesFileExist url if it_exists then do -- It's local, just use it. return $ Just url else do let fn = filename url case fn of Nothing -> return Nothing Just file -> do temp_dir <- getTemporaryDirectory (out_path, out_handle) <- openBinaryTempFile temp_dir file -- We don't need to be logged in to get the images, so use an -- empty cookie jar. result <- get_page C.cj_empty url case result of Left err -> do hPutStrLn stderr $ "Failed to retrieve image. " ++ "Server response:\n" ++ (show err) return Nothing Right bs -> do L.hPut out_handle bs return $ Just out_path -- | Map absolute image URLs to local system file paths where the -- image referenced by the URL is stored. type ImageMap = Map.Map URL FilePath download_image_urls :: [URL] -> IO ImageMap download_image_urls image_urls = do files <- parallel $ map save_image image_urls let pairs = zip image_urls files return $ foldl my_insert empty_map pairs where empty_map = Map.empty :: ImageMap my_insert :: ImageMap -> (URL, Maybe FilePath) -> ImageMap my_insert dict (_, Nothing) = dict my_insert dict (k, Just v) = Map.insert k v dict get_login_cookie :: C.Cfg -> IO C.Cfg get_login_cookie cfg | isNothing (C.username cfg) = return cfg | isNothing (C.password cfg) = return cfg | otherwise = do let uname = fromJust $ C.username cfg let pword = fromJust $ C.password cfg li_result <- log_in uname pword case li_result of Left err -> do let msg = "Failed to log in. Server response:\n" ++ (show err) hPutStrLn stderr msg return cfg Right cj -> return $ cfg { C.cookie_jar = cj } -- | Try to parse the given article using HXT. We try a few different -- methods; if none of them work, we return 'Nothing'. get_article_contents :: C.Cfg -> URL -> IO (Maybe String) get_article_contents cfg article_name = do my_article <- real_article_path article_name is_file <- doesFileExist my_article case is_file of True -> do contents <- Utf8.readFile my_article return $ Just $ contents False -> do -- Download the URL. html <- get_page (C.cookie_jar cfg) my_article case html of Left err -> do let msg = "Failed to retrieve article. " ++ "Server response:\n" ++ (show err) hPutStrLn stderr msg return Nothing Right lbs_article -> return $ Just (toString lbs_article)