)
import Network.Curl.Download (openURI)
import System.Directory (doesFileExist, getTemporaryDirectory)
-import System.IO (hClose, hPutStrLn, stderr, stdout)
+import System.IO (hClose, hPutStrLn, stderr)
import System.IO.Temp (openBinaryTempFile, openTempFile)
import LWN.URI (filename)
hClose out_handle -- We just want to create it for now.
return out_path
-get_page :: Maybe FilePath -> URLString -> IO (Maybe String)
+get_page :: Maybe FilePath -> URLString -> IO (Either String String)
get_page cookie_file url =
withCurlDo $ do
- hPutStrLn stdout ("Getting page: " ++ url ++ "...")
-
-- Create a curl instance.
curl <- initialize
-- Perform the request, and get back a CurlResponse object.
-- The cast is needed to specify how we would like our headers
-- and body returned (Strings).
- putStrLn "Curl options:"
- print curl_opts
-
resp <- do_curl_ curl url curl_opts :: IO CurlResponse
-- Pull out the response code as a CurlCode.
let code = respCurlCode resp
- case code of
- CurlOK -> return $ Just (respBody resp)
- error_code -> do
- hPutStrLn stderr ("HTTP Error: " ++ (show error_code))
+ return $
+ case code of
+ CurlOK -> Right (respBody resp)
+ error_code -> Left ("HTTP Error: " ++ (show error_code))
-- If an error occurred, we want to dump as much information as
-- possible. If this becomes a problem, we can use respGetInfo to
-- query the response object for more information
- return Nothing
where
get_opts =
case cookie_file of
curl_opts = default_curl_opts ++ get_opts
-log_in :: FilePath -> String -> String -> IO Bool
+-- | Log in using curl. Store the resulting session cookies in the
+-- supplied file.Warning: This returns an error if the function
+-- fails!
+log_in :: FilePath -> String -> String -> IO (Maybe String)
log_in cookie_jar username password =
withCurlDo $ do
- hPutStrLn stdout ("Logging " ++ username ++ " in...")
-
-- Create a curl instance.
curl <- initialize
-- Pull out the response code as a CurlCode.
let code = respCurlCode resp
- case code of
- CurlOK -> return True
- error_code -> do
- hPutStrLn stderr ("HTTP Error: " ++ (show error_code))
+ return $
+ case code of
+ CurlOK -> Nothing
+ error_code -> Just $ "HTTP Error: " ++ (show error_code)
-- If an error occurred, we want to dump as much information as
-- possible. If this becomes a problem, we can use respGetInfo to
-- query the response object for more information
- return False
where
post_submit :: String
post_submit = submit_field ++ "=Log+In"
module Main
where
+import Control.Concurrent (threadDelay)
import Control.Monad (when)
-import Data.Maybe (fromJust)
+import Data.Maybe (fromJust, isJust)
import Prelude hiding (readFile)
import System.Directory (doesFileExist)
import System.IO (
import Test.Framework (Test, testGroup)
import Test.Framework.Providers.HUnit (testCase)
import Text.Regex.Posix ((=~))
-import Text.XML.HXT.Core hiding (when)
-
+import Text.XML.HXT.Core (
+ IOStateArrow,
+ SysConfigList,
+ XmlTree,
+ no,
+ readString,
+ withParseHTML,
+ withValidate,
+ withWarnings,
+ yes
+ )
import CommandLine (show_help)
import Configuration (Cfg(..), get_cfg, use_account)
import LWN.HTTP (get_page, log_in, make_cookie_jar)
my_read :: String -> IOStateArrow s b XmlTree
my_read = readString my_read_opts
+
+-- |A wrapper around threadDelay which takes seconds instead of
+-- microseconds as its argument.
+thread_sleep :: Int -> IO ()
+thread_sleep seconds = do
+ let microseconds = seconds * (10 ^ (6 :: Int))
+ threadDelay microseconds
+
+
-- | Try to parse the given article using HXT. We try a few different
-- methods; if none of them work, we return 'Nothing'.
get_xml_from_article :: Cfg -> IO (Maybe (IOStateArrow s b XmlTree))
li_result <- log_in cj
(fromJust $ username cfg)
(fromJust $ password cfg)
- when (not li_result) $ do
- hPutStrLn stderr "Failed to log in."
+
+ -- Without this, the cookie file is empty during
+ -- get_page. Whaaat?
+ thread_sleep 1
+
+ when (isJust li_result) $ do
+ let msg = "Failed to log in. " ++ (fromJust li_result)
+ hPutStrLn stderr msg
html <- get_page (Just cj) my_article
- print $ fromJust $ html
- return $
- case html of
- Nothing -> Nothing
- Just h -> Just $ my_read h
+
+ case html of
+ Left err -> do
+ let msg = "Failed to retrieve page. " ++ err
+ hPutStrLn stderr msg
+ return Nothing
+ Right h -> return $ Just $ my_read h
else do
html <- get_page Nothing my_article
- putStrLn "Not logged in."
- print $ fromJust $ html
- return $
- case html of
- Nothing -> Nothing
- Just h -> Just $ my_read h
+ case html of
+ Left err -> do
+ let msg = "Failed to retrieve page. " ++ err
+ hPutStrLn stderr msg
+ return Nothing
+ Right h -> return $ Just $ my_read h
-- | If we're given an empty path, return a handle to
-- 'stdout'. Otherwise, open the given file and return a read/write