+-- | My version of HandsomeSoup's parseHTML.
+my_read :: String -> IOStateArrow s b XmlTree
+my_read = readString my_read_opts
+
+
+-- |A wrapper around threadDelay which takes seconds instead of
+-- microseconds as its argument.
+thread_sleep :: Int -> IO ()
+thread_sleep seconds = do
+ let microseconds = seconds * (10 ^ (6 :: Int))
+ threadDelay microseconds
+
+
+-- | Try to parse the given article using HXT. We try a few different
+-- methods; if none of them work, we return 'Nothing'.
+get_xml_from_article :: Cfg -> IO (Maybe (IOStateArrow s b XmlTree))
+get_xml_from_article cfg = do
+ my_article <- real_article_path (article cfg)
+ is_file <- doesFileExist my_article
+ case is_file of
+ True -> do
+ contents <- readFile my_article
+ return $ Just $ my_read contents
+ False -> do
+ -- Download the URL and try to parse it.
+ if use_account cfg then do
+ -- use_account would be false if these fromJusts would fail.
+ cj <- make_cookie_jar
+ li_result <- log_in cj
+ (fromJust $ username cfg)
+ (fromJust $ password cfg)
+
+ -- Without this, the cookie file is empty during
+ -- get_page. Whaaat?
+ thread_sleep 1
+
+ when (isJust li_result) $ do
+ let msg = "Failed to log in. " ++ (fromJust li_result)
+ hPutStrLn stderr msg
+
+ html <- get_page (Just cj) my_article
+
+ case html of
+ Left err -> do
+ let msg = "Failed to retrieve page. " ++ err
+ hPutStrLn stderr msg
+ return Nothing
+ Right h -> return $ Just $ my_read h
+ else do
+ html <- get_page Nothing my_article
+ case html of
+ Left err -> do
+ let msg = "Failed to retrieve page. " ++ err
+ hPutStrLn stderr msg
+ return Nothing
+ Right h -> return $ Just $ my_read h
+