X-Git-Url: http://gitweb.michael.orlitzky.com/?a=blobdiff_plain;f=bin%2Fwhatever-dl;h=a40af28fab676f14b3ac36084c3bbdb7ca7cb731;hb=HEAD;hp=cde9e4dbbaaac2e3eb25790a2767d1c4d59d3c2c;hpb=9390083b114048a6782454a37f799733707ee5dc;p=dead%2Fwhatever-dl.git diff --git a/bin/whatever-dl b/bin/whatever-dl index cde9e4d..a40af28 100755 --- a/bin/whatever-dl +++ b/bin/whatever-dl @@ -1,4 +1,4 @@ -#!/usr/bin/ruby -w +#!/usr/bin/ruby -wKU # # whatever-dl, a script to download online (web-based) videos. # @@ -19,99 +19,120 @@ # http://www.fsf.org/licensing/licenses/gpl.html # -# We require the UriUtilities class to handle -# the download of the video URL. -require 'src/uri_utilities' +# This should load everything we need for us. +require 'whatever-dl' +# And getoptlong to check for our one option, --continue. +require 'getoptlong' + +log = Logger.new(STDOUT) +log.level = Logger::WARN + +def usage() + puts < + +Options: + -c, --continue Continue downloading a previously-attempted file. + +EOF -# All of the website classes are located in one -# directory, so we can 'require' them automatically. -Dir.glob('src/websites/*.rb').each do |r| - require r end -EXIT_SUCCESS = 0 -EXIT_NO_URL = 1 -EXIT_INVALID_URL = 2 -EXIT_COULDNT_GET_VIDEO_URL = 3 -EXIT_OUTPUT_FILE_ALREADY_EXISTS = 4 -EXIT_ERROR_READING_FROM_VIDEO_URL = 5 -EXIT_CONNECTION_REFUSED = 6 -EXIT_HTTP_ERROR = 7 -EXIT_ACCESS_DENIED = 8 - -# Only actually do something if this script was called -# directly (i.e. not from the tests). -if (__FILE__ == $0) then - if (ARGV.length < 1) then - # If the user didn't give us a URL, yell - # at him or her. - puts 'Usage: whatever-dl ' - Kernel.exit(EXIT_NO_URL) - end +# Default options. +options = { :continue => false } - # Check the URL against each website's class. - # The class will know whether or not the URL - # "belongs" to its website. +# Parse the command-line options into the options hash. +opts = GetoptLong.new(["--continue", "-c", GetoptLong::NO_ARGUMENT], + ["--help", "-h", GetoptLong::NO_ARGUMENT]) - site = nil - - Website.subclasses.each do |w| - if w.owns_url?(ARGV[0]) - site = w.new() - break - end +opts.each do |opt, arg| + case opt + when '--help' + usage() + Kernel.exit(ExitCodes::EXIT_SUCCESS) + when '--continue' + options[:continue] = true end +end - if site.nil? - puts 'Invalid URL.' - exit(EXIT_INVALID_URL) - end - - video_url = site.get_video_url(ARGV[0]) +cfg = Configuration.new() - if video_url.nil? - puts 'Error retrieving video URL.' - exit(EXIT_COULDNT_GET_VIDEO_URL) - end +# Warn about nonsensical options. +if options[:continue] and not cfg.download_method == 'wget' + log.warn("The --continue flag does nothing unless download_method is wget.") +end - video_uri = URI.parse(video_url) - uu = UriUtilities.new() - +# Note that GetoptLong steals its arguments from ARGV, so we don't need +# to take optional arguments into account when figuring out whether or not +# we were passed a URL. +if (ARGV.length < 1) then + # If the user didn't give us a URL, yell + # at him or her. + usage() + Kernel.exit(ExitCodes::EXIT_NO_URL) +end - # Here, we start out with a default file name and - # extension. If UriUtilities can parse a sane filename - # out of the URL, we'll use that. Otherwise, we fall - # back to the default. - outfile_name = 'default.ext' - - if not uu.get_filename(video_uri).nil? - outfile_name = uu.get_filename(video_uri) - else - puts "We couldn't determine the video's filename. Falling back to the default, #{outfile_name}." - end +# Factory method. +site = Website.create(ARGV[0]) + +if site.nil? + log.error('Invalid URL.') + exit(ExitCodes::EXIT_INVALID_URL) +end - if File.exists?(outfile_name) - puts "Error: output file already exists. Please remove #{outfile_name}, and try again." - Kernel.exit(EXIT_OUTPUT_FILE_ALREADY_EXISTS) - end +video_url = site.get_video_url() +if video_url.nil? + msg = 'Error retrieving video URL: ' + msg += "Site not supported, and the generic parser couldn't find any videos." + log.error(msg) + exit(ExitCodes::EXIT_COULDNT_GET_VIDEO_URL) +end - # Attempt to download the file, and rescue and report - # any (predictable) exceptions. - begin - uu.download_with_progress_bar(video_uri, outfile_name) - rescue Errno::ECONNREFUSED => e - puts 'The connection to the server (to download the video file) was refused. Check your connection, and try again later.' - Kernel.exit(EXIT_CONNECTION_REFUSED) - rescue Errno:EACCES => e - puts "Access denied. Check that you have write permission to the output file/directory. Details: #{e.message}." - rescue OpenURI::HTTPError => e - puts "An HTTP error occurred while downloading the video file: #{e.message}." - Kernel.exit(EXIT_HTTP_ERROR) - end +# The Downloader class is a factory; it should decide +# which subclass we get. +puts "download_method: #{cfg.download_method}" +downloader = Downloader.create(cfg.download_method) + +# Attempt to download the file, and rescue and report +# any (predictable) exceptions. The wget downloader will +# naturally not report any of these, since it will die in +# its own process. +begin + downloader.download(video_url, + site.get_video_filename(), + site.headers(), + continue=options[:continue]) + +rescue Errno::ECONNREFUSED => e + msg = 'The connection to the server (to download the video file) ' + msg += 'was refused. Check your connection, and try again later.' + log.error(msg) + Kernel.exit(ExitCodes::EXIT_CONNECTION_REFUSED) + +rescue Errno::EACCES => e + msg = 'Access denied. Check that you have write permission ' + msg += "to the output file/directory. Details: #{e.message}." + log.error(msg) + Kernel.exit(ExitCodes::EXIT_ACCESS_DENIED) + +rescue OpenURI::HTTPError => e + msg = 'An HTTP error occurred while downloading ' + msg += " the video file: #{e.message}." + log.error(msg) + Kernel.exit(ExitCodes::EXIT_HTTP_ERROR) + +rescue IOError => e + log.error("Input/Output Error: #{e.message}") + Kernel.exit(ExitCodes::EXIT_IO_ERROR) - Kernel.exit(EXIT_SUCCESS) end + +# Write an empty line at the end for aesthetic reasons. +puts '' + +Kernel.exit(ExitCodes::EXIT_SUCCESS)