RemoteFetcher
handles the details of fetching gems and gem information from a remote source.
# File tmp/rubies/ruby-3.2.0/lib/rubygems/remote_fetcher.rb, line 54
def self.fetcher
@fetcher ||= self.new Gem.configuration[:http_proxy]
end
Cached RemoteFetcher
instance.
# File tmp/rubies/ruby-3.2.0/lib/rubygems/remote_fetcher.rb, line 74
def initialize(proxy=nil, dns=nil, headers={})
require_relative "core_ext/tcpsocket_init" if Gem.configuration.ipv4_fallback_enabled
require "net/http"
require "stringio"
require "uri"
Socket.do_not_reverse_lookup = true
@proxy = proxy
@pools = {}
@pool_lock = Thread::Mutex.new
@cert_files = Gem::Request.get_cert_files
@headers = headers
end
Initialize a remote fetcher using the source URI
and possible proxy information.
proxy
-
[String]: explicit specification of proxy; overrides any environment
variable setting
-
nil: respect environment variables (HTTP_PROXY, HTTP_PROXY_USER,
HTTP_PROXY_PASS)
-
:no_proxy
: ignore environment variables and _don’t_ use a proxy
headers
: A set of additional HTTP headers to be sent to the server when
fetching the gem.
# File tmp/rubies/ruby-3.2.0/lib/rubygems/remote_fetcher.rb, line 282
def cache_update_path(uri, path = nil, update = true)
mtime = path && File.stat(path).mtime rescue nil
data = fetch_path(uri, mtime)
if data.nil? # indicates the server returned 304 Not Modified
return Gem.read_binary(path)
end
if update && path
Gem.write_binary(path, data)
end
data
end
Downloads uri
to path
if necessary. If no path is given, it just passes the data.
# File tmp/rubies/ruby-3.2.0/lib/rubygems/remote_fetcher.rb, line 318
def close_all
@pools.each_value {|pool| pool.close_all }
end
# File tmp/rubies/ruby-3.2.0/lib/rubygems/remote_fetcher.rb, line 112
def download(spec, source_uri, install_dir = Gem.dir)
install_cache_dir = File.join install_dir, "cache"
cache_dir =
if Dir.pwd == install_dir # see fetch_command
install_dir
elsif File.writable?(install_cache_dir) || (File.writable?(install_dir) && (!File.exist?(install_cache_dir)))
install_cache_dir
else
File.join Gem.user_dir, "cache"
end
gem_file_name = File.basename spec.cache_file
local_gem_path = File.join cache_dir, gem_file_name
require "fileutils"
FileUtils.mkdir_p cache_dir rescue nil unless File.exist? cache_dir
source_uri = Gem::Uri.new(source_uri)
scheme = source_uri.scheme
# URI.parse gets confused by MS Windows paths with forward slashes.
scheme = nil if scheme =~ /^[a-z]$/i
# REFACTOR: split this up and dispatch on scheme (eg download_http)
# REFACTOR: be sure to clean up fake fetcher when you do this... cleaner
case scheme
when "http", "https", "s3" then
unless File.exist? local_gem_path
begin
verbose "Downloading gem #{gem_file_name}"
remote_gem_path = source_uri + "gems/#{gem_file_name}"
self.cache_update_path remote_gem_path, local_gem_path
rescue FetchError
raise if spec.original_platform == spec.platform
alternate_name = "#{spec.original_name}.gem"
verbose "Failed, downloading gem #{alternate_name}"
remote_gem_path = source_uri + "gems/#{alternate_name}"
self.cache_update_path remote_gem_path, local_gem_path
end
end
when "file" then
begin
path = source_uri.path
path = File.dirname(path) if File.extname(path) == ".gem"
remote_gem_path = Gem::Util.correct_for_windows_path(File.join(path, "gems", gem_file_name))
FileUtils.cp(remote_gem_path, local_gem_path)
rescue Errno::EACCES
local_gem_path = source_uri.to_s
end
verbose "Using local gem #{local_gem_path}"
when nil then # TODO test for local overriding cache
source_path = if Gem.win_platform? && source_uri.scheme &&
!source_uri.path.include?(":")
"#{source_uri.scheme}:#{source_uri.path}"
else
source_uri.path
end
source_path = Gem::UriFormatter.new(source_path).unescape
begin
FileUtils.cp source_path, local_gem_path unless
File.identical?(source_path, local_gem_path)
rescue Errno::EACCES
local_gem_path = source_uri.to_s
end
verbose "Using local gem #{local_gem_path}"
else
raise ArgumentError, "unsupported URI scheme #{source_uri.scheme}"
end
local_gem_path
end
Moves the gem spec
from source_uri
to the cache dir unless it is already there. If the source_uri is local the gem cache dir copy is always replaced.
# File tmp/rubies/ruby-3.2.0/lib/rubygems/remote_fetcher.rb, line 97
def download_to_cache(dependency)
found, _ = Gem::SpecFetcher.fetcher.spec_for_dependency dependency
return if found.empty?
spec, source = found.max_by {|(s,_)| s.version }
download spec, source.uri
end
Given a name and requirement, downloads this gem into cache and returns the filename. Returns nil if the gem cannot be located.
# File tmp/rubies/ruby-3.2.0/lib/rubygems/remote_fetcher.rb, line 200
def fetch_file(uri, *_)
Gem.read_binary Gem::Util.correct_for_windows_path uri.path
end
File
Fetcher. Dispatched by fetch_path
. Use it instead.
# File tmp/rubies/ruby-3.2.0/lib/rubygems/remote_fetcher.rb, line 207
def fetch_http(uri, last_modified = nil, head = false, depth = 0)
fetch_type = head ? Net::HTTP::Head : Net::HTTP::Get
response = request uri, fetch_type, last_modified do |req|
headers.each {|k,v| req.add_field(k,v) }
end
case response
when Net::HTTPOK, Net::HTTPNotModified then
response.uri = uri
head ? response : response.body
when Net::HTTPMovedPermanently, Net::HTTPFound, Net::HTTPSeeOther,
Net::HTTPTemporaryRedirect then
raise FetchError.new("too many redirects", uri) if depth > 10
unless location = response["Location"]
raise FetchError.new("redirecting but no redirect location was given", uri)
end
location = Gem::Uri.new location
if https?(uri) && !https?(location)
raise FetchError.new("redirecting to non-https resource: #{location}", uri)
end
fetch_http(location, last_modified, head, depth + 1)
else
raise FetchError.new("bad response #{response.message} #{response.code}", uri)
end
end
HTTP Fetcher. Dispatched by fetch_path
. Use it instead.
# File tmp/rubies/ruby-3.2.0/lib/rubygems/remote_fetcher.rb, line 241
def fetch_path(uri, mtime = nil, head = false)
uri = Gem::Uri.new uri
unless uri.scheme
raise ArgumentError, "uri scheme is invalid: #{uri.scheme.inspect}"
end
data = send "fetch_#{uri.scheme}", uri, mtime, head
if data && !head && uri.to_s.end_with?(".gz")
begin
data = Gem::Util.gunzip data
rescue Zlib::GzipFile::Error
raise FetchError.new("server did not return a valid file", uri)
end
end
data
rescue Timeout::Error, IOError, SocketError, SystemCallError,
*(OpenSSL::SSL::SSLError if Gem::HAVE_OPENSSL) => e
raise FetchError.new("#{e.class}: #{e}", uri)
end
Downloads uri
and returns it as a String
.
# File tmp/rubies/ruby-3.2.0/lib/rubygems/remote_fetcher.rb, line 264
def fetch_s3(uri, mtime = nil, head = false)
begin
public_uri = s3_uri_signer(uri).sign
rescue Gem::S3URISigner::ConfigurationError, Gem::S3URISigner::InstanceProfileError => e
raise FetchError.new(e.message, "s3://#{uri.host}")
end
fetch_https public_uri, mtime, head
end
# File tmp/rubies/ruby-3.2.0/lib/rubygems/remote_fetcher.rb, line 314
def https?(uri)
uri.scheme.downcase == "https"
end
# File tmp/rubies/ruby-3.2.0/lib/rubygems/remote_fetcher.rb, line 328
def pools_for(proxy)
@pool_lock.synchronize do
@pools[proxy] ||= Gem::Request::ConnectionPools.new proxy, @cert_files
end
end
# File tmp/rubies/ruby-3.2.0/lib/rubygems/remote_fetcher.rb, line 324
def proxy_for(proxy, uri)
Gem::Request.proxy_uri(proxy || Gem::Request.get_proxy_from_env(uri.scheme))
end
# File tmp/rubies/ruby-3.2.0/lib/rubygems/remote_fetcher.rb, line 303
def request(uri, request_class, last_modified = nil)
proxy = proxy_for @proxy, uri
pool = pools_for(proxy).pool_for uri
request = Gem::Request.new uri, request_class, last_modified, pool
request.fetch do |req|
yield req if block_given?
end
end
# File tmp/rubies/ruby-3.2.0/lib/rubygems/remote_fetcher.rb, line 274
def s3_uri_signer(uri)
Gem::S3URISigner.new(uri)
end
we have our own signing code here to avoid a dependency on the aws-sdk gem