2018-12-13 13:39:08 +05:30
|
|
|
# frozen_string_literal: true
|
|
|
|
|
2017-09-10 17:25:29 +05:30
|
|
|
module Gitlab
|
|
|
|
module Gpg
|
|
|
|
extend self
|
|
|
|
|
2019-12-26 22:10:19 +05:30
|
|
|
CleanupError = Class.new(StandardError)
|
2020-01-01 13:55:28 +05:30
|
|
|
BG_CLEANUP_RUNTIME_S = 10
|
2020-03-13 15:44:24 +05:30
|
|
|
FG_CLEANUP_RUNTIME_S = 1
|
2019-12-26 22:10:19 +05:30
|
|
|
|
2017-09-10 17:25:29 +05:30
|
|
|
MUTEX = Mutex.new
|
|
|
|
|
|
|
|
module CurrentKeyChain
|
|
|
|
extend self
|
|
|
|
|
|
|
|
def add(key)
|
|
|
|
GPGME::Key.import(key)
|
|
|
|
end
|
|
|
|
|
|
|
|
def fingerprints_from_key(key)
|
|
|
|
import = GPGME::Key.import(key)
|
|
|
|
|
|
|
|
return [] if import.imported == 0
|
|
|
|
|
|
|
|
import.imports.map(&:fingerprint)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def fingerprints_from_key(key)
|
|
|
|
using_tmp_keychain do
|
|
|
|
CurrentKeyChain.fingerprints_from_key(key)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def primary_keyids_from_key(key)
|
|
|
|
using_tmp_keychain do
|
|
|
|
fingerprints = CurrentKeyChain.fingerprints_from_key(key)
|
|
|
|
|
|
|
|
GPGME::Key.find(:public, fingerprints).map { |raw_key| raw_key.primary_subkey.keyid }
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-03-17 18:26:18 +05:30
|
|
|
def subkeys_from_key(key)
|
|
|
|
using_tmp_keychain do
|
|
|
|
fingerprints = CurrentKeyChain.fingerprints_from_key(key)
|
|
|
|
raw_keys = GPGME::Key.find(:public, fingerprints)
|
|
|
|
|
|
|
|
raw_keys.each_with_object({}) do |raw_key, grouped_subkeys|
|
|
|
|
primary_subkey_id = raw_key.primary_subkey.keyid
|
|
|
|
|
|
|
|
grouped_subkeys[primary_subkey_id] = raw_key.subkeys[1..-1].map do |s|
|
|
|
|
{ keyid: s.keyid, fingerprint: s.fingerprint }
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2017-09-10 17:25:29 +05:30
|
|
|
def user_infos_from_key(key)
|
|
|
|
using_tmp_keychain do
|
|
|
|
fingerprints = CurrentKeyChain.fingerprints_from_key(key)
|
|
|
|
|
|
|
|
GPGME::Key.find(:public, fingerprints).flat_map do |raw_key|
|
2018-11-08 19:23:39 +05:30
|
|
|
raw_key.uids.each_with_object([]) do |uid, arr|
|
|
|
|
name = uid.name.force_encoding('UTF-8')
|
|
|
|
email = uid.email.force_encoding('UTF-8')
|
|
|
|
arr << { name: name, email: email.downcase } if name.valid_encoding? && email.valid_encoding?
|
|
|
|
end
|
2017-09-10 17:25:29 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
# Allows thread safe switching of temporary keychain files
|
|
|
|
#
|
|
|
|
# 1. The current thread may use nesting of temporary keychain
|
|
|
|
# 2. Another thread needs to wait for the lock to be released
|
|
|
|
def using_tmp_keychain(&block)
|
|
|
|
if MUTEX.locked? && MUTEX.owned?
|
|
|
|
optimistic_using_tmp_keychain(&block)
|
|
|
|
else
|
2019-02-15 15:39:39 +05:30
|
|
|
ActiveSupport::Dependencies.interlock.permit_concurrent_loads do
|
2018-11-18 11:00:15 +05:30
|
|
|
MUTEX.synchronize do
|
|
|
|
optimistic_using_tmp_keychain(&block)
|
|
|
|
end
|
2017-09-10 17:25:29 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
# 1. Returns the custom home directory if one has been set by calling
|
|
|
|
# `GPGME::Engine.home_dir=`
|
|
|
|
# 2. Returns the default home directory otherwise
|
|
|
|
def current_home_dir
|
|
|
|
GPGME::Engine.info.first.home_dir || GPGME::Engine.dirinfo('homedir')
|
|
|
|
end
|
|
|
|
|
|
|
|
private
|
|
|
|
|
|
|
|
def optimistic_using_tmp_keychain
|
|
|
|
previous_dir = current_home_dir
|
2018-03-17 18:26:18 +05:30
|
|
|
tmp_dir = Dir.mktmpdir
|
|
|
|
GPGME::Engine.home_dir = tmp_dir
|
2019-12-26 22:10:19 +05:30
|
|
|
tmp_keychains_created.increment
|
|
|
|
|
2018-03-17 18:26:18 +05:30
|
|
|
yield
|
2017-09-10 17:25:29 +05:30
|
|
|
ensure
|
2019-12-26 22:10:19 +05:30
|
|
|
GPGME::Engine.home_dir = previous_dir
|
|
|
|
|
|
|
|
begin
|
|
|
|
cleanup_tmp_dir(tmp_dir)
|
|
|
|
rescue CleanupError => e
|
2020-01-01 13:55:28 +05:30
|
|
|
folder_contents = Dir.children(tmp_dir)
|
2019-12-26 22:10:19 +05:30
|
|
|
# This means we left a GPG-agent process hanging. Logging the problem in
|
|
|
|
# sentry will make this more visible.
|
2020-01-01 13:55:28 +05:30
|
|
|
Gitlab::ErrorTracking.track_and_raise_for_dev_exception(e,
|
2019-12-26 22:10:19 +05:30
|
|
|
issue_url: 'https://gitlab.com/gitlab-org/gitlab/issues/20918',
|
2020-01-01 13:55:28 +05:30
|
|
|
tmp_dir: tmp_dir, contents: folder_contents)
|
2019-12-26 22:10:19 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
tmp_keychains_removed.increment unless File.exist?(tmp_dir)
|
|
|
|
end
|
|
|
|
|
|
|
|
def cleanup_tmp_dir(tmp_dir)
|
|
|
|
# Retry when removing the tmp directory failed, as we may run into a
|
2018-03-17 18:26:18 +05:30
|
|
|
# race condition:
|
|
|
|
# The `gpg-agent` agent process may clean up some files as well while
|
|
|
|
# `FileUtils.remove_entry` is iterating the directory and removing all
|
|
|
|
# its contained files and directories recursively, which could raise an
|
|
|
|
# error.
|
2019-12-26 22:10:19 +05:30
|
|
|
# Failing to remove the tmp directory could leave the `gpg-agent` process
|
|
|
|
# running forever.
|
2020-03-13 15:44:24 +05:30
|
|
|
#
|
|
|
|
# 15 tries will never complete within the maximum time with exponential
|
|
|
|
# backoff. So our limit is the runtime, not the number of tries.
|
|
|
|
Retriable.retriable(max_elapsed_time: cleanup_time, base_interval: 0.1, tries: 15) do
|
2019-12-26 22:10:19 +05:30
|
|
|
FileUtils.remove_entry(tmp_dir) if File.exist?(tmp_dir)
|
|
|
|
end
|
|
|
|
rescue => e
|
|
|
|
raise CleanupError, e
|
|
|
|
end
|
|
|
|
|
|
|
|
def cleanup_time
|
2020-03-13 15:44:24 +05:30
|
|
|
Gitlab::Runtime.sidekiq? ? BG_CLEANUP_RUNTIME_S : FG_CLEANUP_RUNTIME_S
|
2019-12-26 22:10:19 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
def tmp_keychains_created
|
|
|
|
@tmp_keychains_created ||= Gitlab::Metrics.counter(:gpg_tmp_keychains_created_total,
|
|
|
|
'The number of temporary GPG keychains created')
|
|
|
|
end
|
|
|
|
|
|
|
|
def tmp_keychains_removed
|
|
|
|
@tmp_keychains_removed ||= Gitlab::Metrics.counter(:gpg_tmp_keychains_removed_total,
|
|
|
|
'The number of temporary GPG keychains removed')
|
2017-09-10 17:25:29 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|