2018-12-13 13:39:08 +05:30
|
|
|
# frozen_string_literal: true
|
|
|
|
|
2015-10-24 18:46:33 +05:30
|
|
|
module Gitlab
|
|
|
|
module Database
|
2016-06-22 15:30:34 +05:30
|
|
|
# The max value of INTEGER type is the same between MySQL and PostgreSQL:
|
|
|
|
# https://www.postgresql.org/docs/9.2/static/datatype-numeric.html
|
|
|
|
# http://dev.mysql.com/doc/refman/5.7/en/integer-types.html
|
|
|
|
MAX_INT_VALUE = 2147483647
|
2018-03-17 18:26:18 +05:30
|
|
|
# The max value between MySQL's TIMESTAMP and PostgreSQL's timestampz:
|
|
|
|
# https://www.postgresql.org/docs/9.1/static/datatype-datetime.html
|
|
|
|
# https://dev.mysql.com/doc/refman/5.7/en/datetime.html
|
|
|
|
MAX_TIMESTAMP_VALUE = Time.at((1 << 31) - 1).freeze
|
2016-06-22 15:30:34 +05:30
|
|
|
|
2017-08-17 22:00:37 +05:30
|
|
|
def self.config
|
|
|
|
ActiveRecord::Base.configurations[Rails.env]
|
|
|
|
end
|
|
|
|
|
2018-03-17 18:26:18 +05:30
|
|
|
def self.username
|
|
|
|
config['username'] || ENV['USER']
|
|
|
|
end
|
|
|
|
|
|
|
|
def self.database_name
|
|
|
|
config['database']
|
|
|
|
end
|
|
|
|
|
2016-04-02 18:10:28 +05:30
|
|
|
def self.adapter_name
|
2017-08-17 22:00:37 +05:30
|
|
|
config['adapter']
|
2016-04-02 18:10:28 +05:30
|
|
|
end
|
|
|
|
|
2015-10-24 18:46:33 +05:30
|
|
|
def self.mysql?
|
2016-06-02 11:05:42 +05:30
|
|
|
adapter_name.casecmp('mysql2').zero?
|
2015-10-24 18:46:33 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
def self.postgresql?
|
2016-06-02 11:05:42 +05:30
|
|
|
adapter_name.casecmp('postgresql').zero?
|
2016-04-02 18:10:28 +05:30
|
|
|
end
|
|
|
|
|
2018-03-17 18:26:18 +05:30
|
|
|
def self.read_only?
|
|
|
|
false
|
|
|
|
end
|
|
|
|
|
|
|
|
def self.read_write?
|
|
|
|
!self.read_only?
|
|
|
|
end
|
|
|
|
|
2019-02-13 22:33:31 +05:30
|
|
|
# Check whether the underlying database is in read-only mode
|
2018-11-08 19:23:39 +05:30
|
|
|
def self.db_read_only?
|
|
|
|
if postgresql?
|
2019-02-13 22:33:31 +05:30
|
|
|
pg_is_in_recovery =
|
|
|
|
ActiveRecord::Base.connection.execute('SELECT pg_is_in_recovery()')
|
|
|
|
.first.fetch('pg_is_in_recovery')
|
|
|
|
|
|
|
|
Gitlab::Utils.to_boolean(pg_is_in_recovery)
|
2018-11-08 19:23:39 +05:30
|
|
|
else
|
|
|
|
false
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def self.db_read_write?
|
|
|
|
!self.db_read_only?
|
|
|
|
end
|
|
|
|
|
2016-04-02 18:10:28 +05:30
|
|
|
def self.version
|
2018-10-15 14:42:47 +05:30
|
|
|
@version ||= database_version.match(/\A(?:PostgreSQL |)([^\s]+).*\z/)[1]
|
2015-10-24 18:46:33 +05:30
|
|
|
end
|
2015-12-23 02:04:40 +05:30
|
|
|
|
2018-11-18 11:00:15 +05:30
|
|
|
def self.postgresql_9_or_less?
|
|
|
|
postgresql? && version.to_f < 10
|
|
|
|
end
|
|
|
|
|
2017-09-10 17:25:29 +05:30
|
|
|
def self.join_lateral_supported?
|
|
|
|
postgresql? && version.to_f >= 9.3
|
|
|
|
end
|
|
|
|
|
2018-03-17 18:26:18 +05:30
|
|
|
def self.replication_slots_supported?
|
|
|
|
postgresql? && version.to_f >= 9.4
|
|
|
|
end
|
|
|
|
|
2018-11-18 11:00:15 +05:30
|
|
|
def self.pg_stat_wal_receiver_supported?
|
|
|
|
postgresql? && version.to_f >= 9.6
|
|
|
|
end
|
|
|
|
|
|
|
|
# map some of the function names that changed between PostgreSQL 9 and 10
|
|
|
|
# https://wiki.postgresql.org/wiki/New_in_postgres_10
|
|
|
|
def self.pg_wal_lsn_diff
|
|
|
|
Gitlab::Database.postgresql_9_or_less? ? 'pg_xlog_location_diff' : 'pg_wal_lsn_diff'
|
|
|
|
end
|
|
|
|
|
|
|
|
def self.pg_current_wal_insert_lsn
|
|
|
|
Gitlab::Database.postgresql_9_or_less? ? 'pg_current_xlog_insert_location' : 'pg_current_wal_insert_lsn'
|
|
|
|
end
|
|
|
|
|
|
|
|
def self.pg_last_wal_receive_lsn
|
|
|
|
Gitlab::Database.postgresql_9_or_less? ? 'pg_last_xlog_receive_location' : 'pg_last_wal_receive_lsn'
|
|
|
|
end
|
|
|
|
|
|
|
|
def self.pg_last_wal_replay_lsn
|
|
|
|
Gitlab::Database.postgresql_9_or_less? ? 'pg_last_xlog_replay_location' : 'pg_last_wal_replay_lsn'
|
|
|
|
end
|
|
|
|
|
2016-06-16 23:09:34 +05:30
|
|
|
def self.nulls_last_order(field, direction = 'ASC')
|
|
|
|
order = "#{field} #{direction}"
|
|
|
|
|
2017-08-17 22:00:37 +05:30
|
|
|
if postgresql?
|
2018-12-13 13:39:08 +05:30
|
|
|
order = "#{order} NULLS LAST"
|
2016-06-16 23:09:34 +05:30
|
|
|
else
|
|
|
|
# `field IS NULL` will be `0` for non-NULL columns and `1` for NULL
|
|
|
|
# columns. In the (default) ascending order, `0` comes first.
|
2018-12-13 13:39:08 +05:30
|
|
|
order = "#{field} IS NULL, #{order}" if direction == 'ASC'
|
2016-06-16 23:09:34 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
order
|
|
|
|
end
|
|
|
|
|
2017-08-17 22:00:37 +05:30
|
|
|
def self.nulls_first_order(field, direction = 'ASC')
|
|
|
|
order = "#{field} #{direction}"
|
|
|
|
|
|
|
|
if postgresql?
|
2018-12-13 13:39:08 +05:30
|
|
|
order = "#{order} NULLS FIRST"
|
2017-08-17 22:00:37 +05:30
|
|
|
else
|
|
|
|
# `field IS NULL` will be `0` for non-NULL columns and `1` for NULL
|
|
|
|
# columns. In the (default) ascending order, `0` comes first.
|
2018-12-13 13:39:08 +05:30
|
|
|
order = "#{field} IS NULL, #{order}" if direction == 'DESC'
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
order
|
|
|
|
end
|
|
|
|
|
2016-06-16 23:09:34 +05:30
|
|
|
def self.random
|
2017-08-17 22:00:37 +05:30
|
|
|
postgresql? ? "RANDOM()" : "RAND()"
|
2016-06-16 23:09:34 +05:30
|
|
|
end
|
|
|
|
|
2017-08-17 22:00:37 +05:30
|
|
|
def self.true_value
|
|
|
|
if postgresql?
|
2015-12-23 02:04:40 +05:30
|
|
|
"'t'"
|
|
|
|
else
|
|
|
|
1
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2017-08-17 22:00:37 +05:30
|
|
|
def self.false_value
|
|
|
|
if postgresql?
|
2015-12-23 02:04:40 +05:30
|
|
|
"'f'"
|
|
|
|
else
|
|
|
|
0
|
|
|
|
end
|
|
|
|
end
|
2016-04-02 18:10:28 +05:30
|
|
|
|
2017-08-17 22:00:37 +05:30
|
|
|
def self.with_connection_pool(pool_size)
|
|
|
|
pool = create_connection_pool(pool_size)
|
|
|
|
|
|
|
|
begin
|
|
|
|
yield(pool)
|
|
|
|
ensure
|
|
|
|
pool.disconnect!
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-03-17 18:26:18 +05:30
|
|
|
# Bulk inserts a number of rows into a table, optionally returning their
|
|
|
|
# IDs.
|
|
|
|
#
|
|
|
|
# table - The name of the table to insert the rows into.
|
|
|
|
# rows - An Array of Hash instances, each mapping the columns to their
|
|
|
|
# values.
|
|
|
|
# return_ids - When set to true the return value will be an Array of IDs of
|
|
|
|
# the inserted rows, this only works on PostgreSQL.
|
|
|
|
# disable_quote - A key or an Array of keys to exclude from quoting (You
|
|
|
|
# become responsible for protection from SQL injection for
|
|
|
|
# these keys!)
|
|
|
|
def self.bulk_insert(table, rows, return_ids: false, disable_quote: [])
|
2017-09-10 17:25:29 +05:30
|
|
|
return if rows.empty?
|
|
|
|
|
|
|
|
keys = rows.first.keys
|
|
|
|
columns = keys.map { |key| connection.quote_column_name(key) }
|
2018-03-17 18:26:18 +05:30
|
|
|
return_ids = false if mysql?
|
2017-09-10 17:25:29 +05:30
|
|
|
|
2018-03-17 18:26:18 +05:30
|
|
|
disable_quote = Array(disable_quote).to_set
|
2017-09-10 17:25:29 +05:30
|
|
|
tuples = rows.map do |row|
|
2018-03-17 18:26:18 +05:30
|
|
|
keys.map do |k|
|
|
|
|
disable_quote.include?(k) ? row[k] : connection.quote(row[k])
|
|
|
|
end
|
2017-09-10 17:25:29 +05:30
|
|
|
end
|
|
|
|
|
2018-03-17 18:26:18 +05:30
|
|
|
sql = <<-EOF
|
2017-09-10 17:25:29 +05:30
|
|
|
INSERT INTO #{table} (#{columns.join(', ')})
|
|
|
|
VALUES #{tuples.map { |tuple| "(#{tuple.join(', ')})" }.join(', ')}
|
|
|
|
EOF
|
2018-03-17 18:26:18 +05:30
|
|
|
|
|
|
|
if return_ids
|
2018-12-13 13:39:08 +05:30
|
|
|
sql = "#{sql}RETURNING id"
|
2018-03-17 18:26:18 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
result = connection.execute(sql)
|
|
|
|
|
|
|
|
if return_ids
|
|
|
|
result.values.map { |tuple| tuple[0].to_i }
|
|
|
|
else
|
|
|
|
[]
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def self.sanitize_timestamp(timestamp)
|
|
|
|
MAX_TIMESTAMP_VALUE > timestamp ? timestamp : MAX_TIMESTAMP_VALUE.dup
|
2017-09-10 17:25:29 +05:30
|
|
|
end
|
|
|
|
|
2017-08-17 22:00:37 +05:30
|
|
|
# pool_size - The size of the DB pool.
|
|
|
|
# host - An optional host name to use instead of the default one.
|
|
|
|
def self.create_connection_pool(pool_size, host = nil)
|
|
|
|
# See activerecord-4.2.7.1/lib/active_record/connection_adapters/connection_specification.rb
|
|
|
|
env = Rails.env
|
|
|
|
original_config = ActiveRecord::Base.configurations
|
|
|
|
|
|
|
|
env_config = original_config[env].merge('pool' => pool_size)
|
|
|
|
env_config['host'] = host if host
|
|
|
|
|
|
|
|
config = original_config.merge(env => env_config)
|
|
|
|
|
|
|
|
spec =
|
|
|
|
ActiveRecord::
|
|
|
|
ConnectionAdapters::
|
|
|
|
ConnectionSpecification::Resolver.new(config).spec(env.to_sym)
|
|
|
|
|
|
|
|
ActiveRecord::ConnectionAdapters::ConnectionPool.new(spec)
|
|
|
|
end
|
|
|
|
|
2016-04-02 18:10:28 +05:30
|
|
|
def self.connection
|
|
|
|
ActiveRecord::Base.connection
|
|
|
|
end
|
|
|
|
|
2018-03-27 19:54:05 +05:30
|
|
|
def self.cached_column_exists?(table_name, column_name)
|
|
|
|
connection.schema_cache.columns_hash(table_name).has_key?(column_name.to_s)
|
|
|
|
end
|
|
|
|
|
|
|
|
def self.cached_table_exists?(table_name)
|
2019-02-13 22:33:31 +05:30
|
|
|
connection.schema_cache.data_source_exists?(table_name)
|
2018-03-27 19:54:05 +05:30
|
|
|
end
|
|
|
|
|
2016-09-13 17:45:13 +05:30
|
|
|
private_class_method :connection
|
|
|
|
|
2016-04-02 18:10:28 +05:30
|
|
|
def self.database_version
|
|
|
|
row = connection.execute("SELECT VERSION()").first
|
|
|
|
|
|
|
|
if postgresql?
|
|
|
|
row['version']
|
|
|
|
else
|
|
|
|
row.first
|
|
|
|
end
|
|
|
|
end
|
2016-09-13 17:45:13 +05:30
|
|
|
|
|
|
|
private_class_method :database_version
|
2018-12-05 23:21:45 +05:30
|
|
|
|
|
|
|
def self.add_post_migrate_path_to_rails(force: false)
|
|
|
|
return if ENV['SKIP_POST_DEPLOYMENT_MIGRATIONS'] && !force
|
|
|
|
|
|
|
|
Rails.application.config.paths['db'].each do |db_path|
|
|
|
|
path = Rails.root.join(db_path, 'post_migrate').to_s
|
|
|
|
|
|
|
|
unless Rails.application.config.paths['db/migrate'].include? path
|
|
|
|
Rails.application.config.paths['db/migrate'] << path
|
|
|
|
|
|
|
|
# Rails memoizes migrations at certain points where it won't read the above
|
|
|
|
# path just yet. As such we must also update the following list of paths.
|
|
|
|
ActiveRecord::Migrator.migrations_paths << path
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
2015-10-24 18:46:33 +05:30
|
|
|
end
|
|
|
|
end
|