2019-10-12 21:52:04 +05:30
|
|
|
# frozen_string_literal: true
|
|
|
|
|
2016-06-02 11:05:42 +05:30
|
|
|
require 'spec_helper'
|
|
|
|
|
2017-09-10 17:25:29 +05:30
|
|
|
describe Gitlab::Database::MigrationHelpers do
|
2016-06-02 11:05:42 +05:30
|
|
|
let(:model) do
|
2018-03-17 18:26:18 +05:30
|
|
|
ActiveRecord::Migration.new.extend(described_class)
|
2016-06-02 11:05:42 +05:30
|
|
|
end
|
|
|
|
|
2017-09-10 17:25:29 +05:30
|
|
|
before do
|
|
|
|
allow(model).to receive(:puts)
|
|
|
|
end
|
|
|
|
|
2019-10-12 21:52:04 +05:30
|
|
|
describe '#remove_timestamps' do
|
|
|
|
it 'can remove the default timestamps' do
|
|
|
|
Gitlab::Database::MigrationHelpers::DEFAULT_TIMESTAMP_COLUMNS.each do |column_name|
|
|
|
|
expect(model).to receive(:remove_column).with(:foo, column_name)
|
|
|
|
end
|
|
|
|
|
|
|
|
model.remove_timestamps(:foo)
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'can remove custom timestamps' do
|
|
|
|
expect(model).to receive(:remove_column).with(:foo, :bar)
|
|
|
|
|
|
|
|
model.remove_timestamps(:foo, columns: [:bar])
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2017-09-10 17:25:29 +05:30
|
|
|
describe '#add_timestamps_with_timezone' do
|
2019-10-12 21:52:04 +05:30
|
|
|
let(:in_transaction) { false }
|
|
|
|
|
2017-09-10 17:25:29 +05:30
|
|
|
before do
|
2019-10-12 21:52:04 +05:30
|
|
|
allow(model).to receive(:transaction_open?).and_return(in_transaction)
|
|
|
|
allow(model).to receive(:disable_statement_timeout)
|
2017-09-10 17:25:29 +05:30
|
|
|
end
|
|
|
|
|
2019-10-12 21:52:04 +05:30
|
|
|
it 'adds "created_at" and "updated_at" fields with the "datetime_with_timezone" data type' do
|
|
|
|
Gitlab::Database::MigrationHelpers::DEFAULT_TIMESTAMP_COLUMNS.each do |column_name|
|
|
|
|
expect(model).to receive(:add_column).with(:foo, column_name, :datetime_with_timezone, { null: false })
|
2017-09-10 17:25:29 +05:30
|
|
|
end
|
|
|
|
|
2019-10-12 21:52:04 +05:30
|
|
|
model.add_timestamps_with_timezone(:foo)
|
|
|
|
end
|
2017-09-10 17:25:29 +05:30
|
|
|
|
2019-10-12 21:52:04 +05:30
|
|
|
it 'can disable the NOT NULL constraint' do
|
|
|
|
Gitlab::Database::MigrationHelpers::DEFAULT_TIMESTAMP_COLUMNS.each do |column_name|
|
|
|
|
expect(model).to receive(:add_column).with(:foo, column_name, :datetime_with_timezone, { null: true })
|
2017-09-10 17:25:29 +05:30
|
|
|
end
|
2019-10-12 21:52:04 +05:30
|
|
|
|
|
|
|
model.add_timestamps_with_timezone(:foo, null: true)
|
2017-09-10 17:25:29 +05:30
|
|
|
end
|
|
|
|
|
2019-10-12 21:52:04 +05:30
|
|
|
it 'can add just one column' do
|
|
|
|
expect(model).to receive(:add_column).with(:foo, :created_at, :datetime_with_timezone, anything)
|
|
|
|
expect(model).not_to receive(:add_column).with(:foo, :updated_at, :datetime_with_timezone, anything)
|
|
|
|
|
|
|
|
model.add_timestamps_with_timezone(:foo, columns: [:created_at])
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'can add choice of acceptable columns' do
|
|
|
|
expect(model).to receive(:add_column).with(:foo, :created_at, :datetime_with_timezone, anything)
|
|
|
|
expect(model).to receive(:add_column).with(:foo, :deleted_at, :datetime_with_timezone, anything)
|
|
|
|
expect(model).not_to receive(:add_column).with(:foo, :updated_at, :datetime_with_timezone, anything)
|
|
|
|
|
|
|
|
model.add_timestamps_with_timezone(:foo, columns: [:created_at, :deleted_at])
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'cannot add unacceptable column names' do
|
|
|
|
expect do
|
|
|
|
model.add_timestamps_with_timezone(:foo, columns: [:bar])
|
|
|
|
end.to raise_error %r/Illegal timestamp column name/
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'in a transaction' do
|
|
|
|
let(:in_transaction) { true }
|
|
|
|
|
2017-09-10 17:25:29 +05:30
|
|
|
before do
|
2019-10-12 21:52:04 +05:30
|
|
|
allow(model).to receive(:add_column).with(any_args).and_call_original
|
|
|
|
allow(model).to receive(:add_column)
|
|
|
|
.with(:foo, anything, :datetime_with_timezone, anything)
|
|
|
|
.and_return(nil)
|
2017-09-10 17:25:29 +05:30
|
|
|
end
|
|
|
|
|
2019-10-12 21:52:04 +05:30
|
|
|
it 'cannot add a default value' do
|
|
|
|
expect do
|
|
|
|
model.add_timestamps_with_timezone(:foo, default: :i_cause_an_error)
|
|
|
|
end.to raise_error %r/add_timestamps_with_timezone/
|
|
|
|
end
|
2017-09-10 17:25:29 +05:30
|
|
|
|
2019-10-12 21:52:04 +05:30
|
|
|
it 'can add columns without defaults' do
|
|
|
|
expect do
|
|
|
|
model.add_timestamps_with_timezone(:foo)
|
|
|
|
end.not_to raise_error
|
2017-09-10 17:25:29 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
2016-06-02 11:05:42 +05:30
|
|
|
|
|
|
|
describe '#add_concurrent_index' do
|
|
|
|
context 'outside a transaction' do
|
|
|
|
before do
|
2017-08-17 22:00:37 +05:30
|
|
|
allow(model).to receive(:transaction_open?).and_return(false)
|
2019-10-12 21:52:04 +05:30
|
|
|
allow(model).to receive(:disable_statement_timeout).and_call_original
|
2016-06-02 11:05:42 +05:30
|
|
|
end
|
|
|
|
|
2019-10-12 21:52:04 +05:30
|
|
|
it 'creates the index concurrently' do
|
|
|
|
expect(model).to receive(:add_index)
|
|
|
|
.with(:users, :foo, algorithm: :concurrently)
|
2018-05-09 12:01:36 +05:30
|
|
|
|
2019-10-12 21:52:04 +05:30
|
|
|
model.add_concurrent_index(:users, :foo)
|
2016-06-02 11:05:42 +05:30
|
|
|
end
|
|
|
|
|
2019-10-12 21:52:04 +05:30
|
|
|
it 'creates unique index concurrently' do
|
|
|
|
expect(model).to receive(:add_index)
|
|
|
|
.with(:users, :foo, { algorithm: :concurrently, unique: true })
|
2016-06-02 11:05:42 +05:30
|
|
|
|
2019-10-12 21:52:04 +05:30
|
|
|
model.add_concurrent_index(:users, :foo, unique: true)
|
|
|
|
end
|
2018-05-09 12:01:36 +05:30
|
|
|
|
2019-10-12 21:52:04 +05:30
|
|
|
it 'does nothing if the index exists already' do
|
|
|
|
expect(model).to receive(:index_exists?)
|
|
|
|
.with(:users, :foo, { algorithm: :concurrently, unique: true }).and_return(true)
|
|
|
|
expect(model).not_to receive(:add_index)
|
2018-05-09 12:01:36 +05:30
|
|
|
|
2019-10-12 21:52:04 +05:30
|
|
|
model.add_concurrent_index(:users, :foo, unique: true)
|
2016-06-02 11:05:42 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'inside a transaction' do
|
|
|
|
it 'raises RuntimeError' do
|
|
|
|
expect(model).to receive(:transaction_open?).and_return(true)
|
|
|
|
|
2017-09-10 17:25:29 +05:30
|
|
|
expect { model.add_concurrent_index(:users, :foo) }
|
|
|
|
.to raise_error(RuntimeError)
|
2016-06-02 11:05:42 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2017-08-17 22:00:37 +05:30
|
|
|
describe '#remove_concurrent_index' do
|
|
|
|
context 'outside a transaction' do
|
|
|
|
before do
|
|
|
|
allow(model).to receive(:transaction_open?).and_return(false)
|
2018-05-09 12:01:36 +05:30
|
|
|
allow(model).to receive(:index_exists?).and_return(true)
|
2018-11-20 20:47:30 +05:30
|
|
|
allow(model).to receive(:disable_statement_timeout).and_call_original
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
|
2019-10-12 21:52:04 +05:30
|
|
|
describe 'by column name' do
|
|
|
|
it 'removes the index concurrently' do
|
|
|
|
expect(model).to receive(:remove_index)
|
|
|
|
.with(:users, { algorithm: :concurrently, column: :foo })
|
2017-08-17 22:00:37 +05:30
|
|
|
|
2019-10-12 21:52:04 +05:30
|
|
|
model.remove_concurrent_index(:users, :foo)
|
|
|
|
end
|
2018-05-09 12:01:36 +05:30
|
|
|
|
2019-10-12 21:52:04 +05:30
|
|
|
it 'does nothing if the index does not exist' do
|
|
|
|
expect(model).to receive(:index_exists?)
|
|
|
|
.with(:users, :foo, { algorithm: :concurrently, unique: true }).and_return(false)
|
|
|
|
expect(model).not_to receive(:remove_index)
|
2018-05-09 12:01:36 +05:30
|
|
|
|
2019-10-12 21:52:04 +05:30
|
|
|
model.remove_concurrent_index(:users, :foo, unique: true)
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
2017-09-10 17:25:29 +05:30
|
|
|
|
2018-05-09 12:01:36 +05:30
|
|
|
describe 'by index name' do
|
|
|
|
before do
|
|
|
|
allow(model).to receive(:index_exists_by_name?).with(:users, "index_x_by_y").and_return(true)
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'removes the index concurrently by index name' do
|
|
|
|
expect(model).to receive(:remove_index)
|
|
|
|
.with(:users, { algorithm: :concurrently, name: "index_x_by_y" })
|
|
|
|
|
|
|
|
model.remove_concurrent_index_by_name(:users, "index_x_by_y")
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'does nothing if the index does not exist' do
|
|
|
|
expect(model).to receive(:index_exists_by_name?).with(:users, "index_x_by_y").and_return(false)
|
|
|
|
expect(model).not_to receive(:remove_index)
|
2017-09-10 17:25:29 +05:30
|
|
|
|
2018-05-09 12:01:36 +05:30
|
|
|
model.remove_concurrent_index_by_name(:users, "index_x_by_y")
|
|
|
|
end
|
2017-09-10 17:25:29 +05:30
|
|
|
end
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'inside a transaction' do
|
|
|
|
it 'raises RuntimeError' do
|
|
|
|
expect(model).to receive(:transaction_open?).and_return(true)
|
|
|
|
|
2017-09-10 17:25:29 +05:30
|
|
|
expect { model.remove_concurrent_index(:users, :foo) }
|
|
|
|
.to raise_error(RuntimeError)
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
describe '#add_concurrent_foreign_key' do
|
2018-05-09 12:01:36 +05:30
|
|
|
before do
|
|
|
|
allow(model).to receive(:foreign_key_exists?).and_return(false)
|
|
|
|
end
|
|
|
|
|
2017-08-17 22:00:37 +05:30
|
|
|
context 'inside a transaction' do
|
|
|
|
it 'raises an error' do
|
|
|
|
expect(model).to receive(:transaction_open?).and_return(true)
|
|
|
|
|
|
|
|
expect do
|
|
|
|
model.add_concurrent_foreign_key(:projects, :users, column: :user_id)
|
|
|
|
end.to raise_error(RuntimeError)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'outside a transaction' do
|
|
|
|
before do
|
|
|
|
allow(model).to receive(:transaction_open?).and_return(false)
|
|
|
|
end
|
|
|
|
|
2020-01-01 13:55:28 +05:30
|
|
|
context 'ON DELETE statements' do
|
|
|
|
context 'on_delete: :nullify' do
|
|
|
|
it 'appends ON DELETE SET NULL statement' do
|
|
|
|
expect(model).to receive(:disable_statement_timeout).and_call_original
|
|
|
|
expect(model).to receive(:execute).with(/statement_timeout/)
|
|
|
|
expect(model).to receive(:execute).ordered.with(/VALIDATE CONSTRAINT/)
|
|
|
|
expect(model).to receive(:execute).with(/RESET ALL/)
|
|
|
|
|
|
|
|
expect(model).to receive(:execute).with(/ON DELETE SET NULL/)
|
|
|
|
|
|
|
|
model.add_concurrent_foreign_key(:projects, :users,
|
|
|
|
column: :user_id,
|
|
|
|
on_delete: :nullify)
|
|
|
|
end
|
|
|
|
end
|
2017-08-17 22:00:37 +05:30
|
|
|
|
2020-01-01 13:55:28 +05:30
|
|
|
context 'on_delete: :cascade' do
|
|
|
|
it 'appends ON DELETE CASCADE statement' do
|
|
|
|
expect(model).to receive(:disable_statement_timeout).and_call_original
|
|
|
|
expect(model).to receive(:execute).with(/statement_timeout/)
|
|
|
|
expect(model).to receive(:execute).ordered.with(/VALIDATE CONSTRAINT/)
|
|
|
|
expect(model).to receive(:execute).with(/RESET ALL/)
|
|
|
|
|
|
|
|
expect(model).to receive(:execute).with(/ON DELETE CASCADE/)
|
|
|
|
|
|
|
|
model.add_concurrent_foreign_key(:projects, :users,
|
|
|
|
column: :user_id,
|
|
|
|
on_delete: :cascade)
|
|
|
|
end
|
|
|
|
end
|
2017-08-17 22:00:37 +05:30
|
|
|
|
2020-01-01 13:55:28 +05:30
|
|
|
context 'on_delete: nil' do
|
|
|
|
it 'appends no ON DELETE statement' do
|
|
|
|
expect(model).to receive(:disable_statement_timeout).and_call_original
|
|
|
|
expect(model).to receive(:execute).with(/statement_timeout/)
|
|
|
|
expect(model).to receive(:execute).ordered.with(/VALIDATE CONSTRAINT/)
|
|
|
|
expect(model).to receive(:execute).with(/RESET ALL/)
|
2017-09-10 17:25:29 +05:30
|
|
|
|
2020-01-01 13:55:28 +05:30
|
|
|
expect(model).not_to receive(:execute).with(/ON DELETE/)
|
|
|
|
|
|
|
|
model.add_concurrent_foreign_key(:projects, :users,
|
|
|
|
column: :user_id,
|
|
|
|
on_delete: nil)
|
|
|
|
end
|
|
|
|
end
|
2019-10-12 21:52:04 +05:30
|
|
|
end
|
2018-05-09 12:01:36 +05:30
|
|
|
|
2020-01-01 13:55:28 +05:30
|
|
|
context 'when no custom key name is supplied' do
|
|
|
|
it 'creates a concurrent foreign key and validates it' do
|
|
|
|
expect(model).to receive(:disable_statement_timeout).and_call_original
|
|
|
|
expect(model).to receive(:execute).with(/statement_timeout/)
|
|
|
|
expect(model).to receive(:execute).ordered.with(/NOT VALID/)
|
|
|
|
expect(model).to receive(:execute).ordered.with(/VALIDATE CONSTRAINT/)
|
|
|
|
expect(model).to receive(:execute).with(/RESET ALL/)
|
|
|
|
|
|
|
|
model.add_concurrent_foreign_key(:projects, :users, column: :user_id)
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'does not create a foreign key if it exists already' do
|
|
|
|
name = model.concurrent_foreign_key_name(:projects, :user_id)
|
|
|
|
expect(model).to receive(:foreign_key_exists?).with(:projects, :users,
|
|
|
|
column: :user_id,
|
|
|
|
on_delete: :cascade,
|
|
|
|
name: name).and_return(true)
|
|
|
|
|
|
|
|
expect(model).not_to receive(:execute).with(/ADD CONSTRAINT/)
|
|
|
|
expect(model).to receive(:execute).with(/VALIDATE CONSTRAINT/)
|
2018-05-09 12:01:36 +05:30
|
|
|
|
2020-01-01 13:55:28 +05:30
|
|
|
model.add_concurrent_foreign_key(:projects, :users, column: :user_id)
|
|
|
|
end
|
2019-10-12 21:52:04 +05:30
|
|
|
end
|
2019-09-04 21:01:54 +05:30
|
|
|
|
2020-01-01 13:55:28 +05:30
|
|
|
context 'when a custom key name is supplied' do
|
|
|
|
context 'for creating a new foreign key for a column that does not presently exist' do
|
|
|
|
it 'creates a new foreign key' do
|
|
|
|
expect(model).to receive(:disable_statement_timeout).and_call_original
|
|
|
|
expect(model).to receive(:execute).with(/statement_timeout/)
|
|
|
|
expect(model).to receive(:execute).ordered.with(/NOT VALID/)
|
|
|
|
expect(model).to receive(:execute).ordered.with(/VALIDATE CONSTRAINT.+foo/)
|
|
|
|
expect(model).to receive(:execute).with(/RESET ALL/)
|
|
|
|
|
|
|
|
model.add_concurrent_foreign_key(:projects, :users, column: :user_id, name: :foo)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'for creating a duplicate foreign key for a column that presently exists' do
|
|
|
|
context 'when the supplied key name is the same as the existing foreign key name' do
|
|
|
|
it 'does not create a new foreign key' do
|
|
|
|
expect(model).to receive(:foreign_key_exists?).with(:projects, :users,
|
|
|
|
name: :foo,
|
|
|
|
on_delete: :cascade,
|
|
|
|
column: :user_id).and_return(true)
|
|
|
|
|
|
|
|
expect(model).not_to receive(:execute).with(/ADD CONSTRAINT/)
|
|
|
|
expect(model).to receive(:execute).with(/VALIDATE CONSTRAINT/)
|
|
|
|
|
|
|
|
model.add_concurrent_foreign_key(:projects, :users, column: :user_id, name: :foo)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when the supplied key name is different from the existing foreign key name' do
|
|
|
|
it 'creates a new foreign key' do
|
|
|
|
expect(model).to receive(:disable_statement_timeout).and_call_original
|
|
|
|
expect(model).to receive(:execute).with(/statement_timeout/)
|
|
|
|
expect(model).to receive(:execute).ordered.with(/NOT VALID/)
|
|
|
|
expect(model).to receive(:execute).ordered.with(/VALIDATE CONSTRAINT.+bar/)
|
|
|
|
expect(model).to receive(:execute).with(/RESET ALL/)
|
2019-09-04 21:01:54 +05:30
|
|
|
|
2020-01-01 13:55:28 +05:30
|
|
|
model.add_concurrent_foreign_key(:projects, :users, column: :user_id, name: :bar)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
2020-03-09 13:42:32 +05:30
|
|
|
|
|
|
|
describe 'validate option' do
|
|
|
|
let(:args) { [:projects, :users] }
|
|
|
|
let(:options) { { column: :user_id, on_delete: nil } }
|
|
|
|
|
|
|
|
context 'when validate is supplied with a falsey value' do
|
|
|
|
it_behaves_like 'skips validation', validate: false
|
|
|
|
it_behaves_like 'skips validation', validate: nil
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when validate is supplied with a truthy value' do
|
|
|
|
it_behaves_like 'performs validation', validate: true
|
|
|
|
it_behaves_like 'performs validation', validate: :whatever
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when validate is not supplied' do
|
|
|
|
it_behaves_like 'performs validation', {}
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
describe '#validate_foreign_key' do
|
|
|
|
context 'when name is provided' do
|
|
|
|
it 'does not infer the foreign key constraint name' do
|
|
|
|
expect(model).to receive(:foreign_key_exists?).with(:projects, name: :foo).and_return(true)
|
|
|
|
|
|
|
|
aggregate_failures do
|
|
|
|
expect(model).not_to receive(:concurrent_foreign_key_name)
|
|
|
|
expect(model).to receive(:disable_statement_timeout).and_call_original
|
|
|
|
expect(model).to receive(:execute).with(/statement_timeout/)
|
|
|
|
expect(model).to receive(:execute).ordered.with(/ALTER TABLE projects VALIDATE CONSTRAINT/)
|
|
|
|
expect(model).to receive(:execute).ordered.with(/RESET ALL/)
|
|
|
|
end
|
|
|
|
|
|
|
|
model.validate_foreign_key(:projects, :user_id, name: :foo)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when name is not provided' do
|
|
|
|
it 'infers the foreign key constraint name' do
|
|
|
|
expect(model).to receive(:foreign_key_exists?).with(:projects, name: anything).and_return(true)
|
|
|
|
|
|
|
|
aggregate_failures do
|
|
|
|
expect(model).to receive(:concurrent_foreign_key_name)
|
|
|
|
expect(model).to receive(:disable_statement_timeout).and_call_original
|
|
|
|
expect(model).to receive(:execute).with(/statement_timeout/)
|
|
|
|
expect(model).to receive(:execute).ordered.with(/ALTER TABLE projects VALIDATE CONSTRAINT/)
|
|
|
|
expect(model).to receive(:execute).ordered.with(/RESET ALL/)
|
|
|
|
end
|
|
|
|
|
|
|
|
model.validate_foreign_key(:projects, :user_id)
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when the inferred foreign key constraint does not exist' do
|
|
|
|
it 'raises an error' do
|
|
|
|
expect(model).to receive(:foreign_key_exists?).and_return(false)
|
|
|
|
|
|
|
|
expect { model.validate_foreign_key(:projects, :user_id) }.to raise_error(/cannot find/)
|
|
|
|
end
|
|
|
|
end
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
describe '#concurrent_foreign_key_name' do
|
|
|
|
it 'returns the name for a foreign key' do
|
|
|
|
name = model.concurrent_foreign_key_name(:this_is_a_very_long_table_name,
|
|
|
|
:with_a_very_long_column_name)
|
|
|
|
|
|
|
|
expect(name).to be_an_instance_of(String)
|
|
|
|
expect(name.length).to eq(13)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-05-09 12:01:36 +05:30
|
|
|
describe '#foreign_key_exists?' do
|
|
|
|
before do
|
2020-01-01 13:55:28 +05:30
|
|
|
key = ActiveRecord::ConnectionAdapters::ForeignKeyDefinition.new(:projects, :users, { column: :non_standard_id, name: :fk_projects_users_non_standard_id, on_delete: :cascade })
|
2018-05-09 12:01:36 +05:30
|
|
|
allow(model).to receive(:foreign_keys).with(:projects).and_return([key])
|
|
|
|
end
|
|
|
|
|
2020-01-01 13:55:28 +05:30
|
|
|
shared_examples_for 'foreign key checks' do
|
|
|
|
it 'finds existing foreign keys by column' do
|
|
|
|
expect(model.foreign_key_exists?(:projects, target_table, column: :non_standard_id)).to be_truthy
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'finds existing foreign keys by name' do
|
|
|
|
expect(model.foreign_key_exists?(:projects, target_table, name: :fk_projects_users_non_standard_id)).to be_truthy
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'finds existing foreign_keys by name and column' do
|
|
|
|
expect(model.foreign_key_exists?(:projects, target_table, name: :fk_projects_users_non_standard_id, column: :non_standard_id)).to be_truthy
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'finds existing foreign_keys by name, column and on_delete' do
|
|
|
|
expect(model.foreign_key_exists?(:projects, target_table, name: :fk_projects_users_non_standard_id, column: :non_standard_id, on_delete: :cascade)).to be_truthy
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'finds existing foreign keys by target table only' do
|
|
|
|
expect(model.foreign_key_exists?(:projects, target_table)).to be_truthy
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'compares by column name if given' do
|
|
|
|
expect(model.foreign_key_exists?(:projects, target_table, column: :user_id)).to be_falsey
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'compares by foreign key name if given' do
|
|
|
|
expect(model.foreign_key_exists?(:projects, target_table, name: :non_existent_foreign_key_name)).to be_falsey
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'compares by foreign key name and column if given' do
|
|
|
|
expect(model.foreign_key_exists?(:projects, target_table, name: :non_existent_foreign_key_name, column: :non_standard_id)).to be_falsey
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'compares by foreign key name, column and on_delete if given' do
|
|
|
|
expect(model.foreign_key_exists?(:projects, target_table, name: :fk_projects_users_non_standard_id, column: :non_standard_id, on_delete: :nullify)).to be_falsey
|
|
|
|
end
|
2018-05-09 12:01:36 +05:30
|
|
|
end
|
|
|
|
|
2020-01-01 13:55:28 +05:30
|
|
|
context 'without specifying a target table' do
|
|
|
|
let(:target_table) { nil }
|
|
|
|
|
|
|
|
it_behaves_like 'foreign key checks'
|
2018-05-09 12:01:36 +05:30
|
|
|
end
|
|
|
|
|
2020-01-01 13:55:28 +05:30
|
|
|
context 'specifying a target table' do
|
|
|
|
let(:target_table) { :users }
|
|
|
|
|
|
|
|
it_behaves_like 'foreign key checks'
|
2018-05-09 12:01:36 +05:30
|
|
|
end
|
|
|
|
|
2020-01-01 13:55:28 +05:30
|
|
|
it 'compares by target table if no column given' do
|
2018-05-09 12:01:36 +05:30
|
|
|
expect(model.foreign_key_exists?(:projects, :other_table)).to be_falsey
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2017-08-17 22:00:37 +05:30
|
|
|
describe '#disable_statement_timeout' do
|
2019-10-12 21:52:04 +05:30
|
|
|
it 'disables statement timeouts to current transaction only' do
|
|
|
|
expect(model).to receive(:execute).with('SET LOCAL statement_timeout TO 0')
|
2017-08-17 22:00:37 +05:30
|
|
|
|
2019-10-12 21:52:04 +05:30
|
|
|
model.disable_statement_timeout
|
|
|
|
end
|
2017-08-17 22:00:37 +05:30
|
|
|
|
2019-10-12 21:52:04 +05:30
|
|
|
# this specs runs without an enclosing transaction (:delete truncation method for db_cleaner)
|
|
|
|
context 'with real environment', :delete do
|
|
|
|
before do
|
|
|
|
model.execute("SET statement_timeout TO '20000'")
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
2018-11-20 20:47:30 +05:30
|
|
|
|
2019-10-12 21:52:04 +05:30
|
|
|
after do
|
|
|
|
model.execute('RESET ALL')
|
|
|
|
end
|
2018-11-20 20:47:30 +05:30
|
|
|
|
2019-10-12 21:52:04 +05:30
|
|
|
it 'defines statement to 0 only for current transaction' do
|
|
|
|
expect(model.execute('SHOW statement_timeout').first['statement_timeout']).to eq('20s')
|
2018-11-20 20:47:30 +05:30
|
|
|
|
2019-10-12 21:52:04 +05:30
|
|
|
model.connection.transaction do
|
|
|
|
model.disable_statement_timeout
|
|
|
|
expect(model.execute('SHOW statement_timeout').first['statement_timeout']).to eq('0')
|
2018-11-20 20:47:30 +05:30
|
|
|
end
|
2019-10-12 21:52:04 +05:30
|
|
|
|
|
|
|
expect(model.execute('SHOW statement_timeout').first['statement_timeout']).to eq('20s')
|
2018-11-20 20:47:30 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
context 'when passing a blocks' do
|
|
|
|
it 'disables statement timeouts on session level and executes the block' do
|
|
|
|
expect(model).to receive(:execute).with('SET statement_timeout TO 0')
|
2019-10-12 21:52:04 +05:30
|
|
|
expect(model).to receive(:execute).with('RESET ALL').at_least(:once)
|
2018-11-20 20:47:30 +05:30
|
|
|
|
|
|
|
expect { |block| model.disable_statement_timeout(&block) }.to yield_control
|
|
|
|
end
|
|
|
|
|
|
|
|
# this specs runs without an enclosing transaction (:delete truncation method for db_cleaner)
|
2019-10-12 21:52:04 +05:30
|
|
|
context 'with real environment', :delete do
|
2018-11-20 20:47:30 +05:30
|
|
|
before do
|
|
|
|
model.execute("SET statement_timeout TO '20000'")
|
|
|
|
end
|
|
|
|
|
|
|
|
after do
|
|
|
|
model.execute('RESET ALL')
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'defines statement to 0 for any code run inside the block' do
|
|
|
|
expect(model.execute('SHOW statement_timeout').first['statement_timeout']).to eq('20s')
|
|
|
|
|
|
|
|
model.disable_statement_timeout do
|
|
|
|
model.connection.transaction do
|
|
|
|
expect(model.execute('SHOW statement_timeout').first['statement_timeout']).to eq('0')
|
|
|
|
end
|
|
|
|
|
|
|
|
expect(model.execute('SHOW statement_timeout').first['statement_timeout']).to eq('0')
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
describe '#true_value' do
|
2019-10-12 21:52:04 +05:30
|
|
|
it 'returns the appropriate value' do
|
|
|
|
expect(model.true_value).to eq("'t'")
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
describe '#false_value' do
|
2019-10-12 21:52:04 +05:30
|
|
|
it 'returns the appropriate value' do
|
|
|
|
expect(model.false_value).to eq("'f'")
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2016-06-02 11:05:42 +05:30
|
|
|
describe '#update_column_in_batches' do
|
2017-09-10 17:25:29 +05:30
|
|
|
context 'when running outside of a transaction' do
|
|
|
|
before do
|
|
|
|
expect(model).to receive(:transaction_open?).and_return(false)
|
2016-06-02 11:05:42 +05:30
|
|
|
|
2017-09-10 17:25:29 +05:30
|
|
|
create_list(:project, 5)
|
|
|
|
end
|
2016-06-02 11:05:42 +05:30
|
|
|
|
2017-09-10 17:25:29 +05:30
|
|
|
it 'updates all the rows in a table' do
|
2019-09-30 21:07:59 +05:30
|
|
|
model.update_column_in_batches(:projects, :description_html, 'foo')
|
2017-09-10 17:25:29 +05:30
|
|
|
|
2019-09-30 21:07:59 +05:30
|
|
|
expect(Project.where(description_html: 'foo').count).to eq(5)
|
2017-09-10 17:25:29 +05:30
|
|
|
end
|
2016-06-02 11:05:42 +05:30
|
|
|
|
2017-09-10 17:25:29 +05:30
|
|
|
it 'updates boolean values correctly' do
|
|
|
|
model.update_column_in_batches(:projects, :archived, true)
|
2016-06-02 11:05:42 +05:30
|
|
|
|
2017-09-10 17:25:29 +05:30
|
|
|
expect(Project.where(archived: true).count).to eq(5)
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when a block is supplied' do
|
|
|
|
it 'yields an Arel table and query object to the supplied block' do
|
|
|
|
first_id = Project.first.id
|
|
|
|
|
|
|
|
model.update_column_in_batches(:projects, :archived, true) do |t, query|
|
|
|
|
query.where(t[:id].eq(first_id))
|
|
|
|
end
|
|
|
|
|
|
|
|
expect(Project.where(archived: true).count).to eq(1)
|
|
|
|
end
|
|
|
|
end
|
2016-06-22 15:30:34 +05:30
|
|
|
|
2017-09-10 17:25:29 +05:30
|
|
|
context 'when the value is Arel.sql (Arel::Nodes::SqlLiteral)' do
|
|
|
|
it 'updates the value as a SQL expression' do
|
|
|
|
model.update_column_in_batches(:projects, :star_count, Arel.sql('1+1'))
|
2016-06-22 15:30:34 +05:30
|
|
|
|
2017-09-10 17:25:29 +05:30
|
|
|
expect(Project.sum(:star_count)).to eq(2 * Project.count)
|
2016-06-22 15:30:34 +05:30
|
|
|
end
|
2017-09-10 17:25:29 +05:30
|
|
|
end
|
|
|
|
end
|
2016-06-22 15:30:34 +05:30
|
|
|
|
2017-09-10 17:25:29 +05:30
|
|
|
context 'when running inside the transaction' do
|
|
|
|
it 'raises RuntimeError' do
|
|
|
|
expect(model).to receive(:transaction_open?).and_return(true)
|
|
|
|
|
|
|
|
expect do
|
|
|
|
model.update_column_in_batches(:projects, :star_count, Arel.sql('1+1'))
|
|
|
|
end.to raise_error(RuntimeError)
|
2016-06-22 15:30:34 +05:30
|
|
|
end
|
|
|
|
end
|
2016-06-02 11:05:42 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
describe '#add_column_with_default' do
|
|
|
|
context 'outside of a transaction' do
|
2016-09-29 09:46:39 +05:30
|
|
|
context 'when a column limit is not set' do
|
|
|
|
before do
|
2017-09-10 17:25:29 +05:30
|
|
|
expect(model).to receive(:transaction_open?)
|
|
|
|
.and_return(false)
|
|
|
|
.at_least(:once)
|
2016-06-02 11:05:42 +05:30
|
|
|
|
2016-09-29 09:46:39 +05:30
|
|
|
expect(model).to receive(:transaction).and_yield
|
2016-06-02 11:05:42 +05:30
|
|
|
|
2017-09-10 17:25:29 +05:30
|
|
|
expect(model).to receive(:add_column)
|
|
|
|
.with(:projects, :foo, :integer, default: nil)
|
2016-06-02 11:05:42 +05:30
|
|
|
|
2017-09-10 17:25:29 +05:30
|
|
|
expect(model).to receive(:change_column_default)
|
|
|
|
.with(:projects, :foo, 10)
|
2016-09-29 09:46:39 +05:30
|
|
|
end
|
2016-06-02 11:05:42 +05:30
|
|
|
|
2016-09-29 09:46:39 +05:30
|
|
|
it 'adds the column while allowing NULL values' do
|
2017-09-10 17:25:29 +05:30
|
|
|
expect(model).to receive(:update_column_in_batches)
|
|
|
|
.with(:projects, :foo, 10)
|
2016-06-02 11:05:42 +05:30
|
|
|
|
2016-09-29 09:46:39 +05:30
|
|
|
expect(model).not_to receive(:change_column_null)
|
2016-06-02 11:05:42 +05:30
|
|
|
|
2016-09-29 09:46:39 +05:30
|
|
|
model.add_column_with_default(:projects, :foo, :integer,
|
|
|
|
default: 10,
|
|
|
|
allow_null: true)
|
|
|
|
end
|
2016-06-02 11:05:42 +05:30
|
|
|
|
2016-09-29 09:46:39 +05:30
|
|
|
it 'adds the column while not allowing NULL values' do
|
2017-09-10 17:25:29 +05:30
|
|
|
expect(model).to receive(:update_column_in_batches)
|
|
|
|
.with(:projects, :foo, 10)
|
2016-06-02 11:05:42 +05:30
|
|
|
|
2017-09-10 17:25:29 +05:30
|
|
|
expect(model).to receive(:change_column_null)
|
|
|
|
.with(:projects, :foo, false)
|
2016-06-02 11:05:42 +05:30
|
|
|
|
2016-09-29 09:46:39 +05:30
|
|
|
model.add_column_with_default(:projects, :foo, :integer, default: 10)
|
|
|
|
end
|
2016-06-02 11:05:42 +05:30
|
|
|
|
2016-09-29 09:46:39 +05:30
|
|
|
it 'removes the added column whenever updating the rows fails' do
|
2017-09-10 17:25:29 +05:30
|
|
|
expect(model).to receive(:update_column_in_batches)
|
|
|
|
.with(:projects, :foo, 10)
|
|
|
|
.and_raise(RuntimeError)
|
2016-06-02 11:05:42 +05:30
|
|
|
|
2017-09-10 17:25:29 +05:30
|
|
|
expect(model).to receive(:remove_column)
|
|
|
|
.with(:projects, :foo)
|
2016-06-02 11:05:42 +05:30
|
|
|
|
2016-09-29 09:46:39 +05:30
|
|
|
expect do
|
|
|
|
model.add_column_with_default(:projects, :foo, :integer, default: 10)
|
|
|
|
end.to raise_error(RuntimeError)
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'removes the added column whenever changing a column NULL constraint fails' do
|
2017-09-10 17:25:29 +05:30
|
|
|
expect(model).to receive(:change_column_null)
|
|
|
|
.with(:projects, :foo, false)
|
|
|
|
.and_raise(RuntimeError)
|
2016-09-29 09:46:39 +05:30
|
|
|
|
2017-09-10 17:25:29 +05:30
|
|
|
expect(model).to receive(:remove_column)
|
|
|
|
.with(:projects, :foo)
|
2016-09-29 09:46:39 +05:30
|
|
|
|
|
|
|
expect do
|
|
|
|
model.add_column_with_default(:projects, :foo, :integer, default: 10)
|
|
|
|
end.to raise_error(RuntimeError)
|
|
|
|
end
|
2016-06-02 11:05:42 +05:30
|
|
|
end
|
2016-06-16 23:09:34 +05:30
|
|
|
|
2016-09-29 09:46:39 +05:30
|
|
|
context 'when a column limit is set' do
|
|
|
|
it 'adds the column with a limit' do
|
|
|
|
allow(model).to receive(:transaction_open?).and_return(false)
|
|
|
|
allow(model).to receive(:transaction).and_yield
|
|
|
|
allow(model).to receive(:update_column_in_batches).with(:projects, :foo, 10)
|
|
|
|
allow(model).to receive(:change_column_null).with(:projects, :foo, false)
|
|
|
|
allow(model).to receive(:change_column_default).with(:projects, :foo, 10)
|
2016-06-16 23:09:34 +05:30
|
|
|
|
2017-09-10 17:25:29 +05:30
|
|
|
expect(model).to receive(:add_column)
|
|
|
|
.with(:projects, :foo, :integer, default: nil, limit: 8)
|
2016-06-16 23:09:34 +05:30
|
|
|
|
2016-09-29 09:46:39 +05:30
|
|
|
model.add_column_with_default(:projects, :foo, :integer, default: 10, limit: 8)
|
|
|
|
end
|
2016-06-16 23:09:34 +05:30
|
|
|
end
|
2019-09-30 21:07:59 +05:30
|
|
|
|
|
|
|
it 'adds a column with an array default value for a jsonb type' do
|
|
|
|
create(:project)
|
|
|
|
allow(model).to receive(:transaction_open?).and_return(false)
|
|
|
|
allow(model).to receive(:transaction).and_yield
|
|
|
|
expect(model).to receive(:update_column_in_batches).with(:projects, :foo, '[{"foo":"json"}]').and_call_original
|
|
|
|
|
|
|
|
model.add_column_with_default(:projects, :foo, :jsonb, default: [{ foo: "json" }])
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'adds a column with an object default value for a jsonb type' do
|
|
|
|
create(:project)
|
|
|
|
allow(model).to receive(:transaction_open?).and_return(false)
|
|
|
|
allow(model).to receive(:transaction).and_yield
|
|
|
|
expect(model).to receive(:update_column_in_batches).with(:projects, :foo, '{"foo":"json"}').and_call_original
|
|
|
|
|
|
|
|
model.add_column_with_default(:projects, :foo, :jsonb, default: { foo: "json" })
|
|
|
|
end
|
2016-06-02 11:05:42 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
context 'inside a transaction' do
|
|
|
|
it 'raises RuntimeError' do
|
|
|
|
expect(model).to receive(:transaction_open?).and_return(true)
|
|
|
|
|
|
|
|
expect do
|
|
|
|
model.add_column_with_default(:projects, :foo, :integer, default: 10)
|
|
|
|
end.to raise_error(RuntimeError)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
2017-08-17 22:00:37 +05:30
|
|
|
|
|
|
|
describe '#rename_column_concurrently' do
|
|
|
|
context 'in a transaction' do
|
|
|
|
it 'raises RuntimeError' do
|
|
|
|
allow(model).to receive(:transaction_open?).and_return(true)
|
|
|
|
|
2017-09-10 17:25:29 +05:30
|
|
|
expect { model.rename_column_concurrently(:users, :old, :new) }
|
|
|
|
.to raise_error(RuntimeError)
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'outside a transaction' do
|
|
|
|
let(:old_column) do
|
|
|
|
double(:column,
|
|
|
|
type: :integer,
|
|
|
|
limit: 8,
|
|
|
|
default: 0,
|
|
|
|
null: false,
|
|
|
|
precision: 5,
|
|
|
|
scale: 1)
|
|
|
|
end
|
|
|
|
|
|
|
|
let(:trigger_name) { model.rename_trigger_name(:users, :old, :new) }
|
|
|
|
|
|
|
|
before do
|
|
|
|
allow(model).to receive(:transaction_open?).and_return(false)
|
|
|
|
allow(model).to receive(:column_for).and_return(old_column)
|
|
|
|
end
|
|
|
|
|
2019-10-12 21:52:04 +05:30
|
|
|
it 'renames a column concurrently' do
|
|
|
|
expect(model).to receive(:check_trigger_permissions!).with(:users)
|
2018-03-17 18:26:18 +05:30
|
|
|
|
2019-10-12 21:52:04 +05:30
|
|
|
expect(model).to receive(:install_rename_triggers_for_postgresql)
|
|
|
|
.with(trigger_name, '"users"', '"old"', '"new"')
|
2017-08-17 22:00:37 +05:30
|
|
|
|
2019-10-12 21:52:04 +05:30
|
|
|
expect(model).to receive(:add_column)
|
|
|
|
.with(:users, :new, :integer,
|
|
|
|
limit: old_column.limit,
|
|
|
|
precision: old_column.precision,
|
|
|
|
scale: old_column.scale)
|
2017-08-17 22:00:37 +05:30
|
|
|
|
2019-10-12 21:52:04 +05:30
|
|
|
expect(model).to receive(:change_column_default)
|
|
|
|
.with(:users, :new, old_column.default)
|
2017-08-17 22:00:37 +05:30
|
|
|
|
2019-10-12 21:52:04 +05:30
|
|
|
expect(model).to receive(:update_column_in_batches)
|
2017-08-17 22:00:37 +05:30
|
|
|
|
2019-10-12 21:52:04 +05:30
|
|
|
expect(model).to receive(:change_column_null).with(:users, :new, false)
|
2017-08-17 22:00:37 +05:30
|
|
|
|
2019-10-12 21:52:04 +05:30
|
|
|
expect(model).to receive(:copy_indexes).with(:users, :old, :new)
|
|
|
|
expect(model).to receive(:copy_foreign_keys).with(:users, :old, :new)
|
2017-08-17 22:00:37 +05:30
|
|
|
|
2019-10-12 21:52:04 +05:30
|
|
|
model.rename_column_concurrently(:users, :old, :new)
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
|
2019-10-12 21:52:04 +05:30
|
|
|
context 'when default is false' do
|
|
|
|
let(:old_column) do
|
|
|
|
double(:column,
|
|
|
|
type: :boolean,
|
|
|
|
limit: nil,
|
|
|
|
default: false,
|
|
|
|
null: false,
|
|
|
|
precision: nil,
|
|
|
|
scale: nil)
|
|
|
|
end
|
2017-08-17 22:00:37 +05:30
|
|
|
|
2019-10-12 21:52:04 +05:30
|
|
|
it 'copies the default to the new column' do
|
2017-09-10 17:25:29 +05:30
|
|
|
expect(model).to receive(:change_column_default)
|
|
|
|
.with(:users, :new, old_column.default)
|
2017-08-17 22:00:37 +05:30
|
|
|
|
|
|
|
model.rename_column_concurrently(:users, :old, :new)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-10-12 21:52:04 +05:30
|
|
|
describe '#undo_rename_column_concurrently' do
|
|
|
|
it 'reverses the operations of rename_column_concurrently' do
|
|
|
|
expect(model).to receive(:check_trigger_permissions!).with(:users)
|
|
|
|
|
|
|
|
expect(model).to receive(:remove_rename_triggers_for_postgresql)
|
|
|
|
.with(:users, /trigger_.{12}/)
|
|
|
|
|
|
|
|
expect(model).to receive(:remove_column).with(:users, :new)
|
2017-08-17 22:00:37 +05:30
|
|
|
|
2019-10-12 21:52:04 +05:30
|
|
|
model.undo_rename_column_concurrently(:users, :old, :new)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
describe '#cleanup_concurrent_column_rename' do
|
|
|
|
it 'cleans up the renaming procedure' do
|
2018-03-17 18:26:18 +05:30
|
|
|
expect(model).to receive(:check_trigger_permissions!).with(:users)
|
|
|
|
|
2017-09-10 17:25:29 +05:30
|
|
|
expect(model).to receive(:remove_rename_triggers_for_postgresql)
|
|
|
|
.with(:users, /trigger_.{12}/)
|
2017-08-17 22:00:37 +05:30
|
|
|
|
|
|
|
expect(model).to receive(:remove_column).with(:users, :old)
|
|
|
|
|
|
|
|
model.cleanup_concurrent_column_rename(:users, :old, :new)
|
|
|
|
end
|
2019-10-12 21:52:04 +05:30
|
|
|
end
|
2017-08-17 22:00:37 +05:30
|
|
|
|
2019-10-12 21:52:04 +05:30
|
|
|
describe '#undo_cleanup_concurrent_column_rename' do
|
|
|
|
context 'in a transaction' do
|
|
|
|
it 'raises RuntimeError' do
|
|
|
|
allow(model).to receive(:transaction_open?).and_return(true)
|
2017-08-17 22:00:37 +05:30
|
|
|
|
2019-10-12 21:52:04 +05:30
|
|
|
expect { model.undo_cleanup_concurrent_column_rename(:users, :old, :new) }
|
|
|
|
.to raise_error(RuntimeError)
|
|
|
|
end
|
|
|
|
end
|
2018-03-17 18:26:18 +05:30
|
|
|
|
2019-10-12 21:52:04 +05:30
|
|
|
context 'outside a transaction' do
|
|
|
|
let(:new_column) do
|
|
|
|
double(:column,
|
|
|
|
type: :integer,
|
|
|
|
limit: 8,
|
|
|
|
default: 0,
|
|
|
|
null: false,
|
|
|
|
precision: 5,
|
|
|
|
scale: 1)
|
|
|
|
end
|
2017-08-17 22:00:37 +05:30
|
|
|
|
2019-10-12 21:52:04 +05:30
|
|
|
let(:trigger_name) { model.rename_trigger_name(:users, :old, :new) }
|
2017-08-17 22:00:37 +05:30
|
|
|
|
2019-10-12 21:52:04 +05:30
|
|
|
before do
|
|
|
|
allow(model).to receive(:transaction_open?).and_return(false)
|
|
|
|
allow(model).to receive(:column_for).and_return(new_column)
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'reverses the operations of cleanup_concurrent_column_rename' do
|
|
|
|
expect(model).to receive(:check_trigger_permissions!).with(:users)
|
|
|
|
|
|
|
|
expect(model).to receive(:install_rename_triggers_for_postgresql)
|
|
|
|
.with(trigger_name, '"users"', '"old"', '"new"')
|
|
|
|
|
|
|
|
expect(model).to receive(:add_column)
|
|
|
|
.with(:users, :old, :integer,
|
|
|
|
limit: new_column.limit,
|
|
|
|
precision: new_column.precision,
|
|
|
|
scale: new_column.scale)
|
|
|
|
|
|
|
|
expect(model).to receive(:change_column_default)
|
|
|
|
.with(:users, :old, new_column.default)
|
|
|
|
|
|
|
|
expect(model).to receive(:update_column_in_batches)
|
|
|
|
|
|
|
|
expect(model).to receive(:change_column_null).with(:users, :old, false)
|
|
|
|
|
|
|
|
expect(model).to receive(:copy_indexes).with(:users, :new, :old)
|
|
|
|
expect(model).to receive(:copy_foreign_keys).with(:users, :new, :old)
|
|
|
|
|
|
|
|
model.undo_cleanup_concurrent_column_rename(:users, :old, :new)
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when default is false' do
|
|
|
|
let(:new_column) do
|
|
|
|
double(:column,
|
|
|
|
type: :boolean,
|
|
|
|
limit: nil,
|
|
|
|
default: false,
|
|
|
|
null: false,
|
|
|
|
precision: nil,
|
|
|
|
scale: nil)
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'copies the default to the old column' do
|
|
|
|
expect(model).to receive(:change_column_default)
|
|
|
|
.with(:users, :old, new_column.default)
|
|
|
|
|
|
|
|
model.undo_cleanup_concurrent_column_rename(:users, :old, :new)
|
|
|
|
end
|
|
|
|
end
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
describe '#change_column_type_concurrently' do
|
|
|
|
it 'changes the column type' do
|
2017-09-10 17:25:29 +05:30
|
|
|
expect(model).to receive(:rename_column_concurrently)
|
|
|
|
.with('users', 'username', 'username_for_type_change', type: :text)
|
2017-08-17 22:00:37 +05:30
|
|
|
|
|
|
|
model.change_column_type_concurrently('users', 'username', :text)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
describe '#cleanup_concurrent_column_type_change' do
|
|
|
|
it 'cleans up the type changing procedure' do
|
2017-09-10 17:25:29 +05:30
|
|
|
expect(model).to receive(:cleanup_concurrent_column_rename)
|
|
|
|
.with('users', 'username', 'username_for_type_change')
|
2017-08-17 22:00:37 +05:30
|
|
|
|
2017-09-10 17:25:29 +05:30
|
|
|
expect(model).to receive(:rename_column)
|
|
|
|
.with('users', 'username_for_type_change', 'username')
|
2017-08-17 22:00:37 +05:30
|
|
|
|
|
|
|
model.cleanup_concurrent_column_type_change('users', 'username')
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
describe '#install_rename_triggers_for_postgresql' do
|
|
|
|
it 'installs the triggers for PostgreSQL' do
|
2017-09-10 17:25:29 +05:30
|
|
|
expect(model).to receive(:execute)
|
|
|
|
.with(/CREATE OR REPLACE FUNCTION foo()/m)
|
2017-08-17 22:00:37 +05:30
|
|
|
|
2019-12-04 20:38:33 +05:30
|
|
|
expect(model).to receive(:execute)
|
|
|
|
.with(/DROP TRIGGER IF EXISTS foo/m)
|
|
|
|
|
2017-09-10 17:25:29 +05:30
|
|
|
expect(model).to receive(:execute)
|
|
|
|
.with(/CREATE TRIGGER foo/m)
|
2017-08-17 22:00:37 +05:30
|
|
|
|
|
|
|
model.install_rename_triggers_for_postgresql('foo', :users, :old, :new)
|
|
|
|
end
|
2019-12-04 20:38:33 +05:30
|
|
|
|
|
|
|
it 'does not fail if trigger already exists' do
|
|
|
|
model.install_rename_triggers_for_postgresql('foo', :users, :old, :new)
|
|
|
|
model.install_rename_triggers_for_postgresql('foo', :users, :old, :new)
|
|
|
|
end
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
describe '#remove_rename_triggers_for_postgresql' do
|
|
|
|
it 'removes the function and trigger' do
|
2018-03-17 18:26:18 +05:30
|
|
|
expect(model).to receive(:execute).with('DROP TRIGGER IF EXISTS foo ON bar')
|
|
|
|
expect(model).to receive(:execute).with('DROP FUNCTION IF EXISTS foo()')
|
2017-08-17 22:00:37 +05:30
|
|
|
|
|
|
|
model.remove_rename_triggers_for_postgresql('bar', 'foo')
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
describe '#rename_trigger_name' do
|
|
|
|
it 'returns a String' do
|
2017-09-10 17:25:29 +05:30
|
|
|
expect(model.rename_trigger_name(:users, :foo, :bar))
|
|
|
|
.to match(/trigger_.{12}/)
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
describe '#indexes_for' do
|
|
|
|
it 'returns the indexes for a column' do
|
|
|
|
idx1 = double(:idx, columns: %w(project_id))
|
|
|
|
idx2 = double(:idx, columns: %w(user_id))
|
|
|
|
|
|
|
|
allow(model).to receive(:indexes).with('table').and_return([idx1, idx2])
|
|
|
|
|
|
|
|
expect(model.indexes_for('table', :user_id)).to eq([idx2])
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
describe '#foreign_keys_for' do
|
|
|
|
it 'returns the foreign keys for a column' do
|
|
|
|
fk1 = double(:fk, column: 'project_id')
|
|
|
|
fk2 = double(:fk, column: 'user_id')
|
|
|
|
|
|
|
|
allow(model).to receive(:foreign_keys).with('table').and_return([fk1, fk2])
|
|
|
|
|
|
|
|
expect(model.foreign_keys_for('table', :user_id)).to eq([fk2])
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
describe '#copy_indexes' do
|
|
|
|
context 'using a regular index using a single column' do
|
|
|
|
it 'copies the index' do
|
|
|
|
index = double(:index,
|
|
|
|
columns: %w(project_id),
|
|
|
|
name: 'index_on_issues_project_id',
|
|
|
|
using: nil,
|
|
|
|
where: nil,
|
|
|
|
opclasses: {},
|
|
|
|
unique: false,
|
|
|
|
lengths: [],
|
|
|
|
orders: [])
|
|
|
|
|
2017-09-10 17:25:29 +05:30
|
|
|
allow(model).to receive(:indexes_for).with(:issues, 'project_id')
|
|
|
|
.and_return([index])
|
2017-08-17 22:00:37 +05:30
|
|
|
|
2017-09-10 17:25:29 +05:30
|
|
|
expect(model).to receive(:add_concurrent_index)
|
|
|
|
.with(:issues,
|
2017-08-17 22:00:37 +05:30
|
|
|
%w(gl_project_id),
|
|
|
|
unique: false,
|
|
|
|
name: 'index_on_issues_gl_project_id',
|
|
|
|
length: [],
|
|
|
|
order: [])
|
|
|
|
|
|
|
|
model.copy_indexes(:issues, :project_id, :gl_project_id)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'using a regular index with multiple columns' do
|
|
|
|
it 'copies the index' do
|
|
|
|
index = double(:index,
|
|
|
|
columns: %w(project_id foobar),
|
|
|
|
name: 'index_on_issues_project_id_foobar',
|
|
|
|
using: nil,
|
|
|
|
where: nil,
|
|
|
|
opclasses: {},
|
|
|
|
unique: false,
|
|
|
|
lengths: [],
|
|
|
|
orders: [])
|
|
|
|
|
2017-09-10 17:25:29 +05:30
|
|
|
allow(model).to receive(:indexes_for).with(:issues, 'project_id')
|
|
|
|
.and_return([index])
|
2017-08-17 22:00:37 +05:30
|
|
|
|
2017-09-10 17:25:29 +05:30
|
|
|
expect(model).to receive(:add_concurrent_index)
|
|
|
|
.with(:issues,
|
2017-08-17 22:00:37 +05:30
|
|
|
%w(gl_project_id foobar),
|
|
|
|
unique: false,
|
|
|
|
name: 'index_on_issues_gl_project_id_foobar',
|
|
|
|
length: [],
|
|
|
|
order: [])
|
|
|
|
|
|
|
|
model.copy_indexes(:issues, :project_id, :gl_project_id)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'using an index with a WHERE clause' do
|
|
|
|
it 'copies the index' do
|
|
|
|
index = double(:index,
|
|
|
|
columns: %w(project_id),
|
|
|
|
name: 'index_on_issues_project_id',
|
|
|
|
using: nil,
|
|
|
|
where: 'foo',
|
|
|
|
opclasses: {},
|
|
|
|
unique: false,
|
|
|
|
lengths: [],
|
|
|
|
orders: [])
|
|
|
|
|
2017-09-10 17:25:29 +05:30
|
|
|
allow(model).to receive(:indexes_for).with(:issues, 'project_id')
|
|
|
|
.and_return([index])
|
2017-08-17 22:00:37 +05:30
|
|
|
|
2017-09-10 17:25:29 +05:30
|
|
|
expect(model).to receive(:add_concurrent_index)
|
|
|
|
.with(:issues,
|
2017-08-17 22:00:37 +05:30
|
|
|
%w(gl_project_id),
|
|
|
|
unique: false,
|
|
|
|
name: 'index_on_issues_gl_project_id',
|
|
|
|
length: [],
|
|
|
|
order: [],
|
|
|
|
where: 'foo')
|
|
|
|
|
|
|
|
model.copy_indexes(:issues, :project_id, :gl_project_id)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'using an index with a USING clause' do
|
|
|
|
it 'copies the index' do
|
|
|
|
index = double(:index,
|
|
|
|
columns: %w(project_id),
|
|
|
|
name: 'index_on_issues_project_id',
|
|
|
|
where: nil,
|
|
|
|
using: 'foo',
|
|
|
|
opclasses: {},
|
|
|
|
unique: false,
|
|
|
|
lengths: [],
|
|
|
|
orders: [])
|
|
|
|
|
2017-09-10 17:25:29 +05:30
|
|
|
allow(model).to receive(:indexes_for).with(:issues, 'project_id')
|
|
|
|
.and_return([index])
|
2017-08-17 22:00:37 +05:30
|
|
|
|
2017-09-10 17:25:29 +05:30
|
|
|
expect(model).to receive(:add_concurrent_index)
|
|
|
|
.with(:issues,
|
2017-08-17 22:00:37 +05:30
|
|
|
%w(gl_project_id),
|
|
|
|
unique: false,
|
|
|
|
name: 'index_on_issues_gl_project_id',
|
|
|
|
length: [],
|
|
|
|
order: [],
|
|
|
|
using: 'foo')
|
|
|
|
|
|
|
|
model.copy_indexes(:issues, :project_id, :gl_project_id)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'using an index with custom operator classes' do
|
|
|
|
it 'copies the index' do
|
|
|
|
index = double(:index,
|
|
|
|
columns: %w(project_id),
|
|
|
|
name: 'index_on_issues_project_id',
|
|
|
|
using: nil,
|
|
|
|
where: nil,
|
|
|
|
opclasses: { 'project_id' => 'bar' },
|
|
|
|
unique: false,
|
|
|
|
lengths: [],
|
|
|
|
orders: [])
|
|
|
|
|
2017-09-10 17:25:29 +05:30
|
|
|
allow(model).to receive(:indexes_for).with(:issues, 'project_id')
|
|
|
|
.and_return([index])
|
2017-08-17 22:00:37 +05:30
|
|
|
|
2017-09-10 17:25:29 +05:30
|
|
|
expect(model).to receive(:add_concurrent_index)
|
|
|
|
.with(:issues,
|
2017-08-17 22:00:37 +05:30
|
|
|
%w(gl_project_id),
|
|
|
|
unique: false,
|
|
|
|
name: 'index_on_issues_gl_project_id',
|
|
|
|
length: [],
|
|
|
|
order: [],
|
|
|
|
opclasses: { 'gl_project_id' => 'bar' })
|
|
|
|
|
|
|
|
model.copy_indexes(:issues, :project_id, :gl_project_id)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
describe 'using an index of which the name does not contain the source column' do
|
|
|
|
it 'raises RuntimeError' do
|
|
|
|
index = double(:index,
|
|
|
|
columns: %w(project_id),
|
|
|
|
name: 'index_foobar_index',
|
|
|
|
using: nil,
|
|
|
|
where: nil,
|
|
|
|
opclasses: {},
|
|
|
|
unique: false,
|
|
|
|
lengths: [],
|
|
|
|
orders: [])
|
|
|
|
|
2017-09-10 17:25:29 +05:30
|
|
|
allow(model).to receive(:indexes_for).with(:issues, 'project_id')
|
|
|
|
.and_return([index])
|
2017-08-17 22:00:37 +05:30
|
|
|
|
2017-09-10 17:25:29 +05:30
|
|
|
expect { model.copy_indexes(:issues, :project_id, :gl_project_id) }
|
|
|
|
.to raise_error(RuntimeError)
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
describe '#copy_foreign_keys' do
|
|
|
|
it 'copies foreign keys from one column to another' do
|
|
|
|
fk = double(:fk,
|
|
|
|
from_table: 'issues',
|
|
|
|
to_table: 'projects',
|
|
|
|
on_delete: :cascade)
|
|
|
|
|
2017-09-10 17:25:29 +05:30
|
|
|
allow(model).to receive(:foreign_keys_for).with(:issues, :project_id)
|
|
|
|
.and_return([fk])
|
2017-08-17 22:00:37 +05:30
|
|
|
|
2017-09-10 17:25:29 +05:30
|
|
|
expect(model).to receive(:add_concurrent_foreign_key)
|
|
|
|
.with('issues', 'projects', column: :gl_project_id, on_delete: :cascade)
|
2017-08-17 22:00:37 +05:30
|
|
|
|
|
|
|
model.copy_foreign_keys(:issues, :project_id, :gl_project_id)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
describe '#column_for' do
|
|
|
|
it 'returns a column object for an existing column' do
|
|
|
|
column = model.column_for(:users, :id)
|
|
|
|
|
|
|
|
expect(column.name).to eq('id')
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'returns nil when a column does not exist' do
|
|
|
|
expect(model.column_for(:users, :kittens)).to be_nil
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
describe '#replace_sql' do
|
2019-10-12 21:52:04 +05:30
|
|
|
it 'builds the sql with correct functions' do
|
|
|
|
expect(model.replace_sql(Arel::Table.new(:users)[:first_name], "Alice", "Eve").to_s)
|
|
|
|
.to include('regexp_replace')
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
describe 'results' do
|
|
|
|
let!(:user) { create(:user, name: 'Kathy Alice Aliceson') }
|
|
|
|
|
|
|
|
it 'replaces the correct part of the string' do
|
2017-09-10 17:25:29 +05:30
|
|
|
allow(model).to receive(:transaction_open?).and_return(false)
|
|
|
|
query = model.replace_sql(Arel::Table.new(:users)[:name], 'Alice', 'Eve')
|
|
|
|
|
|
|
|
model.update_column_in_batches(:users, :name, query)
|
|
|
|
|
2017-08-17 22:00:37 +05:30
|
|
|
expect(user.reload.name).to eq('Kathy Eve Aliceson')
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
2018-03-17 18:26:18 +05:30
|
|
|
|
2020-03-09 13:42:32 +05:30
|
|
|
describe 'sidekiq migration helpers', :redis do
|
2018-03-17 18:26:18 +05:30
|
|
|
let(:worker) do
|
|
|
|
Class.new do
|
|
|
|
include Sidekiq::Worker
|
|
|
|
sidekiq_options queue: 'test'
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
describe '#sidekiq_queue_length' do
|
|
|
|
context 'when queue is empty' do
|
|
|
|
it 'returns zero' do
|
|
|
|
Sidekiq::Testing.disable! do
|
|
|
|
expect(model.sidekiq_queue_length('test')).to eq 0
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when queue contains jobs' do
|
|
|
|
it 'returns correct size of the queue' do
|
|
|
|
Sidekiq::Testing.disable! do
|
|
|
|
worker.perform_async('Something', [1])
|
|
|
|
worker.perform_async('Something', [2])
|
|
|
|
|
|
|
|
expect(model.sidekiq_queue_length('test')).to eq 2
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
describe '#migrate_sidekiq_queue' do
|
|
|
|
it 'migrates jobs from one sidekiq queue to another' do
|
|
|
|
Sidekiq::Testing.disable! do
|
|
|
|
worker.perform_async('Something', [1])
|
|
|
|
worker.perform_async('Something', [2])
|
|
|
|
|
|
|
|
expect(model.sidekiq_queue_length('test')).to eq 2
|
|
|
|
expect(model.sidekiq_queue_length('new_test')).to eq 0
|
|
|
|
|
|
|
|
model.sidekiq_queue_migrate('test', to: 'new_test')
|
|
|
|
|
|
|
|
expect(model.sidekiq_queue_length('test')).to eq 0
|
|
|
|
expect(model.sidekiq_queue_length('new_test')).to eq 2
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
describe '#check_trigger_permissions!' do
|
|
|
|
it 'does nothing when the user has the correct permissions' do
|
|
|
|
expect { model.check_trigger_permissions!('users') }
|
|
|
|
.not_to raise_error
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'raises RuntimeError when the user does not have the correct permissions' do
|
|
|
|
allow(Gitlab::Database::Grant).to receive(:create_and_execute_trigger?)
|
|
|
|
.with('kittens')
|
|
|
|
.and_return(false)
|
|
|
|
|
|
|
|
expect { model.check_trigger_permissions!('kittens') }
|
|
|
|
.to raise_error(RuntimeError, /Your database user is not allowed/)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2020-03-09 13:42:32 +05:30
|
|
|
describe '#bulk_queue_background_migration_jobs_by_range' do
|
2018-03-17 18:26:18 +05:30
|
|
|
context 'when the model has an ID column' do
|
|
|
|
let!(:id1) { create(:user).id }
|
|
|
|
let!(:id2) { create(:user).id }
|
|
|
|
let!(:id3) { create(:user).id }
|
|
|
|
|
|
|
|
before do
|
|
|
|
User.class_eval do
|
|
|
|
include EachBatch
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'with enough rows to bulk queue jobs more than once' do
|
|
|
|
before do
|
|
|
|
stub_const('Gitlab::Database::MigrationHelpers::BACKGROUND_MIGRATION_JOB_BUFFER_SIZE', 1)
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'queues jobs correctly' do
|
|
|
|
Sidekiq::Testing.fake! do
|
|
|
|
model.bulk_queue_background_migration_jobs_by_range(User, 'FooJob', batch_size: 2)
|
|
|
|
|
|
|
|
expect(BackgroundMigrationWorker.jobs[0]['args']).to eq(['FooJob', [id1, id2]])
|
|
|
|
expect(BackgroundMigrationWorker.jobs[1]['args']).to eq(['FooJob', [id3, id3]])
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'queues jobs in groups of buffer size 1' do
|
|
|
|
expect(BackgroundMigrationWorker).to receive(:bulk_perform_async).with([['FooJob', [id1, id2]]])
|
|
|
|
expect(BackgroundMigrationWorker).to receive(:bulk_perform_async).with([['FooJob', [id3, id3]]])
|
|
|
|
|
|
|
|
model.bulk_queue_background_migration_jobs_by_range(User, 'FooJob', batch_size: 2)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'with not enough rows to bulk queue jobs more than once' do
|
|
|
|
it 'queues jobs correctly' do
|
|
|
|
Sidekiq::Testing.fake! do
|
|
|
|
model.bulk_queue_background_migration_jobs_by_range(User, 'FooJob', batch_size: 2)
|
|
|
|
|
|
|
|
expect(BackgroundMigrationWorker.jobs[0]['args']).to eq(['FooJob', [id1, id2]])
|
|
|
|
expect(BackgroundMigrationWorker.jobs[1]['args']).to eq(['FooJob', [id3, id3]])
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'queues jobs in bulk all at once (big buffer size)' do
|
|
|
|
expect(BackgroundMigrationWorker).to receive(:bulk_perform_async).with([['FooJob', [id1, id2]],
|
|
|
|
['FooJob', [id3, id3]]])
|
|
|
|
|
|
|
|
model.bulk_queue_background_migration_jobs_by_range(User, 'FooJob', batch_size: 2)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'without specifying batch_size' do
|
|
|
|
it 'queues jobs correctly' do
|
|
|
|
Sidekiq::Testing.fake! do
|
|
|
|
model.bulk_queue_background_migration_jobs_by_range(User, 'FooJob')
|
|
|
|
|
|
|
|
expect(BackgroundMigrationWorker.jobs[0]['args']).to eq(['FooJob', [id1, id3]])
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context "when the model doesn't have an ID column" do
|
|
|
|
it 'raises error (for now)' do
|
|
|
|
expect do
|
|
|
|
model.bulk_queue_background_migration_jobs_by_range(ProjectAuthorization, 'FooJob')
|
|
|
|
end.to raise_error(StandardError, /does not have an ID/)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2020-03-09 13:42:32 +05:30
|
|
|
describe '#queue_background_migration_jobs_by_range_at_intervals' do
|
2018-03-17 18:26:18 +05:30
|
|
|
context 'when the model has an ID column' do
|
|
|
|
let!(:id1) { create(:user).id }
|
|
|
|
let!(:id2) { create(:user).id }
|
|
|
|
let!(:id3) { create(:user).id }
|
|
|
|
|
|
|
|
around do |example|
|
|
|
|
Timecop.freeze { example.run }
|
|
|
|
end
|
|
|
|
|
|
|
|
before do
|
|
|
|
User.class_eval do
|
|
|
|
include EachBatch
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'with batch_size option' do
|
|
|
|
it 'queues jobs correctly' do
|
|
|
|
Sidekiq::Testing.fake! do
|
|
|
|
model.queue_background_migration_jobs_by_range_at_intervals(User, 'FooJob', 10.minutes, batch_size: 2)
|
|
|
|
|
|
|
|
expect(BackgroundMigrationWorker.jobs[0]['args']).to eq(['FooJob', [id1, id2]])
|
|
|
|
expect(BackgroundMigrationWorker.jobs[0]['at']).to eq(10.minutes.from_now.to_f)
|
|
|
|
expect(BackgroundMigrationWorker.jobs[1]['args']).to eq(['FooJob', [id3, id3]])
|
|
|
|
expect(BackgroundMigrationWorker.jobs[1]['at']).to eq(20.minutes.from_now.to_f)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'without batch_size option' do
|
|
|
|
it 'queues jobs correctly' do
|
|
|
|
Sidekiq::Testing.fake! do
|
|
|
|
model.queue_background_migration_jobs_by_range_at_intervals(User, 'FooJob', 10.minutes)
|
|
|
|
|
|
|
|
expect(BackgroundMigrationWorker.jobs[0]['args']).to eq(['FooJob', [id1, id3]])
|
|
|
|
expect(BackgroundMigrationWorker.jobs[0]['at']).to eq(10.minutes.from_now.to_f)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context "when the model doesn't have an ID column" do
|
|
|
|
it 'raises error (for now)' do
|
|
|
|
expect do
|
|
|
|
model.queue_background_migration_jobs_by_range_at_intervals(ProjectAuthorization, 'FooJob', 10.seconds)
|
|
|
|
end.to raise_error(StandardError, /does not have an ID/)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
describe '#change_column_type_using_background_migration' do
|
|
|
|
let!(:issue) { create(:issue, :closed, closed_at: Time.zone.now) }
|
|
|
|
|
|
|
|
let(:issue_model) do
|
|
|
|
Class.new(ActiveRecord::Base) do
|
|
|
|
self.table_name = 'issues'
|
|
|
|
include EachBatch
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'changes the type of a column using a background migration' do
|
|
|
|
expect(model)
|
|
|
|
.to receive(:add_column)
|
|
|
|
.with('issues', 'closed_at_for_type_change', :datetime_with_timezone)
|
|
|
|
|
|
|
|
expect(model)
|
|
|
|
.to receive(:install_rename_triggers)
|
|
|
|
.with('issues', :closed_at, 'closed_at_for_type_change')
|
|
|
|
|
|
|
|
expect(BackgroundMigrationWorker)
|
|
|
|
.to receive(:perform_in)
|
|
|
|
.ordered
|
|
|
|
.with(
|
|
|
|
10.minutes,
|
|
|
|
'CopyColumn',
|
|
|
|
['issues', :closed_at, 'closed_at_for_type_change', issue.id, issue.id]
|
|
|
|
)
|
|
|
|
|
|
|
|
expect(BackgroundMigrationWorker)
|
|
|
|
.to receive(:perform_in)
|
|
|
|
.ordered
|
|
|
|
.with(
|
|
|
|
1.hour + 10.minutes,
|
|
|
|
'CleanupConcurrentTypeChange',
|
|
|
|
['issues', :closed_at, 'closed_at_for_type_change']
|
|
|
|
)
|
|
|
|
|
|
|
|
expect(Gitlab::BackgroundMigration)
|
|
|
|
.to receive(:steal)
|
|
|
|
.ordered
|
|
|
|
.with('CopyColumn')
|
|
|
|
|
|
|
|
expect(Gitlab::BackgroundMigration)
|
|
|
|
.to receive(:steal)
|
|
|
|
.ordered
|
|
|
|
.with('CleanupConcurrentTypeChange')
|
|
|
|
|
|
|
|
model.change_column_type_using_background_migration(
|
|
|
|
issue_model.all,
|
|
|
|
:closed_at,
|
|
|
|
:datetime_with_timezone
|
|
|
|
)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-11-08 19:23:39 +05:30
|
|
|
describe '#rename_column_using_background_migration' do
|
|
|
|
let!(:issue) { create(:issue, :closed, closed_at: Time.zone.now) }
|
|
|
|
|
|
|
|
it 'renames a column using a background migration' do
|
|
|
|
expect(model)
|
|
|
|
.to receive(:add_column)
|
|
|
|
.with(
|
|
|
|
'issues',
|
|
|
|
:closed_at_timestamp,
|
|
|
|
:datetime_with_timezone,
|
|
|
|
limit: anything,
|
|
|
|
precision: anything,
|
|
|
|
scale: anything
|
|
|
|
)
|
|
|
|
|
|
|
|
expect(model)
|
|
|
|
.to receive(:install_rename_triggers)
|
|
|
|
.with('issues', :closed_at, :closed_at_timestamp)
|
|
|
|
|
|
|
|
expect(BackgroundMigrationWorker)
|
|
|
|
.to receive(:perform_in)
|
|
|
|
.ordered
|
|
|
|
.with(
|
|
|
|
10.minutes,
|
|
|
|
'CopyColumn',
|
|
|
|
['issues', :closed_at, :closed_at_timestamp, issue.id, issue.id]
|
|
|
|
)
|
|
|
|
|
|
|
|
expect(BackgroundMigrationWorker)
|
|
|
|
.to receive(:perform_in)
|
|
|
|
.ordered
|
|
|
|
.with(
|
|
|
|
1.hour + 10.minutes,
|
|
|
|
'CleanupConcurrentRename',
|
|
|
|
['issues', :closed_at, :closed_at_timestamp]
|
|
|
|
)
|
|
|
|
|
|
|
|
expect(Gitlab::BackgroundMigration)
|
|
|
|
.to receive(:steal)
|
|
|
|
.ordered
|
|
|
|
.with('CopyColumn')
|
|
|
|
|
|
|
|
expect(Gitlab::BackgroundMigration)
|
|
|
|
.to receive(:steal)
|
|
|
|
.ordered
|
|
|
|
.with('CleanupConcurrentRename')
|
|
|
|
|
|
|
|
model.rename_column_using_background_migration(
|
|
|
|
'issues',
|
|
|
|
:closed_at,
|
|
|
|
:closed_at_timestamp
|
|
|
|
)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-03-17 18:26:18 +05:30
|
|
|
describe '#perform_background_migration_inline?' do
|
|
|
|
it 'returns true in a test environment' do
|
2019-12-04 20:38:33 +05:30
|
|
|
stub_rails_env('test')
|
2018-03-17 18:26:18 +05:30
|
|
|
|
|
|
|
expect(model.perform_background_migration_inline?).to eq(true)
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'returns true in a development environment' do
|
2019-12-04 20:38:33 +05:30
|
|
|
stub_rails_env('development')
|
2018-03-17 18:26:18 +05:30
|
|
|
|
|
|
|
expect(model.perform_background_migration_inline?).to eq(true)
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'returns false in a production environment' do
|
2019-12-04 20:38:33 +05:30
|
|
|
stub_rails_env('production')
|
2018-03-17 18:26:18 +05:30
|
|
|
|
|
|
|
expect(model.perform_background_migration_inline?).to eq(false)
|
|
|
|
end
|
|
|
|
end
|
2018-05-09 12:01:36 +05:30
|
|
|
|
|
|
|
describe '#index_exists_by_name?' do
|
2019-01-03 12:48:30 +05:30
|
|
|
it 'returns true if an index exists' do
|
2020-03-09 13:42:32 +05:30
|
|
|
ActiveRecord::Base.connection.execute(
|
|
|
|
'CREATE INDEX test_index_for_index_exists ON projects (path);'
|
|
|
|
)
|
|
|
|
|
|
|
|
expect(model.index_exists_by_name?(:projects, 'test_index_for_index_exists'))
|
2018-05-09 12:01:36 +05:30
|
|
|
.to be_truthy
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'returns false if the index does not exist' do
|
|
|
|
expect(model.index_exists_by_name?(:projects, 'this_does_not_exist'))
|
|
|
|
.to be_falsy
|
|
|
|
end
|
|
|
|
|
2019-10-12 21:52:04 +05:30
|
|
|
context 'when an index with a function exists' do
|
2018-05-09 12:01:36 +05:30
|
|
|
before do
|
|
|
|
ActiveRecord::Base.connection.execute(
|
|
|
|
'CREATE INDEX test_index ON projects (LOWER(path));'
|
|
|
|
)
|
|
|
|
end
|
|
|
|
|
|
|
|
after do
|
|
|
|
'DROP INDEX IF EXISTS test_index;'
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'returns true if an index exists' do
|
|
|
|
expect(model.index_exists_by_name?(:projects, 'test_index'))
|
|
|
|
.to be_truthy
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
2020-01-01 13:55:28 +05:30
|
|
|
|
|
|
|
describe '#create_or_update_plan_limit' do
|
|
|
|
it 'creates or updates plan limits' do
|
|
|
|
expect(model).to receive(:execute).with <<~SQL
|
|
|
|
INSERT INTO plan_limits (plan_id, "project_hooks")
|
|
|
|
VALUES
|
|
|
|
((SELECT id FROM plans WHERE name = 'free' LIMIT 1), '10')
|
|
|
|
ON CONFLICT (plan_id) DO UPDATE SET "project_hooks" = EXCLUDED."project_hooks";
|
|
|
|
SQL
|
|
|
|
|
|
|
|
model.create_or_update_plan_limit('project_hooks', 'free', 10)
|
|
|
|
end
|
|
|
|
end
|
2020-03-09 13:42:32 +05:30
|
|
|
|
|
|
|
describe '#with_lock_retries' do
|
|
|
|
let(:buffer) { StringIO.new }
|
|
|
|
let(:in_memory_logger) { Gitlab::JsonLogger.new(buffer) }
|
|
|
|
let(:env) { { 'DISABLE_LOCK_RETRIES' => 'true' } }
|
|
|
|
|
|
|
|
it 'sets the migration class name in the logs' do
|
|
|
|
model.with_lock_retries(env: env, logger: in_memory_logger) { }
|
|
|
|
|
|
|
|
buffer.rewind
|
|
|
|
expect(buffer.read).to include("\"class\":\"#{model.class}\"")
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
describe '#backfill_iids' do
|
|
|
|
include MigrationsHelpers
|
|
|
|
|
|
|
|
class self::Issue < ActiveRecord::Base
|
|
|
|
include AtomicInternalId
|
|
|
|
|
|
|
|
self.table_name = 'issues'
|
|
|
|
self.inheritance_column = :_type_disabled
|
|
|
|
|
|
|
|
belongs_to :project, class_name: "::Project"
|
|
|
|
|
|
|
|
has_internal_id :iid,
|
|
|
|
scope: :project,
|
|
|
|
init: ->(s) { s&.project&.issues&.maximum(:iid) },
|
|
|
|
backfill: true,
|
|
|
|
presence: false
|
|
|
|
end
|
|
|
|
|
|
|
|
let(:namespaces) { table(:namespaces) }
|
|
|
|
let(:projects) { table(:projects) }
|
|
|
|
let(:issues) { table(:issues) }
|
|
|
|
|
|
|
|
def setup
|
|
|
|
namespace = namespaces.create!(name: 'foo', path: 'foo')
|
|
|
|
project = projects.create!(namespace_id: namespace.id)
|
|
|
|
|
|
|
|
project
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'generates iids properly for models created after the migration' do
|
|
|
|
project = setup
|
|
|
|
|
|
|
|
model.backfill_iids('issues')
|
|
|
|
|
|
|
|
issue = self.class::Issue.create!(project_id: project.id)
|
|
|
|
|
|
|
|
expect(issue.iid).to eq(1)
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'generates iids properly for models created after the migration when iids are backfilled' do
|
|
|
|
project = setup
|
|
|
|
issue_a = issues.create!(project_id: project.id)
|
|
|
|
|
|
|
|
model.backfill_iids('issues')
|
|
|
|
|
|
|
|
issue_b = self.class::Issue.create!(project_id: project.id)
|
|
|
|
|
|
|
|
expect(issue_a.reload.iid).to eq(1)
|
|
|
|
expect(issue_b.iid).to eq(2)
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'generates iids properly for models created after the migration across multiple projects' do
|
|
|
|
project_a = setup
|
|
|
|
project_b = setup
|
|
|
|
issues.create!(project_id: project_a.id)
|
|
|
|
issues.create!(project_id: project_b.id)
|
|
|
|
issues.create!(project_id: project_b.id)
|
|
|
|
|
|
|
|
model.backfill_iids('issues')
|
|
|
|
|
|
|
|
issue_a = self.class::Issue.create!(project_id: project_a.id)
|
|
|
|
issue_b = self.class::Issue.create!(project_id: project_b.id)
|
|
|
|
|
|
|
|
expect(issue_a.iid).to eq(2)
|
|
|
|
expect(issue_b.iid).to eq(3)
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when the new code creates a row post deploy but before the migration runs' do
|
|
|
|
it 'does not change the row iid' do
|
|
|
|
project = setup
|
|
|
|
issue = self.class::Issue.create!(project_id: project.id)
|
|
|
|
|
|
|
|
model.backfill_iids('issues')
|
|
|
|
|
|
|
|
expect(issue.reload.iid).to eq(1)
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'backfills iids for rows already in the database' do
|
|
|
|
project = setup
|
|
|
|
issue_a = issues.create!(project_id: project.id)
|
|
|
|
issue_b = issues.create!(project_id: project.id)
|
|
|
|
issue_c = self.class::Issue.create!(project_id: project.id)
|
|
|
|
|
|
|
|
model.backfill_iids('issues')
|
|
|
|
|
|
|
|
expect(issue_a.reload.iid).to eq(1)
|
|
|
|
expect(issue_b.reload.iid).to eq(2)
|
|
|
|
expect(issue_c.reload.iid).to eq(3)
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'backfills iids across multiple projects' do
|
|
|
|
project_a = setup
|
|
|
|
project_b = setup
|
|
|
|
issue_a = issues.create!(project_id: project_a.id)
|
|
|
|
issue_b = issues.create!(project_id: project_b.id)
|
|
|
|
issue_c = self.class::Issue.create!(project_id: project_a.id)
|
|
|
|
issue_d = self.class::Issue.create!(project_id: project_b.id)
|
|
|
|
|
|
|
|
model.backfill_iids('issues')
|
|
|
|
|
|
|
|
expect(issue_a.reload.iid).to eq(1)
|
|
|
|
expect(issue_b.reload.iid).to eq(1)
|
|
|
|
expect(issue_c.reload.iid).to eq(2)
|
|
|
|
expect(issue_d.reload.iid).to eq(2)
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'generates iids properly for models created after the migration' do
|
|
|
|
project = setup
|
|
|
|
issue_a = issues.create!(project_id: project.id)
|
|
|
|
issue_b = issues.create!(project_id: project.id)
|
|
|
|
issue_c = self.class::Issue.create!(project_id: project.id)
|
|
|
|
|
|
|
|
model.backfill_iids('issues')
|
|
|
|
|
|
|
|
issue_d = self.class::Issue.create!(project_id: project.id)
|
|
|
|
issue_e = self.class::Issue.create!(project_id: project.id)
|
|
|
|
|
|
|
|
expect(issue_a.reload.iid).to eq(1)
|
|
|
|
expect(issue_b.reload.iid).to eq(2)
|
|
|
|
expect(issue_c.reload.iid).to eq(3)
|
|
|
|
expect(issue_d.iid).to eq(4)
|
|
|
|
expect(issue_e.iid).to eq(5)
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'backfills iids and properly generates iids for new models across multiple projects' do
|
|
|
|
project_a = setup
|
|
|
|
project_b = setup
|
|
|
|
issue_a = issues.create!(project_id: project_a.id)
|
|
|
|
issue_b = issues.create!(project_id: project_b.id)
|
|
|
|
issue_c = self.class::Issue.create!(project_id: project_a.id)
|
|
|
|
issue_d = self.class::Issue.create!(project_id: project_b.id)
|
|
|
|
|
|
|
|
model.backfill_iids('issues')
|
|
|
|
|
|
|
|
issue_e = self.class::Issue.create!(project_id: project_a.id)
|
|
|
|
issue_f = self.class::Issue.create!(project_id: project_b.id)
|
|
|
|
issue_g = self.class::Issue.create!(project_id: project_a.id)
|
|
|
|
|
|
|
|
expect(issue_a.reload.iid).to eq(1)
|
|
|
|
expect(issue_b.reload.iid).to eq(1)
|
|
|
|
expect(issue_c.reload.iid).to eq(2)
|
|
|
|
expect(issue_d.reload.iid).to eq(2)
|
|
|
|
expect(issue_e.iid).to eq(3)
|
|
|
|
expect(issue_f.iid).to eq(3)
|
|
|
|
expect(issue_g.iid).to eq(4)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when the new code creates a model and then old code creates a model post deploy but before the migration runs' do
|
|
|
|
it 'backfills iids' do
|
|
|
|
project = setup
|
|
|
|
issue_a = issues.create!(project_id: project.id)
|
|
|
|
issue_b = self.class::Issue.create!(project_id: project.id)
|
|
|
|
issue_c = issues.create!(project_id: project.id)
|
|
|
|
|
|
|
|
model.backfill_iids('issues')
|
|
|
|
|
|
|
|
expect(issue_a.reload.iid).to eq(1)
|
|
|
|
expect(issue_b.reload.iid).to eq(2)
|
|
|
|
expect(issue_c.reload.iid).to eq(3)
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'generates an iid for a new model after the migration' do
|
|
|
|
project = setup
|
|
|
|
issue_a = issues.create!(project_id: project.id)
|
|
|
|
issue_b = issues.create!(project_id: project.id)
|
|
|
|
issue_c = self.class::Issue.create!(project_id: project.id)
|
|
|
|
issue_d = issues.create!(project_id: project.id)
|
|
|
|
|
|
|
|
model.backfill_iids('issues')
|
|
|
|
|
|
|
|
issue_e = self.class::Issue.create!(project_id: project.id)
|
|
|
|
|
|
|
|
expect(issue_a.reload.iid).to eq(1)
|
|
|
|
expect(issue_b.reload.iid).to eq(2)
|
|
|
|
expect(issue_c.reload.iid).to eq(3)
|
|
|
|
expect(issue_d.reload.iid).to eq(4)
|
|
|
|
expect(issue_e.iid).to eq(5)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when the new code and old code alternate creating models post deploy but before the migration runs' do
|
|
|
|
it 'backfills iids' do
|
|
|
|
project = setup
|
|
|
|
issue_a = issues.create!(project_id: project.id)
|
|
|
|
issue_b = self.class::Issue.create!(project_id: project.id)
|
|
|
|
issue_c = issues.create!(project_id: project.id)
|
|
|
|
issue_d = self.class::Issue.create!(project_id: project.id)
|
|
|
|
|
|
|
|
model.backfill_iids('issues')
|
|
|
|
|
|
|
|
expect(issue_a.reload.iid).to eq(1)
|
|
|
|
expect(issue_b.reload.iid).to eq(2)
|
|
|
|
expect(issue_c.reload.iid).to eq(3)
|
|
|
|
expect(issue_d.reload.iid).to eq(4)
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'generates an iid for a new model after the migration' do
|
|
|
|
project = setup
|
|
|
|
issue_a = issues.create!(project_id: project.id)
|
|
|
|
issue_b = issues.create!(project_id: project.id)
|
|
|
|
issue_c = self.class::Issue.create!(project_id: project.id)
|
|
|
|
issue_d = issues.create!(project_id: project.id)
|
|
|
|
issue_e = self.class::Issue.create!(project_id: project.id)
|
|
|
|
|
|
|
|
model.backfill_iids('issues')
|
|
|
|
|
|
|
|
issue_f = self.class::Issue.create!(project_id: project.id)
|
|
|
|
|
|
|
|
expect(issue_a.reload.iid).to eq(1)
|
|
|
|
expect(issue_b.reload.iid).to eq(2)
|
|
|
|
expect(issue_c.reload.iid).to eq(3)
|
|
|
|
expect(issue_d.reload.iid).to eq(4)
|
|
|
|
expect(issue_e.reload.iid).to eq(5)
|
|
|
|
expect(issue_f.iid).to eq(6)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when the new code creates and deletes a model post deploy but before the migration runs' do
|
|
|
|
it 'backfills iids for rows already in the database' do
|
|
|
|
project = setup
|
|
|
|
issue_a = issues.create!(project_id: project.id)
|
|
|
|
issue_b = issues.create!(project_id: project.id)
|
|
|
|
issue_c = self.class::Issue.create!(project_id: project.id)
|
|
|
|
issue_c.delete
|
|
|
|
|
|
|
|
model.backfill_iids('issues')
|
|
|
|
|
|
|
|
expect(issue_a.reload.iid).to eq(1)
|
|
|
|
expect(issue_b.reload.iid).to eq(2)
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'successfully creates a new model after the migration' do
|
|
|
|
project = setup
|
|
|
|
issue_a = issues.create!(project_id: project.id)
|
|
|
|
issue_b = issues.create!(project_id: project.id)
|
|
|
|
issue_c = self.class::Issue.create!(project_id: project.id)
|
|
|
|
issue_c.delete
|
|
|
|
|
|
|
|
model.backfill_iids('issues')
|
|
|
|
|
|
|
|
issue_d = self.class::Issue.create!(project_id: project.id)
|
|
|
|
|
|
|
|
expect(issue_a.reload.iid).to eq(1)
|
|
|
|
expect(issue_b.reload.iid).to eq(2)
|
|
|
|
expect(issue_d.iid).to eq(3)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when the new code creates and deletes a model and old code creates a model post deploy but before the migration runs' do
|
|
|
|
it 'backfills iids' do
|
|
|
|
project = setup
|
|
|
|
issue_a = issues.create!(project_id: project.id)
|
|
|
|
issue_b = issues.create!(project_id: project.id)
|
|
|
|
issue_c = self.class::Issue.create!(project_id: project.id)
|
|
|
|
issue_c.delete
|
|
|
|
issue_d = issues.create!(project_id: project.id)
|
|
|
|
|
|
|
|
model.backfill_iids('issues')
|
|
|
|
|
|
|
|
expect(issue_a.reload.iid).to eq(1)
|
|
|
|
expect(issue_b.reload.iid).to eq(2)
|
|
|
|
expect(issue_d.reload.iid).to eq(3)
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'successfully creates a new model after the migration' do
|
|
|
|
project = setup
|
|
|
|
issue_a = issues.create!(project_id: project.id)
|
|
|
|
issue_b = issues.create!(project_id: project.id)
|
|
|
|
issue_c = self.class::Issue.create!(project_id: project.id)
|
|
|
|
issue_c.delete
|
|
|
|
issue_d = issues.create!(project_id: project.id)
|
|
|
|
|
|
|
|
model.backfill_iids('issues')
|
|
|
|
|
|
|
|
issue_e = self.class::Issue.create!(project_id: project.id)
|
|
|
|
|
|
|
|
expect(issue_a.reload.iid).to eq(1)
|
|
|
|
expect(issue_b.reload.iid).to eq(2)
|
|
|
|
expect(issue_d.reload.iid).to eq(3)
|
|
|
|
expect(issue_e.iid).to eq(4)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when the new code creates and deletes a model and then creates another model post deploy but before the migration runs' do
|
|
|
|
it 'successfully generates an iid for a new model after the migration' do
|
|
|
|
project = setup
|
|
|
|
issue_a = issues.create!(project_id: project.id)
|
|
|
|
issue_b = issues.create!(project_id: project.id)
|
|
|
|
issue_c = self.class::Issue.create!(project_id: project.id)
|
|
|
|
issue_c.delete
|
|
|
|
issue_d = self.class::Issue.create!(project_id: project.id)
|
|
|
|
|
|
|
|
model.backfill_iids('issues')
|
|
|
|
|
|
|
|
expect(issue_a.reload.iid).to eq(1)
|
|
|
|
expect(issue_b.reload.iid).to eq(2)
|
|
|
|
expect(issue_d.reload.iid).to eq(3)
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'successfully generates an iid for a new model after the migration' do
|
|
|
|
project = setup
|
|
|
|
issue_a = issues.create!(project_id: project.id)
|
|
|
|
issue_b = issues.create!(project_id: project.id)
|
|
|
|
issue_c = self.class::Issue.create!(project_id: project.id)
|
|
|
|
issue_c.delete
|
|
|
|
issue_d = self.class::Issue.create!(project_id: project.id)
|
|
|
|
|
|
|
|
model.backfill_iids('issues')
|
|
|
|
|
|
|
|
issue_e = self.class::Issue.create!(project_id: project.id)
|
|
|
|
|
|
|
|
expect(issue_a.reload.iid).to eq(1)
|
|
|
|
expect(issue_b.reload.iid).to eq(2)
|
|
|
|
expect(issue_d.reload.iid).to eq(3)
|
|
|
|
expect(issue_e.iid).to eq(4)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when the first model is created for a project after the migration' do
|
|
|
|
it 'generates an iid' do
|
|
|
|
project_a = setup
|
|
|
|
project_b = setup
|
|
|
|
issue_a = issues.create!(project_id: project_a.id)
|
|
|
|
|
|
|
|
model.backfill_iids('issues')
|
|
|
|
|
|
|
|
issue_b = self.class::Issue.create!(project_id: project_b.id)
|
|
|
|
|
|
|
|
expect(issue_a.reload.iid).to eq(1)
|
|
|
|
expect(issue_b.reload.iid).to eq(1)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when a row already has an iid set in the database' do
|
|
|
|
it 'backfills iids' do
|
|
|
|
project = setup
|
|
|
|
issue_a = issues.create!(project_id: project.id, iid: 1)
|
|
|
|
issue_b = issues.create!(project_id: project.id, iid: 2)
|
|
|
|
|
|
|
|
model.backfill_iids('issues')
|
|
|
|
|
|
|
|
expect(issue_a.reload.iid).to eq(1)
|
|
|
|
expect(issue_b.reload.iid).to eq(2)
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'backfills for multiple projects' do
|
|
|
|
project_a = setup
|
|
|
|
project_b = setup
|
|
|
|
issue_a = issues.create!(project_id: project_a.id, iid: 1)
|
|
|
|
issue_b = issues.create!(project_id: project_b.id, iid: 1)
|
|
|
|
issue_c = issues.create!(project_id: project_a.id, iid: 2)
|
|
|
|
|
|
|
|
model.backfill_iids('issues')
|
|
|
|
|
|
|
|
expect(issue_a.reload.iid).to eq(1)
|
|
|
|
expect(issue_b.reload.iid).to eq(1)
|
|
|
|
expect(issue_c.reload.iid).to eq(2)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
2016-06-02 11:05:42 +05:30
|
|
|
end
|