2019-07-07 11:18:12 +05:30
|
|
|
# frozen_string_literal: true
|
|
|
|
|
2015-04-26 12:48:37 +05:30
|
|
|
require 'spec_helper'
|
|
|
|
|
2018-03-17 18:26:18 +05:30
|
|
|
describe Repository do
|
2015-04-26 12:48:37 +05:30
|
|
|
include RepoHelpers
|
2018-12-05 23:21:45 +05:30
|
|
|
include GitHelpers
|
|
|
|
|
2017-09-10 17:25:29 +05:30
|
|
|
TestBlob = Struct.new(:path)
|
2015-04-26 12:48:37 +05:30
|
|
|
|
2017-08-17 22:00:37 +05:30
|
|
|
let(:project) { create(:project, :repository) }
|
2016-08-24 12:49:21 +05:30
|
|
|
let(:repository) { project.repository }
|
2017-09-10 17:25:29 +05:30
|
|
|
let(:broken_repository) { create(:project, :broken_storage).repository }
|
2015-12-23 02:04:40 +05:30
|
|
|
let(:user) { create(:user) }
|
2018-03-17 18:26:18 +05:30
|
|
|
let(:git_user) { Gitlab::Git::User.from_gitlab(user) }
|
2016-11-03 12:29:30 +05:30
|
|
|
|
2018-03-17 18:26:18 +05:30
|
|
|
let(:message) { 'Test message' }
|
2016-11-03 12:29:30 +05:30
|
|
|
|
2016-04-02 18:10:28 +05:30
|
|
|
let(:merge_commit) do
|
2016-08-24 12:49:21 +05:30
|
|
|
merge_request = create(:merge_request, source_branch: 'feature', target_branch: 'master', source_project: project)
|
2017-08-17 22:00:37 +05:30
|
|
|
|
|
|
|
merge_commit_id = repository.merge(user,
|
|
|
|
merge_request.diff_head_sha,
|
|
|
|
merge_request,
|
2018-03-17 18:26:18 +05:30
|
|
|
message)
|
2017-08-17 22:00:37 +05:30
|
|
|
|
2016-04-02 18:10:28 +05:30
|
|
|
repository.commit(merge_commit_id)
|
|
|
|
end
|
2016-11-03 12:29:30 +05:30
|
|
|
|
2017-08-17 22:00:37 +05:30
|
|
|
let(:author_email) { 'user@example.org' }
|
|
|
|
let(:author_name) { 'John Doe' }
|
2015-04-26 12:48:37 +05:30
|
|
|
|
2017-09-10 17:25:29 +05:30
|
|
|
def expect_to_raise_storage_error
|
|
|
|
expect { yield }.to raise_error do |exception|
|
2018-12-13 13:39:08 +05:30
|
|
|
storage_exceptions = [Gitlab::Git::CommandError, GRPC::Unavailable]
|
2018-03-17 18:26:18 +05:30
|
|
|
known_exception = storage_exceptions.select { |e| exception.is_a?(e) }
|
|
|
|
|
|
|
|
expect(known_exception).not_to be_nil
|
2017-09-10 17:25:29 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2016-08-24 12:49:21 +05:30
|
|
|
describe '#branch_names_contains' do
|
2020-03-09 13:42:32 +05:30
|
|
|
let_it_be(:project) { create(:project, :repository) }
|
2019-02-15 15:39:39 +05:30
|
|
|
let(:repository) { project.repository }
|
2015-04-26 12:48:37 +05:30
|
|
|
|
2019-02-15 15:39:39 +05:30
|
|
|
subject { repository.branch_names_contains(sample_commit.id) }
|
2017-09-10 17:25:29 +05:30
|
|
|
|
2019-02-15 15:39:39 +05:30
|
|
|
it { is_expected.to include('master') }
|
|
|
|
it { is_expected.not_to include('feature') }
|
|
|
|
it { is_expected.not_to include('fix') }
|
2018-03-17 18:26:18 +05:30
|
|
|
|
2019-02-15 15:39:39 +05:30
|
|
|
describe 'when storage is broken', :broken_storage do
|
2019-07-07 11:18:12 +05:30
|
|
|
it 'raises a storage error' do
|
2019-02-15 15:39:39 +05:30
|
|
|
expect_to_raise_storage_error do
|
|
|
|
broken_repository.branch_names_contains(sample_commit.id)
|
2017-09-10 17:25:29 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
2015-04-26 12:48:37 +05:30
|
|
|
end
|
|
|
|
|
2016-08-24 12:49:21 +05:30
|
|
|
describe '#tag_names_contains' do
|
2019-02-15 15:39:39 +05:30
|
|
|
subject { repository.tag_names_contains(sample_commit.id) }
|
2019-01-03 12:48:30 +05:30
|
|
|
|
2019-02-15 15:39:39 +05:30
|
|
|
it { is_expected.to include('v1.1.0') }
|
|
|
|
it { is_expected.not_to include('v1.0.0') }
|
2015-04-26 12:48:37 +05:30
|
|
|
end
|
|
|
|
|
2016-06-22 15:30:34 +05:30
|
|
|
describe 'tags_sorted_by' do
|
2019-12-04 20:38:33 +05:30
|
|
|
let(:tags_to_compare) { %w[v1.0.0 v1.1.0] }
|
|
|
|
|
2018-03-17 18:26:18 +05:30
|
|
|
context 'name_desc' do
|
2019-12-04 20:38:33 +05:30
|
|
|
subject { repository.tags_sorted_by('name_desc').map(&:name) & tags_to_compare }
|
2016-06-22 15:30:34 +05:30
|
|
|
|
|
|
|
it { is_expected.to eq(['v1.1.0', 'v1.0.0']) }
|
|
|
|
end
|
|
|
|
|
2018-03-17 18:26:18 +05:30
|
|
|
context 'name_asc' do
|
2019-12-04 20:38:33 +05:30
|
|
|
subject { repository.tags_sorted_by('name_asc').map(&:name) & tags_to_compare }
|
2018-03-17 18:26:18 +05:30
|
|
|
|
|
|
|
it { is_expected.to eq(['v1.0.0', 'v1.1.0']) }
|
|
|
|
end
|
|
|
|
|
2016-06-22 15:30:34 +05:30
|
|
|
context 'updated' do
|
|
|
|
let(:tag_a) { repository.find_tag('v1.0.0') }
|
|
|
|
let(:tag_b) { repository.find_tag('v1.1.0') }
|
|
|
|
|
|
|
|
context 'desc' do
|
|
|
|
subject { repository.tags_sorted_by('updated_desc').map(&:name) }
|
|
|
|
|
|
|
|
before do
|
|
|
|
double_first = double(committed_date: Time.now)
|
|
|
|
double_last = double(committed_date: Time.now - 1.second)
|
|
|
|
|
2016-11-24 13:41:30 +05:30
|
|
|
allow(tag_a).to receive(:dereferenced_target).and_return(double_first)
|
|
|
|
allow(tag_b).to receive(:dereferenced_target).and_return(double_last)
|
2016-09-13 17:45:13 +05:30
|
|
|
allow(repository).to receive(:tags).and_return([tag_a, tag_b])
|
2016-06-22 15:30:34 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
it { is_expected.to eq(['v1.0.0', 'v1.1.0']) }
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'asc' do
|
|
|
|
subject { repository.tags_sorted_by('updated_asc').map(&:name) }
|
|
|
|
|
|
|
|
before do
|
|
|
|
double_first = double(committed_date: Time.now - 1.second)
|
|
|
|
double_last = double(committed_date: Time.now)
|
|
|
|
|
2016-11-24 13:41:30 +05:30
|
|
|
allow(tag_a).to receive(:dereferenced_target).and_return(double_last)
|
|
|
|
allow(tag_b).to receive(:dereferenced_target).and_return(double_first)
|
2016-09-13 17:45:13 +05:30
|
|
|
allow(repository).to receive(:tags).and_return([tag_a, tag_b])
|
2016-06-22 15:30:34 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
it { is_expected.to eq(['v1.1.0', 'v1.0.0']) }
|
|
|
|
end
|
2017-08-17 22:00:37 +05:30
|
|
|
|
|
|
|
context 'annotated tag pointing to a blob' do
|
|
|
|
let(:annotated_tag_name) { 'annotated-tag' }
|
|
|
|
|
2019-12-04 20:38:33 +05:30
|
|
|
subject { repository.tags_sorted_by('updated_asc').map(&:name) & (tags_to_compare + [annotated_tag_name]) }
|
2017-08-17 22:00:37 +05:30
|
|
|
|
|
|
|
before do
|
|
|
|
options = { message: 'test tag message\n',
|
|
|
|
tagger: { name: 'John Smith', email: 'john@gmail.com' } }
|
2018-11-08 19:23:39 +05:30
|
|
|
|
2018-12-05 23:21:45 +05:30
|
|
|
rugged_repo(repository).tags.create(annotated_tag_name, 'a48e4fc218069f68ef2e769dd8dfea3991362175', options)
|
2017-08-17 22:00:37 +05:30
|
|
|
|
|
|
|
double_first = double(committed_date: Time.now - 1.second)
|
|
|
|
double_last = double(committed_date: Time.now)
|
|
|
|
|
|
|
|
allow(tag_a).to receive(:dereferenced_target).and_return(double_last)
|
|
|
|
allow(tag_b).to receive(:dereferenced_target).and_return(double_first)
|
|
|
|
end
|
|
|
|
|
|
|
|
it { is_expected.to eq(['v1.1.0', 'v1.0.0', annotated_tag_name]) }
|
|
|
|
|
|
|
|
after do
|
2018-12-05 23:21:45 +05:30
|
|
|
rugged_repo(repository).tags.delete(annotated_tag_name)
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
end
|
2016-06-22 15:30:34 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2016-11-03 12:29:30 +05:30
|
|
|
describe '#ref_name_for_sha' do
|
2017-08-17 22:00:37 +05:30
|
|
|
it 'returns the ref' do
|
2017-09-10 17:25:29 +05:30
|
|
|
allow(repository.raw_repository).to receive(:ref_name_for_sha)
|
|
|
|
.and_return('refs/environments/production/77')
|
2016-11-03 12:29:30 +05:30
|
|
|
|
2017-08-17 22:00:37 +05:30
|
|
|
expect(repository.ref_name_for_sha('bla', '0' * 40)).to eq 'refs/environments/production/77'
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
describe '#ref_exists?' do
|
|
|
|
context 'when ref exists' do
|
|
|
|
it 'returns true' do
|
|
|
|
expect(repository.ref_exists?('refs/heads/master')).to be true
|
2016-11-03 12:29:30 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2017-08-17 22:00:37 +05:30
|
|
|
context 'when ref does not exist' do
|
|
|
|
it 'returns false' do
|
|
|
|
expect(repository.ref_exists?('refs/heads/non-existent')).to be false
|
|
|
|
end
|
|
|
|
end
|
2016-11-03 12:29:30 +05:30
|
|
|
|
2017-08-17 22:00:37 +05:30
|
|
|
context 'when ref format is incorrect' do
|
|
|
|
it 'returns false' do
|
|
|
|
expect(repository.ref_exists?('refs/heads/invalid:master')).to be false
|
2016-11-03 12:29:30 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-12-05 23:21:45 +05:30
|
|
|
describe '#list_last_commits_for_tree' do
|
|
|
|
let(:path_to_commit) do
|
|
|
|
{
|
|
|
|
"encoding" => "913c66a37b4a45b9769037c55c2d238bd0942d2e",
|
|
|
|
"files" => "570e7b2abdd848b95f2f578043fc23bd6f6fd24d",
|
|
|
|
".gitignore" => "c1acaa58bbcbc3eafe538cb8274ba387047b69f8",
|
|
|
|
".gitmodules" => "6f6d7e7ed97bb5f0054f2b1df789b39ca89b6ff9",
|
|
|
|
"CHANGELOG" => "913c66a37b4a45b9769037c55c2d238bd0942d2e",
|
|
|
|
"CONTRIBUTING.md" => "6d394385cf567f80a8fd85055db1ab4c5295806f",
|
|
|
|
"Gemfile.zip" => "ae73cb07c9eeaf35924a10f713b364d32b2dd34f",
|
|
|
|
"LICENSE" => "1a0b36b3cdad1d2ee32457c102a8c0b7056fa863",
|
|
|
|
"MAINTENANCE.md" => "913c66a37b4a45b9769037c55c2d238bd0942d2e",
|
|
|
|
"PROCESS.md" => "913c66a37b4a45b9769037c55c2d238bd0942d2e",
|
|
|
|
"README.md" => "1a0b36b3cdad1d2ee32457c102a8c0b7056fa863",
|
|
|
|
"VERSION" => "913c66a37b4a45b9769037c55c2d238bd0942d2e",
|
|
|
|
"gitlab-shell" => "6f6d7e7ed97bb5f0054f2b1df789b39ca89b6ff9",
|
|
|
|
"six" => "cfe32cf61b73a0d5e9f13e774abde7ff789b1660"
|
|
|
|
}
|
|
|
|
end
|
|
|
|
|
|
|
|
subject { repository.list_last_commits_for_tree(sample_commit.id, '.').id }
|
|
|
|
|
|
|
|
it 'returns the last commits for every entry in the current path' do
|
|
|
|
result = repository.list_last_commits_for_tree(sample_commit.id, '.')
|
|
|
|
|
|
|
|
result.each do |key, value|
|
|
|
|
result[key] = value.id
|
|
|
|
end
|
|
|
|
|
|
|
|
expect(result).to include(path_to_commit)
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'returns the last commits for every entry in the current path starting from the offset' do
|
|
|
|
result = repository.list_last_commits_for_tree(sample_commit.id, '.', offset: path_to_commit.size - 1)
|
|
|
|
|
|
|
|
expect(result.size).to eq(1)
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'returns a limited number of last commits for every entry in the current path starting from the offset' do
|
|
|
|
result = repository.list_last_commits_for_tree(sample_commit.id, '.', limit: 1)
|
|
|
|
|
|
|
|
expect(result.size).to eq(1)
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'returns an empty hash when offset is out of bounds' do
|
|
|
|
result = repository.list_last_commits_for_tree(sample_commit.id, '.', offset: path_to_commit.size)
|
|
|
|
|
|
|
|
expect(result.size).to eq(0)
|
|
|
|
end
|
2019-07-31 22:56:46 +05:30
|
|
|
|
|
|
|
context 'with a commit with invalid UTF-8 path' do
|
|
|
|
def create_commit_with_invalid_utf8_path
|
|
|
|
rugged = rugged_repo(repository)
|
|
|
|
blob_id = Rugged::Blob.from_buffer(rugged, "some contents")
|
|
|
|
tree_builder = Rugged::Tree::Builder.new(rugged)
|
|
|
|
tree_builder.insert({ oid: blob_id, name: "hello\x80world", filemode: 0100644 })
|
|
|
|
tree_id = tree_builder.write
|
|
|
|
user = { email: "jcai@gitlab.com", time: Time.now, name: "John Cai" }
|
|
|
|
|
|
|
|
Rugged::Commit.create(rugged, message: 'some commit message', parents: [rugged.head.target.oid], tree: tree_id, committer: user, author: user)
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'does not raise an error' do
|
|
|
|
commit = create_commit_with_invalid_utf8_path
|
|
|
|
|
|
|
|
expect { repository.list_last_commits_for_tree(commit, '.', offset: 0) }.not_to raise_error
|
|
|
|
end
|
|
|
|
end
|
2018-12-05 23:21:45 +05:30
|
|
|
end
|
|
|
|
|
2016-08-24 12:49:21 +05:30
|
|
|
describe '#last_commit_for_path' do
|
2019-02-15 15:39:39 +05:30
|
|
|
subject { repository.last_commit_for_path(sample_commit.id, '.gitignore').id }
|
2017-09-10 17:25:29 +05:30
|
|
|
|
2019-02-15 15:39:39 +05:30
|
|
|
it { is_expected.to eq('c1acaa58bbcbc3eafe538cb8274ba387047b69f8') }
|
2017-09-10 17:25:29 +05:30
|
|
|
|
2019-02-15 15:39:39 +05:30
|
|
|
describe 'when storage is broken', :broken_storage do
|
2019-07-07 11:18:12 +05:30
|
|
|
it 'raises a storage error' do
|
2019-02-15 15:39:39 +05:30
|
|
|
expect_to_raise_storage_error do
|
|
|
|
broken_repository.last_commit_id_for_path(sample_commit.id, '.gitignore')
|
2017-09-10 17:25:29 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
2015-04-26 12:48:37 +05:30
|
|
|
end
|
2015-09-11 14:41:01 +05:30
|
|
|
|
2017-08-17 22:00:37 +05:30
|
|
|
describe '#last_commit_id_for_path' do
|
2019-02-15 15:39:39 +05:30
|
|
|
subject { repository.last_commit_id_for_path(sample_commit.id, '.gitignore') }
|
2017-09-10 17:25:29 +05:30
|
|
|
|
2019-02-15 15:39:39 +05:30
|
|
|
it "returns last commit id for a given path" do
|
|
|
|
is_expected.to eq('c1acaa58bbcbc3eafe538cb8274ba387047b69f8')
|
|
|
|
end
|
2017-08-17 22:00:37 +05:30
|
|
|
|
2019-02-15 15:39:39 +05:30
|
|
|
it "caches last commit id for a given path" do
|
|
|
|
cache = repository.send(:cache)
|
|
|
|
key = "last_commit_id_for_path:#{sample_commit.id}:#{Digest::SHA1.hexdigest('.gitignore')}"
|
2017-09-10 17:25:29 +05:30
|
|
|
|
2019-02-15 15:39:39 +05:30
|
|
|
expect(cache).to receive(:fetch).with(key).and_return('c1acaa5')
|
|
|
|
is_expected.to eq('c1acaa5')
|
|
|
|
end
|
2017-09-10 17:25:29 +05:30
|
|
|
|
2019-02-15 15:39:39 +05:30
|
|
|
describe 'when storage is broken', :broken_storage do
|
2019-07-07 11:18:12 +05:30
|
|
|
it 'raises a storage error' do
|
2019-02-15 15:39:39 +05:30
|
|
|
expect_to_raise_storage_error do
|
|
|
|
broken_repository.last_commit_for_path(sample_commit.id, '.gitignore').id
|
2017-09-10 17:25:29 +05:30
|
|
|
end
|
|
|
|
end
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
describe '#commits' do
|
2018-03-27 19:54:05 +05:30
|
|
|
context 'when neither the all flag nor a ref are specified' do
|
|
|
|
it 'returns every commit from default branch' do
|
2019-12-21 20:55:43 +05:30
|
|
|
expect(repository.commits(nil, limit: 60).size).to eq(37)
|
2018-03-27 19:54:05 +05:30
|
|
|
end
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
|
2018-03-27 19:54:05 +05:30
|
|
|
context 'when ref is passed' do
|
|
|
|
it 'returns every commit from the specified ref' do
|
|
|
|
expect(repository.commits('master', limit: 60).size).to eq(37)
|
|
|
|
end
|
2017-08-17 22:00:37 +05:30
|
|
|
|
2018-03-27 19:54:05 +05:30
|
|
|
context 'when all' do
|
|
|
|
it 'returns every commit from the repository' do
|
|
|
|
expect(repository.commits('master', limit: 60, all: true).size).to eq(60)
|
|
|
|
end
|
|
|
|
end
|
2017-08-17 22:00:37 +05:30
|
|
|
|
2018-03-27 19:54:05 +05:30
|
|
|
context 'with path' do
|
|
|
|
it 'sets follow when it is a single path' do
|
|
|
|
expect(Gitlab::Git::Commit).to receive(:where).with(a_hash_including(follow: true)).and_call_original.twice
|
|
|
|
|
|
|
|
repository.commits('master', limit: 1, path: 'README.md')
|
|
|
|
repository.commits('master', limit: 1, path: ['README.md'])
|
|
|
|
end
|
2017-08-17 22:00:37 +05:30
|
|
|
|
2018-03-27 19:54:05 +05:30
|
|
|
it 'does not set follow when it is multiple paths' do
|
|
|
|
expect(Gitlab::Git::Commit).to receive(:where).with(a_hash_including(follow: false)).and_call_original
|
|
|
|
|
|
|
|
repository.commits('master', limit: 1, path: ['README.md', 'CHANGELOG'])
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'without path' do
|
|
|
|
it 'does not set follow' do
|
|
|
|
expect(Gitlab::Git::Commit).to receive(:where).with(a_hash_including(follow: false)).and_call_original
|
|
|
|
|
|
|
|
repository.commits('master', limit: 1)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2020-04-08 14:13:33 +05:30
|
|
|
context "when 'author' is set" do
|
|
|
|
it "returns commits from that author" do
|
|
|
|
commit = repository.commits(nil, limit: 1).first
|
|
|
|
known_author = "#{commit.author_name} <#{commit.author_email}>"
|
|
|
|
|
|
|
|
expect(repository.commits(nil, author: known_author, limit: 1)).not_to be_empty
|
|
|
|
end
|
|
|
|
|
|
|
|
it "doesn't returns commits from an unknown author" do
|
|
|
|
unknown_author = "The Man With No Name <zapp@brannigan.com>"
|
|
|
|
|
|
|
|
expect(repository.commits(nil, author: unknown_author, limit: 1)).to be_empty
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-03-27 19:54:05 +05:30
|
|
|
context "when 'all' flag is set" do
|
|
|
|
it 'returns every commit from the repository' do
|
2019-12-21 20:55:43 +05:30
|
|
|
expect(repository.commits(nil, all: true, limit: 60).size).to eq(60)
|
2018-03-27 19:54:05 +05:30
|
|
|
end
|
2018-03-17 18:26:18 +05:30
|
|
|
end
|
2020-03-09 13:42:32 +05:30
|
|
|
|
|
|
|
context "when 'order' flag is set" do
|
|
|
|
it 'passes order option to perform the query' do
|
|
|
|
expect(Gitlab::Git::Commit).to receive(:where).with(a_hash_including(order: 'topo')).and_call_original
|
|
|
|
|
|
|
|
repository.commits('master', limit: 1, order: 'topo')
|
|
|
|
end
|
|
|
|
end
|
2018-03-17 18:26:18 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
describe '#new_commits' do
|
2020-03-09 13:42:32 +05:30
|
|
|
let_it_be(:project) { create(:project, :repository) }
|
2018-11-20 20:47:30 +05:30
|
|
|
let(:repository) { project.repository }
|
2018-03-17 18:26:18 +05:30
|
|
|
|
2018-11-20 20:47:30 +05:30
|
|
|
subject { repository.new_commits(rev) }
|
2018-03-17 18:26:18 +05:30
|
|
|
|
2018-11-20 20:47:30 +05:30
|
|
|
context 'when there are no new commits' do
|
|
|
|
let(:rev) { repository.commit.id }
|
2018-03-17 18:26:18 +05:30
|
|
|
|
2018-11-20 20:47:30 +05:30
|
|
|
it 'returns an empty array' do
|
|
|
|
expect(subject).to eq([])
|
2018-11-18 11:00:15 +05:30
|
|
|
end
|
2018-11-20 20:47:30 +05:30
|
|
|
end
|
2018-11-18 11:00:15 +05:30
|
|
|
|
2018-11-20 20:47:30 +05:30
|
|
|
context 'when new commits are found' do
|
|
|
|
let(:branch) { 'orphaned-branch' }
|
|
|
|
let!(:rev) { repository.commit(branch).id }
|
2018-11-18 11:00:15 +05:30
|
|
|
|
2018-11-20 20:47:30 +05:30
|
|
|
it 'returns the commits' do
|
|
|
|
repository.delete_branch(branch)
|
2018-11-18 11:00:15 +05:30
|
|
|
|
2018-11-20 20:47:30 +05:30
|
|
|
expect(subject).not_to be_empty
|
|
|
|
expect(subject).to all( be_a(::Commit) )
|
|
|
|
expect(subject.size).to eq(1)
|
2018-11-18 11:00:15 +05:30
|
|
|
end
|
|
|
|
end
|
2018-03-17 18:26:18 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
describe '#commits_by' do
|
2020-03-09 13:42:32 +05:30
|
|
|
let_it_be(:project) { create(:project, :repository) }
|
2019-02-15 15:39:39 +05:30
|
|
|
let(:oids) { TestEnv::BRANCH_SHA.values }
|
2018-03-17 18:26:18 +05:30
|
|
|
|
2019-02-15 15:39:39 +05:30
|
|
|
subject { project.repository.commits_by(oids: oids) }
|
2018-03-17 18:26:18 +05:30
|
|
|
|
2019-02-15 15:39:39 +05:30
|
|
|
it 'finds each commit' do
|
|
|
|
expect(subject).not_to include(nil)
|
|
|
|
expect(subject.size).to eq(oids.size)
|
|
|
|
end
|
2018-03-17 18:26:18 +05:30
|
|
|
|
2019-02-15 15:39:39 +05:30
|
|
|
it 'returns only Commit instances' do
|
|
|
|
expect(subject).to all( be_a(Commit) )
|
|
|
|
end
|
2018-03-17 18:26:18 +05:30
|
|
|
|
2019-02-15 15:39:39 +05:30
|
|
|
context 'when some commits are not found ' do
|
|
|
|
let(:oids) do
|
2020-03-09 13:42:32 +05:30
|
|
|
['deadbeef'] + TestEnv::BRANCH_SHA.each_value.first(10)
|
2018-03-17 18:26:18 +05:30
|
|
|
end
|
|
|
|
|
2019-02-15 15:39:39 +05:30
|
|
|
it 'returns only found commits' do
|
|
|
|
expect(subject).not_to include(nil)
|
|
|
|
expect(subject.size).to eq(10)
|
2018-03-17 18:26:18 +05:30
|
|
|
end
|
2019-02-15 15:39:39 +05:30
|
|
|
end
|
2018-03-17 18:26:18 +05:30
|
|
|
|
2019-02-15 15:39:39 +05:30
|
|
|
context 'when no oids are passed' do
|
|
|
|
let(:oids) { [] }
|
2018-03-17 18:26:18 +05:30
|
|
|
|
2019-02-15 15:39:39 +05:30
|
|
|
it 'does not call #batch_by_oid' do
|
|
|
|
expect(Gitlab::Git::Commit).not_to receive(:batch_by_oid)
|
2018-03-17 18:26:18 +05:30
|
|
|
|
2019-02-15 15:39:39 +05:30
|
|
|
subject
|
2018-03-17 18:26:18 +05:30
|
|
|
end
|
|
|
|
end
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
|
2016-08-24 12:49:21 +05:30
|
|
|
describe '#find_commits_by_message' do
|
2019-02-15 15:39:39 +05:30
|
|
|
it 'returns commits with messages containing a given string' do
|
|
|
|
commit_ids = repository.find_commits_by_message('submodule').map(&:id)
|
2019-01-03 12:48:30 +05:30
|
|
|
|
2019-02-15 15:39:39 +05:30
|
|
|
expect(commit_ids).to include(
|
|
|
|
'5937ac0a7beb003549fc5fd26fc247adbce4a52e',
|
|
|
|
'6f6d7e7ed97bb5f0054f2b1df789b39ca89b6ff9',
|
|
|
|
'cfe32cf61b73a0d5e9f13e774abde7ff789b1660'
|
|
|
|
)
|
|
|
|
expect(commit_ids).not_to include('913c66a37b4a45b9769037c55c2d238bd0942d2e')
|
2017-09-10 17:25:29 +05:30
|
|
|
end
|
2016-11-03 12:29:30 +05:30
|
|
|
|
2019-02-15 15:39:39 +05:30
|
|
|
it 'is case insensitive' do
|
|
|
|
commit_ids = repository.find_commits_by_message('SUBMODULE').map(&:id)
|
2016-11-03 12:29:30 +05:30
|
|
|
|
2019-02-15 15:39:39 +05:30
|
|
|
expect(commit_ids).to include('5937ac0a7beb003549fc5fd26fc247adbce4a52e')
|
2017-09-10 17:25:29 +05:30
|
|
|
end
|
2015-11-26 14:37:03 +05:30
|
|
|
|
2018-11-08 19:23:39 +05:30
|
|
|
describe 'when storage is broken', :broken_storage do
|
2019-07-07 11:18:12 +05:30
|
|
|
it 'raises a storage error' do
|
2017-09-10 17:25:29 +05:30
|
|
|
expect_to_raise_storage_error { broken_repository.find_commits_by_message('s') }
|
|
|
|
end
|
2016-11-03 12:29:30 +05:30
|
|
|
end
|
2015-11-26 14:37:03 +05:30
|
|
|
end
|
|
|
|
|
2016-08-24 12:49:21 +05:30
|
|
|
describe '#blob_at' do
|
2015-09-11 14:41:01 +05:30
|
|
|
context 'blank sha' do
|
|
|
|
subject { repository.blob_at(Gitlab::Git::BLANK_SHA, '.gitignore') }
|
|
|
|
|
|
|
|
it { is_expected.to be_nil }
|
|
|
|
end
|
2018-12-13 13:39:08 +05:30
|
|
|
|
|
|
|
context 'regular blob' do
|
|
|
|
subject { repository.blob_at(repository.head_commit.sha, '.gitignore') }
|
|
|
|
|
|
|
|
it { is_expected.to be_an_instance_of(::Blob) }
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'readme blob on HEAD' do
|
|
|
|
subject { repository.blob_at(repository.head_commit.sha, 'README.md') }
|
|
|
|
|
|
|
|
it { is_expected.to be_an_instance_of(::ReadmeBlob) }
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'readme blob not on HEAD' do
|
|
|
|
subject { repository.blob_at(repository.find_branch('feature').target, 'README.md') }
|
|
|
|
|
|
|
|
it { is_expected.to be_an_instance_of(::Blob) }
|
|
|
|
end
|
2015-09-11 14:41:01 +05:30
|
|
|
end
|
|
|
|
|
2016-08-24 12:49:21 +05:30
|
|
|
describe '#merged_to_root_ref?' do
|
2016-11-03 12:29:30 +05:30
|
|
|
context 'merged branch without ff' do
|
|
|
|
subject { repository.merged_to_root_ref?('branch-merged') }
|
|
|
|
|
|
|
|
it { is_expected.to be_truthy }
|
|
|
|
end
|
|
|
|
|
|
|
|
# If the HEAD was ff then it will be false
|
|
|
|
context 'merged with ff' do
|
2015-09-11 14:41:01 +05:30
|
|
|
subject { repository.merged_to_root_ref?('improve/awesome') }
|
|
|
|
|
|
|
|
it { is_expected.to be_truthy }
|
|
|
|
end
|
2016-11-03 12:29:30 +05:30
|
|
|
|
|
|
|
context 'not merged branch' do
|
|
|
|
subject { repository.merged_to_root_ref?('not-merged-branch') }
|
|
|
|
|
|
|
|
it { is_expected.to be_falsey }
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'default branch' do
|
|
|
|
subject { repository.merged_to_root_ref?('master') }
|
|
|
|
|
|
|
|
it { is_expected.to be_falsey }
|
|
|
|
end
|
2015-09-25 12:07:36 +05:30
|
|
|
|
2018-11-08 19:23:39 +05:30
|
|
|
context 'non merged branch' do
|
|
|
|
subject { repository.merged_to_root_ref?('fix') }
|
2015-09-25 12:07:36 +05:30
|
|
|
|
2018-11-08 19:23:39 +05:30
|
|
|
it { is_expected.to be_falsey }
|
|
|
|
end
|
2015-09-25 12:07:36 +05:30
|
|
|
|
2018-11-08 19:23:39 +05:30
|
|
|
context 'non existent branch' do
|
|
|
|
subject { repository.merged_to_root_ref?('non_existent_branch') }
|
2015-09-25 12:07:36 +05:30
|
|
|
|
2018-11-08 19:23:39 +05:30
|
|
|
it { is_expected.to be_nil }
|
|
|
|
end
|
|
|
|
end
|
2015-09-11 14:41:01 +05:30
|
|
|
|
2020-03-09 13:42:32 +05:30
|
|
|
describe "#root_ref_sha" do
|
|
|
|
let(:commit) { double("commit", sha: "a94a8fe5ccb19ba61c4c0873d391e987982fbbd3") }
|
|
|
|
|
|
|
|
subject { repository.root_ref_sha }
|
|
|
|
|
|
|
|
before do
|
|
|
|
allow(repository).to receive(:commit).with(repository.root_ref) { commit }
|
|
|
|
end
|
|
|
|
|
|
|
|
it { is_expected.to eq(commit.sha) }
|
|
|
|
end
|
|
|
|
|
|
|
|
describe "#merged_branch_names", :clean_gitlab_redis_cache do
|
|
|
|
subject { repository.merged_branch_names(branch_names) }
|
|
|
|
|
|
|
|
let(:branch_names) { %w(test beep boop definitely_merged) }
|
|
|
|
let(:already_merged) { Set.new(["definitely_merged"]) }
|
|
|
|
|
|
|
|
let(:write_hash) do
|
|
|
|
{
|
|
|
|
"test" => Gitlab::Redis::Boolean.new(false).to_s,
|
|
|
|
"beep" => Gitlab::Redis::Boolean.new(false).to_s,
|
|
|
|
"boop" => Gitlab::Redis::Boolean.new(false).to_s,
|
|
|
|
"definitely_merged" => Gitlab::Redis::Boolean.new(true).to_s
|
|
|
|
}
|
|
|
|
end
|
|
|
|
|
|
|
|
let(:read_hash) do
|
|
|
|
{
|
|
|
|
"test" => Gitlab::Redis::Boolean.new(false).to_s,
|
|
|
|
"beep" => Gitlab::Redis::Boolean.new(false).to_s,
|
|
|
|
"boop" => Gitlab::Redis::Boolean.new(false).to_s,
|
|
|
|
"definitely_merged" => Gitlab::Redis::Boolean.new(true).to_s
|
|
|
|
}
|
|
|
|
end
|
|
|
|
|
|
|
|
let(:cache) { repository.send(:redis_hash_cache) }
|
|
|
|
let(:cache_key) { cache.cache_key(:merged_branch_names) }
|
|
|
|
|
|
|
|
before do
|
|
|
|
allow(repository.raw_repository).to receive(:merged_branch_names).with(branch_names).and_return(already_merged)
|
|
|
|
end
|
|
|
|
|
|
|
|
it { is_expected.to eq(already_merged) }
|
|
|
|
it { is_expected.to be_a(Set) }
|
|
|
|
|
|
|
|
describe "cache expiry" do
|
|
|
|
before do
|
|
|
|
allow(cache).to receive(:delete).with(anything)
|
|
|
|
end
|
|
|
|
|
|
|
|
it "is expired when the branches caches are expired" do
|
|
|
|
expect(cache).to receive(:delete).with(:merged_branch_names).at_least(:once)
|
|
|
|
|
|
|
|
repository.send(:expire_branches_cache)
|
|
|
|
end
|
|
|
|
|
|
|
|
it "is expired when the repository caches are expired" do
|
|
|
|
expect(cache).to receive(:delete).with(:merged_branch_names).at_least(:once)
|
|
|
|
|
|
|
|
repository.send(:expire_all_method_caches)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context "cache is empty" do
|
|
|
|
before do
|
|
|
|
cache.delete(:merged_branch_names)
|
|
|
|
end
|
|
|
|
|
|
|
|
it { is_expected.to eq(already_merged) }
|
|
|
|
|
|
|
|
describe "cache values" do
|
|
|
|
it "writes the values to redis" do
|
|
|
|
expect(cache).to receive(:write).with(:merged_branch_names, write_hash)
|
|
|
|
|
|
|
|
subject
|
|
|
|
end
|
|
|
|
|
|
|
|
it "matches the supplied hash" do
|
|
|
|
subject
|
|
|
|
|
|
|
|
expect(cache.read_members(:merged_branch_names, branch_names)).to eq(read_hash)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context "cache is not empty" do
|
|
|
|
before do
|
|
|
|
cache.write(:merged_branch_names, write_hash)
|
|
|
|
end
|
|
|
|
|
|
|
|
it { is_expected.to eq(already_merged) }
|
|
|
|
|
|
|
|
it "doesn't fetch from the disk" do
|
|
|
|
expect(repository.raw_repository).not_to receive(:merged_branch_names)
|
|
|
|
|
|
|
|
subject
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context "cache is partially complete" do
|
|
|
|
before do
|
|
|
|
allow(repository.raw_repository).to receive(:merged_branch_names).with(["boop"]).and_return([])
|
|
|
|
hash = write_hash.except("boop")
|
|
|
|
cache.write(:merged_branch_names, hash)
|
|
|
|
end
|
|
|
|
|
|
|
|
it { is_expected.to eq(already_merged) }
|
|
|
|
|
|
|
|
it "does fetch from the disk" do
|
|
|
|
expect(repository.raw_repository).to receive(:merged_branch_names).with(["boop"])
|
|
|
|
|
|
|
|
subject
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context "requested branches array is empty" do
|
|
|
|
let(:branch_names) { [] }
|
|
|
|
|
|
|
|
it { is_expected.to eq(already_merged) }
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-11-08 19:23:39 +05:30
|
|
|
describe '#can_be_merged?' do
|
|
|
|
context 'mergeable branches' do
|
|
|
|
subject { repository.can_be_merged?('0b4bc9a49b562e85de7cc9e834518ea6828729b9', 'master') }
|
2015-09-11 14:41:01 +05:30
|
|
|
|
2018-11-08 19:23:39 +05:30
|
|
|
it { is_expected.to be_truthy }
|
|
|
|
end
|
2018-03-17 18:26:18 +05:30
|
|
|
|
2018-11-08 19:23:39 +05:30
|
|
|
context 'non-mergeable branches without conflict sides missing' do
|
|
|
|
subject { repository.can_be_merged?('bb5206fee213d983da88c47f9cf4cc6caf9c66dc', 'feature') }
|
2018-03-17 18:26:18 +05:30
|
|
|
|
2018-11-08 19:23:39 +05:30
|
|
|
it { is_expected.to be_falsey }
|
|
|
|
end
|
2018-03-17 18:26:18 +05:30
|
|
|
|
2018-11-08 19:23:39 +05:30
|
|
|
context 'non-mergeable branches with conflict sides missing' do
|
|
|
|
subject { repository.can_be_merged?('conflict-missing-side', 'conflict-start') }
|
2018-03-17 18:26:18 +05:30
|
|
|
|
2018-11-08 19:23:39 +05:30
|
|
|
it { is_expected.to be_falsey }
|
2015-09-11 14:41:01 +05:30
|
|
|
end
|
|
|
|
|
2018-11-08 19:23:39 +05:30
|
|
|
context 'submodule changes that confuse rugged' do
|
|
|
|
subject { repository.can_be_merged?('update-gitlab-shell-v-6-0-1', 'update-gitlab-shell-v-6-0-3') }
|
2015-09-11 14:41:01 +05:30
|
|
|
|
2018-11-08 19:23:39 +05:30
|
|
|
it { is_expected.to be_falsey }
|
2015-09-11 14:41:01 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2017-08-17 22:00:37 +05:30
|
|
|
describe '#commit' do
|
|
|
|
context 'when ref exists' do
|
|
|
|
it 'returns commit object' do
|
|
|
|
expect(repository.commit('master'))
|
|
|
|
.to be_an_instance_of Commit
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when ref does not exist' do
|
|
|
|
it 'returns nil' do
|
|
|
|
expect(repository.commit('non-existent-ref')).to be_nil
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-11-08 19:23:39 +05:30
|
|
|
context 'when ref is not specified' do
|
|
|
|
it 'is using a root ref' do
|
|
|
|
expect(repository).to receive(:find_commit).with('master')
|
|
|
|
|
|
|
|
repository.commit
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2017-08-17 22:00:37 +05:30
|
|
|
context 'when ref is not valid' do
|
|
|
|
context 'when preceding tree element exists' do
|
|
|
|
it 'returns nil' do
|
|
|
|
expect(repository.commit('master:ref')).to be_nil
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when preceding tree element does not exist' do
|
|
|
|
it 'returns nil' do
|
|
|
|
expect(repository.commit('non-existent:ref')).to be_nil
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
describe "#create_dir" do
|
2016-09-29 09:46:39 +05:30
|
|
|
it "commits a change that creates a new directory" do
|
|
|
|
expect do
|
2017-08-17 22:00:37 +05:30
|
|
|
repository.create_dir(user, 'newdir',
|
|
|
|
message: 'Create newdir', branch_name: 'master')
|
2018-03-17 18:26:18 +05:30
|
|
|
end.to change { repository.count_commits(ref: 'master') }.by(1)
|
2016-09-29 09:46:39 +05:30
|
|
|
|
|
|
|
newdir = repository.tree('master', 'newdir')
|
|
|
|
expect(newdir.path).to eq('newdir')
|
|
|
|
end
|
|
|
|
|
2017-08-17 22:00:37 +05:30
|
|
|
context "when committing to another project" do
|
2017-09-10 17:25:29 +05:30
|
|
|
let(:forked_project) { create(:project, :repository) }
|
2017-08-17 22:00:37 +05:30
|
|
|
|
|
|
|
it "creates a fork and commit to the forked project" do
|
|
|
|
expect do
|
|
|
|
repository.create_dir(user, 'newdir',
|
|
|
|
message: 'Create newdir', branch_name: 'patch',
|
|
|
|
start_branch_name: 'master', start_project: forked_project)
|
2018-03-17 18:26:18 +05:30
|
|
|
end.to change { repository.count_commits(ref: 'master') }.by(0)
|
2017-08-17 22:00:37 +05:30
|
|
|
|
|
|
|
expect(repository.branch_exists?('patch')).to be_truthy
|
|
|
|
expect(forked_project.repository.branch_exists?('patch')).to be_falsy
|
|
|
|
|
|
|
|
newdir = repository.tree('patch', 'newdir')
|
|
|
|
expect(newdir.path).to eq('newdir')
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2016-09-29 09:46:39 +05:30
|
|
|
context "when an author is specified" do
|
|
|
|
it "uses the given email/name to set the commit's author" do
|
|
|
|
expect do
|
2017-08-17 22:00:37 +05:30
|
|
|
repository.create_dir(user, 'newdir',
|
|
|
|
message: 'Add newdir',
|
|
|
|
branch_name: 'master',
|
|
|
|
author_email: author_email, author_name: author_name)
|
2018-03-17 18:26:18 +05:30
|
|
|
end.to change { repository.count_commits(ref: 'master') }.by(1)
|
2016-09-29 09:46:39 +05:30
|
|
|
|
|
|
|
last_commit = repository.commit
|
|
|
|
|
|
|
|
expect(last_commit.author_email).to eq(author_email)
|
|
|
|
expect(last_commit.author_name).to eq(author_name)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2017-08-17 22:00:37 +05:30
|
|
|
describe "#create_file" do
|
|
|
|
it 'commits new file successfully' do
|
2016-08-24 12:49:21 +05:30
|
|
|
expect do
|
2017-08-17 22:00:37 +05:30
|
|
|
repository.create_file(user, 'NEWCHANGELOG', 'Changelog!',
|
|
|
|
message: 'Create changelog',
|
|
|
|
branch_name: 'master')
|
2018-03-17 18:26:18 +05:30
|
|
|
end.to change { repository.count_commits(ref: 'master') }.by(1)
|
2016-08-24 12:49:21 +05:30
|
|
|
|
2017-08-17 22:00:37 +05:30
|
|
|
blob = repository.blob_at('master', 'NEWCHANGELOG')
|
2016-08-24 12:49:21 +05:30
|
|
|
|
|
|
|
expect(blob.data).to eq('Changelog!')
|
|
|
|
end
|
2016-09-29 09:46:39 +05:30
|
|
|
|
2017-09-10 17:25:29 +05:30
|
|
|
it 'creates new file and dir when file_path has a forward slash' do
|
|
|
|
expect do
|
|
|
|
repository.create_file(user, 'new_dir/new_file.txt', 'File!',
|
|
|
|
message: 'Create new_file with new_dir',
|
|
|
|
branch_name: 'master')
|
2018-03-17 18:26:18 +05:30
|
|
|
end.to change { repository.count_commits(ref: 'master') }.by(1)
|
2017-09-10 17:25:29 +05:30
|
|
|
|
|
|
|
expect(repository.tree('master', 'new_dir').path).to eq('new_dir')
|
|
|
|
expect(repository.blob_at('master', 'new_dir/new_file.txt').data).to eq('File!')
|
|
|
|
end
|
|
|
|
|
2017-08-17 22:00:37 +05:30
|
|
|
it 'respects the autocrlf setting' do
|
|
|
|
repository.create_file(user, 'hello.txt', "Hello,\r\nWorld",
|
|
|
|
message: 'Add hello world',
|
|
|
|
branch_name: 'master')
|
|
|
|
|
|
|
|
blob = repository.blob_at('master', 'hello.txt')
|
|
|
|
|
|
|
|
expect(blob.data).to eq("Hello,\nWorld")
|
|
|
|
end
|
|
|
|
|
2016-09-29 09:46:39 +05:30
|
|
|
context "when an author is specified" do
|
|
|
|
it "uses the given email/name to set the commit's author" do
|
|
|
|
expect do
|
2017-08-17 22:00:37 +05:30
|
|
|
repository.create_file(user, 'NEWREADME', 'README!',
|
|
|
|
message: 'Add README',
|
|
|
|
branch_name: 'master',
|
|
|
|
author_email: author_email,
|
|
|
|
author_name: author_name)
|
2018-03-17 18:26:18 +05:30
|
|
|
end.to change { repository.count_commits(ref: 'master') }.by(1)
|
2016-09-29 09:46:39 +05:30
|
|
|
|
|
|
|
last_commit = repository.commit
|
|
|
|
|
|
|
|
expect(last_commit.author_email).to eq(author_email)
|
|
|
|
expect(last_commit.author_name).to eq(author_name)
|
|
|
|
end
|
|
|
|
end
|
2016-08-24 12:49:21 +05:30
|
|
|
end
|
|
|
|
|
2016-09-29 09:46:39 +05:30
|
|
|
describe "#update_file" do
|
2017-08-17 22:00:37 +05:30
|
|
|
it 'updates file successfully' do
|
|
|
|
expect do
|
|
|
|
repository.update_file(user, 'CHANGELOG', 'Changelog!',
|
|
|
|
message: 'Update changelog',
|
|
|
|
branch_name: 'master')
|
2018-03-17 18:26:18 +05:30
|
|
|
end.to change { repository.count_commits(ref: 'master') }.by(1)
|
2017-08-17 22:00:37 +05:30
|
|
|
|
|
|
|
blob = repository.blob_at('master', 'CHANGELOG')
|
|
|
|
|
|
|
|
expect(blob.data).to eq('Changelog!')
|
|
|
|
end
|
|
|
|
|
2016-08-24 12:49:21 +05:30
|
|
|
it 'updates filename successfully' do
|
|
|
|
expect do
|
|
|
|
repository.update_file(user, 'NEWLICENSE', 'Copyright!',
|
2017-08-17 22:00:37 +05:30
|
|
|
branch_name: 'master',
|
2016-08-24 12:49:21 +05:30
|
|
|
previous_path: 'LICENSE',
|
|
|
|
message: 'Changes filename')
|
2018-03-17 18:26:18 +05:30
|
|
|
end.to change { repository.count_commits(ref: 'master') }.by(1)
|
2016-08-24 12:49:21 +05:30
|
|
|
|
|
|
|
files = repository.ls_files('master')
|
|
|
|
|
|
|
|
expect(files).not_to include('LICENSE')
|
|
|
|
expect(files).to include('NEWLICENSE')
|
|
|
|
end
|
2016-09-29 09:46:39 +05:30
|
|
|
|
|
|
|
context "when an author is specified" do
|
|
|
|
it "uses the given email/name to set the commit's author" do
|
|
|
|
expect do
|
2017-08-17 22:00:37 +05:30
|
|
|
repository.update_file(user, 'README', 'Updated README!',
|
|
|
|
branch_name: 'master',
|
|
|
|
previous_path: 'README',
|
|
|
|
message: 'Update README',
|
|
|
|
author_email: author_email,
|
|
|
|
author_name: author_name)
|
2018-03-17 18:26:18 +05:30
|
|
|
end.to change { repository.count_commits(ref: 'master') }.by(1)
|
2016-09-29 09:46:39 +05:30
|
|
|
|
|
|
|
last_commit = repository.commit
|
|
|
|
|
|
|
|
expect(last_commit.author_email).to eq(author_email)
|
|
|
|
expect(last_commit.author_name).to eq(author_name)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2017-08-17 22:00:37 +05:30
|
|
|
describe "#delete_file" do
|
2016-09-29 09:46:39 +05:30
|
|
|
it 'removes file successfully' do
|
|
|
|
expect do
|
2017-08-17 22:00:37 +05:30
|
|
|
repository.delete_file(user, 'README',
|
|
|
|
message: 'Remove README', branch_name: 'master')
|
2018-03-17 18:26:18 +05:30
|
|
|
end.to change { repository.count_commits(ref: 'master') }.by(1)
|
2016-09-29 09:46:39 +05:30
|
|
|
|
|
|
|
expect(repository.blob_at('master', 'README')).to be_nil
|
|
|
|
end
|
|
|
|
|
|
|
|
context "when an author is specified" do
|
|
|
|
it "uses the given email/name to set the commit's author" do
|
|
|
|
expect do
|
2017-08-17 22:00:37 +05:30
|
|
|
repository.delete_file(user, 'README',
|
|
|
|
message: 'Remove README', branch_name: 'master',
|
|
|
|
author_email: author_email, author_name: author_name)
|
2018-03-17 18:26:18 +05:30
|
|
|
end.to change { repository.count_commits(ref: 'master') }.by(1)
|
2016-09-29 09:46:39 +05:30
|
|
|
|
|
|
|
last_commit = repository.commit
|
|
|
|
|
|
|
|
expect(last_commit.author_email).to eq(author_email)
|
|
|
|
expect(last_commit.author_name).to eq(author_name)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2017-08-17 22:00:37 +05:30
|
|
|
describe "search_files_by_content" do
|
|
|
|
let(:results) { repository.search_files_by_content('feature', 'master') }
|
2020-03-09 13:42:32 +05:30
|
|
|
|
2015-09-11 14:41:01 +05:30
|
|
|
subject { results }
|
|
|
|
|
|
|
|
it { is_expected.to be_an Array }
|
|
|
|
|
2016-06-02 11:05:42 +05:30
|
|
|
it 'regex-escapes the query string' do
|
2017-08-17 22:00:37 +05:30
|
|
|
results = repository.search_files_by_content("test\\", 'master')
|
2016-06-02 11:05:42 +05:30
|
|
|
|
|
|
|
expect(results.first).not_to start_with('fatal:')
|
|
|
|
end
|
|
|
|
|
2016-06-16 23:09:34 +05:30
|
|
|
it 'properly handles an unmatched parenthesis' do
|
2017-08-17 22:00:37 +05:30
|
|
|
results = repository.search_files_by_content("test(", 'master')
|
2016-06-16 23:09:34 +05:30
|
|
|
|
|
|
|
expect(results.first).not_to start_with('fatal:')
|
|
|
|
end
|
|
|
|
|
2017-08-17 22:00:37 +05:30
|
|
|
it 'properly handles when query is not present' do
|
|
|
|
results = repository.search_files_by_content('', 'master')
|
|
|
|
|
|
|
|
expect(results).to match_array([])
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'properly handles query when repo is empty' do
|
2018-03-17 18:26:18 +05:30
|
|
|
repository = create(:project, :empty_repo).repository
|
2017-08-17 22:00:37 +05:30
|
|
|
results = repository.search_files_by_content('test', 'master')
|
|
|
|
|
|
|
|
expect(results).to match_array([])
|
|
|
|
end
|
|
|
|
|
2018-11-08 19:23:39 +05:30
|
|
|
describe 'when storage is broken', :broken_storage do
|
2019-07-07 11:18:12 +05:30
|
|
|
it 'raises a storage error' do
|
2017-09-10 17:25:29 +05:30
|
|
|
expect_to_raise_storage_error do
|
|
|
|
broken_repository.search_files_by_content('feature', 'master')
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2015-09-11 14:41:01 +05:30
|
|
|
describe 'result' do
|
|
|
|
subject { results.first }
|
|
|
|
|
|
|
|
it { is_expected.to be_an String }
|
2018-03-17 18:26:18 +05:30
|
|
|
it { expect(subject.lines[2]).to eq("master:CHANGELOG\x00190\x00 - Feature: Replace teams with group membership\n") }
|
2016-11-03 12:29:30 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2017-08-17 22:00:37 +05:30
|
|
|
describe "search_files_by_name" do
|
|
|
|
let(:results) { repository.search_files_by_name('files', 'master') }
|
|
|
|
|
|
|
|
it 'returns result' do
|
|
|
|
expect(results.first).to eq('files/html/500.html')
|
|
|
|
end
|
|
|
|
|
2018-03-17 18:26:18 +05:30
|
|
|
it 'ignores leading slashes' do
|
|
|
|
results = repository.search_files_by_name('/files', 'master')
|
|
|
|
|
|
|
|
expect(results.first).to eq('files/html/500.html')
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'properly handles when query is only slashes' do
|
|
|
|
results = repository.search_files_by_name('//', 'master')
|
|
|
|
|
|
|
|
expect(results).to match_array([])
|
|
|
|
end
|
|
|
|
|
2017-08-17 22:00:37 +05:30
|
|
|
it 'properly handles when query is not present' do
|
|
|
|
results = repository.search_files_by_name('', 'master')
|
|
|
|
|
|
|
|
expect(results).to match_array([])
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'properly handles query when repo is empty' do
|
2018-03-17 18:26:18 +05:30
|
|
|
repository = create(:project, :empty_repo).repository
|
2017-08-17 22:00:37 +05:30
|
|
|
|
|
|
|
results = repository.search_files_by_name('test', 'master')
|
|
|
|
|
|
|
|
expect(results).to match_array([])
|
|
|
|
end
|
2017-09-10 17:25:29 +05:30
|
|
|
|
2018-11-08 19:23:39 +05:30
|
|
|
describe 'when storage is broken', :broken_storage do
|
2019-07-07 11:18:12 +05:30
|
|
|
it 'raises a storage error' do
|
2017-09-10 17:25:29 +05:30
|
|
|
expect_to_raise_storage_error { broken_repository.search_files_by_name('files', 'master') }
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-10-15 14:42:47 +05:30
|
|
|
describe '#async_remove_remote' do
|
|
|
|
before do
|
|
|
|
masterrev = repository.find_branch('master').dereferenced_target
|
|
|
|
create_remote_branch('joe', 'remote_branch', masterrev)
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when worker is scheduled successfully' do
|
|
|
|
before do
|
|
|
|
masterrev = repository.find_branch('master').dereferenced_target
|
|
|
|
create_remote_branch('remote_name', 'remote_branch', masterrev)
|
|
|
|
|
|
|
|
allow(RepositoryRemoveRemoteWorker).to receive(:perform_async).and_return('1234')
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'returns job_id' do
|
|
|
|
expect(repository.async_remove_remote('joe')).to eq('1234')
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when worker does not schedule successfully' do
|
|
|
|
before do
|
|
|
|
allow(RepositoryRemoveRemoteWorker).to receive(:perform_async).and_return(nil)
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'returns nil' do
|
|
|
|
expect(Rails.logger).to receive(:info).with("Remove remote job failed to create for #{project.id} with remote name joe.")
|
|
|
|
|
|
|
|
expect(repository.async_remove_remote('joe')).to be_nil
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2017-09-10 17:25:29 +05:30
|
|
|
describe '#fetch_ref' do
|
2018-03-17 18:26:18 +05:30
|
|
|
let(:broken_repository) { create(:project, :broken_storage).repository }
|
2017-09-10 17:25:29 +05:30
|
|
|
|
2018-11-08 19:23:39 +05:30
|
|
|
describe 'when storage is broken', :broken_storage do
|
2019-07-07 11:18:12 +05:30
|
|
|
it 'raises a storage error' do
|
2018-03-17 18:26:18 +05:30
|
|
|
expect_to_raise_storage_error do
|
|
|
|
broken_repository.fetch_ref(broken_repository, source_ref: '1', target_ref: '2')
|
|
|
|
end
|
2017-09-10 17:25:29 +05:30
|
|
|
end
|
|
|
|
end
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
|
2019-09-30 21:07:59 +05:30
|
|
|
describe '#get_raw_changes' do
|
2020-01-01 13:55:28 +05:30
|
|
|
context 'with non-UTF8 bytes in paths' do
|
2019-09-30 21:07:59 +05:30
|
|
|
let(:old_rev) { 'd0888d297eadcd7a345427915c309413b1231e65' }
|
|
|
|
let(:new_rev) { '19950f03c765f7ac8723a73a0599764095f52fc0' }
|
|
|
|
let(:changes) { repository.raw_changes_between(old_rev, new_rev) }
|
|
|
|
|
|
|
|
it 'returns the changes' do
|
|
|
|
expect { changes }.not_to raise_error
|
|
|
|
expect(changes.first.new_path.bytes).to eq("hello\x80world".bytes)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2016-11-03 12:29:30 +05:30
|
|
|
describe '#create_ref' do
|
2018-03-17 18:26:18 +05:30
|
|
|
it 'redirects the call to write_ref' do
|
2016-11-03 12:29:30 +05:30
|
|
|
ref, ref_path = '1', '2'
|
|
|
|
|
2018-03-17 18:26:18 +05:30
|
|
|
expect(repository.raw_repository).to receive(:write_ref).with(ref_path, ref)
|
2016-11-03 12:29:30 +05:30
|
|
|
|
|
|
|
repository.create_ref(ref, ref_path)
|
2015-09-11 14:41:01 +05:30
|
|
|
end
|
2016-06-02 11:05:42 +05:30
|
|
|
end
|
|
|
|
|
2017-09-10 17:25:29 +05:30
|
|
|
describe "#changelog", :use_clean_rails_memory_store_caching do
|
2016-06-02 11:05:42 +05:30
|
|
|
it 'accepts changelog' do
|
|
|
|
expect(repository.tree).to receive(:blobs).and_return([TestBlob.new('changelog')])
|
|
|
|
|
2017-09-10 17:25:29 +05:30
|
|
|
expect(repository.changelog.path).to eq('changelog')
|
2015-09-11 14:41:01 +05:30
|
|
|
end
|
2015-12-23 02:04:40 +05:30
|
|
|
|
2016-06-02 11:05:42 +05:30
|
|
|
it 'accepts news instead of changelog' do
|
|
|
|
expect(repository.tree).to receive(:blobs).and_return([TestBlob.new('news')])
|
|
|
|
|
2017-09-10 17:25:29 +05:30
|
|
|
expect(repository.changelog.path).to eq('news')
|
2016-06-02 11:05:42 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
it 'accepts history instead of changelog' do
|
|
|
|
expect(repository.tree).to receive(:blobs).and_return([TestBlob.new('history')])
|
|
|
|
|
2017-09-10 17:25:29 +05:30
|
|
|
expect(repository.changelog.path).to eq('history')
|
2016-06-02 11:05:42 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
it 'accepts changes instead of changelog' do
|
|
|
|
expect(repository.tree).to receive(:blobs).and_return([TestBlob.new('changes')])
|
|
|
|
|
2017-09-10 17:25:29 +05:30
|
|
|
expect(repository.changelog.path).to eq('changes')
|
2016-06-02 11:05:42 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
it 'is case-insensitive' do
|
|
|
|
expect(repository.tree).to receive(:blobs).and_return([TestBlob.new('CHANGELOG')])
|
|
|
|
|
2017-09-10 17:25:29 +05:30
|
|
|
expect(repository.changelog.path).to eq('CHANGELOG')
|
2016-06-02 11:05:42 +05:30
|
|
|
end
|
2015-12-23 02:04:40 +05:30
|
|
|
end
|
|
|
|
|
2017-09-10 17:25:29 +05:30
|
|
|
describe "#license_blob", :use_clean_rails_memory_store_caching do
|
2015-12-23 02:04:40 +05:30
|
|
|
before do
|
2017-08-17 22:00:37 +05:30
|
|
|
repository.delete_file(
|
|
|
|
user, 'LICENSE', message: 'Remove LICENSE', branch_name: 'master')
|
2016-06-02 11:05:42 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
it 'handles when HEAD points to non-existent ref' do
|
2017-08-17 22:00:37 +05:30
|
|
|
repository.create_file(
|
|
|
|
user, 'LICENSE', 'Copyright!',
|
|
|
|
message: 'Add LICENSE', branch_name: 'master')
|
|
|
|
|
2018-03-17 18:26:18 +05:30
|
|
|
allow(repository).to receive(:root_ref).and_raise(Gitlab::Git::Repository::NoRepository)
|
2016-06-02 11:05:42 +05:30
|
|
|
|
|
|
|
expect(repository.license_blob).to be_nil
|
2015-12-23 02:04:40 +05:30
|
|
|
end
|
|
|
|
|
2016-06-02 11:05:42 +05:30
|
|
|
it 'looks in the root_ref only' do
|
2017-08-17 22:00:37 +05:30
|
|
|
repository.delete_file(user, 'LICENSE',
|
|
|
|
message: 'Remove LICENSE', branch_name: 'markdown')
|
|
|
|
repository.create_file(user, 'LICENSE',
|
|
|
|
Licensee::License.new('mit').content,
|
|
|
|
message: 'Add LICENSE', branch_name: 'markdown')
|
2016-06-02 11:05:42 +05:30
|
|
|
|
|
|
|
expect(repository.license_blob).to be_nil
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'detects license file with no recognizable open-source license content' do
|
2017-08-17 22:00:37 +05:30
|
|
|
repository.create_file(user, 'LICENSE', 'Copyright!',
|
|
|
|
message: 'Add LICENSE', branch_name: 'master')
|
2016-06-02 11:05:42 +05:30
|
|
|
|
2017-09-10 17:25:29 +05:30
|
|
|
expect(repository.license_blob.path).to eq('LICENSE')
|
2016-06-02 11:05:42 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
%w[LICENSE LICENCE LiCensE LICENSE.md LICENSE.foo COPYING COPYING.md].each do |filename|
|
|
|
|
it "detects '#{filename}'" do
|
2017-08-17 22:00:37 +05:30
|
|
|
repository.create_file(user, filename,
|
|
|
|
Licensee::License.new('mit').content,
|
|
|
|
message: "Add #{filename}", branch_name: 'master')
|
2016-06-02 11:05:42 +05:30
|
|
|
|
|
|
|
expect(repository.license_blob.name).to eq(filename)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2017-09-10 17:25:29 +05:30
|
|
|
describe '#license_key', :use_clean_rails_memory_store_caching do
|
2016-06-02 11:05:42 +05:30
|
|
|
before do
|
2017-08-17 22:00:37 +05:30
|
|
|
repository.delete_file(user, 'LICENSE',
|
|
|
|
message: 'Remove LICENSE', branch_name: 'master')
|
2016-06-02 11:05:42 +05:30
|
|
|
end
|
|
|
|
|
2017-08-17 22:00:37 +05:30
|
|
|
it 'returns nil when no license is detected' do
|
2016-06-02 11:05:42 +05:30
|
|
|
expect(repository.license_key).to be_nil
|
|
|
|
end
|
|
|
|
|
2017-08-17 22:00:37 +05:30
|
|
|
it 'returns nil when the repository does not exist' do
|
|
|
|
expect(repository).to receive(:exists?).and_return(false)
|
|
|
|
|
2016-06-02 11:05:42 +05:30
|
|
|
expect(repository.license_key).to be_nil
|
|
|
|
end
|
|
|
|
|
2017-09-10 17:25:29 +05:30
|
|
|
it 'returns nil when the content is not recognizable' do
|
2018-05-09 12:01:36 +05:30
|
|
|
repository.create_file(user, 'LICENSE', 'Gitlab B.V.',
|
2017-08-17 22:00:37 +05:30
|
|
|
message: 'Add LICENSE', branch_name: 'master')
|
2016-06-02 11:05:42 +05:30
|
|
|
|
|
|
|
expect(repository.license_key).to be_nil
|
|
|
|
end
|
|
|
|
|
2018-03-17 18:26:18 +05:30
|
|
|
it 'returns nil when the commit SHA does not exist' do
|
|
|
|
allow(repository.head_commit).to receive(:sha).and_return('1' * 40)
|
|
|
|
|
|
|
|
expect(repository.license_key).to be_nil
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'returns nil when master does not exist' do
|
|
|
|
repository.rm_branch(user, 'master')
|
|
|
|
|
|
|
|
expect(repository.license_key).to be_nil
|
|
|
|
end
|
|
|
|
|
2016-06-02 11:05:42 +05:30
|
|
|
it 'returns the license key' do
|
2017-08-17 22:00:37 +05:30
|
|
|
repository.create_file(user, 'LICENSE',
|
|
|
|
Licensee::License.new('mit').content,
|
|
|
|
message: 'Add LICENSE', branch_name: 'master')
|
2016-06-02 11:05:42 +05:30
|
|
|
|
|
|
|
expect(repository.license_key).to eq('mit')
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2017-09-10 17:25:29 +05:30
|
|
|
describe '#license' do
|
|
|
|
before do
|
|
|
|
repository.delete_file(user, 'LICENSE',
|
|
|
|
message: 'Remove LICENSE', branch_name: 'master')
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'returns nil when no license is detected' do
|
|
|
|
expect(repository.license).to be_nil
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'returns nil when the repository does not exist' do
|
|
|
|
expect(repository).to receive(:exists?).and_return(false)
|
|
|
|
|
|
|
|
expect(repository.license).to be_nil
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'returns nil when the content is not recognizable' do
|
2018-05-09 12:01:36 +05:30
|
|
|
repository.create_file(user, 'LICENSE', 'Gitlab B.V.',
|
2017-09-10 17:25:29 +05:30
|
|
|
message: 'Add LICENSE', branch_name: 'master')
|
|
|
|
|
|
|
|
expect(repository.license).to be_nil
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'returns the license' do
|
|
|
|
license = Licensee::License.new('mit')
|
|
|
|
repository.create_file(user, 'LICENSE',
|
|
|
|
license.content,
|
|
|
|
message: 'Add LICENSE', branch_name: 'master')
|
|
|
|
|
|
|
|
expect(repository.license).to eq(license)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
describe "#gitlab_ci_yml", :use_clean_rails_memory_store_caching do
|
2016-06-02 11:05:42 +05:30
|
|
|
it 'returns valid file' do
|
|
|
|
files = [TestBlob.new('file'), TestBlob.new('.gitlab-ci.yml'), TestBlob.new('copying')]
|
2015-12-23 02:04:40 +05:30
|
|
|
expect(repository.tree).to receive(:blobs).and_return(files)
|
|
|
|
|
2017-09-10 17:25:29 +05:30
|
|
|
expect(repository.gitlab_ci_yml.path).to eq('.gitlab-ci.yml')
|
2015-12-23 02:04:40 +05:30
|
|
|
end
|
|
|
|
|
2016-06-02 11:05:42 +05:30
|
|
|
it 'returns nil if not exists' do
|
|
|
|
expect(repository.tree).to receive(:blobs).and_return([])
|
|
|
|
expect(repository.gitlab_ci_yml).to be_nil
|
|
|
|
end
|
2015-12-23 02:04:40 +05:30
|
|
|
|
2016-06-02 11:05:42 +05:30
|
|
|
it 'returns nil for empty repository' do
|
2018-03-17 18:26:18 +05:30
|
|
|
allow(repository).to receive(:root_ref).and_raise(Gitlab::Git::Repository::NoRepository)
|
2016-06-02 11:05:42 +05:30
|
|
|
expect(repository.gitlab_ci_yml).to be_nil
|
2015-12-23 02:04:40 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-01-03 12:48:30 +05:30
|
|
|
describe '#ambiguous_ref?' do
|
|
|
|
let(:ref) { 'ref' }
|
|
|
|
|
|
|
|
subject { repository.ambiguous_ref?(ref) }
|
|
|
|
|
|
|
|
context 'when ref is ambiguous' do
|
|
|
|
before do
|
|
|
|
repository.add_tag(project.creator, ref, 'master')
|
|
|
|
repository.add_branch(project.creator, ref, 'master')
|
|
|
|
end
|
|
|
|
|
2019-07-07 11:18:12 +05:30
|
|
|
it 'is true' do
|
2019-01-03 12:48:30 +05:30
|
|
|
is_expected.to eq(true)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when ref is not ambiguous' do
|
|
|
|
before do
|
|
|
|
repository.add_tag(project.creator, ref, 'master')
|
|
|
|
end
|
|
|
|
|
2019-07-07 11:18:12 +05:30
|
|
|
it 'is false' do
|
2019-01-03 12:48:30 +05:30
|
|
|
is_expected.to eq(false)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
describe '#expand_ref' do
|
|
|
|
let(:ref) { 'ref' }
|
|
|
|
|
|
|
|
subject { repository.expand_ref(ref) }
|
|
|
|
|
|
|
|
context 'when ref is not tag or branch name' do
|
|
|
|
let(:ref) { 'refs/heads/master' }
|
|
|
|
|
|
|
|
it 'returns nil' do
|
2019-12-21 20:55:43 +05:30
|
|
|
is_expected.to be_nil
|
2019-01-03 12:48:30 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when ref is tag name' do
|
|
|
|
before do
|
|
|
|
repository.add_tag(project.creator, ref, 'master')
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'returns the tag ref' do
|
|
|
|
is_expected.to eq("refs/tags/#{ref}")
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when ref is branch name' do
|
|
|
|
before do
|
|
|
|
repository.add_branch(project.creator, ref, 'master')
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'returns the branch ref' do
|
|
|
|
is_expected.to eq("refs/heads/#{ref}")
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2016-08-24 12:49:21 +05:30
|
|
|
describe '#add_branch' do
|
2018-03-17 18:26:18 +05:30
|
|
|
let(:branch_name) { 'new_feature' }
|
|
|
|
let(:target) { 'master' }
|
2015-12-23 02:04:40 +05:30
|
|
|
|
2018-03-17 18:26:18 +05:30
|
|
|
subject { repository.add_branch(user, branch_name, target) }
|
2015-12-23 02:04:40 +05:30
|
|
|
|
2018-11-08 19:23:39 +05:30
|
|
|
it "calls Gitaly's OperationService" do
|
|
|
|
expect_any_instance_of(Gitlab::GitalyClient::OperationService)
|
|
|
|
.to receive(:user_create_branch).with(branch_name, user, target)
|
|
|
|
.and_return(nil)
|
2016-06-02 11:05:42 +05:30
|
|
|
|
2018-11-08 19:23:39 +05:30
|
|
|
subject
|
2015-12-23 02:04:40 +05:30
|
|
|
end
|
|
|
|
|
2018-11-08 19:23:39 +05:30
|
|
|
it 'creates_the_branch' do
|
|
|
|
expect(subject.name).to eq(branch_name)
|
|
|
|
expect(repository.find_branch(branch_name)).not_to be_nil
|
|
|
|
end
|
2016-09-13 17:45:13 +05:30
|
|
|
|
2018-11-08 19:23:39 +05:30
|
|
|
context 'with a non-existing target' do
|
|
|
|
let(:target) { 'fake-target' }
|
2015-12-23 02:04:40 +05:30
|
|
|
|
2018-11-08 19:23:39 +05:30
|
|
|
it "returns false and doesn't create the branch" do
|
|
|
|
expect(subject).to be(false)
|
|
|
|
expect(repository.find_branch(branch_name)).to be_nil
|
2015-12-23 02:04:40 +05:30
|
|
|
end
|
|
|
|
end
|
2018-03-17 18:26:18 +05:30
|
|
|
end
|
2015-12-23 02:04:40 +05:30
|
|
|
|
2019-07-07 11:18:12 +05:30
|
|
|
shared_examples 'asymmetric cached method' do |method|
|
2018-12-05 23:21:45 +05:30
|
|
|
context 'asymmetric caching', :use_clean_rails_memory_store_caching, :request_store do
|
|
|
|
let(:cache) { repository.send(:cache) }
|
|
|
|
let(:request_store_cache) { repository.send(:request_store_cache) }
|
2017-08-17 22:00:37 +05:30
|
|
|
|
2018-12-05 23:21:45 +05:30
|
|
|
context 'when it returns true' do
|
|
|
|
before do
|
2019-07-07 11:18:12 +05:30
|
|
|
expect(repository.raw_repository).to receive(method).once.and_return(true)
|
2016-09-13 17:45:13 +05:30
|
|
|
end
|
2018-03-17 18:26:18 +05:30
|
|
|
|
2018-12-05 23:21:45 +05:30
|
|
|
it 'caches the output in RequestStore' do
|
|
|
|
expect do
|
2019-07-07 11:18:12 +05:30
|
|
|
repository.send(method)
|
|
|
|
end.to change { request_store_cache.read(method) }.from(nil).to(true)
|
2018-03-17 18:26:18 +05:30
|
|
|
end
|
|
|
|
|
2018-12-05 23:21:45 +05:30
|
|
|
it 'caches the output in RepositoryCache' do
|
|
|
|
expect do
|
2019-07-07 11:18:12 +05:30
|
|
|
repository.send(method)
|
|
|
|
end.to change { cache.read(method) }.from(nil).to(true)
|
2018-03-17 18:26:18 +05:30
|
|
|
end
|
|
|
|
end
|
2016-09-29 09:46:39 +05:30
|
|
|
|
2018-12-05 23:21:45 +05:30
|
|
|
context 'when it returns false' do
|
|
|
|
before do
|
2019-07-07 11:18:12 +05:30
|
|
|
expect(repository.raw_repository).to receive(method).once.and_return(false)
|
2018-03-17 18:26:18 +05:30
|
|
|
end
|
2016-08-24 12:49:21 +05:30
|
|
|
|
2018-12-05 23:21:45 +05:30
|
|
|
it 'caches the output in RequestStore' do
|
|
|
|
expect do
|
2019-07-07 11:18:12 +05:30
|
|
|
repository.send(method)
|
|
|
|
end.to change { request_store_cache.read(method) }.from(nil).to(false)
|
2018-12-05 23:21:45 +05:30
|
|
|
end
|
2016-04-02 18:10:28 +05:30
|
|
|
|
2018-12-05 23:21:45 +05:30
|
|
|
it 'does NOT cache the output in RepositoryCache' do
|
|
|
|
expect do
|
2019-07-07 11:18:12 +05:30
|
|
|
repository.send(method)
|
|
|
|
end.not_to change { cache.read(method) }.from(nil)
|
2018-12-05 23:21:45 +05:30
|
|
|
end
|
2017-09-10 17:25:29 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
2016-04-02 18:10:28 +05:30
|
|
|
|
2019-07-07 11:18:12 +05:30
|
|
|
describe '#exists?' do
|
|
|
|
it 'returns true when a repository exists' do
|
|
|
|
expect(repository.exists?).to be(true)
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'returns false if no full path can be constructed' do
|
|
|
|
allow(repository).to receive(:full_path).and_return(nil)
|
|
|
|
|
|
|
|
expect(repository.exists?).to be(false)
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'with broken storage', :broken_storage do
|
|
|
|
it 'raises a storage error' do
|
|
|
|
expect_to_raise_storage_error { broken_repository.exists? }
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
it_behaves_like 'asymmetric cached method', :exists?
|
|
|
|
end
|
|
|
|
|
2016-02-05 20:25:01 +05:30
|
|
|
describe '#has_visible_content?' do
|
2019-12-21 20:55:43 +05:30
|
|
|
it 'delegates to raw_repository when true' do
|
2018-03-17 18:26:18 +05:30
|
|
|
expect(repository.raw_repository).to receive(:has_visible_content?)
|
2019-12-21 20:55:43 +05:30
|
|
|
.and_return(true)
|
2016-02-05 20:25:01 +05:30
|
|
|
|
2019-12-21 20:55:43 +05:30
|
|
|
expect(repository.has_visible_content?).to eq(true)
|
2016-02-05 20:25:01 +05:30
|
|
|
end
|
|
|
|
|
2019-12-21 20:55:43 +05:30
|
|
|
it 'delegates to raw_repository when false' do
|
|
|
|
expect(repository.raw_repository).to receive(:has_visible_content?)
|
|
|
|
.and_return(false)
|
2016-04-02 18:10:28 +05:30
|
|
|
|
2019-12-21 20:55:43 +05:30
|
|
|
expect(repository.has_visible_content?).to eq(false)
|
2016-04-02 18:10:28 +05:30
|
|
|
end
|
2019-12-21 20:55:43 +05:30
|
|
|
|
|
|
|
it_behaves_like 'asymmetric cached method', :has_visible_content?
|
2016-04-02 18:10:28 +05:30
|
|
|
end
|
|
|
|
|
2018-03-17 18:26:18 +05:30
|
|
|
describe '#branch_exists?' do
|
2019-12-21 20:55:43 +05:30
|
|
|
let(:branch) { repository.root_ref }
|
|
|
|
|
|
|
|
subject { repository.branch_exists?(branch) }
|
|
|
|
|
|
|
|
it 'delegates to branch_names when the cache is empty' do
|
|
|
|
repository.expire_branches_cache
|
|
|
|
|
|
|
|
expect(repository).to receive(:branch_names).and_call_original
|
|
|
|
is_expected.to eq(true)
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'uses redis set caching when the cache is filled' do
|
|
|
|
repository.branch_names # ensure the branch name cache is filled
|
|
|
|
|
|
|
|
expect(repository)
|
|
|
|
.to receive(:branch_names_include?)
|
|
|
|
.with(branch)
|
|
|
|
.and_call_original
|
2018-03-17 18:26:18 +05:30
|
|
|
|
2019-12-21 20:55:43 +05:30
|
|
|
is_expected.to eq(true)
|
2018-03-17 18:26:18 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
describe '#tag_exists?' do
|
2019-12-21 20:55:43 +05:30
|
|
|
let(:tag) { repository.tags.first.name }
|
2018-03-17 18:26:18 +05:30
|
|
|
|
2019-12-21 20:55:43 +05:30
|
|
|
subject { repository.tag_exists?(tag) }
|
|
|
|
|
|
|
|
it 'delegates to tag_names when the cache is empty' do
|
|
|
|
repository.expire_tags_cache
|
|
|
|
|
|
|
|
expect(repository).to receive(:tag_names).and_call_original
|
|
|
|
is_expected.to eq(true)
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'uses redis set caching when the cache is filled' do
|
|
|
|
repository.tag_names # ensure the tag name cache is filled
|
|
|
|
|
|
|
|
expect(repository)
|
|
|
|
.to receive(:tag_names_include?)
|
|
|
|
.with(tag)
|
|
|
|
.and_call_original
|
|
|
|
|
|
|
|
is_expected.to eq(true)
|
2018-03-17 18:26:18 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-12-21 20:55:43 +05:30
|
|
|
describe '#branch_names', :clean_gitlab_redis_cache do
|
2018-03-17 18:26:18 +05:30
|
|
|
let(:fake_branch_names) { ['foobar'] }
|
|
|
|
|
|
|
|
it 'gets cached across Repository instances' do
|
|
|
|
allow(repository.raw_repository).to receive(:branch_names).once.and_return(fake_branch_names)
|
|
|
|
|
2019-12-21 20:55:43 +05:30
|
|
|
expect(repository.branch_names).to match_array(fake_branch_names)
|
2018-03-17 18:26:18 +05:30
|
|
|
|
|
|
|
fresh_repository = Project.find(project.id).repository
|
|
|
|
expect(fresh_repository.object_id).not_to eq(repository.object_id)
|
|
|
|
|
|
|
|
expect(fresh_repository.raw_repository).not_to receive(:branch_names)
|
2019-12-21 20:55:43 +05:30
|
|
|
expect(fresh_repository.branch_names).to match_array(fake_branch_names)
|
2018-03-17 18:26:18 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2016-04-02 18:10:28 +05:30
|
|
|
describe '#empty?' do
|
|
|
|
let(:empty_repository) { create(:project_empty_repo).repository }
|
|
|
|
|
|
|
|
it 'returns true for an empty repository' do
|
2018-03-17 18:26:18 +05:30
|
|
|
expect(empty_repository).to be_empty
|
2016-04-02 18:10:28 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
it 'returns false for a non-empty repository' do
|
2018-03-17 18:26:18 +05:30
|
|
|
expect(repository).not_to be_empty
|
2016-04-02 18:10:28 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
it 'caches the output' do
|
2018-03-17 18:26:18 +05:30
|
|
|
expect(repository.raw_repository).to receive(:has_visible_content?).once
|
2016-04-02 18:10:28 +05:30
|
|
|
|
|
|
|
repository.empty?
|
|
|
|
repository.empty?
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-02-15 15:39:39 +05:30
|
|
|
describe '#blobs_at' do
|
|
|
|
let(:empty_repository) { create(:project_empty_repo).repository }
|
|
|
|
|
|
|
|
it 'returns empty array for an empty repository' do
|
|
|
|
# rubocop:disable Style/WordArray
|
|
|
|
expect(empty_repository.blobs_at(['master', 'foobar'])).to eq([])
|
|
|
|
# rubocop:enable Style/WordArray
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'returns blob array for a non-empty repository' do
|
|
|
|
repository.create_file(User.last, 'foobar', 'CONTENT', message: 'message', branch_name: 'master')
|
|
|
|
|
|
|
|
# rubocop:disable Style/WordArray
|
|
|
|
blobs = repository.blobs_at([['master', 'foobar']])
|
|
|
|
# rubocop:enable Style/WordArray
|
|
|
|
|
|
|
|
expect(blobs.first.name).to eq('foobar')
|
|
|
|
expect(blobs.size).to eq(1)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2016-04-02 18:10:28 +05:30
|
|
|
describe '#root_ref' do
|
|
|
|
it 'returns a branch name' do
|
|
|
|
expect(repository.root_ref).to be_an_instance_of(String)
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'caches the output' do
|
2017-09-10 17:25:29 +05:30
|
|
|
expect(repository.raw_repository).to receive(:root_ref)
|
|
|
|
.once
|
|
|
|
.and_return('master')
|
2016-04-02 18:10:28 +05:30
|
|
|
|
|
|
|
repository.root_ref
|
|
|
|
repository.root_ref
|
|
|
|
end
|
2019-07-07 11:18:12 +05:30
|
|
|
|
2020-03-09 13:42:32 +05:30
|
|
|
it 'returns nil if the repository does not exist' do
|
|
|
|
repository = create(:project).repository
|
|
|
|
|
|
|
|
expect(repository).not_to be_exists
|
|
|
|
expect(repository.root_ref).to be_nil
|
|
|
|
end
|
|
|
|
|
2019-07-07 11:18:12 +05:30
|
|
|
it_behaves_like 'asymmetric cached method', :root_ref
|
2016-04-02 18:10:28 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
describe '#expire_root_ref_cache' do
|
|
|
|
it 'expires the root reference cache' do
|
|
|
|
repository.root_ref
|
|
|
|
|
2017-09-10 17:25:29 +05:30
|
|
|
expect(repository.raw_repository).to receive(:root_ref)
|
|
|
|
.once
|
|
|
|
.and_return('foo')
|
2016-04-02 18:10:28 +05:30
|
|
|
|
|
|
|
repository.expire_root_ref_cache
|
|
|
|
|
|
|
|
expect(repository.root_ref).to eq('foo')
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2016-06-02 11:05:42 +05:30
|
|
|
describe '#expire_branch_cache' do
|
2016-04-02 18:10:28 +05:30
|
|
|
# This method is private but we need it for testing purposes. Sadly there's
|
|
|
|
# no other proper way of testing caching operations.
|
|
|
|
let(:cache) { repository.send(:cache) }
|
|
|
|
|
|
|
|
it 'expires the cache for all branches' do
|
2017-09-10 17:25:29 +05:30
|
|
|
expect(cache).to receive(:expire)
|
|
|
|
.at_least(repository.branches.length * 2)
|
|
|
|
.times
|
2016-04-02 18:10:28 +05:30
|
|
|
|
|
|
|
repository.expire_branch_cache
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'expires the cache for all branches when the root branch is given' do
|
2017-09-10 17:25:29 +05:30
|
|
|
expect(cache).to receive(:expire)
|
|
|
|
.at_least(repository.branches.length * 2)
|
|
|
|
.times
|
2016-04-02 18:10:28 +05:30
|
|
|
|
|
|
|
repository.expire_branch_cache(repository.root_ref)
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'expires the cache for a specific branch' do
|
2017-08-17 22:00:37 +05:30
|
|
|
expect(cache).to receive(:expire).twice
|
2016-04-02 18:10:28 +05:30
|
|
|
|
|
|
|
repository.expire_branch_cache('foo')
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
describe '#expire_emptiness_caches' do
|
|
|
|
let(:cache) { repository.send(:cache) }
|
|
|
|
|
2017-08-17 22:00:37 +05:30
|
|
|
it 'expires the caches for an empty repository' do
|
|
|
|
allow(repository).to receive(:empty?).and_return(true)
|
|
|
|
|
2018-03-17 18:26:18 +05:30
|
|
|
expect(cache).to receive(:expire).with(:has_visible_content?)
|
2017-08-17 22:00:37 +05:30
|
|
|
|
|
|
|
repository.expire_emptiness_caches
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'does not expire the cache for a non-empty repository' do
|
|
|
|
allow(repository).to receive(:empty?).and_return(false)
|
|
|
|
|
2018-03-17 18:26:18 +05:30
|
|
|
expect(cache).not_to receive(:expire).with(:has_visible_content?)
|
2016-04-02 18:10:28 +05:30
|
|
|
|
|
|
|
repository.expire_emptiness_caches
|
|
|
|
end
|
2018-05-09 12:01:36 +05:30
|
|
|
|
|
|
|
it 'expires the memoized repository cache' do
|
|
|
|
allow(repository.raw_repository).to receive(:expire_has_local_branches_cache).and_call_original
|
|
|
|
|
|
|
|
repository.expire_emptiness_caches
|
|
|
|
end
|
2016-04-02 18:10:28 +05:30
|
|
|
end
|
|
|
|
|
2017-08-17 22:00:37 +05:30
|
|
|
describe 'skip_merges option' do
|
2018-03-17 18:26:18 +05:30
|
|
|
subject { repository.commits(Gitlab::Git::BRANCH_REF_PREFIX + "'test'", limit: 100, skip_merges: true).map { |k| k.id } }
|
2016-04-02 18:10:28 +05:30
|
|
|
|
|
|
|
it { is_expected.not_to include('e56497bb5f03a90a51293fc6d516788730953899') }
|
|
|
|
end
|
|
|
|
|
|
|
|
describe '#merge' do
|
2017-08-17 22:00:37 +05:30
|
|
|
let(:merge_request) { create(:merge_request, source_branch: 'feature', target_branch: 'master', source_project: project) }
|
2018-03-17 18:26:18 +05:30
|
|
|
let(:message) { 'Test \r\n\r\n message' }
|
|
|
|
|
2019-02-15 15:39:39 +05:30
|
|
|
it 'merges the code and returns the commit id' do
|
|
|
|
expect(merge_commit).to be_present
|
|
|
|
expect(repository.blob_at(merge_commit.id, 'files/ruby/feature.rb')).to be_present
|
|
|
|
end
|
2019-01-03 12:48:30 +05:30
|
|
|
|
2019-02-15 15:39:39 +05:30
|
|
|
it 'sets the `in_progress_merge_commit_sha` flag for the given merge request' do
|
|
|
|
merge_commit_id = merge(repository, user, merge_request, message)
|
2019-01-03 12:48:30 +05:30
|
|
|
|
2019-02-15 15:39:39 +05:30
|
|
|
expect(merge_request.in_progress_merge_commit_sha).to eq(merge_commit_id)
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
|
2019-02-15 15:39:39 +05:30
|
|
|
it 'removes carriage returns from commit message' do
|
|
|
|
merge_commit_id = merge(repository, user, merge_request, message)
|
2016-09-13 17:45:13 +05:30
|
|
|
|
2019-02-15 15:39:39 +05:30
|
|
|
expect(repository.commit(merge_commit_id).message).to eq(message.delete("\r"))
|
2018-03-17 18:26:18 +05:30
|
|
|
end
|
2016-09-13 17:45:13 +05:30
|
|
|
|
2018-03-17 18:26:18 +05:30
|
|
|
def merge(repository, user, merge_request, message)
|
|
|
|
repository.merge(user, merge_request.diff_head_sha, merge_request, message)
|
2016-09-13 17:45:13 +05:30
|
|
|
end
|
2018-03-17 18:26:18 +05:30
|
|
|
end
|
2017-08-17 22:00:37 +05:30
|
|
|
|
2019-07-07 11:18:12 +05:30
|
|
|
describe '#merge_to_ref' do
|
|
|
|
let(:merge_request) do
|
|
|
|
create(:merge_request, source_branch: 'feature',
|
|
|
|
target_branch: 'master',
|
|
|
|
source_project: project)
|
|
|
|
end
|
|
|
|
|
2019-09-30 21:07:59 +05:30
|
|
|
it 'writes merge of source SHA and first parent ref to MR merge_ref_path' do
|
2019-07-07 11:18:12 +05:30
|
|
|
merge_commit_id = repository.merge_to_ref(user,
|
|
|
|
merge_request.diff_head_sha,
|
|
|
|
merge_request,
|
|
|
|
merge_request.merge_ref_path,
|
2019-09-30 21:07:59 +05:30
|
|
|
'Custom message',
|
|
|
|
merge_request.target_branch_ref)
|
2019-07-07 11:18:12 +05:30
|
|
|
|
|
|
|
merge_commit = repository.commit(merge_commit_id)
|
|
|
|
|
|
|
|
expect(merge_commit.message).to eq('Custom message')
|
|
|
|
expect(merge_commit.author_name).to eq(user.name)
|
|
|
|
expect(merge_commit.author_email).to eq(user.commit_email)
|
|
|
|
expect(repository.blob_at(merge_commit.id, 'files/ruby/feature.rb')).to be_present
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-03-17 18:26:18 +05:30
|
|
|
describe '#ff_merge' do
|
|
|
|
before do
|
|
|
|
repository.add_branch(user, 'ff-target', 'feature~5')
|
|
|
|
end
|
2017-08-17 22:00:37 +05:30
|
|
|
|
2018-03-17 18:26:18 +05:30
|
|
|
it 'merges the code and return the commit id' do
|
|
|
|
merge_request = create(:merge_request, source_branch: 'feature', target_branch: 'ff-target', source_project: project)
|
|
|
|
merge_commit_id = repository.ff_merge(user,
|
|
|
|
merge_request.diff_head_sha,
|
|
|
|
merge_request.target_branch,
|
|
|
|
merge_request: merge_request)
|
|
|
|
merge_commit = repository.commit(merge_commit_id)
|
|
|
|
|
|
|
|
expect(merge_commit).to be_present
|
|
|
|
expect(repository.blob_at(merge_commit.id, 'files/ruby/feature.rb')).to be_present
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
|
2018-03-17 18:26:18 +05:30
|
|
|
it 'sets the `in_progress_merge_commit_sha` flag for the given merge request' do
|
|
|
|
merge_request = create(:merge_request, source_branch: 'feature', target_branch: 'ff-target', source_project: project)
|
|
|
|
merge_commit_id = repository.ff_merge(user,
|
|
|
|
merge_request.diff_head_sha,
|
|
|
|
merge_request.target_branch,
|
|
|
|
merge_request: merge_request)
|
|
|
|
|
|
|
|
expect(merge_request.in_progress_merge_commit_sha).to eq(merge_commit_id)
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
2016-04-02 18:10:28 +05:30
|
|
|
end
|
|
|
|
|
2019-07-31 22:56:46 +05:30
|
|
|
describe '#rebase' do
|
|
|
|
let(:merge_request) { create(:merge_request, source_branch: 'feature', target_branch: 'master', source_project: project) }
|
|
|
|
|
|
|
|
shared_examples_for 'a method that can rebase successfully' do
|
|
|
|
it 'returns the rebase commit sha' do
|
|
|
|
rebase_commit_sha = repository.rebase(user, merge_request)
|
|
|
|
head_sha = merge_request.source_project.repository.commit(merge_request.source_branch).sha
|
|
|
|
|
|
|
|
expect(rebase_commit_sha).to eq(head_sha)
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'sets the `rebase_commit_sha` for the given merge request' do
|
|
|
|
rebase_commit_sha = repository.rebase(user, merge_request)
|
|
|
|
|
|
|
|
expect(rebase_commit_sha).not_to be_nil
|
|
|
|
expect(merge_request.rebase_commit_sha).to eq(rebase_commit_sha)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2020-03-09 13:42:32 +05:30
|
|
|
it_behaves_like 'a method that can rebase successfully'
|
2019-07-31 22:56:46 +05:30
|
|
|
|
2020-03-09 13:42:32 +05:30
|
|
|
it 'executes the new Gitaly RPC' do
|
|
|
|
expect_any_instance_of(Gitlab::GitalyClient::OperationService).to receive(:rebase)
|
2019-07-31 22:56:46 +05:30
|
|
|
|
2020-03-09 13:42:32 +05:30
|
|
|
repository.rebase(user, merge_request)
|
|
|
|
end
|
2019-07-31 22:56:46 +05:30
|
|
|
|
2020-03-09 13:42:32 +05:30
|
|
|
describe 'rolling back the `rebase_commit_sha`' do
|
|
|
|
let(:new_sha) { Digest::SHA1.hexdigest('foo') }
|
2019-07-31 22:56:46 +05:30
|
|
|
|
2020-03-09 13:42:32 +05:30
|
|
|
it 'does not rollback when there are no errors' do
|
|
|
|
second_response = double(pre_receive_error: nil, git_error: nil)
|
|
|
|
mock_gitaly(second_response)
|
2019-07-31 22:56:46 +05:30
|
|
|
|
2020-03-09 13:42:32 +05:30
|
|
|
repository.rebase(user, merge_request)
|
2019-07-31 22:56:46 +05:30
|
|
|
|
2020-03-09 13:42:32 +05:30
|
|
|
expect(merge_request.reload.rebase_commit_sha).to eq(new_sha)
|
|
|
|
end
|
2019-07-31 22:56:46 +05:30
|
|
|
|
2020-03-09 13:42:32 +05:30
|
|
|
it 'does rollback when a PreReceiveError is encountered in the second step' do
|
|
|
|
second_response = double(pre_receive_error: 'my_error', git_error: nil)
|
|
|
|
mock_gitaly(second_response)
|
2020-01-01 13:55:28 +05:30
|
|
|
|
2020-03-09 13:42:32 +05:30
|
|
|
expect do
|
|
|
|
repository.rebase(user, merge_request)
|
|
|
|
end.to raise_error(Gitlab::Git::PreReceiveError)
|
2020-01-01 13:55:28 +05:30
|
|
|
|
2020-03-09 13:42:32 +05:30
|
|
|
expect(merge_request.reload.rebase_commit_sha).to be_nil
|
|
|
|
end
|
2020-01-01 13:55:28 +05:30
|
|
|
|
2020-03-09 13:42:32 +05:30
|
|
|
it 'does rollback when a GitError is encountered in the second step' do
|
|
|
|
second_response = double(pre_receive_error: nil, git_error: 'git error')
|
|
|
|
mock_gitaly(second_response)
|
2019-07-31 22:56:46 +05:30
|
|
|
|
2020-03-09 13:42:32 +05:30
|
|
|
expect do
|
|
|
|
repository.rebase(user, merge_request)
|
|
|
|
end.to raise_error(Gitlab::Git::Repository::GitError)
|
2019-07-31 22:56:46 +05:30
|
|
|
|
2020-03-09 13:42:32 +05:30
|
|
|
expect(merge_request.reload.rebase_commit_sha).to be_nil
|
2019-07-31 22:56:46 +05:30
|
|
|
end
|
|
|
|
|
2020-03-09 13:42:32 +05:30
|
|
|
def mock_gitaly(second_response)
|
|
|
|
responses = [
|
|
|
|
double(rebase_sha: new_sha).as_null_object,
|
|
|
|
second_response
|
|
|
|
]
|
2019-07-31 22:56:46 +05:30
|
|
|
|
2020-03-09 13:42:32 +05:30
|
|
|
expect_any_instance_of(
|
|
|
|
Gitaly::OperationService::Stub
|
|
|
|
).to receive(:user_rebase_confirmable).and_return(responses.each)
|
2019-07-31 22:56:46 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2016-06-02 11:05:42 +05:30
|
|
|
describe '#revert' do
|
2019-02-15 15:39:39 +05:30
|
|
|
let(:new_image_commit) { repository.commit('33f3729a45c02fc67d00adb1b8bca394b0e761d9') }
|
|
|
|
let(:update_image_commit) { repository.commit('2f63565e7aac07bcdadb654e253078b727143ec4') }
|
|
|
|
let(:message) { 'revert message' }
|
2016-06-02 11:05:42 +05:30
|
|
|
|
2019-02-15 15:39:39 +05:30
|
|
|
context 'when there is a conflict' do
|
|
|
|
it 'raises an error' do
|
|
|
|
expect { repository.revert(user, new_image_commit, 'master', message) }.to raise_error(Gitlab::Git::Repository::CreateTreeError)
|
2018-12-23 12:14:25 +05:30
|
|
|
end
|
2019-02-15 15:39:39 +05:30
|
|
|
end
|
2016-06-02 11:05:42 +05:30
|
|
|
|
2019-02-15 15:39:39 +05:30
|
|
|
context 'when commit was already reverted' do
|
|
|
|
it 'raises an error' do
|
|
|
|
repository.revert(user, update_image_commit, 'master', message)
|
2018-12-23 12:14:25 +05:30
|
|
|
|
2019-02-15 15:39:39 +05:30
|
|
|
expect { repository.revert(user, update_image_commit, 'master', message) }.to raise_error(Gitlab::Git::Repository::CreateTreeError)
|
2018-12-23 12:14:25 +05:30
|
|
|
end
|
2016-06-02 11:05:42 +05:30
|
|
|
end
|
2019-01-03 12:48:30 +05:30
|
|
|
|
2019-02-15 15:39:39 +05:30
|
|
|
context 'when commit can be reverted' do
|
|
|
|
it 'reverts the changes' do
|
|
|
|
expect(repository.revert(user, update_image_commit, 'master', message)).to be_truthy
|
|
|
|
end
|
2019-01-03 12:48:30 +05:30
|
|
|
end
|
|
|
|
|
2019-02-15 15:39:39 +05:30
|
|
|
context 'reverting a merge commit' do
|
|
|
|
it 'reverts the changes' do
|
|
|
|
merge_commit
|
|
|
|
expect(repository.blob_at_branch('master', 'files/ruby/feature.rb')).to be_present
|
|
|
|
|
|
|
|
repository.revert(user, merge_commit, 'master', message)
|
|
|
|
expect(repository.blob_at_branch('master', 'files/ruby/feature.rb')).not_to be_present
|
|
|
|
end
|
2019-01-03 12:48:30 +05:30
|
|
|
end
|
2016-06-02 11:05:42 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
describe '#cherry_pick' do
|
2019-02-15 15:39:39 +05:30
|
|
|
let(:conflict_commit) { repository.commit('c642fe9b8b9f28f9225d7ea953fe14e74748d53b') }
|
|
|
|
let(:pickable_commit) { repository.commit('7d3b0f7cff5f37573aea97cebfd5692ea1689924') }
|
|
|
|
let(:pickable_merge) { repository.commit('e56497bb5f03a90a51293fc6d516788730953899') }
|
|
|
|
let(:message) { 'cherry-pick message' }
|
|
|
|
|
|
|
|
context 'when there is a conflict' do
|
|
|
|
it 'raises an error' do
|
|
|
|
expect { repository.cherry_pick(user, conflict_commit, 'master', message) }.to raise_error(Gitlab::Git::Repository::CreateTreeError)
|
2016-06-02 11:05:42 +05:30
|
|
|
end
|
2019-02-15 15:39:39 +05:30
|
|
|
end
|
2016-06-02 11:05:42 +05:30
|
|
|
|
2019-02-15 15:39:39 +05:30
|
|
|
context 'when commit was already cherry-picked' do
|
|
|
|
it 'raises an error' do
|
|
|
|
repository.cherry_pick(user, pickable_commit, 'master', message)
|
2016-06-02 11:05:42 +05:30
|
|
|
|
2019-02-15 15:39:39 +05:30
|
|
|
expect { repository.cherry_pick(user, pickable_commit, 'master', message) }.to raise_error(Gitlab::Git::Repository::CreateTreeError)
|
2016-06-02 11:05:42 +05:30
|
|
|
end
|
2019-02-15 15:39:39 +05:30
|
|
|
end
|
2016-06-02 11:05:42 +05:30
|
|
|
|
2019-02-15 15:39:39 +05:30
|
|
|
context 'when commit can be cherry-picked' do
|
|
|
|
it 'cherry-picks the changes' do
|
|
|
|
expect(repository.cherry_pick(user, pickable_commit, 'master', message)).to be_truthy
|
2018-03-17 18:26:18 +05:30
|
|
|
end
|
2019-02-15 15:39:39 +05:30
|
|
|
end
|
2018-03-17 18:26:18 +05:30
|
|
|
|
2019-02-15 15:39:39 +05:30
|
|
|
context 'cherry-picking a merge commit' do
|
|
|
|
it 'cherry-picks the changes' do
|
|
|
|
expect(repository.blob_at_branch('improve/awesome', 'foo/bar/.gitkeep')).to be_nil
|
2018-03-17 18:26:18 +05:30
|
|
|
|
2019-02-15 15:39:39 +05:30
|
|
|
cherry_pick_commit_sha = repository.cherry_pick(user, pickable_merge, 'improve/awesome', message)
|
|
|
|
cherry_pick_commit_message = project.commit(cherry_pick_commit_sha).message
|
2018-03-17 18:26:18 +05:30
|
|
|
|
2019-02-15 15:39:39 +05:30
|
|
|
expect(repository.blob_at_branch('improve/awesome', 'foo/bar/.gitkeep')).not_to be_nil
|
|
|
|
expect(cherry_pick_commit_message).to eq(message)
|
2016-06-02 11:05:42 +05:30
|
|
|
end
|
|
|
|
end
|
2016-02-05 20:25:01 +05:30
|
|
|
end
|
2016-06-02 11:05:42 +05:30
|
|
|
|
|
|
|
describe '#before_delete' do
|
|
|
|
describe 'when a repository does not exist' do
|
|
|
|
before do
|
|
|
|
allow(repository).to receive(:exists?).and_return(false)
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'does not flush caches that depend on repository data' do
|
2016-06-16 23:09:34 +05:30
|
|
|
expect(repository).not_to receive(:expire_cache)
|
2016-06-02 11:05:42 +05:30
|
|
|
|
|
|
|
repository.before_delete
|
|
|
|
end
|
|
|
|
|
2016-08-24 12:49:21 +05:30
|
|
|
it 'flushes the tags cache' do
|
|
|
|
expect(repository).to receive(:expire_tags_cache)
|
|
|
|
|
|
|
|
repository.before_delete
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'flushes the branches cache' do
|
|
|
|
expect(repository).to receive(:expire_branches_cache)
|
|
|
|
|
|
|
|
repository.before_delete
|
|
|
|
end
|
|
|
|
|
2016-06-02 11:05:42 +05:30
|
|
|
it 'flushes the root ref cache' do
|
|
|
|
expect(repository).to receive(:expire_root_ref_cache)
|
|
|
|
|
|
|
|
repository.before_delete
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'flushes the emptiness caches' do
|
|
|
|
expect(repository).to receive(:expire_emptiness_caches)
|
|
|
|
|
|
|
|
repository.before_delete
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'flushes the exists cache' do
|
|
|
|
expect(repository).to receive(:expire_exists_cache).twice
|
|
|
|
|
|
|
|
repository.before_delete
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
describe 'when a repository exists' do
|
|
|
|
before do
|
|
|
|
allow(repository).to receive(:exists?).and_return(true)
|
|
|
|
end
|
|
|
|
|
2016-08-24 12:49:21 +05:30
|
|
|
it 'flushes the tags cache' do
|
|
|
|
expect(repository).to receive(:expire_tags_cache)
|
|
|
|
|
|
|
|
repository.before_delete
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'flushes the branches cache' do
|
|
|
|
expect(repository).to receive(:expire_branches_cache)
|
|
|
|
|
|
|
|
repository.before_delete
|
|
|
|
end
|
|
|
|
|
2016-06-02 11:05:42 +05:30
|
|
|
it 'flushes the root ref cache' do
|
|
|
|
expect(repository).to receive(:expire_root_ref_cache)
|
|
|
|
|
|
|
|
repository.before_delete
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'flushes the emptiness caches' do
|
|
|
|
expect(repository).to receive(:expire_emptiness_caches)
|
|
|
|
|
|
|
|
repository.before_delete
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
describe '#before_change_head' do
|
|
|
|
it 'flushes the branch cache' do
|
|
|
|
expect(repository).to receive(:expire_branch_cache)
|
|
|
|
|
|
|
|
repository.before_change_head
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'flushes the root ref cache' do
|
|
|
|
expect(repository).to receive(:expire_root_ref_cache)
|
|
|
|
|
|
|
|
repository.before_change_head
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2017-08-17 22:00:37 +05:30
|
|
|
describe '#after_change_head' do
|
2018-11-08 19:23:39 +05:30
|
|
|
it 'flushes the method caches' do
|
2017-08-17 22:00:37 +05:30
|
|
|
expect(repository).to receive(:expire_method_caches).with([
|
2018-11-08 19:23:39 +05:30
|
|
|
:size,
|
|
|
|
:commit_count,
|
|
|
|
:rendered_readme,
|
2019-02-15 15:39:39 +05:30
|
|
|
:readme_path,
|
2018-11-08 19:23:39 +05:30
|
|
|
:contribution_guide,
|
2017-08-17 22:00:37 +05:30
|
|
|
:changelog,
|
2018-11-08 19:23:39 +05:30
|
|
|
:license_blob,
|
|
|
|
:license_key,
|
2017-08-17 22:00:37 +05:30
|
|
|
:gitignore,
|
2018-11-08 19:23:39 +05:30
|
|
|
:gitlab_ci_yml,
|
|
|
|
:branch_names,
|
|
|
|
:tag_names,
|
|
|
|
:branch_count,
|
|
|
|
:tag_count,
|
2018-03-17 18:26:18 +05:30
|
|
|
:avatar,
|
2018-11-08 19:23:39 +05:30
|
|
|
:exists?,
|
|
|
|
:root_ref,
|
2020-03-09 13:42:32 +05:30
|
|
|
:merged_branch_names,
|
2018-11-08 19:23:39 +05:30
|
|
|
:has_visible_content?,
|
|
|
|
:issue_template_names,
|
|
|
|
:merge_request_template_names,
|
2019-07-31 22:56:46 +05:30
|
|
|
:metrics_dashboard_paths,
|
2018-11-08 19:23:39 +05:30
|
|
|
:xcode_project?
|
2017-08-17 22:00:37 +05:30
|
|
|
])
|
2016-06-02 11:05:42 +05:30
|
|
|
|
2017-08-17 22:00:37 +05:30
|
|
|
repository.after_change_head
|
2016-06-02 11:05:42 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-10-12 21:52:04 +05:30
|
|
|
describe '#expires_caches_for_tags' do
|
2017-08-17 22:00:37 +05:30
|
|
|
it 'flushes the cache' do
|
|
|
|
expect(repository).to receive(:expire_statistics_caches)
|
|
|
|
expect(repository).to receive(:expire_emptiness_caches)
|
|
|
|
expect(repository).to receive(:expire_tags_cache)
|
2016-06-02 11:05:42 +05:30
|
|
|
|
2019-10-12 21:52:04 +05:30
|
|
|
repository.expire_caches_for_tags
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
describe '#before_push_tag' do
|
|
|
|
it 'logs an event' do
|
|
|
|
expect(repository).not_to receive(:expire_statistics_caches)
|
|
|
|
expect(repository).not_to receive(:expire_emptiness_caches)
|
|
|
|
expect(repository).not_to receive(:expire_tags_cache)
|
|
|
|
expect(repository).to receive(:repository_event).with(:push_tag)
|
|
|
|
|
2017-08-17 22:00:37 +05:30
|
|
|
repository.before_push_tag
|
2016-06-02 11:05:42 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
describe '#after_push_commit' do
|
2017-08-17 22:00:37 +05:30
|
|
|
it 'expires statistics caches' do
|
2017-09-10 17:25:29 +05:30
|
|
|
expect(repository).to receive(:expire_statistics_caches)
|
|
|
|
.and_call_original
|
2016-06-02 11:05:42 +05:30
|
|
|
|
2017-09-10 17:25:29 +05:30
|
|
|
expect(repository).to receive(:expire_branch_cache)
|
|
|
|
.with('master')
|
|
|
|
.and_call_original
|
2017-08-17 22:00:37 +05:30
|
|
|
|
|
|
|
repository.after_push_commit('master')
|
2016-06-02 11:05:42 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
describe '#after_create_branch' do
|
2017-08-17 22:00:37 +05:30
|
|
|
it 'expires the branch caches' do
|
|
|
|
expect(repository).to receive(:expire_branches_cache)
|
2016-06-02 11:05:42 +05:30
|
|
|
|
|
|
|
repository.after_create_branch
|
|
|
|
end
|
2019-10-12 21:52:04 +05:30
|
|
|
|
|
|
|
it 'does not expire the branch caches when specified' do
|
|
|
|
expect(repository).not_to receive(:expire_branches_cache)
|
|
|
|
|
|
|
|
repository.after_create_branch(expire_cache: false)
|
|
|
|
end
|
2016-06-02 11:05:42 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
describe '#after_remove_branch' do
|
2017-08-17 22:00:37 +05:30
|
|
|
it 'expires the branch caches' do
|
|
|
|
expect(repository).to receive(:expire_branches_cache)
|
2016-06-02 11:05:42 +05:30
|
|
|
|
|
|
|
repository.after_remove_branch
|
|
|
|
end
|
2019-10-12 21:52:04 +05:30
|
|
|
|
|
|
|
it 'does not expire the branch caches when specified' do
|
|
|
|
expect(repository).not_to receive(:expire_branches_cache)
|
|
|
|
|
|
|
|
repository.after_remove_branch(expire_cache: false)
|
|
|
|
end
|
2016-06-02 11:05:42 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
describe '#after_create' do
|
2019-10-12 21:52:04 +05:30
|
|
|
it 'calls expire_status_cache' do
|
|
|
|
expect(repository).to receive(:expire_status_cache)
|
|
|
|
|
|
|
|
repository.after_create
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'logs an event' do
|
|
|
|
expect(repository).to receive(:repository_event).with(:create_repository)
|
|
|
|
|
|
|
|
repository.after_create
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
describe '#expire_status_cache' do
|
2016-06-02 11:05:42 +05:30
|
|
|
it 'flushes the exists cache' do
|
|
|
|
expect(repository).to receive(:expire_exists_cache)
|
|
|
|
|
2019-10-12 21:52:04 +05:30
|
|
|
repository.expire_status_cache
|
2016-06-02 11:05:42 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
it 'flushes the root ref cache' do
|
|
|
|
expect(repository).to receive(:expire_root_ref_cache)
|
|
|
|
|
2019-10-12 21:52:04 +05:30
|
|
|
repository.expire_status_cache
|
2016-06-02 11:05:42 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
it 'flushes the emptiness caches' do
|
|
|
|
expect(repository).to receive(:expire_emptiness_caches)
|
|
|
|
|
2019-10-12 21:52:04 +05:30
|
|
|
repository.expire_status_cache
|
2016-06-02 11:05:42 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
describe "#copy_gitattributes" do
|
|
|
|
it 'returns true with a valid ref' do
|
|
|
|
expect(repository.copy_gitattributes('master')).to be_truthy
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'returns false with an invalid ref' do
|
|
|
|
expect(repository.copy_gitattributes('invalid')).to be_falsey
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
describe '#before_remove_tag' do
|
|
|
|
it 'flushes the tag cache' do
|
2017-08-17 22:00:37 +05:30
|
|
|
expect(repository).to receive(:expire_tags_cache).and_call_original
|
|
|
|
expect(repository).to receive(:expire_statistics_caches).and_call_original
|
2016-06-02 11:05:42 +05:30
|
|
|
|
|
|
|
repository.before_remove_tag
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
describe '#branch_count' do
|
|
|
|
it 'returns the number of branches' do
|
2017-08-17 22:00:37 +05:30
|
|
|
expect(repository.branch_count).to be_an(Integer)
|
|
|
|
|
2018-12-05 23:21:45 +05:30
|
|
|
rugged_count = rugged_repo(repository).branches.count
|
2017-08-17 22:00:37 +05:30
|
|
|
|
|
|
|
expect(repository.branch_count).to eq(rugged_count)
|
2016-06-02 11:05:42 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
describe '#tag_count' do
|
|
|
|
it 'returns the number of tags' do
|
2017-08-17 22:00:37 +05:30
|
|
|
expect(repository.tag_count).to be_an(Integer)
|
|
|
|
|
2018-12-05 23:21:45 +05:30
|
|
|
rugged_count = rugged_repo(repository).tags.count
|
2017-08-17 22:00:37 +05:30
|
|
|
|
|
|
|
expect(repository.tag_count).to eq(rugged_count)
|
2016-06-02 11:05:42 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2017-08-17 22:00:37 +05:30
|
|
|
describe '#expire_branches_cache' do
|
2016-06-02 11:05:42 +05:30
|
|
|
it 'expires the cache' do
|
2017-09-10 17:25:29 +05:30
|
|
|
expect(repository).to receive(:expire_method_caches)
|
2020-03-09 13:42:32 +05:30
|
|
|
.with(%i(branch_names merged_branch_names branch_count has_visible_content?))
|
2017-09-10 17:25:29 +05:30
|
|
|
.and_call_original
|
2016-06-02 11:05:42 +05:30
|
|
|
|
2017-08-17 22:00:37 +05:30
|
|
|
repository.expire_branches_cache
|
2016-06-02 11:05:42 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2017-08-17 22:00:37 +05:30
|
|
|
describe '#expire_tags_cache' do
|
2016-06-02 11:05:42 +05:30
|
|
|
it 'expires the cache' do
|
2017-09-10 17:25:29 +05:30
|
|
|
expect(repository).to receive(:expire_method_caches)
|
|
|
|
.with(%i(tag_names tag_count))
|
|
|
|
.and_call_original
|
2016-06-02 11:05:42 +05:30
|
|
|
|
2017-08-17 22:00:37 +05:30
|
|
|
repository.expire_tags_cache
|
2016-06-02 11:05:42 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
describe '#add_tag' do
|
2018-03-17 18:26:18 +05:30
|
|
|
let(:user) { build_stubbed(:user) }
|
2016-06-02 11:05:42 +05:30
|
|
|
|
2018-11-08 19:23:39 +05:30
|
|
|
context 'with a valid target' do
|
|
|
|
it 'creates the tag' do
|
|
|
|
repository.add_tag(user, '8.5', 'master', 'foo')
|
2016-06-02 11:05:42 +05:30
|
|
|
|
2018-11-08 19:23:39 +05:30
|
|
|
tag = repository.find_tag('8.5')
|
|
|
|
expect(tag).to be_present
|
|
|
|
expect(tag.message).to eq('foo')
|
|
|
|
expect(tag.dereferenced_target.id).to eq(repository.commit('master').id)
|
2018-03-17 18:26:18 +05:30
|
|
|
end
|
2017-01-15 13:20:01 +05:30
|
|
|
|
2018-11-08 19:23:39 +05:30
|
|
|
it 'returns a Gitlab::Git::Tag object' do
|
2017-01-15 13:20:01 +05:30
|
|
|
tag = repository.add_tag(user, '8.5', 'master', 'foo')
|
|
|
|
|
2018-11-08 19:23:39 +05:30
|
|
|
expect(tag).to be_a(Gitlab::Git::Tag)
|
2017-01-15 13:20:01 +05:30
|
|
|
end
|
2016-06-02 11:05:42 +05:30
|
|
|
end
|
2018-03-17 18:26:18 +05:30
|
|
|
|
2018-11-08 19:23:39 +05:30
|
|
|
context 'with an invalid target' do
|
|
|
|
it 'returns false' do
|
|
|
|
expect(repository.add_tag(user, '8.5', 'bar', 'foo')).to be false
|
2016-06-02 11:05:42 +05:30
|
|
|
end
|
|
|
|
end
|
2018-11-08 19:23:39 +05:30
|
|
|
end
|
2016-06-02 11:05:42 +05:30
|
|
|
|
2018-11-08 19:23:39 +05:30
|
|
|
describe '#rm_branch' do
|
|
|
|
it 'removes a branch' do
|
|
|
|
expect(repository).to receive(:before_remove_branch)
|
|
|
|
expect(repository).to receive(:after_remove_branch)
|
2018-03-17 18:26:18 +05:30
|
|
|
|
2018-11-08 19:23:39 +05:30
|
|
|
repository.rm_branch(user, 'feature')
|
2018-03-17 18:26:18 +05:30
|
|
|
end
|
|
|
|
|
2018-11-08 19:23:39 +05:30
|
|
|
context 'when pre hooks failed' do
|
|
|
|
before do
|
|
|
|
allow_any_instance_of(Gitlab::GitalyClient::OperationService)
|
|
|
|
.to receive(:user_delete_branch).and_raise(Gitlab::Git::PreReceiveError)
|
2018-03-17 18:26:18 +05:30
|
|
|
end
|
|
|
|
|
2018-11-08 19:23:39 +05:30
|
|
|
it 'gets an error and does not delete the branch' do
|
|
|
|
expect do
|
|
|
|
repository.rm_branch(user, 'feature')
|
|
|
|
end.to raise_error(Gitlab::Git::PreReceiveError)
|
2018-03-17 18:26:18 +05:30
|
|
|
|
2018-11-08 19:23:39 +05:30
|
|
|
expect(repository.find_branch('feature')).not_to be_nil
|
2018-03-17 18:26:18 +05:30
|
|
|
end
|
2016-06-02 11:05:42 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
describe '#rm_tag' do
|
2018-11-08 19:23:39 +05:30
|
|
|
it 'removes a tag' do
|
|
|
|
expect(repository).to receive(:before_remove_tag)
|
2017-08-17 22:00:37 +05:30
|
|
|
|
2018-11-08 19:23:39 +05:30
|
|
|
repository.rm_tag(build_stubbed(:user), 'v1.1.0')
|
2018-03-17 18:26:18 +05:30
|
|
|
|
2018-11-08 19:23:39 +05:30
|
|
|
expect(repository.find_tag('v1.1.0')).to be_nil
|
2016-06-02 11:05:42 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
describe '#avatar' do
|
|
|
|
it 'returns nil if repo does not exist' do
|
2018-03-17 18:26:18 +05:30
|
|
|
allow(repository).to receive(:root_ref).and_raise(Gitlab::Git::Repository::NoRepository)
|
2016-06-02 11:05:42 +05:30
|
|
|
|
2019-12-21 20:55:43 +05:30
|
|
|
expect(repository.avatar).to be_nil
|
2016-06-02 11:05:42 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
it 'returns the first avatar file found in the repository' do
|
2017-09-10 17:25:29 +05:30
|
|
|
expect(repository).to receive(:file_on_head)
|
|
|
|
.with(:avatar)
|
|
|
|
.and_return(double(:tree, path: 'logo.png'))
|
2016-06-02 11:05:42 +05:30
|
|
|
|
|
|
|
expect(repository.avatar).to eq('logo.png')
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'caches the output' do
|
2017-09-10 17:25:29 +05:30
|
|
|
expect(repository).to receive(:file_on_head)
|
|
|
|
.with(:avatar)
|
|
|
|
.once
|
|
|
|
.and_return(double(:tree, path: 'logo.png'))
|
2016-06-02 11:05:42 +05:30
|
|
|
|
2017-08-17 22:00:37 +05:30
|
|
|
2.times { expect(repository.avatar).to eq('logo.png') }
|
2016-06-02 11:05:42 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2017-08-17 22:00:37 +05:30
|
|
|
describe '#expire_exists_cache' do
|
2016-06-02 11:05:42 +05:30
|
|
|
let(:cache) { repository.send(:cache) }
|
2018-12-05 23:21:45 +05:30
|
|
|
let(:request_store_cache) { repository.send(:request_store_cache) }
|
2016-06-02 11:05:42 +05:30
|
|
|
|
2017-08-17 22:00:37 +05:30
|
|
|
it 'expires the cache' do
|
|
|
|
expect(cache).to receive(:expire).with(:exists?)
|
|
|
|
|
|
|
|
repository.expire_exists_cache
|
2016-06-02 11:05:42 +05:30
|
|
|
end
|
2018-12-05 23:21:45 +05:30
|
|
|
|
|
|
|
it 'expires the request store cache', :request_store do
|
|
|
|
expect(request_store_cache).to receive(:expire).with(:exists?)
|
|
|
|
|
|
|
|
repository.expire_exists_cache
|
|
|
|
end
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
2016-06-02 11:05:42 +05:30
|
|
|
|
2018-11-08 19:23:39 +05:30
|
|
|
describe '#xcode_project?' do
|
|
|
|
before do
|
|
|
|
allow(repository).to receive(:tree).with(:head).and_return(double(:tree, trees: [tree]))
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when the root contains a *.xcodeproj directory' do
|
|
|
|
let(:tree) { double(:tree, path: 'Foo.xcodeproj') }
|
|
|
|
|
|
|
|
it 'returns true' do
|
|
|
|
expect(repository.xcode_project?).to be_truthy
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when the root contains a *.xcworkspace directory' do
|
|
|
|
let(:tree) { double(:tree, path: 'Foo.xcworkspace') }
|
|
|
|
|
|
|
|
it 'returns true' do
|
|
|
|
expect(repository.xcode_project?).to be_truthy
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when the root contains no Xcode config directory' do
|
|
|
|
let(:tree) { double(:tree, path: 'Foo') }
|
|
|
|
|
|
|
|
it 'returns false' do
|
|
|
|
expect(repository.xcode_project?).to be_falsey
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2017-09-10 17:25:29 +05:30
|
|
|
describe '#contribution_guide', :use_clean_rails_memory_store_caching do
|
2017-08-17 22:00:37 +05:30
|
|
|
it 'returns and caches the output' do
|
2017-09-10 17:25:29 +05:30
|
|
|
expect(repository).to receive(:file_on_head)
|
|
|
|
.with(:contributing)
|
|
|
|
.and_return(Gitlab::Git::Tree.new(path: 'CONTRIBUTING.md'))
|
|
|
|
.once
|
2017-08-17 22:00:37 +05:30
|
|
|
|
|
|
|
2.times do
|
2017-09-10 17:25:29 +05:30
|
|
|
expect(repository.contribution_guide)
|
|
|
|
.to be_an_instance_of(Gitlab::Git::Tree)
|
2016-06-02 11:05:42 +05:30
|
|
|
end
|
|
|
|
end
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
2016-06-02 11:05:42 +05:30
|
|
|
|
2017-09-10 17:25:29 +05:30
|
|
|
describe '#gitignore', :use_clean_rails_memory_store_caching do
|
2017-08-17 22:00:37 +05:30
|
|
|
it 'returns and caches the output' do
|
2017-09-10 17:25:29 +05:30
|
|
|
expect(repository).to receive(:file_on_head)
|
|
|
|
.with(:gitignore)
|
|
|
|
.and_return(Gitlab::Git::Tree.new(path: '.gitignore'))
|
|
|
|
.once
|
2016-06-02 11:05:42 +05:30
|
|
|
|
2017-08-17 22:00:37 +05:30
|
|
|
2.times do
|
|
|
|
expect(repository.gitignore).to be_an_instance_of(Gitlab::Git::Tree)
|
2016-06-02 11:05:42 +05:30
|
|
|
end
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
end
|
2016-06-02 11:05:42 +05:30
|
|
|
|
2017-09-10 17:25:29 +05:30
|
|
|
describe '#readme', :use_clean_rails_memory_store_caching do
|
2017-08-17 22:00:37 +05:30
|
|
|
context 'with a non-existing repository' do
|
2018-12-13 13:39:08 +05:30
|
|
|
let(:project) { create(:project) }
|
2016-06-02 11:05:42 +05:30
|
|
|
|
2018-12-13 13:39:08 +05:30
|
|
|
it 'returns nil' do
|
2017-08-17 22:00:37 +05:30
|
|
|
expect(repository.readme).to be_nil
|
2016-06-02 11:05:42 +05:30
|
|
|
end
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
context 'with an existing repository' do
|
2017-09-10 17:25:29 +05:30
|
|
|
context 'when no README exists' do
|
2018-12-13 13:39:08 +05:30
|
|
|
let(:project) { create(:project, :empty_repo) }
|
2017-09-10 17:25:29 +05:30
|
|
|
|
2018-12-13 13:39:08 +05:30
|
|
|
it 'returns nil' do
|
2017-09-10 17:25:29 +05:30
|
|
|
expect(repository.readme).to be_nil
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when a README exists' do
|
2018-12-13 13:39:08 +05:30
|
|
|
let(:project) { create(:project, :repository) }
|
|
|
|
|
2017-09-10 17:25:29 +05:30
|
|
|
it 'returns the README' do
|
|
|
|
expect(repository.readme).to be_an_instance_of(ReadmeBlob)
|
|
|
|
end
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-02-15 15:39:39 +05:30
|
|
|
describe '#readme_path', :use_clean_rails_memory_store_caching do
|
|
|
|
context 'with a non-existing repository' do
|
|
|
|
let(:project) { create(:project) }
|
|
|
|
|
|
|
|
it 'returns nil' do
|
|
|
|
expect(repository.readme_path).to be_nil
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'with an existing repository' do
|
|
|
|
context 'when no README exists' do
|
|
|
|
let(:project) { create(:project, :empty_repo) }
|
|
|
|
|
|
|
|
it 'returns nil' do
|
|
|
|
expect(repository.readme_path).to be_nil
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when a README exists' do
|
|
|
|
let(:project) { create(:project, :repository) }
|
|
|
|
|
|
|
|
it 'returns the README' do
|
|
|
|
expect(repository.readme_path).to eq("README.md")
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'caches the response' do
|
|
|
|
expect(repository).to receive(:readme).and_call_original.once
|
|
|
|
|
|
|
|
2.times do
|
|
|
|
expect(repository.readme_path).to eq("README.md")
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2017-08-17 22:00:37 +05:30
|
|
|
describe '#expire_statistics_caches' do
|
|
|
|
it 'expires the caches' do
|
2017-09-10 17:25:29 +05:30
|
|
|
expect(repository).to receive(:expire_method_caches)
|
|
|
|
.with(%i(size commit_count))
|
2017-08-17 22:00:37 +05:30
|
|
|
|
|
|
|
repository.expire_statistics_caches
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
describe '#expire_all_method_caches' do
|
|
|
|
it 'expires the caches of all methods' do
|
2017-09-10 17:25:29 +05:30
|
|
|
expect(repository).to receive(:expire_method_caches)
|
|
|
|
.with(Repository::CACHED_METHODS)
|
2016-06-02 11:05:42 +05:30
|
|
|
|
2017-08-17 22:00:37 +05:30
|
|
|
repository.expire_all_method_caches
|
|
|
|
end
|
2018-03-17 18:26:18 +05:30
|
|
|
|
|
|
|
it 'all cache_method definitions are in the lists of method caches' do
|
|
|
|
methods = repository.methods.map do |method|
|
|
|
|
match = /^_uncached_(.*)/.match(method)
|
|
|
|
match[1].to_sym if match
|
|
|
|
end.compact
|
|
|
|
|
2018-12-05 23:21:45 +05:30
|
|
|
expect(Repository::CACHED_METHODS + Repository::MEMOIZED_CACHED_METHODS).to include(*methods)
|
2018-03-17 18:26:18 +05:30
|
|
|
end
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
2016-06-02 11:05:42 +05:30
|
|
|
|
2017-08-17 22:00:37 +05:30
|
|
|
describe '#file_on_head' do
|
|
|
|
context 'with a non-existing repository' do
|
|
|
|
it 'returns nil' do
|
|
|
|
expect(repository).to receive(:tree).with(:head).and_return(nil)
|
2016-06-02 11:05:42 +05:30
|
|
|
|
2017-08-17 22:00:37 +05:30
|
|
|
expect(repository.file_on_head(:readme)).to be_nil
|
2016-06-02 11:05:42 +05:30
|
|
|
end
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
2016-06-02 11:05:42 +05:30
|
|
|
|
2017-08-17 22:00:37 +05:30
|
|
|
context 'with a repository that has no blobs' do
|
|
|
|
it 'returns nil' do
|
|
|
|
expect_any_instance_of(Tree).to receive(:blobs).and_return([])
|
2016-06-02 11:05:42 +05:30
|
|
|
|
2017-08-17 22:00:37 +05:30
|
|
|
expect(repository.file_on_head(:readme)).to be_nil
|
|
|
|
end
|
|
|
|
end
|
2016-06-02 11:05:42 +05:30
|
|
|
|
2017-08-17 22:00:37 +05:30
|
|
|
context 'with an existing repository' do
|
|
|
|
it 'returns a Gitlab::Git::Tree' do
|
2017-09-10 17:25:29 +05:30
|
|
|
expect(repository.file_on_head(:readme))
|
|
|
|
.to be_an_instance_of(Gitlab::Git::Tree)
|
2016-06-02 11:05:42 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2017-08-17 22:00:37 +05:30
|
|
|
describe '#head_tree' do
|
|
|
|
context 'with an existing repository' do
|
|
|
|
it 'returns a Tree' do
|
|
|
|
expect(repository.head_tree).to be_an_instance_of(Tree)
|
|
|
|
end
|
|
|
|
end
|
2016-06-02 11:05:42 +05:30
|
|
|
|
2017-08-17 22:00:37 +05:30
|
|
|
context 'with a non-existing repository' do
|
|
|
|
it 'returns nil' do
|
|
|
|
expect(repository).to receive(:head_commit).and_return(nil)
|
2016-06-02 11:05:42 +05:30
|
|
|
|
2017-08-17 22:00:37 +05:30
|
|
|
expect(repository.head_tree).to be_nil
|
|
|
|
end
|
2016-06-02 11:05:42 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2020-03-09 13:42:32 +05:30
|
|
|
shared_examples '#tree' do
|
2017-08-17 22:00:37 +05:30
|
|
|
context 'using a non-existing repository' do
|
|
|
|
before do
|
|
|
|
allow(repository).to receive(:head_commit).and_return(nil)
|
|
|
|
end
|
2016-06-02 11:05:42 +05:30
|
|
|
|
2017-08-17 22:00:37 +05:30
|
|
|
it 'returns nil' do
|
|
|
|
expect(repository.tree(:head)).to be_nil
|
|
|
|
end
|
2016-08-24 12:49:21 +05:30
|
|
|
|
2017-08-17 22:00:37 +05:30
|
|
|
it 'returns nil when using a path' do
|
|
|
|
expect(repository.tree(:head, 'README.md')).to be_nil
|
|
|
|
end
|
|
|
|
end
|
2016-06-02 11:05:42 +05:30
|
|
|
|
2017-08-17 22:00:37 +05:30
|
|
|
context 'using an existing repository' do
|
|
|
|
it 'returns a Tree' do
|
|
|
|
expect(repository.tree(:head)).to be_an_instance_of(Tree)
|
2020-03-09 13:42:32 +05:30
|
|
|
expect(repository.tree('v1.1.1')).to be_an_instance_of(Tree)
|
2016-06-02 11:05:42 +05:30
|
|
|
end
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2020-03-09 13:42:32 +05:30
|
|
|
it_behaves_like '#tree'
|
|
|
|
|
|
|
|
describe '#tree? with Rugged enabled', :enable_rugged do
|
|
|
|
it_behaves_like '#tree'
|
|
|
|
end
|
|
|
|
|
2017-08-17 22:00:37 +05:30
|
|
|
describe '#size' do
|
|
|
|
context 'with a non-existing repository' do
|
|
|
|
it 'returns 0' do
|
|
|
|
expect(repository).to receive(:exists?).and_return(false)
|
2016-06-02 11:05:42 +05:30
|
|
|
|
2017-08-17 22:00:37 +05:30
|
|
|
expect(repository.size).to eq(0.0)
|
|
|
|
end
|
2016-06-02 11:05:42 +05:30
|
|
|
end
|
|
|
|
|
2017-08-17 22:00:37 +05:30
|
|
|
context 'with an existing repository' do
|
|
|
|
it 'returns the repository size as a Float' do
|
|
|
|
expect(repository.size).to be_an_instance_of(Float)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
2016-08-24 12:49:21 +05:30
|
|
|
|
2018-11-08 19:23:39 +05:30
|
|
|
describe '#local_branches' do
|
|
|
|
it 'returns the local branches' do
|
|
|
|
masterrev = repository.find_branch('master').dereferenced_target
|
|
|
|
create_remote_branch('joe', 'remote_branch', masterrev)
|
|
|
|
repository.add_branch(user, 'local_branch', masterrev.id)
|
|
|
|
|
|
|
|
expect(repository.local_branches.any? { |branch| branch.name == 'remote_branch' }).to eq(false)
|
|
|
|
expect(repository.local_branches.any? { |branch| branch.name == 'local_branch' }).to eq(true)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2017-08-17 22:00:37 +05:30
|
|
|
describe '#commit_count' do
|
|
|
|
context 'with a non-existing repository' do
|
|
|
|
it 'returns 0' do
|
|
|
|
expect(repository).to receive(:root_ref).and_return(nil)
|
2016-06-02 11:05:42 +05:30
|
|
|
|
2017-08-17 22:00:37 +05:30
|
|
|
expect(repository.commit_count).to eq(0)
|
2016-06-02 11:05:42 +05:30
|
|
|
end
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
2016-06-02 11:05:42 +05:30
|
|
|
|
2017-08-17 22:00:37 +05:30
|
|
|
context 'with an existing repository' do
|
|
|
|
it 'returns the commit count' do
|
|
|
|
expect(repository.commit_count).to be_an(Integer)
|
|
|
|
end
|
2016-06-02 11:05:42 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2017-08-17 22:00:37 +05:30
|
|
|
describe '#commit_count_for_ref' do
|
2017-09-10 17:25:29 +05:30
|
|
|
let(:project) { create :project }
|
2017-08-17 22:00:37 +05:30
|
|
|
|
|
|
|
context 'with a non-existing repository' do
|
|
|
|
it 'returns 0' do
|
|
|
|
expect(project.repository.commit_count_for_ref('master')).to eq(0)
|
|
|
|
end
|
2016-08-24 12:49:21 +05:30
|
|
|
end
|
2016-06-02 11:05:42 +05:30
|
|
|
|
2017-08-17 22:00:37 +05:30
|
|
|
context 'with empty repository' do
|
|
|
|
it 'returns 0' do
|
|
|
|
project.create_repository
|
|
|
|
expect(project.repository.commit_count_for_ref('master')).to eq(0)
|
|
|
|
end
|
|
|
|
end
|
2016-06-02 11:05:42 +05:30
|
|
|
|
2017-08-17 22:00:37 +05:30
|
|
|
context 'when searching for the root ref' do
|
|
|
|
it 'returns the same count as #commit_count' do
|
|
|
|
expect(repository.commit_count_for_ref(repository.root_ref)).to eq(repository.commit_count)
|
|
|
|
end
|
2016-06-02 11:05:42 +05:30
|
|
|
end
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
2016-06-02 11:05:42 +05:30
|
|
|
|
2017-08-17 22:00:37 +05:30
|
|
|
describe '#refresh_method_caches' do
|
|
|
|
it 'refreshes the caches of the given types' do
|
2017-09-10 17:25:29 +05:30
|
|
|
expect(repository).to receive(:expire_method_caches)
|
2019-02-15 15:39:39 +05:30
|
|
|
.with(%i(rendered_readme readme_path license_blob license_key license))
|
2016-09-29 09:46:39 +05:30
|
|
|
|
2017-08-17 22:00:37 +05:30
|
|
|
expect(repository).to receive(:rendered_readme)
|
2019-02-15 15:39:39 +05:30
|
|
|
expect(repository).to receive(:readme_path)
|
2017-08-17 22:00:37 +05:30
|
|
|
expect(repository).to receive(:license_blob)
|
|
|
|
expect(repository).to receive(:license_key)
|
2017-09-10 17:25:29 +05:30
|
|
|
expect(repository).to receive(:license)
|
2017-08-17 22:00:37 +05:30
|
|
|
|
|
|
|
repository.refresh_method_caches(%i(readme license))
|
2016-09-29 09:46:39 +05:30
|
|
|
end
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
2016-09-29 09:46:39 +05:30
|
|
|
|
2017-08-17 22:00:37 +05:30
|
|
|
describe '#gitlab_ci_yml_for' do
|
|
|
|
before do
|
|
|
|
repository.create_file(User.last, '.gitlab-ci.yml', 'CONTENT', message: 'Add .gitlab-ci.yml', branch_name: 'master')
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when there is a .gitlab-ci.yml at the commit' do
|
|
|
|
it 'returns the content' do
|
|
|
|
expect(repository.gitlab_ci_yml_for(repository.commit.sha)).to eq('CONTENT')
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when there is no .gitlab-ci.yml at the commit' do
|
|
|
|
it 'returns nil' do
|
|
|
|
expect(repository.gitlab_ci_yml_for(repository.commit.parent.sha)).to be_nil
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
describe '#route_map_for' do
|
|
|
|
before do
|
|
|
|
repository.create_file(User.last, '.gitlab/route-map.yml', 'CONTENT', message: 'Add .gitlab/route-map.yml', branch_name: 'master')
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when there is a .gitlab/route-map.yml at the commit' do
|
|
|
|
it 'returns the content' do
|
|
|
|
expect(repository.route_map_for(repository.commit.sha)).to eq('CONTENT')
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when there is no .gitlab/route-map.yml at the commit' do
|
|
|
|
it 'returns nil' do
|
|
|
|
expect(repository.route_map_for(repository.commit.parent.sha)).to be_nil
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-10-15 14:42:47 +05:30
|
|
|
def create_remote_branch(remote_name, branch_name, target)
|
2018-12-05 23:21:45 +05:30
|
|
|
rugged = rugged_repo(repository)
|
2018-10-15 14:42:47 +05:30
|
|
|
rugged.references.create("refs/remotes/#{remote_name}/#{branch_name}", target.id)
|
|
|
|
end
|
|
|
|
|
2019-05-03 19:53:19 +05:30
|
|
|
shared_examples '#ancestor?' do
|
2017-08-17 22:00:37 +05:30
|
|
|
let(:commit) { repository.commit }
|
|
|
|
let(:ancestor) { commit.parents.first }
|
|
|
|
|
2019-07-07 11:18:12 +05:30
|
|
|
it 'is an ancestor' do
|
2019-02-15 15:39:39 +05:30
|
|
|
expect(repository.ancestor?(ancestor.id, commit.id)).to eq(true)
|
|
|
|
end
|
2019-01-03 12:48:30 +05:30
|
|
|
|
2019-07-07 11:18:12 +05:30
|
|
|
it 'is not an ancestor' do
|
2019-02-15 15:39:39 +05:30
|
|
|
expect(repository.ancestor?(commit.id, ancestor.id)).to eq(false)
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
|
2019-02-15 15:39:39 +05:30
|
|
|
it 'returns false on nil-values' do
|
|
|
|
expect(repository.ancestor?(nil, commit.id)).to eq(false)
|
|
|
|
expect(repository.ancestor?(ancestor.id, nil)).to eq(false)
|
|
|
|
expect(repository.ancestor?(nil, nil)).to eq(false)
|
2018-03-17 18:26:18 +05:30
|
|
|
end
|
|
|
|
|
2019-02-15 15:39:39 +05:30
|
|
|
it 'returns false for invalid commit IDs' do
|
|
|
|
expect(repository.ancestor?(commit.id, Gitlab::Git::BLANK_SHA)).to eq(false)
|
|
|
|
expect(repository.ancestor?( Gitlab::Git::BLANK_SHA, commit.id)).to eq(false)
|
2018-03-17 18:26:18 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-05-03 19:53:19 +05:30
|
|
|
describe '#ancestor? with Gitaly enabled' do
|
2020-01-01 13:55:28 +05:30
|
|
|
let(:commit) { repository.commit }
|
|
|
|
let(:ancestor) { commit.parents.first }
|
|
|
|
let(:cache_key) { "ancestor:#{ancestor.id}:#{commit.id}" }
|
|
|
|
|
|
|
|
it_behaves_like '#ancestor?'
|
|
|
|
|
|
|
|
context 'caching', :request_store, :clean_gitlab_redis_cache do
|
|
|
|
it 'only calls out to Gitaly once' do
|
|
|
|
expect(repository.raw_repository).to receive(:ancestor?).once
|
|
|
|
|
|
|
|
2.times { repository.ancestor?(commit.id, ancestor.id) }
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'returns the value from the request store' do
|
|
|
|
repository.__send__(:request_store_cache).write(cache_key, "it's apparent")
|
|
|
|
|
|
|
|
expect(repository.ancestor?(ancestor.id, commit.id)).to eq("it's apparent")
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'returns the value from the redis cache' do
|
|
|
|
expect(repository.__send__(:cache)).to receive(:fetch).with(cache_key).and_return("it's apparent")
|
|
|
|
|
|
|
|
expect(repository.ancestor?(ancestor.id, commit.id)).to eq("it's apparent")
|
|
|
|
end
|
|
|
|
end
|
2019-05-03 19:53:19 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
describe '#ancestor? with Rugged enabled', :enable_rugged do
|
|
|
|
it 'calls out to the Rugged implementation' do
|
|
|
|
allow_any_instance_of(Rugged).to receive(:merge_base).with(repository.commit.id, Gitlab::Git::BLANK_SHA).and_call_original
|
|
|
|
|
|
|
|
repository.ancestor?(repository.commit.id, Gitlab::Git::BLANK_SHA)
|
|
|
|
end
|
|
|
|
|
|
|
|
it_behaves_like '#ancestor?'
|
|
|
|
end
|
|
|
|
|
2018-11-08 19:23:39 +05:30
|
|
|
describe '#archive_metadata' do
|
|
|
|
let(:ref) { 'master' }
|
|
|
|
let(:storage_path) { '/tmp' }
|
|
|
|
|
|
|
|
let(:prefix) { [project.path, ref].join('-') }
|
|
|
|
let(:filename) { prefix + '.tar.gz' }
|
|
|
|
|
|
|
|
subject(:result) { repository.archive_metadata(ref, storage_path, append_sha: false) }
|
|
|
|
|
|
|
|
context 'with hashed storage disabled' do
|
|
|
|
let(:project) { create(:project, :repository, :legacy_storage) }
|
|
|
|
|
|
|
|
it 'uses the project path to generate the filename' do
|
|
|
|
expect(result['ArchivePrefix']).to eq(prefix)
|
|
|
|
expect(File.basename(result['ArchivePath'])).to eq(filename)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'with hashed storage enabled' do
|
|
|
|
it 'uses the project path to generate the filename' do
|
|
|
|
expect(result['ArchivePrefix']).to eq(prefix)
|
|
|
|
expect(File.basename(result['ArchivePath'])).to eq(filename)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-03-17 18:26:18 +05:30
|
|
|
describe 'commit cache' do
|
2020-03-09 13:42:32 +05:30
|
|
|
let_it_be(:project) { create(:project, :repository) }
|
2018-03-17 18:26:18 +05:30
|
|
|
|
|
|
|
it 'caches based on SHA' do
|
|
|
|
# Gets the commit oid, and warms the cache
|
|
|
|
oid = project.commit.id
|
|
|
|
|
2019-09-30 21:07:59 +05:30
|
|
|
expect(Gitlab::Git::Commit).to receive(:find).once
|
2018-03-17 18:26:18 +05:30
|
|
|
|
2019-09-30 21:07:59 +05:30
|
|
|
2.times { project.commit_by(oid: oid) }
|
2018-03-17 18:26:18 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
it 'caches nil values' do
|
|
|
|
expect(Gitlab::Git::Commit).to receive(:find).once
|
|
|
|
|
2019-09-30 21:07:59 +05:30
|
|
|
2.times { project.commit_by(oid: '1' * 40) }
|
2018-03-17 18:26:18 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
describe '#raw_repository' do
|
|
|
|
subject { repository.raw_repository }
|
|
|
|
|
|
|
|
it 'returns a Gitlab::Git::Repository representation of the repository' do
|
|
|
|
expect(subject).to be_a(Gitlab::Git::Repository)
|
|
|
|
expect(subject.relative_path).to eq(project.disk_path + '.git')
|
|
|
|
expect(subject.gl_repository).to eq("project-#{project.id}")
|
2019-03-02 22:35:43 +05:30
|
|
|
expect(subject.gl_project_path).to eq(project.full_path)
|
2018-03-17 18:26:18 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
context 'with a wiki repository' do
|
|
|
|
let(:repository) { project.wiki.repository }
|
|
|
|
|
|
|
|
it 'creates a Gitlab::Git::Repository with the proper attributes' do
|
|
|
|
expect(subject).to be_a(Gitlab::Git::Repository)
|
|
|
|
expect(subject.relative_path).to eq(project.disk_path + '.wiki.git')
|
|
|
|
expect(subject.gl_repository).to eq("wiki-#{project.id}")
|
2019-03-02 22:35:43 +05:30
|
|
|
expect(subject.gl_project_path).to eq(project.full_path)
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
2018-03-17 18:26:18 +05:30
|
|
|
end
|
|
|
|
end
|
2017-08-17 22:00:37 +05:30
|
|
|
|
2018-03-17 18:26:18 +05:30
|
|
|
describe '#contributors' do
|
|
|
|
let(:author_a) { build(:author, email: 'tiagonbotelho@hotmail.com', name: 'tiagonbotelho') }
|
|
|
|
let(:author_b) { build(:author, email: 'gitlab@winniehell.de', name: 'Winnie') }
|
|
|
|
let(:author_c) { build(:author, email: 'douwe@gitlab.com', name: 'Douwe Maan') }
|
|
|
|
let(:stubbed_commits) do
|
|
|
|
[build(:commit, author: author_a),
|
|
|
|
build(:commit, author: author_a),
|
|
|
|
build(:commit, author: author_b),
|
|
|
|
build(:commit, author: author_c),
|
|
|
|
build(:commit, author: author_c),
|
|
|
|
build(:commit, author: author_c)]
|
|
|
|
end
|
|
|
|
let(:order_by) { nil }
|
|
|
|
let(:sort) { nil }
|
|
|
|
|
|
|
|
before do
|
|
|
|
allow(repository).to receive(:commits).with(nil, limit: 2000, offset: 0, skip_merges: true).and_return(stubbed_commits)
|
|
|
|
end
|
|
|
|
|
|
|
|
subject { repository.contributors(order_by: order_by, sort: sort) }
|
|
|
|
|
|
|
|
def expect_contributors(*contributors)
|
|
|
|
expect(subject.map(&:email)).to eq(contributors.map(&:email))
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'returns the array of Gitlab::Contributor for the repository' do
|
|
|
|
expect_contributors(author_a, author_b, author_c)
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'order_by email' do
|
|
|
|
let(:order_by) { 'email' }
|
|
|
|
|
|
|
|
context 'asc' do
|
|
|
|
let(:sort) { 'asc' }
|
|
|
|
|
|
|
|
it 'returns all the contributors ordered by email asc case insensitive' do
|
|
|
|
expect_contributors(author_c, author_b, author_a)
|
|
|
|
end
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
|
2018-03-17 18:26:18 +05:30
|
|
|
context 'desc' do
|
|
|
|
let(:sort) { 'desc' }
|
|
|
|
|
|
|
|
it 'returns all the contributors ordered by email desc case insensitive' do
|
|
|
|
expect_contributors(author_a, author_b, author_c)
|
|
|
|
end
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
2018-03-17 18:26:18 +05:30
|
|
|
end
|
2017-08-17 22:00:37 +05:30
|
|
|
|
2018-03-17 18:26:18 +05:30
|
|
|
context 'order_by name' do
|
|
|
|
let(:order_by) { 'name' }
|
|
|
|
|
|
|
|
context 'asc' do
|
|
|
|
let(:sort) { 'asc' }
|
|
|
|
|
|
|
|
it 'returns all the contributors ordered by name asc case insensitive' do
|
|
|
|
expect_contributors(author_c, author_a, author_b)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'desc' do
|
|
|
|
let(:sort) { 'desc' }
|
|
|
|
|
|
|
|
it 'returns all the contributors ordered by name desc case insensitive' do
|
|
|
|
expect_contributors(author_b, author_a, author_c)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'order_by commits' do
|
|
|
|
let(:order_by) { 'commits' }
|
|
|
|
|
|
|
|
context 'asc' do
|
|
|
|
let(:sort) { 'asc' }
|
|
|
|
|
|
|
|
it 'returns all the contributors ordered by commits asc' do
|
|
|
|
expect_contributors(author_b, author_a, author_c)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'desc' do
|
|
|
|
let(:sort) { 'desc' }
|
|
|
|
|
|
|
|
it 'returns all the contributors ordered by commits desc' do
|
|
|
|
expect_contributors(author_c, author_a, author_b)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'invalid ordering' do
|
|
|
|
let(:order_by) { 'unknown' }
|
|
|
|
|
|
|
|
it 'returns the contributors unsorted' do
|
|
|
|
expect_contributors(author_a, author_b, author_c)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'invalid sorting' do
|
|
|
|
let(:order_by) { 'name' }
|
|
|
|
let(:sort) { 'unknown' }
|
|
|
|
|
|
|
|
it 'returns the contributors unsorted' do
|
|
|
|
expect_contributors(author_a, author_b, author_c)
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
2016-09-29 09:46:39 +05:30
|
|
|
end
|
|
|
|
end
|
2018-12-13 13:39:08 +05:30
|
|
|
|
|
|
|
describe '#merge_base' do
|
2020-03-09 13:42:32 +05:30
|
|
|
let_it_be(:project) { create(:project, :repository) }
|
2018-12-13 13:39:08 +05:30
|
|
|
subject(:repository) { project.repository }
|
|
|
|
|
|
|
|
it 'only makes one gitaly call' do
|
|
|
|
expect(Gitlab::GitalyClient).to receive(:call).once.and_call_original
|
|
|
|
|
|
|
|
repository.merge_base('master', 'fix')
|
|
|
|
end
|
|
|
|
end
|
2019-07-31 22:56:46 +05:30
|
|
|
|
|
|
|
describe '#create_if_not_exists' do
|
|
|
|
let(:project) { create(:project) }
|
|
|
|
let(:repository) { project.repository }
|
|
|
|
|
|
|
|
it 'creates the repository if it did not exist' do
|
|
|
|
expect { repository.create_if_not_exists }.to change { repository.exists? }.from(false).to(true)
|
|
|
|
end
|
|
|
|
|
2019-12-21 20:55:43 +05:30
|
|
|
it 'returns true' do
|
|
|
|
expect(repository.create_if_not_exists).to eq(true)
|
|
|
|
end
|
|
|
|
|
2019-07-31 22:56:46 +05:30
|
|
|
it 'calls out to the repository client to create a repo' do
|
|
|
|
expect(repository.raw.gitaly_repository_client).to receive(:create_repository)
|
|
|
|
|
|
|
|
repository.create_if_not_exists
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'it does nothing if the repository already existed' do
|
|
|
|
let(:project) { create(:project, :repository) }
|
|
|
|
|
|
|
|
it 'does nothing if the repository already existed' do
|
|
|
|
expect(repository.raw.gitaly_repository_client).not_to receive(:create_repository)
|
|
|
|
|
|
|
|
repository.create_if_not_exists
|
|
|
|
end
|
2019-12-21 20:55:43 +05:30
|
|
|
|
|
|
|
it 'returns nil' do
|
|
|
|
expect(repository.create_if_not_exists).to be_nil
|
|
|
|
end
|
2019-07-31 22:56:46 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
context 'when the repository exists but the cache is not up to date' do
|
|
|
|
let(:project) { create(:project, :repository) }
|
|
|
|
|
|
|
|
it 'does not raise errors' do
|
|
|
|
allow(repository).to receive(:exists?).and_return(false)
|
|
|
|
expect(repository.raw).to receive(:create_repository).and_call_original
|
|
|
|
|
|
|
|
expect { repository.create_if_not_exists }.not_to raise_error
|
|
|
|
end
|
2019-12-21 20:55:43 +05:30
|
|
|
|
|
|
|
it 'returns nil' do
|
|
|
|
expect(repository.create_if_not_exists).to be_nil
|
|
|
|
end
|
2019-07-31 22:56:46 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2020-03-09 13:42:32 +05:30
|
|
|
describe '#create_from_bundle' do
|
|
|
|
let(:project) { create(:project) }
|
|
|
|
let(:repository) { project.repository }
|
|
|
|
let(:valid_bundle_path) { File.join(Dir.tmpdir, "repo-#{SecureRandom.hex}.bundle") }
|
|
|
|
let(:raw_repository) { repository.raw }
|
|
|
|
|
|
|
|
before do
|
|
|
|
allow(raw_repository).to receive(:create_from_bundle).and_return({})
|
|
|
|
end
|
|
|
|
|
|
|
|
after do
|
|
|
|
FileUtils.rm_rf(valid_bundle_path)
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'calls out to the raw_repository to create a repo from bundle' do
|
|
|
|
expect(raw_repository).to receive(:create_from_bundle)
|
|
|
|
|
|
|
|
repository.create_from_bundle(valid_bundle_path)
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'calls after_create' do
|
|
|
|
expect(repository).to receive(:after_create)
|
|
|
|
|
|
|
|
repository.create_from_bundle(valid_bundle_path)
|
|
|
|
end
|
|
|
|
|
|
|
|
context 'when exception is raised' do
|
|
|
|
before do
|
|
|
|
allow(raw_repository).to receive(:create_from_bundle).and_raise(::Gitlab::Git::BundleFile::InvalidBundleError)
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'after_create is not executed' do
|
|
|
|
expect(repository).not_to receive(:after_create)
|
|
|
|
|
|
|
|
expect {repository.create_from_bundle(valid_bundle_path)}.to raise_error(::Gitlab::Git::BundleFile::InvalidBundleError)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-07-31 22:56:46 +05:30
|
|
|
describe "#blobs_metadata" do
|
2020-03-09 13:42:32 +05:30
|
|
|
let_it_be(:project) { create(:project, :repository) }
|
2019-07-31 22:56:46 +05:30
|
|
|
let(:repository) { project.repository }
|
|
|
|
|
|
|
|
def expect_metadata_blob(thing)
|
|
|
|
expect(thing).to be_a(Blob)
|
|
|
|
expect(thing.data).to be_empty
|
|
|
|
end
|
|
|
|
|
|
|
|
it "returns blob metadata in batch for HEAD" do
|
|
|
|
result = repository.blobs_metadata(["bar/branch-test.txt", "README.md", "does/not/exist"])
|
|
|
|
|
|
|
|
expect_metadata_blob(result.first)
|
|
|
|
expect_metadata_blob(result.second)
|
|
|
|
expect(result.size).to eq(2)
|
|
|
|
end
|
|
|
|
|
|
|
|
it "returns blob metadata for a specified ref" do
|
|
|
|
result = repository.blobs_metadata(["files/ruby/feature.rb"], "feature")
|
|
|
|
|
|
|
|
expect_metadata_blob(result.first)
|
|
|
|
end
|
|
|
|
|
|
|
|
it "performs a single gitaly call", :request_store do
|
|
|
|
expect { repository.blobs_metadata(["bar/branch-test.txt", "readme.txt", "does/not/exist"]) }
|
|
|
|
.to change { Gitlab::GitalyClient.get_request_count }.by(1)
|
|
|
|
end
|
|
|
|
end
|
2020-03-09 13:42:32 +05:30
|
|
|
|
|
|
|
describe '#submodule_links' do
|
|
|
|
it 'returns an instance of Gitlab::SubmoduleLinks' do
|
|
|
|
expect(repository.submodule_links).to be_a(Gitlab::SubmoduleLinks)
|
|
|
|
end
|
|
|
|
end
|
2015-04-26 12:48:37 +05:30
|
|
|
end
|