2015-04-26 12:48:37 +05:30
|
|
|
module Gitlab
|
2018-03-17 18:26:18 +05:30
|
|
|
module LegacyGithubImport
|
2015-04-26 12:48:37 +05:30
|
|
|
class Importer
|
2016-01-19 16:12:03 +05:30
|
|
|
include Gitlab::ShellAdapter
|
|
|
|
|
2018-03-17 18:26:18 +05:30
|
|
|
def self.refmap
|
|
|
|
Gitlab::GithubImport.refmap
|
|
|
|
end
|
|
|
|
|
2017-08-17 22:00:37 +05:30
|
|
|
attr_reader :errors, :project, :repo, :repo_url
|
2015-04-26 12:48:37 +05:30
|
|
|
|
|
|
|
def initialize(project)
|
2016-06-02 11:05:42 +05:30
|
|
|
@project = project
|
|
|
|
@repo = project.import_source
|
|
|
|
@repo_url = project.import_url
|
2016-09-13 17:45:13 +05:30
|
|
|
@errors = []
|
2016-11-03 12:29:30 +05:30
|
|
|
@labels = {}
|
2017-08-17 22:00:37 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
def client
|
|
|
|
return @client if defined?(@client)
|
2018-03-17 18:26:18 +05:30
|
|
|
|
2017-08-17 22:00:37 +05:30
|
|
|
unless credentials
|
|
|
|
raise Projects::ImportService::Error,
|
|
|
|
"Unable to find project import data credentials for project ID: #{@project.id}"
|
|
|
|
end
|
2016-06-02 11:05:42 +05:30
|
|
|
|
2017-08-17 22:00:37 +05:30
|
|
|
opts = {}
|
|
|
|
# Gitea plan to be GitHub compliant
|
|
|
|
if project.gitea_import?
|
|
|
|
uri = URI.parse(project.import_url)
|
|
|
|
host = "#{uri.scheme}://#{uri.host}:#{uri.port}#{uri.path}".sub(%r{/?[\w-]+/[\w-]+\.git\z}, '')
|
|
|
|
opts = {
|
|
|
|
host: host,
|
|
|
|
api_version: 'v1'
|
|
|
|
}
|
2016-06-02 11:05:42 +05:30
|
|
|
end
|
2017-08-17 22:00:37 +05:30
|
|
|
|
|
|
|
@client = Client.new(credentials[:user], opts)
|
2015-04-26 12:48:37 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
def execute
|
2017-08-17 22:00:37 +05:30
|
|
|
# The ordering of importing is important here due to the way GitHub structures their data
|
|
|
|
# 1. Labels are required by other items while not having a dependency on anything else
|
|
|
|
# so need to be first
|
|
|
|
# 2. Pull requests must come before issues. Every pull request is also an issue but not
|
|
|
|
# all issues are pull requests. Only the issue entity has labels defined in GitHub. GitLab
|
|
|
|
# doesn't structure data like this so we need to make sure that we've created the MRs
|
|
|
|
# before we attempt to add the labels defined in the GitHub issue for the related, already
|
|
|
|
# imported, pull request
|
2016-09-13 17:45:13 +05:30
|
|
|
import_labels
|
|
|
|
import_milestones
|
|
|
|
import_pull_requests
|
2017-08-17 22:00:37 +05:30
|
|
|
import_issues
|
2016-11-03 12:29:30 +05:30
|
|
|
import_comments(:issues)
|
|
|
|
import_comments(:pull_requests)
|
2016-09-13 17:45:13 +05:30
|
|
|
import_wiki
|
2017-08-17 22:00:37 +05:30
|
|
|
|
|
|
|
# Gitea doesn't have a Release API yet
|
|
|
|
# See https://github.com/go-gitea/gitea/issues/330
|
|
|
|
unless project.gitea_import?
|
|
|
|
import_releases
|
|
|
|
end
|
|
|
|
|
2016-09-13 17:45:13 +05:30
|
|
|
handle_errors
|
|
|
|
|
|
|
|
true
|
2016-01-14 18:37:52 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
private
|
|
|
|
|
2016-06-02 11:05:42 +05:30
|
|
|
def credentials
|
2017-08-17 22:00:37 +05:30
|
|
|
return @credentials if defined?(@credentials)
|
|
|
|
|
|
|
|
@credentials = project.import_data ? project.import_data.credentials : nil
|
2016-06-02 11:05:42 +05:30
|
|
|
end
|
|
|
|
|
2016-09-13 17:45:13 +05:30
|
|
|
def handle_errors
|
|
|
|
return unless errors.any?
|
|
|
|
|
|
|
|
project.update_column(:import_error, {
|
|
|
|
message: 'The remote data could not be fully imported.',
|
|
|
|
errors: errors
|
|
|
|
}.to_json)
|
|
|
|
end
|
|
|
|
|
2016-06-02 11:05:42 +05:30
|
|
|
def import_labels
|
2016-11-03 12:29:30 +05:30
|
|
|
fetch_resources(:labels, repo, per_page: 100) do |labels|
|
|
|
|
labels.each do |raw|
|
|
|
|
begin
|
2017-08-17 22:00:37 +05:30
|
|
|
gh_label = LabelFormatter.new(project, raw)
|
|
|
|
gh_label.create!
|
2016-11-03 12:29:30 +05:30
|
|
|
rescue => e
|
2017-08-17 22:00:37 +05:30
|
|
|
errors << { type: :label, url: Gitlab::UrlSanitizer.sanitize(gh_label.url), errors: e.message }
|
2016-11-03 12:29:30 +05:30
|
|
|
end
|
2016-09-13 17:45:13 +05:30
|
|
|
end
|
|
|
|
end
|
2017-08-17 22:00:37 +05:30
|
|
|
|
|
|
|
cache_labels!
|
2016-06-02 11:05:42 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
def import_milestones
|
2016-11-03 12:29:30 +05:30
|
|
|
fetch_resources(:milestones, repo, state: :all, per_page: 100) do |milestones|
|
|
|
|
milestones.each do |raw|
|
|
|
|
begin
|
2017-08-17 22:00:37 +05:30
|
|
|
gh_milestone = MilestoneFormatter.new(project, raw)
|
|
|
|
gh_milestone.create!
|
2016-11-03 12:29:30 +05:30
|
|
|
rescue => e
|
2017-08-17 22:00:37 +05:30
|
|
|
errors << { type: :milestone, url: Gitlab::UrlSanitizer.sanitize(gh_milestone.url), errors: e.message }
|
2016-11-03 12:29:30 +05:30
|
|
|
end
|
2016-09-13 17:45:13 +05:30
|
|
|
end
|
|
|
|
end
|
2016-06-02 11:05:42 +05:30
|
|
|
end
|
|
|
|
|
2016-01-14 18:37:52 +05:30
|
|
|
def import_issues
|
2016-11-03 12:29:30 +05:30
|
|
|
fetch_resources(:issues, repo, state: :all, sort: :created, direction: :asc, per_page: 100) do |issues|
|
|
|
|
issues.each do |raw|
|
2017-08-17 22:00:37 +05:30
|
|
|
gh_issue = IssueFormatter.new(project, raw, client)
|
|
|
|
|
|
|
|
begin
|
|
|
|
issuable =
|
|
|
|
if gh_issue.pull_request?
|
|
|
|
MergeRequest.find_by(target_project_id: project.id, iid: gh_issue.number)
|
|
|
|
else
|
|
|
|
gh_issue.create!
|
|
|
|
end
|
|
|
|
|
|
|
|
apply_labels(issuable, raw)
|
|
|
|
rescue => e
|
|
|
|
errors << { type: :issue, url: Gitlab::UrlSanitizer.sanitize(gh_issue.url), errors: e.message }
|
2016-09-13 17:45:13 +05:30
|
|
|
end
|
2016-01-14 18:37:52 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def import_pull_requests
|
2016-11-03 12:29:30 +05:30
|
|
|
fetch_resources(:pull_requests, repo, state: :all, sort: :created, direction: :asc, per_page: 100) do |pull_requests|
|
|
|
|
pull_requests.each do |raw|
|
2017-08-17 22:00:37 +05:30
|
|
|
gh_pull_request = PullRequestFormatter.new(project, raw, client)
|
|
|
|
|
|
|
|
next unless gh_pull_request.valid?
|
2016-11-03 12:29:30 +05:30
|
|
|
|
|
|
|
begin
|
2017-08-17 22:00:37 +05:30
|
|
|
restore_source_branch(gh_pull_request) unless gh_pull_request.source_branch_exists?
|
|
|
|
restore_target_branch(gh_pull_request) unless gh_pull_request.target_branch_exists?
|
|
|
|
|
|
|
|
merge_request = gh_pull_request.create!
|
2016-11-03 12:29:30 +05:30
|
|
|
|
2017-08-17 22:00:37 +05:30
|
|
|
# Gitea doesn't return PR in the Issue API endpoint, so labels must be assigned at this stage
|
|
|
|
if project.gitea_import?
|
|
|
|
apply_labels(merge_request, raw)
|
|
|
|
end
|
2016-11-03 12:29:30 +05:30
|
|
|
rescue => e
|
2017-08-17 22:00:37 +05:30
|
|
|
errors << { type: :pull_request, url: Gitlab::UrlSanitizer.sanitize(gh_pull_request.url), errors: e.message }
|
2016-11-03 12:29:30 +05:30
|
|
|
ensure
|
2017-08-17 22:00:37 +05:30
|
|
|
clean_up_restored_branches(gh_pull_request)
|
2016-11-03 12:29:30 +05:30
|
|
|
end
|
2016-09-13 17:45:13 +05:30
|
|
|
end
|
2015-04-26 12:48:37 +05:30
|
|
|
end
|
2016-11-03 12:29:30 +05:30
|
|
|
|
|
|
|
project.repository.after_remove_branch
|
2016-06-16 23:09:34 +05:30
|
|
|
end
|
|
|
|
|
2016-09-13 17:45:13 +05:30
|
|
|
def restore_source_branch(pull_request)
|
2017-08-17 22:00:37 +05:30
|
|
|
project.repository.create_branch(pull_request.source_branch_name, pull_request.source_branch_sha)
|
2016-06-16 23:09:34 +05:30
|
|
|
end
|
|
|
|
|
2016-09-13 17:45:13 +05:30
|
|
|
def restore_target_branch(pull_request)
|
|
|
|
project.repository.create_branch(pull_request.target_branch_name, pull_request.target_branch_sha)
|
2016-06-22 15:30:34 +05:30
|
|
|
end
|
|
|
|
|
2016-09-13 17:45:13 +05:30
|
|
|
def remove_branch(name)
|
|
|
|
project.repository.delete_branch(name)
|
2018-03-17 18:26:18 +05:30
|
|
|
rescue Gitlab::Git::Repository::DeleteBranchFailed
|
2016-09-13 17:45:13 +05:30
|
|
|
errors << { type: :remove_branch, name: name }
|
2015-04-26 12:48:37 +05:30
|
|
|
end
|
|
|
|
|
2016-09-13 17:45:13 +05:30
|
|
|
def clean_up_restored_branches(pull_request)
|
2017-08-17 22:00:37 +05:30
|
|
|
return if pull_request.opened?
|
|
|
|
|
2016-09-13 17:45:13 +05:30
|
|
|
remove_branch(pull_request.source_branch_name) unless pull_request.source_branch_exists?
|
|
|
|
remove_branch(pull_request.target_branch_name) unless pull_request.target_branch_exists?
|
2016-06-02 11:05:42 +05:30
|
|
|
end
|
|
|
|
|
2017-08-17 22:00:37 +05:30
|
|
|
def apply_labels(issuable, raw)
|
|
|
|
return unless raw.labels.count > 0
|
2016-11-03 12:29:30 +05:30
|
|
|
|
2017-08-17 22:00:37 +05:30
|
|
|
label_ids = raw.labels
|
|
|
|
.map { |attrs| @labels[attrs.name] }
|
|
|
|
.compact
|
2016-06-02 11:05:42 +05:30
|
|
|
|
2017-08-17 22:00:37 +05:30
|
|
|
issuable.update_attribute(:label_ids, label_ids)
|
2016-06-02 11:05:42 +05:30
|
|
|
end
|
|
|
|
|
2016-11-03 12:29:30 +05:30
|
|
|
def import_comments(issuable_type)
|
|
|
|
resource_type = "#{issuable_type}_comments".to_sym
|
|
|
|
|
|
|
|
# Two notes here:
|
|
|
|
# 1. We don't have a distinctive attribute for comments (unlike issues iid), so we fetch the last inserted note,
|
|
|
|
# compare it against every comment in the current imported page until we find match, and that's where start importing
|
|
|
|
# 2. GH returns comments for _both_ issues and PRs through issues_comments API, while pull_requests_comments returns
|
|
|
|
# only comments on diffs, so select last note not based on noteable_type but on line_code
|
|
|
|
line_code_is = issuable_type == :pull_requests ? 'NOT NULL' : 'NULL'
|
|
|
|
last_note = project.notes.where("line_code IS #{line_code_is}").last
|
|
|
|
|
|
|
|
fetch_resources(resource_type, repo, per_page: 100) do |comments|
|
|
|
|
if last_note
|
|
|
|
discard_inserted_comments(comments, last_note)
|
|
|
|
last_note = nil
|
|
|
|
end
|
2015-04-26 12:48:37 +05:30
|
|
|
|
2016-11-03 12:29:30 +05:30
|
|
|
create_comments(comments)
|
|
|
|
end
|
2016-01-14 18:37:52 +05:30
|
|
|
end
|
|
|
|
|
2016-11-03 12:29:30 +05:30
|
|
|
def create_comments(comments)
|
2016-09-29 09:46:39 +05:30
|
|
|
ActiveRecord::Base.no_touching do
|
|
|
|
comments.each do |raw|
|
|
|
|
begin
|
2017-08-17 22:00:37 +05:30
|
|
|
comment = CommentFormatter.new(project, raw, client)
|
|
|
|
|
2016-11-03 12:29:30 +05:30
|
|
|
# GH does not return info about comment's parent, so we guess it by checking its URL!
|
|
|
|
*_, parent, iid = URI(raw.html_url).path.split('/')
|
2017-08-17 22:00:37 +05:30
|
|
|
|
|
|
|
issuable = if parent == 'issues'
|
|
|
|
Issue.find_by(project_id: project.id, iid: iid)
|
|
|
|
else
|
|
|
|
MergeRequest.find_by(target_project_id: project.id, iid: iid)
|
|
|
|
end
|
|
|
|
|
2016-11-03 12:29:30 +05:30
|
|
|
next unless issuable
|
|
|
|
|
2016-09-29 09:46:39 +05:30
|
|
|
issuable.notes.create!(comment.attributes)
|
|
|
|
rescue => e
|
|
|
|
errors << { type: :comment, url: Gitlab::UrlSanitizer.sanitize(raw.url), errors: e.message }
|
|
|
|
end
|
2016-09-13 17:45:13 +05:30
|
|
|
end
|
2016-01-14 18:37:52 +05:30
|
|
|
end
|
2015-04-26 12:48:37 +05:30
|
|
|
end
|
2016-01-19 16:12:03 +05:30
|
|
|
|
2016-11-03 12:29:30 +05:30
|
|
|
def discard_inserted_comments(comments, last_note)
|
|
|
|
last_note_attrs = nil
|
|
|
|
|
|
|
|
cut_off_index = comments.find_index do |raw|
|
|
|
|
comment = CommentFormatter.new(project, raw)
|
|
|
|
comment_attrs = comment.attributes
|
|
|
|
last_note_attrs ||= last_note.slice(*comment_attrs.keys)
|
|
|
|
|
|
|
|
comment_attrs.with_indifferent_access == last_note_attrs
|
|
|
|
end
|
|
|
|
|
|
|
|
# No matching resource in the collection, which means we got halted right on the end of the last page, so all good
|
|
|
|
return unless cut_off_index
|
|
|
|
|
|
|
|
# Otherwise, remove the resources we've already inserted
|
|
|
|
comments.shift(cut_off_index + 1)
|
|
|
|
end
|
|
|
|
|
2016-01-19 16:12:03 +05:30
|
|
|
def import_wiki
|
2016-11-03 12:29:30 +05:30
|
|
|
unless project.wiki.repository_exists?
|
2016-01-19 16:12:03 +05:30
|
|
|
wiki = WikiFormatter.new(project)
|
2017-09-10 17:25:29 +05:30
|
|
|
gitlab_shell.import_repository(project.repository_storage_path, wiki.disk_path, wiki.import_url)
|
2016-01-19 16:12:03 +05:30
|
|
|
end
|
2016-01-29 22:53:50 +05:30
|
|
|
rescue Gitlab::Shell::Error => e
|
2016-04-02 18:10:28 +05:30
|
|
|
# GitHub error message when the wiki repo has not been created,
|
|
|
|
# this means that repo has wiki enabled, but have no pages. So,
|
|
|
|
# we can skip the import.
|
|
|
|
if e.message !~ /repository not exported/
|
2016-09-13 17:45:13 +05:30
|
|
|
errors << { type: :wiki, errors: e.message }
|
2016-01-29 22:53:50 +05:30
|
|
|
end
|
2016-01-19 16:12:03 +05:30
|
|
|
end
|
2016-09-29 09:46:39 +05:30
|
|
|
|
|
|
|
def import_releases
|
2016-11-03 12:29:30 +05:30
|
|
|
fetch_resources(:releases, repo, per_page: 100) do |releases|
|
|
|
|
releases.each do |raw|
|
|
|
|
begin
|
|
|
|
gh_release = ReleaseFormatter.new(project, raw)
|
|
|
|
gh_release.create! if gh_release.valid?
|
|
|
|
rescue => e
|
2017-08-17 22:00:37 +05:30
|
|
|
errors << { type: :release, url: Gitlab::UrlSanitizer.sanitize(gh_release.url), errors: e.message }
|
2016-11-03 12:29:30 +05:30
|
|
|
end
|
2016-09-29 09:46:39 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
2016-11-03 12:29:30 +05:30
|
|
|
|
2017-08-17 22:00:37 +05:30
|
|
|
def cache_labels!
|
|
|
|
project.labels.select(:id, :title).find_each do |label|
|
|
|
|
@labels[label.title] = label.id
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2016-11-03 12:29:30 +05:30
|
|
|
def fetch_resources(resource_type, *opts)
|
|
|
|
return if imported?(resource_type)
|
|
|
|
|
2017-08-17 22:00:37 +05:30
|
|
|
opts.last[:page] = current_page(resource_type)
|
2016-11-03 12:29:30 +05:30
|
|
|
|
2018-03-17 18:26:18 +05:30
|
|
|
client.public_send(resource_type, *opts) do |resources| # rubocop:disable GitlabSecurity/PublicSend
|
2016-11-03 12:29:30 +05:30
|
|
|
yield resources
|
|
|
|
increment_page(resource_type)
|
|
|
|
end
|
|
|
|
|
|
|
|
imported!(resource_type)
|
|
|
|
end
|
|
|
|
|
|
|
|
def imported?(resource_type)
|
|
|
|
Rails.cache.read("#{cache_key_prefix}:#{resource_type}:imported")
|
|
|
|
end
|
|
|
|
|
|
|
|
def imported!(resource_type)
|
|
|
|
Rails.cache.write("#{cache_key_prefix}:#{resource_type}:imported", true, ex: 1.day)
|
|
|
|
end
|
|
|
|
|
|
|
|
def increment_page(resource_type)
|
|
|
|
key = "#{cache_key_prefix}:#{resource_type}:current-page"
|
|
|
|
|
|
|
|
# Rails.cache.increment calls INCRBY directly on the value stored under the key, which is
|
|
|
|
# a serialized ActiveSupport::Cache::Entry, so it will return an error by Redis, hence this ugly work-around
|
|
|
|
page = Rails.cache.read(key)
|
|
|
|
page += 1
|
|
|
|
Rails.cache.write(key, page)
|
|
|
|
|
|
|
|
page
|
|
|
|
end
|
|
|
|
|
|
|
|
def current_page(resource_type)
|
|
|
|
Rails.cache.fetch("#{cache_key_prefix}:#{resource_type}:current-page", ex: 1.day) { 1 }
|
|
|
|
end
|
|
|
|
|
|
|
|
def cache_key_prefix
|
|
|
|
@cache_key_prefix ||= "github-import:#{project.id}"
|
|
|
|
end
|
2015-04-26 12:48:37 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|