As we reevaluate how to best support and maintain Staging Ref in the future, we encourage development teams using this environment to highlight their use cases in the following issue: https://gitlab.com/gitlab-com/gl-infra/software-delivery/framework/software-delivery-framework-issue-tracker/-/issues/36.

Skip to content
Snippets Groups Projects
Unverified Commit 73e2cf53 authored by Nick Thomas's avatar Nick Thomas
Browse files

Remove the GitlabProjects implementation

parent f8f31489
No related branches found
No related tags found
No related merge requests found
Loading
Loading
@@ -77,41 +77,6 @@ To install gitlab-shell you also need a Go compiler version 1.8 or newer. https:
./bin/check
## Repos
Add repo:
./bin/gitlab-projects add-project gitlab/gitlab-ci.git
Remove repo:
./bin/gitlab-projects rm-project gitlab/gitlab-ci.git
List repos:
./bin/gitlab-projects list-projects
Import repo:
# Default timeout is 2 minutes
./bin/gitlab-projects import-project randx/six.git https://github.com/randx/six.git
# Override timeout in seconds
./bin/gitlab-projects import-project randx/six.git https://github.com/randx/six.git 90
Fork repo:
./bin/gitlab-projects fork-project gitlab/gitlab-ci.git randx
Create tag (lightweight & annotated):
./bin/gitlab-projects create-tag gitlab/gitlab-ci.git v3.0.0 3-0-stable
./bin/gitlab-projects create-tag gitlab/gitlab-ci.git v3.0.0 3-0-stable 'annotated message goes here'
Gc repo:
./bin/gitlab-projects gc gitlab/gitlab-ci.git
## Keys
Add key:
Loading
Loading
Loading
Loading
@@ -5,16 +5,38 @@
# This script is used when restoring a GitLab backup.
require_relative '../lib/gitlab_init'
require File.join(ROOT_PATH, 'lib', 'gitlab_projects')
require File.join(ROOT_PATH, 'lib', 'gitlab_metrics')
def create_hooks(path)
global_hooks_directory = File.join(ROOT_PATH, 'hooks')
local_hooks_directory = File.join(path, 'hooks')
real_local_hooks_directory = :not_found
begin
real_local_hooks_directory = File.realpath(local_hooks_directory)
rescue Errno::ENOENT
# real_local_hooks_directory == :not_found
end
if real_local_hooks_directory != File.realpath(global_hooks_directory)
if File.exist?(local_hooks_directory)
$logger.info "Moving existing hooks directory and symlinking global hooks directory for #{path}."
FileUtils.mv(local_hooks_directory, "#{local_hooks_directory}.old.#{Time.now.to_i}")
end
FileUtils.ln_sf(global_hooks_directory, local_hooks_directory)
else
$logger.info "Hooks already exist for #{path}."
true
end
end
repository_storage_paths = ARGV
repository_storage_paths.each do |repo_path|
Dir["#{repo_path.chomp('/')}/**/*.git"].each do |repo|
begin
GitlabMetrics.measure('command-create-hooks') do
GitlabProjects.create_hooks(repo)
create_hooks(repo)
end
rescue Errno::ENOENT
# The user must have deleted their repository. Ignore.
Loading
Loading
#!/usr/bin/env ruby
require_relative '../lib/gitlab_init'
#
# GitLab Projects shell. Add/remove projects from /home/git/repositories
#
# Ex.
# /bin/gitlab-projects add-project gitlab/gitlab-ci.git
#
# /bin/gitlab-projects rm-project gitlab/gitlab-ci.git
#
# /bin/gitlab-projects list-projects
#
# /bin/gitlab-projects mv-project gitlab/gitlab-ci.git randx/fork.git
#
# /bin/gitlab-projects fork-project gitlab/gitlab-ci.git randx
#
# /bin/gitlab-projects import-project randx/six.git https://github.com/randx/six.git
#
require File.join(ROOT_PATH, 'lib', 'gitlab_projects')
# Return non-zero if command execution was not successful
if GitlabProjects.new.exec
exit 0
else
exit 1
end
require 'fileutils'
require 'tempfile'
require 'timeout'
require 'open3'
require_relative 'gitlab_config'
require_relative 'gitlab_logger'
require_relative 'gitlab_metrics'
class GitlabProjects
GLOBAL_HOOKS_DIRECTORY = File.join(ROOT_PATH, 'hooks')
# Project name is a directory name for repository with .git at the end
# It may be namespaced or not. Like repo.git or gitlab/repo.git
attr_reader :project_name
# Absolute path to directory where repositories stored
# By default it is /home/git/repositories
attr_reader :repos_path
# Full path is an absolute path to the repository
# Ex /home/git/repositories/test.git
attr_reader :full_path
def self.create_hooks(path)
local_hooks_directory = File.join(path, 'hooks')
real_local_hooks_directory = :not_found
begin
real_local_hooks_directory = File.realpath(local_hooks_directory)
rescue Errno::ENOENT
# real_local_hooks_directory == :not_found
end
if real_local_hooks_directory != File.realpath(GLOBAL_HOOKS_DIRECTORY)
if File.exist?(local_hooks_directory)
$logger.info "Moving existing hooks directory and symlinking global hooks directory for #{path}."
FileUtils.mv(local_hooks_directory, "#{local_hooks_directory}.old.#{Time.now.to_i}")
end
FileUtils.ln_sf(GLOBAL_HOOKS_DIRECTORY, local_hooks_directory)
else
$logger.info "Hooks already exist for #{path}."
true
end
end
def initialize
@command = ARGV.shift
@repos_path = ARGV.shift
@project_name = ARGV.shift
@full_path = File.join(@repos_path, @project_name) unless @project_name.nil?
end
def exec
GitlabMetrics.measure("command-#{@command}") do
case @command
when 'create-tag';
create_tag
when 'add-project';
add_project
when 'list-projects';
puts list_projects
when 'rm-project';
rm_project
when 'mv-project';
mv_project
when 'mv-storage';
mv_storage
when 'import-project';
import_project
when 'fork-project';
fork_project
when 'fork-repository';
fork_repository
when 'fetch-remote';
fetch_remote
when 'push-branches';
push_branches
when 'delete-remote-branches';
delete_remote_branches
when 'list-remote-tags';
list_remote_tags
when 'gc';
gc
else
$logger.warn "Attempt to execute invalid gitlab-projects command #{@command.inspect}."
puts 'not allowed'
false
end
end
end
protected
def list_remote_tags
remote_name = ARGV.shift
tag_list, exit_code, error = nil
cmd = %W(git --git-dir=#{full_path} ls-remote --tags #{remote_name})
Open3.popen3(*cmd) do |stdin, stdout, stderr, wait_thr|
tag_list = stdout.read
error = stderr.read
exit_code = wait_thr.value.exitstatus
end
if exit_code.zero?
puts tag_list
true
else
puts error
false
end
end
def push_branches
remote_name = ARGV.shift
# timeout for push
timeout = (ARGV.shift || 120).to_i
# push with --force?
forced = ARGV.delete('--force') if ARGV.include?('--force')
$logger.info "Pushing branches from #{full_path} to remote #{remote_name}: #{ARGV}"
cmd = %W(git --git-dir=#{full_path} push)
cmd << forced if forced
cmd += %W(-- #{remote_name}).concat(ARGV)
pid = Process.spawn(*cmd)
begin
Timeout.timeout(timeout) do
Process.wait(pid)
end
$?.exitstatus.zero?
rescue => exception
$logger.error "Pushing branches to remote #{remote_name} failed due to: #{exception.message}."
Process.kill('KILL', pid)
Process.wait
false
end
end
def delete_remote_branches
remote_name = ARGV.shift
branches = ARGV.map { |branch_name| ":#{branch_name}" }
$logger.info "Pushing deleted branches from #{full_path} to remote #{remote_name}: #{ARGV}"
cmd = %W(git --git-dir=#{full_path} push -- #{remote_name}).concat(branches)
pid = Process.spawn(*cmd)
begin
Process.wait(pid)
$?.exitstatus.zero?
rescue => exception
$logger.error "Pushing deleted branches to remote #{remote_name} failed due to: #{exception.message}"
Process.kill('KILL', pid)
Process.wait
false
end
end
def create_tag
tag_name = ARGV.shift
ref = ARGV.shift || "HEAD"
cmd = %W(git --git-dir=#{full_path} tag)
if ARGV.size > 0
msg = ARGV.shift
cmd += %W(-a -m #{msg})
end
cmd += %W(-- #{tag_name} #{ref})
system(*cmd)
end
def add_project
$logger.info "Adding project #{@project_name} at <#{full_path}>."
FileUtils.mkdir_p(full_path, mode: 0770)
cmd = %W(git --git-dir=#{full_path} init --bare)
system(*cmd) && self.class.create_hooks(full_path)
end
def list_projects
$logger.info 'Listing projects'
Dir.chdir(repos_path) do
next Dir.glob('**/*.git')
end
end
def rm_project
$logger.info "Removing project #{@project_name} from <#{full_path}>."
FileUtils.rm_rf(full_path)
end
def mask_password_in_url(url)
result = URI(url)
result.password = "*****" unless result.password.nil?
result.user = "*****" unless result.user.nil? #it's needed for oauth access_token
result
rescue
url
end
def fetch_remote
@name = ARGV.shift
# timeout for fetch
timeout = (ARGV.shift || 120).to_i
# fetch with --force ?
forced = ARGV.include?('--force')
# fetch with --tags or --no-tags
tags_option = ARGV.include?('--no-tags') ? '--no-tags' : '--tags'
$logger.info "Fetching remote #{@name} for project #{@project_name}."
cmd = %W(git --git-dir=#{full_path} fetch #{@name} --prune --quiet)
cmd << '--force' if forced
cmd << tags_option
setup_ssh_auth do |env|
pid = Process.spawn(env, *cmd)
begin
_, status = Timeout.timeout(timeout) do
Process.wait2(pid)
end
status.success?
rescue => exception
$logger.error "Fetching remote #{@name} for project #{@project_name} failed due to: #{exception.message}."
Process.kill('KILL', pid)
Process.wait
false
end
end
end
def remove_origin_in_repo
cmd = %W(git --git-dir=#{full_path} remote rm origin)
pid = Process.spawn(*cmd)
Process.wait(pid)
end
# Import project via git clone --bare
# URL must be publicly cloneable
def import_project
# Skip import if repo already exists
return false if File.exists?(full_path)
@source = ARGV.shift
masked_source = mask_password_in_url(@source)
# timeout for clone
timeout = (ARGV.shift || 120).to_i
$logger.info "Importing project #{@project_name} from <#{masked_source}> to <#{full_path}>."
cmd = %W(git clone --bare -- #{@source} #{full_path})
pid = Process.spawn(*cmd)
begin
Timeout.timeout(timeout) do
Process.wait(pid)
end
return false unless $?.exitstatus.zero?
rescue Timeout::Error
$logger.error "Importing project #{@project_name} from <#{masked_source}> failed due to timeout."
Process.kill('KILL', pid)
Process.wait
FileUtils.rm_rf(full_path)
return false
end
self.class.create_hooks(full_path)
# The project was imported successfully.
# Remove the origin URL since it may contain password.
remove_origin_in_repo
true
end
# Move repository from one directory to another
#
# Ex.
# gitlab.git -> gitlabhq.git
# gitlab/gitlab-ci.git -> randx/six.git
#
# Wont work if target namespace directory does not exist
#
def mv_project
new_path = ARGV.shift
unless new_path
$logger.error "mv-project failed: no destination path provided."
return false
end
new_full_path = File.join(repos_path, new_path)
# verify that the source repo exists
unless File.exists?(full_path)
$logger.error "mv-project failed: source path <#{full_path}> does not exist."
return false
end
# ...and that the target repo does not exist
if File.exists?(new_full_path)
$logger.error "mv-project failed: destination path <#{new_full_path}> already exists."
return false
end
$logger.info "Moving project #{@project_name} from <#{full_path}> to <#{new_full_path}>."
FileUtils.mv(full_path, new_full_path)
end
# Move repository from one storage path to another
#
# Wont work if target namespace directory does not exist in the new storage path
#
def mv_storage
new_storage = ARGV.shift
unless new_storage
$logger.error "mv-storage failed: no destination storage path provided."
return false
end
new_full_path = File.join(new_storage, project_name)
# verify that the source repo exists
unless File.exists?(full_path)
$logger.error "mv-storage failed: source path <#{full_path}> does not exist."
return false
end
# Make sure the destination directory exists
FileUtils.mkdir_p(new_full_path)
# Make sure the source path ends with a slash so that rsync copies the
# contents of the directory, as opposed to copying the directory by name
source_path = File.join(full_path, '')
if wait_for_pushes
$logger.info "Syncing project #{@project_name} from <#{full_path}> to <#{new_full_path}>."
# Set a low IO priority with ionice to not choke the server on moves
if rsync(source_path, new_full_path, 'ionice -c2 -n7 rsync')
true
else
# If the command fails with `ionice` (maybe because we're on a OS X
# development machine), try again without `ionice`.
rsync(source_path, new_full_path)
end
else
$logger.error "mv-storage failed: source path <#{full_path}> is waiting for pushes to finish."
false
end
end
def fork_repository
from_path = full_path
new_repos_path = ARGV.shift
new_full_path = ARGV.shift
unless new_repos_path && new_full_path
$logger.error "fork-repository failed: no destination repository path provided."
return false
end
to_path = File.join(new_repos_path, new_full_path)
# The repository cannot already exist
if File.exists?(to_path)
$logger.error "fork-repository failed: destination repository <#{to_path}> already exists."
return false
end
# Ensure the namepsace / hashed storage directory exists
FileUtils.mkdir_p(File.dirname(to_path), mode: 0770)
$logger.info "Forking repository from <#{from_path}> to <#{to_path}>."
cmd = %W(git clone --bare --no-local -- #{from_path} #{to_path})
system(*cmd) && self.class.create_hooks(to_path)
end
# DEPRECATED in favour of fork_repository, which takes a source and destination
# repository path and so can work with hashed storage. Remove in v6.0.0
def fork_project
destination_repos_path = ARGV.shift
unless destination_repos_path
$logger.error "fork-project failed: no destination repository path provided."
return false
end
new_namespace = ARGV.shift
# destination namespace must be provided
unless new_namespace
$logger.error "fork-project failed: no destination namespace provided."
return false
end
# destination namespace must exist
namespaced_path = File.join(destination_repos_path, new_namespace)
unless File.exists?(namespaced_path)
$logger.error "fork-project failed: destination namespace <#{namespaced_path}> does not exist."
return false
end
# a project of the same name cannot already be within the destination namespace
full_destination_path = File.join(namespaced_path, project_name.split('/')[-1])
if File.exists?(full_destination_path)
$logger.error "fork-project failed: destination repository <#{full_destination_path}> already exists."
return false
end
$logger.info "Forking project from <#{full_path}> to <#{full_destination_path}>."
cmd = %W(git clone --bare --no-local -- #{full_path} #{full_destination_path})
system(*cmd) && self.class.create_hooks(full_destination_path)
end
def gc
$logger.info "Running git gc for <#{full_path}>."
unless File.exists?(full_path)
$logger.error "gc failed: destination path <#{full_path}> does not exist."
return false
end
cmd = %W(git --git-dir=#{full_path} gc)
system(*cmd)
end
def wait_for_pushes
# Try for 30 seconds, polling every 10
3.times do
return true if gitlab_reference_counter.value == 0
sleep 10
end
false
end
# Builds a small shell script that can be used to execute SSH with a set of
# custom options.
#
# Options are expanded as `'-oKey="Value"'`, so SSH will correctly interpret
# paths with spaces in them. We trust the user not to embed single or double
# quotes in the key or value.
def custom_ssh_script(options = {})
args = options.map { |k, v| "'-o#{k}=\"#{v}\"'" }.join(' ')
[
"#!/bin/sh",
"exec ssh #{args} \"$@\""
].join("\n")
end
# Known hosts data and private keys can be passed to gitlab-shell in the
# environment. If present, this method puts them into temporary files, writes
# a script that can substitute as `ssh`, setting the options to respect those
# files, and yields: { "GIT_SSH" => "/tmp/myScript" }
def setup_ssh_auth
options = {}
if ENV.key?('GITLAB_SHELL_SSH_KEY')
key_file = Tempfile.new('gitlab-shell-key-file')
key_file.chmod(0o400)
key_file.write(ENV['GITLAB_SHELL_SSH_KEY'])
key_file.close
options['IdentityFile'] = key_file.path
options['IdentitiesOnly'] = 'yes'
end
if ENV.key?('GITLAB_SHELL_KNOWN_HOSTS')
known_hosts_file = Tempfile.new('gitlab-shell-known-hosts')
known_hosts_file.chmod(0o400)
known_hosts_file.write(ENV['GITLAB_SHELL_KNOWN_HOSTS'])
known_hosts_file.close
options['StrictHostKeyChecking'] = 'yes'
options['UserKnownHostsFile'] = known_hosts_file.path
end
return yield({}) if options.empty?
script = Tempfile.new('gitlab-shell-ssh-wrapper')
script.chmod(0o755)
script.write(custom_ssh_script(options))
script.close
yield('GIT_SSH' => script.path)
ensure
key_file.close! unless key_file.nil?
known_hosts_file.close! unless known_hosts_file.nil?
script.close! unless script.nil?
end
def gitlab_reference_counter
@gitlab_reference_counter ||= begin
# Defer loading because this pulls in gitlab_net, which takes 100-200 ms
# to load
require_relative 'gitlab_reference_counter'
GitlabReferenceCounter.new(full_path)
end
end
def rsync(src, dest, rsync_path = 'rsync')
command = rsync_path.split + %W(-a --delete --rsync-path="#{rsync_path}" #{src} #{dest})
system(*command)
end
end
require_relative 'spec_helper'
require_relative '../lib/gitlab_projects'
require_relative '../lib/gitlab_reference_counter'
describe GitlabProjects do
before do
FileUtils.mkdir_p(tmp_repos_path)
$logger = double('logger').as_null_object
end
after do
FileUtils.rm_rf(tmp_repos_path)
end
describe :create_hooks do
let(:repo_path) { File.join(tmp_repos_path, 'hook-test.git') }
let(:hooks_dir) { File.join(repo_path, 'hooks') }
let(:target_hooks_dir) { File.join(ROOT_PATH, 'hooks') }
let(:existing_target) { File.join(repo_path, 'foobar') }
before do
FileUtils.rm_rf(repo_path)
FileUtils.mkdir_p(repo_path)
end
context 'hooks is a directory' do
let(:existing_file) { File.join(hooks_dir, 'my-file') }
before do
FileUtils.mkdir_p(hooks_dir)
FileUtils.touch(existing_file)
GitlabProjects.create_hooks(repo_path)
end
it { File.readlink(hooks_dir).should == target_hooks_dir }
it { Dir[File.join(repo_path, "hooks.old.*/my-file")].count.should == 1 }
end
context 'hooks is a valid symlink' do
before do
FileUtils.mkdir_p existing_target
File.symlink(existing_target, hooks_dir)
GitlabProjects.create_hooks(repo_path)
end
it { File.readlink(hooks_dir).should == target_hooks_dir }
end
context 'hooks is a broken symlink' do
before do
FileUtils.rm_f(existing_target)
File.symlink(existing_target, hooks_dir)
GitlabProjects.create_hooks(repo_path)
end
it { File.readlink(hooks_dir).should == target_hooks_dir }
end
end
describe :initialize do
before do
argv('add-project', tmp_repos_path, repo_name)
@gl_projects = GitlabProjects.new
end
it { @gl_projects.project_name.should == repo_name }
it { @gl_projects.repos_path.should == tmp_repos_path }
it { @gl_projects.full_path.should == "#{tmp_repos_path}/gitlab-ci.git" }
it { @gl_projects.instance_variable_get(:@command).should == 'add-project' }
end
describe :create_tag do
let(:gl_projects_create) {
build_gitlab_projects('import-project', tmp_repos_path, repo_name, 'https://github.com/randx/six.git')
}
context "lightweight tag" do
let(:gl_projects) { build_gitlab_projects('create-tag', tmp_repos_path, repo_name, 'test_tag', 'master') }
it "should create a tag" do
gl_projects_create.exec
gl_projects.exec
tag_ref = capture_in_tmp_repo(%W(git rev-parse test_tag))
master_ref = capture_in_tmp_repo(%W(git rev-parse master))
tag_ref.should == master_ref
end
end
context "annotated tag" do
msg = 'some message'
tag_name = 'test_annotated_tag'
let(:gl_projects) { build_gitlab_projects('create-tag', tmp_repos_path, repo_name, tag_name, 'master', msg) }
it "should create an annotated tag" do
gl_projects_create.exec
system(*%W(git --git-dir=#{tmp_repo_path} config user.name Joe))
system(*%W(git --git-dir=#{tmp_repo_path} config user.email joe@smith.com))
gl_projects.exec
tag_ref = capture_in_tmp_repo(%W(git rev-parse #{tag_name}^{}))
master_ref = capture_in_tmp_repo(%W(git rev-parse master))
tag_msg = capture_in_tmp_repo(%W(git tag -l -n1 #{tag_name}))
tag_ref.should == master_ref
tag_msg.should == tag_name + ' ' + msg
end
end
end
describe :add_project do
let(:gl_projects) { build_gitlab_projects('add-project', tmp_repos_path, repo_name) }
it "should create a directory" do
gl_projects.stub(system: true)
GitlabProjects.stub(create_hooks: true)
gl_projects.exec
File.exists?(tmp_repo_path).should be_true
end
it "should receive valid cmd" do
valid_cmd = ['git', "--git-dir=#{tmp_repo_path}", 'init', '--bare']
gl_projects.should_receive(:system).with(*valid_cmd).and_return(true)
GitlabProjects.should_receive(:create_hooks).with(tmp_repo_path)
gl_projects.exec
end
it "should log an add-project event" do
$logger.should_receive(:info).with("Adding project #{repo_name} at <#{tmp_repo_path}>.")
gl_projects.exec
end
end
describe :list_projects do
let(:gl_projects) do
build_gitlab_projects('add-project', tmp_repos_path, "list_test/#{repo_name}")
end
before do
FileUtils.mkdir_p(tmp_repos_path)
end
it 'should create projects and list them' do
GitlabProjects.stub(create_hooks: true)
gl_projects.exec
gl_projects.send(:list_projects).should == ["list_test/#{repo_name}"]
end
end
describe :mv_project do
let(:gl_projects) { build_gitlab_projects('mv-project', tmp_repos_path, repo_name, 'repo.git') }
let(:new_repo_path) { File.join(tmp_repos_path, 'repo.git') }
before do
FileUtils.mkdir_p(tmp_repo_path)
end
it "should move a repo directory" do
File.exists?(tmp_repo_path).should be_true
gl_projects.exec
File.exists?(tmp_repo_path).should be_false
File.exists?(new_repo_path).should be_true
end
it "should fail if no destination path is provided" do
incomplete = build_gitlab_projects('mv-project', tmp_repos_path, repo_name)
$logger.should_receive(:error).with("mv-project failed: no destination path provided.")
incomplete.exec.should be_false
end
it "should fail if the source path doesn't exist" do
bad_source = build_gitlab_projects('mv-project', tmp_repos_path, 'bad-src.git', 'dest.git')
$logger.should_receive(:error).with("mv-project failed: source path <#{tmp_repos_path}/bad-src.git> does not exist.")
bad_source.exec.should be_false
end
it "should fail if the destination path already exists" do
FileUtils.mkdir_p(File.join(tmp_repos_path, 'already-exists.git'))
bad_dest = build_gitlab_projects('mv-project', tmp_repos_path, repo_name, 'already-exists.git')
message = "mv-project failed: destination path <#{tmp_repos_path}/already-exists.git> already exists."
$logger.should_receive(:error).with(message)
bad_dest.exec.should be_false
end
it "should log an mv-project event" do
message = "Moving project #{repo_name} from <#{tmp_repo_path}> to <#{new_repo_path}>."
$logger.should_receive(:info).with(message)
gl_projects.exec
end
end
describe :rm_project do
let(:gl_projects) { build_gitlab_projects('rm-project', tmp_repos_path, repo_name) }
before do
FileUtils.mkdir_p(tmp_repo_path)
end
it "should remove a repo directory" do
File.exists?(tmp_repo_path).should be_true
gl_projects.exec
File.exists?(tmp_repo_path).should be_false
end
it "should log an rm-project event" do
$logger.should_receive(:info).with("Removing project #{repo_name} from <#{tmp_repo_path}>.")
gl_projects.exec
end
end
describe :mv_storage do
let(:alternative_storage_path) { File.join(ROOT_PATH, 'tmp', 'alternative') }
let(:gl_projects) { build_gitlab_projects('mv-storage', tmp_repos_path, repo_name, alternative_storage_path) }
let(:new_repo_path) { File.join(alternative_storage_path, repo_name) }
before do
FileUtils.mkdir_p(tmp_repo_path)
FileUtils.mkdir_p(File.join(tmp_repo_path, 'hooks')) # Add some contents to copy
FileUtils.mkdir_p(alternative_storage_path)
allow_any_instance_of(GitlabReferenceCounter).to receive(:value).and_return(0)
end
after { FileUtils.rm_rf(alternative_storage_path) }
it "should rsync a repo directory" do
File.exists?(tmp_repo_path).should be_true
gl_projects.exec
File.exists?(new_repo_path).should be_true
# Make sure the target directory isn't empty (i.e. contents were copied)
FileUtils.cd(new_repo_path) { Dir['**/*'].length.should_not be(0) }
end
it "should attempt rsync with ionice first" do
expect(gl_projects).to receive(:system).with(
'ionice', '-c2', '-n7', 'rsync', '-a', '--delete', '--rsync-path="ionice -c2 -n7 rsync"',
"#{tmp_repo_path}/", new_repo_path
).and_return(true)
gl_projects.exec.should be_true
end
it "should attempt rsync without ionice if with ionice fails" do
expect(gl_projects).to receive(:system).with(
'ionice', '-c2', '-n7', 'rsync', '-a', '--delete', '--rsync-path="ionice -c2 -n7 rsync"',
"#{tmp_repo_path}/", new_repo_path
).and_return(false)
expect(gl_projects).to receive(:system).with(
'rsync', '-a', '--delete', '--rsync-path="rsync"', "#{tmp_repo_path}/", new_repo_path
).and_return(true)
gl_projects.exec.should be_true
end
it "should fail if both rsync attempts fail" do
expect(gl_projects).to receive(:system).with(
'ionice', '-c2', '-n7', 'rsync', '-a', '--delete', '--rsync-path="ionice -c2 -n7 rsync"',
"#{tmp_repo_path}/", new_repo_path
).and_return(false)
expect(gl_projects).to receive(:system).with(
'rsync', '-a', '--delete', '--rsync-path="rsync"', "#{tmp_repo_path}/", new_repo_path
).and_return(false)
gl_projects.exec.should be_false
end
it "should fail if no destination path is provided" do
incomplete = build_gitlab_projects('mv-storage', tmp_repos_path, repo_name)
$logger.should_receive(:error).with("mv-storage failed: no destination storage path provided.")
incomplete.exec.should be_false
end
it "should fail if the source path doesn't exist" do
bad_source = build_gitlab_projects('mv-storage', tmp_repos_path, 'bad-src.git', alternative_storage_path)
$logger.should_receive(:error).with("mv-storage failed: source path <#{tmp_repos_path}/bad-src.git> does not exist.")
bad_source.exec.should be_false
end
it "should fail if there are pushes ongoing" do
allow_any_instance_of(GitlabReferenceCounter).to receive(:value).and_return(1)
$logger.should_receive(:error).with("mv-storage failed: source path <#{tmp_repo_path}> is waiting for pushes to finish.")
gl_projects.exec.should be_false
end
it "should log an mv-storage event" do
message = "Syncing project #{repo_name} from <#{tmp_repo_path}> to <#{new_repo_path}>."
$logger.should_receive(:info).with(message)
gl_projects.exec
end
end
describe :push_branches do
let(:repos_path) { 'current/storage' }
let(:project_name) { 'project/path.git' }
let(:full_path) { File.join(repos_path, project_name) }
let(:remote_name) { 'new/storage' }
let(:pid) { 1234 }
let(:branch_name) { 'master' }
let(:cmd) { %W(git --git-dir=#{full_path} push -- #{remote_name} #{branch_name}) }
let(:gl_projects) { build_gitlab_projects('push-branches', repos_path, project_name, remote_name, '600', 'master') }
it 'executes the command' do
expect(Process).to receive(:spawn).with(*cmd).and_return(pid)
expect(Process).to receive(:wait).with(pid)
expect(gl_projects.exec).to be true
end
it 'raises timeout' do
expect(Timeout).to receive(:timeout).with(600).and_raise(Timeout::Error)
expect(Process).to receive(:spawn).with(*cmd).and_return(pid)
expect(Process).to receive(:wait)
expect(Process).to receive(:kill).with('KILL', pid)
expect(gl_projects.exec).to be false
end
context 'with --force' do
let(:cmd) { %W(git --git-dir=#{full_path} push --force -- #{remote_name} #{branch_name}) }
let(:gl_projects) { build_gitlab_projects('push-branches', repos_path, project_name, remote_name, '600', '--force', 'master') }
it 'executes the command' do
expect(Process).to receive(:spawn).with(*cmd).and_return(pid)
expect(Process).to receive(:wait).with(pid)
expect(gl_projects.exec).to be true
end
end
end
describe :fetch_remote do
let(:repos_path) { 'current/storage' }
let(:project_name) { 'project.git' }
let(:full_path) { File.join(repos_path, project_name) }
let(:remote_name) { 'new/storage' }
let(:pid) { 1234 }
let(:branch_name) { 'master' }
def stub_spawn(*args, wait: true, success: true)
expect(Process).to receive(:spawn).with(*args).and_return(pid)
expect(Process).to receive(:wait2).with(pid).and_return([pid, double(success?: success)]) if wait
end
def stub_env(args = {})
original = ENV.to_h
args.each { |k, v| ENV[k] = v }
yield
ensure
ENV.replace(original)
end
def stub_tempfile(name, filename, opts = {})
chmod = opts.delete(:chmod)
file = StringIO.new
allow(file).to receive(:close!)
allow(file).to receive(:path).and_return(name)
expect(Tempfile).to receive(:new).with(filename).and_return(file)
expect(file).to receive(:chmod).with(chmod) if chmod
file
end
describe 'with default args' do
let(:gl_projects) { build_gitlab_projects('fetch-remote', repos_path, project_name, remote_name, '600') }
let(:cmd) { %W(git --git-dir=#{full_path} fetch #{remote_name} --prune --quiet --tags) }
it 'executes the command' do
stub_spawn({}, *cmd)
expect(gl_projects.exec).to be true
end
it 'raises timeout' do
stub_spawn({}, *cmd, wait: false)
expect(Timeout).to receive(:timeout).with(600).and_raise(Timeout::Error)
expect(Process).to receive(:kill).with('KILL', pid)
expect(gl_projects.exec).to be false
end
end
describe 'with --force' do
let(:gl_projects) { build_gitlab_projects('fetch-remote', repos_path, project_name, remote_name, '600', '--force') }
let(:env) { {} }
let(:cmd) { %W(git --git-dir=#{full_path} fetch #{remote_name} --prune --quiet --force --tags) }
it 'executes the command with forced option' do
stub_spawn({}, *cmd)
expect(gl_projects.exec).to be true
end
end
describe 'with --no-tags' do
let(:gl_projects) { build_gitlab_projects('fetch-remote', repos_path, project_name, remote_name, '600', '--no-tags') }
let(:cmd) { %W(git --git-dir=#{full_path} fetch #{remote_name} --prune --quiet --no-tags) }
it 'executes the command' do
stub_spawn({}, *cmd)
expect(gl_projects.exec).to be true
end
end
describe 'with GITLAB_SHELL_SSH_KEY' do
let(:gl_projects) { build_gitlab_projects('fetch-remote', repos_path, project_name, remote_name, '600') }
let(:cmd) { %W(git --git-dir=#{full_path} fetch #{remote_name} --prune --quiet --tags) }
around(:each) do |example|
stub_env('GITLAB_SHELL_SSH_KEY' => 'SSH KEY') { example.run }
end
it 'sets GIT_SSH to a custom script' do
script = stub_tempfile('scriptFile', 'gitlab-shell-ssh-wrapper', chmod: 0o755)
key = stub_tempfile('/tmp files/keyFile', 'gitlab-shell-key-file', chmod: 0o400)
stub_spawn({ 'GIT_SSH' => 'scriptFile' }, *cmd)
expect(gl_projects.exec).to be true
expect(script.string).to eq("#!/bin/sh\nexec ssh '-oIdentityFile=\"/tmp files/keyFile\"' '-oIdentitiesOnly=\"yes\"' \"$@\"")
expect(key.string).to eq('SSH KEY')
end
end
describe 'with GITLAB_SHELL_KNOWN_HOSTS' do
let(:gl_projects) { build_gitlab_projects('fetch-remote', repos_path, project_name, remote_name, '600') }
let(:cmd) { %W(git --git-dir=#{full_path} fetch #{remote_name} --prune --quiet --tags) }
around(:each) do |example|
stub_env('GITLAB_SHELL_KNOWN_HOSTS' => 'KNOWN HOSTS') { example.run }
end
it 'sets GIT_SSH to a custom script' do
script = stub_tempfile('scriptFile', 'gitlab-shell-ssh-wrapper', chmod: 0o755)
key = stub_tempfile('/tmp files/knownHosts', 'gitlab-shell-known-hosts', chmod: 0o400)
stub_spawn({ 'GIT_SSH' => 'scriptFile' }, *cmd)
expect(gl_projects.exec).to be true
expect(script.string).to eq("#!/bin/sh\nexec ssh '-oStrictHostKeyChecking=\"yes\"' '-oUserKnownHostsFile=\"/tmp files/knownHosts\"' \"$@\"")
expect(key.string).to eq('KNOWN HOSTS')
end
end
end
describe :import_project do
context 'success import' do
let(:gl_projects) { build_gitlab_projects('import-project', tmp_repos_path, repo_name, 'https://github.com/randx/six.git') }
it { gl_projects.exec.should be_true }
it "should import a repo" do
gl_projects.exec
File.exists?(File.join(tmp_repo_path, 'HEAD')).should be_true
end
it "should log an import-project event" do
message = "Importing project #{repo_name} from <https://github.com/randx/six.git> to <#{tmp_repo_path}>."
$logger.should_receive(:info).with(message)
gl_projects.exec
end
end
context 'already exists' do
let(:gl_projects) { build_gitlab_projects('import-project', tmp_repos_path, repo_name, 'https://github.com/randx/six.git') }
it 'should import only once' do
gl_projects.exec.should be_true
gl_projects.exec.should be_false
end
end
context 'timeout' do
let(:gl_projects) { build_gitlab_projects('import-project', tmp_repos_path, repo_name, 'https://github.com/gitlabhq/gitlabhq.git', '1') }
it { gl_projects.exec.should be_false }
it "should not import a repo" do
gl_projects.exec
File.exists?(File.join(tmp_repo_path, 'HEAD')).should be_false
end
it "should log an import-project event" do
message = "Importing project #{repo_name} from <https://github.com/gitlabhq/gitlabhq.git> failed due to timeout."
$logger.should_receive(:error).with(message)
gl_projects.exec
end
end
end
describe :fork_repository do
let(:source_repos_path) { tmp_repos_path }
let(:dest_repos_path) { tmp_repos_path }
let(:source_repo_name) { File.join('source-namespace', repo_name) }
let(:dest_repo_name) { File.join('@hashed', 'aa', 'bb', 'xyz.git') }
let(:dest_repo) { File.join(dest_repos_path, dest_repo_name) }
let(:dest_namespace) { File.dirname(dest_repo) }
let(:gl_repo_fork) { build_gitlab_projects('fork-repository', source_repos_path, source_repo_name, dest_repos_path, dest_repo_name) }
let(:gl_projects_import) { build_gitlab_projects('import-project', source_repos_path, source_repo_name, 'https://gitlab.com/gitlab-org/gitlab-test.git') }
before do
FileUtils.mkdir_p(dest_repos_path)
gl_projects_import.exec
end
after do
FileUtils.rm_rf(dest_repos_path)
end
it "should not fork without a source repository path" do
missing_arg = build_gitlab_projects('fork-repository', tmp_repos_path, source_repo_name)
expect($logger).to receive(:error).with("fork-repository failed: no destination repository path provided.")
expect(missing_arg.exec).to be_false
end
it "should not fork without a destination repository path" do
missing_arg = build_gitlab_projects('fork-repository', tmp_repos_path, source_repo_name, tmp_repos_path)
$logger.should_receive(:error).with("fork-repository failed: no destination repository path provided.")
expect(missing_arg.exec).to be_false
end
it "should fork the repository" do
expect(gl_repo_fork.exec).to be_true
expect(File.exists?(dest_repo)).to be_true
expect(File.exists?(File.join(dest_repo, 'hooks', 'pre-receive'))).to be_true
expect(File.exists?(File.join(dest_repo, 'hooks', 'post-receive'))).to be_true
end
it "should not fork if a project of the same name already exists" do
# create a fake project at the intended destination
FileUtils.mkdir_p(dest_repo)
# trying to fork again should fail as the repo already exists
message = "fork-repository failed: destination repository <#{dest_repo}> already exists."
expect($logger).to receive(:error).with(message)
expect(gl_repo_fork.exec).to be_false
end
it "should log a fork-project event" do
message = "Forking repository from <#{File.join(tmp_repos_path, source_repo_name)}> to <#{dest_repo}>."
expect($logger).to receive(:info).with(message)
expect(gl_repo_fork.exec).to be_true
end
context 'different storages' do
let(:dest_repos_path) { File.join(ROOT_PATH, 'tmp', 'alternative') }
it "should fork the repo" do
expect(gl_repo_fork.exec).to be_true
expect(File.exists?(dest_repo)).to be_true
expect(File.exists?(File.join(dest_repo, 'hooks', 'pre-receive'))).to be_true
expect(File.exists?(File.join(dest_repo, 'hooks', 'post-receive'))).to be_true
end
end
end
describe :fork_project do
let(:source_repo_name) { File.join('source-namespace', repo_name) }
let(:dest_repo) { File.join(tmp_repos_path, 'forked-to-namespace', repo_name) }
let(:gl_projects_fork) { build_gitlab_projects('fork-project', tmp_repos_path, source_repo_name, tmp_repos_path, 'forked-to-namespace') }
let(:gl_projects_import) { build_gitlab_projects('import-project', tmp_repos_path, source_repo_name, 'https://github.com/randx/six.git') }
before do
gl_projects_import.exec
end
it "should not fork without a source repository path" do
missing_arg = build_gitlab_projects('fork-project', tmp_repos_path, source_repo_name)
$logger.should_receive(:error).with("fork-project failed: no destination repository path provided.")
missing_arg.exec.should be_false
end
it "should not fork without a destination namespace" do
missing_arg = build_gitlab_projects('fork-project', tmp_repos_path, source_repo_name, tmp_repos_path)
$logger.should_receive(:error).with("fork-project failed: no destination namespace provided.")
missing_arg.exec.should be_false
end
it "should not fork into a namespace that doesn't exist" do
message = "fork-project failed: destination namespace <#{tmp_repos_path}/forked-to-namespace> does not exist."
$logger.should_receive(:error).with(message)
gl_projects_fork.exec.should be_false
end
it "should fork the repo" do
# create destination namespace
FileUtils.mkdir_p(File.join(tmp_repos_path, 'forked-to-namespace'))
gl_projects_fork.exec.should be_true
File.exists?(dest_repo).should be_true
File.exists?(File.join(dest_repo, '/hooks/pre-receive')).should be_true
File.exists?(File.join(dest_repo, '/hooks/post-receive')).should be_true
end
it "should not fork if a project of the same name already exists" do
# create a fake project at the intended destination
FileUtils.mkdir_p(File.join(tmp_repos_path, 'forked-to-namespace', repo_name))
# trying to fork again should fail as the repo already exists
message = "fork-project failed: destination repository <#{tmp_repos_path}/forked-to-namespace/#{repo_name}> "
message << "already exists."
$logger.should_receive(:error).with(message)
gl_projects_fork.exec.should be_false
end
it "should log a fork-project event" do
message = "Forking project from <#{File.join(tmp_repos_path, source_repo_name)}> to <#{dest_repo}>."
$logger.should_receive(:info).with(message)
# create destination namespace
FileUtils.mkdir_p(File.join(tmp_repos_path, 'forked-to-namespace'))
gl_projects_fork.exec.should be_true
end
context 'different storages' do
let(:alternative_repos_path) { File.join(ROOT_PATH, 'tmp', 'alternative') }
let(:dest_repo) { File.join(alternative_repos_path, 'forked-to-namespace', repo_name) }
let(:gl_projects_fork) { build_gitlab_projects('fork-project', tmp_repos_path, source_repo_name, alternative_repos_path, 'forked-to-namespace') }
before do
FileUtils.mkdir_p(alternative_repos_path)
end
after do
FileUtils.rm_rf(alternative_repos_path)
end
it "should fork the repo" do
# create destination namespace
FileUtils.mkdir_p(File.join(alternative_repos_path, 'forked-to-namespace'))
gl_projects_fork.exec.should be_true
File.exists?(dest_repo).should be_true
File.exists?(File.join(dest_repo, '/hooks/pre-receive')).should be_true
File.exists?(File.join(dest_repo, '/hooks/post-receive')).should be_true
end
end
end
describe :exec do
it 'should puts message if unknown command arg' do
gitlab_projects = build_gitlab_projects('edit-project', tmp_repos_path, repo_name)
gitlab_projects.should_receive(:puts).with('not allowed')
gitlab_projects.exec
end
it 'should log a warning for unknown commands' do
gitlab_projects = build_gitlab_projects('hurf-durf', tmp_repos_path, repo_name)
$logger.should_receive(:warn).with('Attempt to execute invalid gitlab-projects command "hurf-durf".')
gitlab_projects.exec
end
end
def build_gitlab_projects(*args)
argv(*args)
GitlabProjects.new
end
def argv(*args)
args.each_with_index do |arg, i|
ARGV[i] = arg
end
end
def tmp_repos_path
File.join(ROOT_PATH, 'tmp', 'repositories')
end
def tmp_repo_path
File.join(tmp_repos_path, repo_name)
end
def repo_name
'gitlab-ci.git'
end
def capture_in_tmp_repo(cmd)
IO.popen([*cmd, {chdir: tmp_repo_path}]).read.strip
end
end
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment