New upstream version 15.5.4+ds3

This commit is contained in:
Pirate Praveen 2022-12-04 17:31:26 +05:30
parent 786b3c7d00
commit fb1e825bdf
1708 changed files with 63355 additions and 34554 deletions

View file

@ -1 +0,0 @@
15.0.0-rc1

View file

@ -1,20 +0,0 @@
//go:build static && system_libgit2
// +build static,system_libgit2
package main
import (
"context"
"encoding/gob"
"flag"
"gitlab.com/gitlab-org/gitaly/v14/cmd/gitaly-git2go-v14/commit"
)
type commitSubcommand struct{}
func (commitSubcommand) Flags() *flag.FlagSet { return flag.NewFlagSet("commit", flag.ExitOnError) }
func (commitSubcommand) Run(ctx context.Context, decoder *gob.Decoder, encoder *gob.Encoder) error {
return commit.Run(ctx, decoder, encoder)
}

View file

@ -1,48 +0,0 @@
//go:build static && system_libgit2
// +build static,system_libgit2
package testhelper
import (
"testing"
"time"
git "github.com/libgit2/git2go/v33"
"github.com/stretchr/testify/require"
"gitlab.com/gitlab-org/gitaly/v14/cmd/gitaly-git2go-v14/git2goutil"
)
// DefaultAuthor is the author used by BuildCommit
var DefaultAuthor = git.Signature{
Name: "Foo",
Email: "foo@example.com",
When: time.Date(2020, 1, 1, 1, 1, 1, 0, time.FixedZone("", 2*60*60)),
}
//nolint: revive,stylecheck // This is unintentionally missing documentation.
func BuildCommit(t testing.TB, repoPath string, parents []*git.Oid, fileContents map[string]string) *git.Oid {
repo, err := git2goutil.OpenRepository(repoPath)
require.NoError(t, err)
defer repo.Free()
odb, err := repo.Odb()
require.NoError(t, err)
treeBuilder, err := repo.TreeBuilder()
require.NoError(t, err)
for file, contents := range fileContents {
oid, err := odb.Write([]byte(contents), git.ObjectBlob)
require.NoError(t, err)
require.NoError(t, treeBuilder.Insert(file, oid, git.FilemodeBlob))
}
tree, err := treeBuilder.Write()
require.NoError(t, err)
var commit *git.Oid
commit, err = repo.CreateCommitFromIds("", &DefaultAuthor, &DefaultAuthor, "Message", tree, parents...)
require.NoError(t, err)
return commit
}

View file

@ -1,21 +0,0 @@
//go:build static && system_libgit2
// +build static,system_libgit2
package main
import (
"testing"
"gitlab.com/gitlab-org/gitaly/v14/internal/git/gittest"
"gitlab.com/gitlab-org/gitaly/v14/internal/git2go"
"gitlab.com/gitlab-org/gitaly/v14/internal/gitaly/config"
"gitlab.com/gitlab-org/gitaly/v14/internal/testhelper"
)
func TestMain(m *testing.M) {
testhelper.Run(m)
}
func buildExecutor(tb testing.TB, cfg config.Cfg) *git2go.Executor {
return git2go.NewExecutor(cfg, gittest.NewCommandFactory(tb, cfg), config.NewLocator(cfg))
}

View file

@ -1,42 +0,0 @@
# gitaly-hooks
`gitaly-hooks` is a binary that is the single point of entry for git hooks through gitaly.
## How is it invoked?
`gitaly-hooks` has the following subcommands:
| subcommand | purpose | arguments | stdin |
|--------------|-------------------------------------------------|--------------------------------------|---------------------------------------------|
| `check` | checks if the hooks can reach the gitlab server | none | none |
| `pre-receive` | used as the git pre-receive hook | none | `<old-value>` SP `<new-value>` SP `<ref-name>` LF |
| `update` | used as the git update hook | `<ref-name>` `<old-object>` `<new-object>` | none
| `post-receive` | used as the git post-receive hook | none | `<old-value>` SP `<new-value>` SP `<ref-name>` LF |
| `git` | used as the git pack-objects hook | `pack-objects` `[--stdout]` `[--shallow-file]` | `<object-list>` |
## Where is it invoked from?
There are three main code paths that call `gitaly-hooks`.
### git receive-pack (SSH & HTTP)
We have two RPCs that perform the `git receive-pack` function, [SSHReceivePack](https://gitlab.com/gitlab-org/gitaly/-/blob/master/internal/service/ssh/receive_pack.go) and [PostReceivePack](https://gitlab.com/gitlab-org/gitaly/-/blob/master/internal/service/smarthttp/receive_pack.go).
Both of these RPCs, when executing `git receive-pack`, set `core.hooksPath` to the path of the `gitaly-hooks` binary. [That happens here in `ReceivePackConfig`](https://gitlab.com/gitlab-org/gitaly/-/blob/master/internal/git/receivepack.go).
### Operations service RPCs
In the [operations service](https://gitlab.com/gitlab-org/gitaly/-/tree/master/internal/service/operations) there are RPCs that call out to `gitaly-ruby`, which then do certain operations that execute git hooks.
This is accomplished through the `with_hooks` method [here](https://gitlab.com/gitlab-org/gitaly/-/blob/master/ruby/lib/gitlab/git/operation_service.rb). Eventually the [`hook.rb`](https://gitlab.com/gitlab-org/gitaly/-/blob/master/ruby/lib/gitlab/git/hook.rb) is
called, which then calls the `gitaly-hooks` binary. This method doesn't rely on git to run the hooks. Instead, the arguments and input to the
hooks are built in ruby and then get shelled out to `gitaly-hooks`.
### git upload-pack (SSH & HTTP)
Only when the pack-objects cache is enabled in Gitaly's configuration file.
SSHUploadPack and PostUploadPack, when executing `git upload-pack`, set `uploadpack.packObjectsHook` to the path of the `gitaly-hooks` binary. Afterward, when `git upload-pack` requests packfile data, it calls `gitaly-hooks` binary instead of `git pack-objects`. [That happens here in `WithPackObjectsHookEnv`](https://gitlab.com/gitlab-org/gitaly/-/blob/47164700a1ea086c5e8ca0d02feefe4e68bf4f81/internal/git/hooks_options.go#L54)
## What does gitaly-hooks do?
`gitaly-hooks` will take the arguments and make an RPC call to `PreReceiveHook`, `UpdateHook`, `PostReceiveHook`, or `PackObjectsHook` accordingly.

View file

@ -1,141 +0,0 @@
package main
import (
"context"
"encoding/json"
"errors"
"fmt"
"io"
"net/url"
"path/filepath"
"github.com/git-lfs/git-lfs/lfs"
"gitlab.com/gitlab-org/gitaly/v14/internal/gitaly/config"
"gitlab.com/gitlab-org/gitaly/v14/internal/gitaly/config/prometheus"
"gitlab.com/gitlab-org/gitaly/v14/internal/gitlab"
gitalylog "gitlab.com/gitlab-org/gitaly/v14/internal/log"
"gitlab.com/gitlab-org/labkit/log"
"gitlab.com/gitlab-org/labkit/tracing"
)
type configProvider interface {
Get(key string) string
}
func initLogging(p configProvider) (io.Closer, error) {
path := p.Get(gitalylog.GitalyLogDirEnvKey)
if path == "" {
return nil, nil
}
filepath := filepath.Join(path, "gitaly_lfs_smudge.log")
return log.Initialize(
log.WithFormatter("json"),
log.WithLogLevel("info"),
log.WithOutputName(filepath),
)
}
func smudge(to io.Writer, from io.Reader, cfgProvider configProvider) error {
// Since the environment is sanitized at the moment, we're only
// using this to extract the correlation ID. The finished() call
// to clean up the tracing will be a NOP here.
ctx, finished := tracing.ExtractFromEnv(context.Background())
defer finished()
output, err := handleSmudge(ctx, to, from, cfgProvider)
if err != nil {
log.WithError(err).Error(err)
return err
}
defer func() {
if err := output.Close(); err != nil {
log.ContextLogger(ctx).WithError(err).Error("closing LFS object: %w", err)
}
}()
_, copyErr := io.Copy(to, output)
if copyErr != nil {
log.WithError(err).Error(copyErr)
return copyErr
}
return nil
}
func handleSmudge(ctx context.Context, to io.Writer, from io.Reader, config configProvider) (io.ReadCloser, error) {
logger := log.ContextLogger(ctx)
ptr, contents, err := lfs.DecodeFrom(from)
if err != nil {
// This isn't a valid LFS pointer. Just copy the existing pointer data.
return io.NopCloser(contents), nil
}
logger.WithField("oid", ptr.Oid).Debug("decoded LFS OID")
glCfg, tlsCfg, glRepository, err := loadConfig(config)
if err != nil {
return io.NopCloser(contents), err
}
logger.WithField("gitlab_config", glCfg).
WithField("gitaly_tls_config", tlsCfg).
Debug("loaded GitLab API config")
client, err := gitlab.NewHTTPClient(logger, glCfg, tlsCfg, prometheus.Config{})
if err != nil {
return io.NopCloser(contents), err
}
qs := url.Values{}
qs.Set("oid", ptr.Oid)
qs.Set("gl_repository", glRepository)
u := url.URL{Path: "/lfs", RawQuery: qs.Encode()}
response, err := client.Get(ctx, u.String())
if err != nil {
return io.NopCloser(contents), fmt.Errorf("error loading LFS object: %v", err)
}
if response.StatusCode == 200 {
return response.Body, nil
}
if err := response.Body.Close(); err != nil {
logger.WithError(err).Error("closing LFS pointer body: %w", err)
}
return io.NopCloser(contents), nil
}
func loadConfig(cfgProvider configProvider) (config.Gitlab, config.TLS, string, error) {
var cfg config.Gitlab
var tlsCfg config.TLS
glRepository := cfgProvider.Get("GL_REPOSITORY")
if glRepository == "" {
return cfg, tlsCfg, "", fmt.Errorf("error loading project: GL_REPOSITORY is not defined")
}
u := cfgProvider.Get("GL_INTERNAL_CONFIG")
if u == "" {
return cfg, tlsCfg, glRepository, fmt.Errorf("unable to retrieve GL_INTERNAL_CONFIG")
}
if err := json.Unmarshal([]byte(u), &cfg); err != nil {
return cfg, tlsCfg, glRepository, fmt.Errorf("unable to unmarshal GL_INTERNAL_CONFIG: %v", err)
}
u = cfgProvider.Get("GITALY_TLS")
if u == "" {
return cfg, tlsCfg, glRepository, errors.New("unable to retrieve GITALY_TLS")
}
if err := json.Unmarshal([]byte(u), &tlsCfg); err != nil {
return cfg, tlsCfg, glRepository, fmt.Errorf("unable to unmarshal GITALY_TLS: %w", err)
}
return cfg, tlsCfg, glRepository, nil
}

View file

@ -1,256 +0,0 @@
package main
import (
"bytes"
"encoding/json"
"net/http"
"path/filepath"
"strings"
"testing"
"github.com/stretchr/testify/require"
"gitlab.com/gitlab-org/gitaly/v14/internal/gitaly/config"
"gitlab.com/gitlab-org/gitaly/v14/internal/gitlab"
"gitlab.com/gitlab-org/gitaly/v14/internal/testhelper"
)
const (
lfsOid = "3ea5dd307f195f449f0e08234183b82e92c3d5f4cff11c2a6bb014f9e0de12aa"
lfsPointer = `version https://git-lfs.github.com/spec/v1
oid sha256:3ea5dd307f195f449f0e08234183b82e92c3d5f4cff11c2a6bb014f9e0de12aa
size 177735
`
lfsPointerWithCRLF = `version https://git-lfs.github.com/spec/v1
oid sha256:3ea5dd307f195f449f0e08234183b82e92c3d5f4cff11c2a6bb014f9e0de12aa` + "\r\nsize 177735"
invalidLfsPointer = `version https://git-lfs.github.com/spec/v1
oid sha256:3ea5dd307f195f449f0e08234183b82e92c3d5f4cff11c2a6bb014f9e0de12aa&gl_repository=project-51
size 177735
`
invalidLfsPointerWithNonHex = `version https://git-lfs.github.com/spec/v1
oid sha256:3ea5dd307f195f449f0e08234183b82e92c3d5f4cff11c2a6bb014f9e0de12z-
size 177735`
glRepository = "project-1"
secretToken = "topsecret"
testData = "hello world"
certPath = "../../internal/gitlab/testdata/certs/server.crt"
keyPath = "../../internal/gitlab/testdata/certs/server.key"
)
var defaultOptions = gitlab.TestServerOptions{
SecretToken: secretToken,
LfsBody: testData,
LfsOid: lfsOid,
GlRepository: glRepository,
ClientCACertPath: certPath,
ServerCertPath: certPath,
ServerKeyPath: keyPath,
}
type mapConfig struct {
env map[string]string
}
func TestMain(m *testing.M) {
testhelper.Run(m)
}
func (m *mapConfig) Get(key string) string {
return m.env[key]
}
func runTestServer(t *testing.T, options gitlab.TestServerOptions) (config.Gitlab, func()) {
tempDir := testhelper.TempDir(t)
gitlab.WriteShellSecretFile(t, tempDir, secretToken)
secretFilePath := filepath.Join(tempDir, ".gitlab_shell_secret")
serverURL, serverCleanup := gitlab.NewTestServer(t, options)
c := config.Gitlab{URL: serverURL, SecretFile: secretFilePath, HTTPSettings: config.HTTPSettings{CAFile: certPath}}
return c, func() {
serverCleanup()
}
}
func TestSuccessfulLfsSmudge(t *testing.T) {
testCases := []struct {
desc string
data string
}{
{
desc: "regular LFS pointer",
data: lfsPointer,
},
{
desc: "LFS pointer with CRLF",
data: lfsPointerWithCRLF,
},
}
for _, tc := range testCases {
t.Run(tc.desc, func(t *testing.T) {
var b bytes.Buffer
reader := strings.NewReader(tc.data)
c, cleanup := runTestServer(t, defaultOptions)
defer cleanup()
cfg, err := json.Marshal(c)
require.NoError(t, err)
tlsCfg, err := json.Marshal(config.TLS{
CertPath: certPath,
KeyPath: keyPath,
})
require.NoError(t, err)
tmpDir := testhelper.TempDir(t)
env := map[string]string{
"GL_REPOSITORY": "project-1",
"GL_INTERNAL_CONFIG": string(cfg),
"GITALY_LOG_DIR": tmpDir,
"GITALY_TLS": string(tlsCfg),
}
cfgProvider := &mapConfig{env: env}
_, err = initLogging(cfgProvider)
require.NoError(t, err)
err = smudge(&b, reader, cfgProvider)
require.NoError(t, err)
require.Equal(t, testData, b.String())
logFilename := filepath.Join(tmpDir, "gitaly_lfs_smudge.log")
require.FileExists(t, logFilename)
data := testhelper.MustReadFile(t, logFilename)
require.NoError(t, err)
d := string(data)
require.Contains(t, d, `"msg":"Finished HTTP request"`)
require.Contains(t, d, `"status":200`)
require.Contains(t, d, `"content_length_bytes":`)
})
}
}
func TestUnsuccessfulLfsSmudge(t *testing.T) {
testCases := []struct {
desc string
data string
missingEnv string
tlsCfg config.TLS
expectedError bool
options gitlab.TestServerOptions
expectedLogMessage string
expectedGitalyTLS string
}{
{
desc: "bad LFS pointer",
data: "test data",
options: defaultOptions,
expectedError: false,
},
{
desc: "invalid LFS pointer",
data: invalidLfsPointer,
options: defaultOptions,
expectedError: false,
},
{
desc: "invalid LFS pointer with non-hex characters",
data: invalidLfsPointerWithNonHex,
options: defaultOptions,
expectedError: false,
},
{
desc: "missing GL_REPOSITORY",
data: lfsPointer,
missingEnv: "GL_REPOSITORY",
options: defaultOptions,
expectedError: true,
expectedLogMessage: "GL_REPOSITORY is not defined",
},
{
desc: "missing GL_INTERNAL_CONFIG",
data: lfsPointer,
missingEnv: "GL_INTERNAL_CONFIG",
options: defaultOptions,
expectedError: true,
expectedLogMessage: "unable to retrieve GL_INTERNAL_CONFIG",
},
{
desc: "failed HTTP response",
data: lfsPointer,
options: gitlab.TestServerOptions{
SecretToken: secretToken,
LfsBody: testData,
LfsOid: lfsOid,
GlRepository: glRepository,
LfsStatusCode: http.StatusInternalServerError,
},
expectedError: true,
expectedLogMessage: "error loading LFS object",
},
{
desc: "invalid TLS paths",
data: lfsPointer,
options: defaultOptions,
tlsCfg: config.TLS{CertPath: "fake-path", KeyPath: "not-real"},
expectedError: true,
},
}
for _, tc := range testCases {
t.Run(tc.desc, func(t *testing.T) {
c, cleanup := runTestServer(t, tc.options)
defer cleanup()
cfg, err := json.Marshal(c)
require.NoError(t, err)
tlsCfg, err := json.Marshal(tc.tlsCfg)
require.NoError(t, err)
tmpDir := testhelper.TempDir(t)
env := map[string]string{
"GL_REPOSITORY": "project-1",
"GL_INTERNAL_CONFIG": string(cfg),
"GITALY_LOG_DIR": tmpDir,
"GITALY_TLS": string(tlsCfg),
}
if tc.missingEnv != "" {
delete(env, tc.missingEnv)
}
cfgProvider := &mapConfig{env: env}
var b bytes.Buffer
reader := strings.NewReader(tc.data)
_, err = initLogging(cfgProvider)
require.NoError(t, err)
err = smudge(&b, reader, cfgProvider)
if tc.expectedError {
require.Error(t, err)
} else {
require.NoError(t, err)
require.Equal(t, tc.data, b.String())
}
logFilename := filepath.Join(tmpDir, "gitaly_lfs_smudge.log")
require.FileExists(t, logFilename)
data := testhelper.MustReadFile(t, logFilename)
if tc.expectedLogMessage != "" {
require.Contains(t, string(data), tc.expectedLogMessage)
}
})
}
}

View file

@ -1,43 +0,0 @@
package main
import (
"fmt"
"os"
)
type envConfig struct{}
func (e *envConfig) Get(key string) string {
return os.Getenv(key)
}
func requireStdin(msg string) {
var out string
stat, err := os.Stdin.Stat()
if err != nil {
out = fmt.Sprintf("Cannot read from STDIN. %s (%s)", msg, err)
} else if (stat.Mode() & os.ModeCharDevice) != 0 {
out = fmt.Sprintf("Cannot read from STDIN. %s", msg)
}
if len(out) > 0 {
fmt.Println(out)
os.Exit(1)
}
}
func main() {
requireStdin("This command should be run by the Git 'smudge' filter")
closer, err := initLogging(&envConfig{})
if err != nil {
fmt.Fprintf(os.Stderr, "error initializing log file for gitaly-lfs-smudge: %v", err)
}
defer closer.Close()
err = smudge(os.Stdout, os.Stdin, &envConfig{})
if err != nil {
os.Exit(1)
}
}

View file

@ -1,3 +0,0 @@
source 'https://rubygems.org'
gem 'gitlab-dangerfiles', '~> 3.1.0', require: false

View file

@ -1,101 +0,0 @@
GEM
remote: https://rubygems.org/
specs:
addressable (2.8.0)
public_suffix (>= 2.0.2, < 5.0)
claide (1.1.0)
claide-plugins (0.9.2)
cork
nap
open4 (~> 1.3)
colored2 (3.1.2)
cork (0.3.0)
colored2 (~> 3.1)
danger (8.6.0)
claide (~> 1.0)
claide-plugins (>= 0.9.2)
colored2 (~> 3.1)
cork (~> 0.1)
faraday (>= 0.9.0, < 2.0)
faraday-http-cache (~> 2.0)
git (~> 1.7)
kramdown (~> 2.3)
kramdown-parser-gfm (~> 1.0)
no_proxy_fix
octokit (~> 4.7)
terminal-table (>= 1, < 4)
danger-gitlab (8.0.0)
danger
gitlab (~> 4.2, >= 4.2.0)
faraday (1.10.0)
faraday-em_http (~> 1.0)
faraday-em_synchrony (~> 1.0)
faraday-excon (~> 1.1)
faraday-httpclient (~> 1.0)
faraday-multipart (~> 1.0)
faraday-net_http (~> 1.0)
faraday-net_http_persistent (~> 1.0)
faraday-patron (~> 1.0)
faraday-rack (~> 1.0)
faraday-retry (~> 1.0)
ruby2_keywords (>= 0.0.4)
faraday-em_http (1.0.0)
faraday-em_synchrony (1.0.0)
faraday-excon (1.1.0)
faraday-http-cache (2.2.0)
faraday (>= 0.8)
faraday-httpclient (1.0.1)
faraday-multipart (1.0.3)
multipart-post (>= 1.2, < 3)
faraday-net_http (1.0.1)
faraday-net_http_persistent (1.2.0)
faraday-patron (1.0.0)
faraday-rack (1.0.0)
faraday-retry (1.0.3)
git (1.11.0)
rchardet (~> 1.8)
gitlab (4.18.0)
httparty (~> 0.18)
terminal-table (>= 1.5.1)
gitlab-dangerfiles (3.1.0)
danger (>= 8.4.5)
danger-gitlab (>= 8.0.0)
rake
httparty (0.20.0)
mime-types (~> 3.0)
multi_xml (>= 0.5.2)
kramdown (2.3.2)
rexml
kramdown-parser-gfm (1.1.0)
kramdown (~> 2.0)
mime-types (3.4.1)
mime-types-data (~> 3.2015)
mime-types-data (3.2022.0105)
multi_xml (0.6.0)
multipart-post (2.1.1)
nap (1.1.0)
no_proxy_fix (0.1.2)
octokit (4.22.0)
faraday (>= 0.9)
sawyer (~> 0.8.0, >= 0.5.3)
open4 (1.3.4)
public_suffix (4.0.7)
rake (13.0.6)
rchardet (1.8.0)
rexml (3.2.5)
ruby2_keywords (0.0.5)
sawyer (0.8.2)
addressable (>= 2.3.5)
faraday (> 0.8, < 2.0)
terminal-table (3.0.2)
unicode-display_width (>= 1.1.1, < 3)
unicode-display_width (2.1.0)
PLATFORMS
ruby
DEPENDENCIES
gitlab-dangerfiles (~> 3.1.0)
BUNDLED WITH
2.2.33

View file

@ -1,53 +0,0 @@
module gitlab.com/gitlab-org/gitaly/v14
exclude (
// grpc-go version v1.34.0 and v1.35.0-dev have a bug that affects unix domain docket
// dialing. It should be avoided until upgraded to a newer fixed
// version. More details:
// https://github.com/grpc/grpc-go/issues/3990
github.com/grpc/grpc-go v1.34.0
github.com/grpc/grpc-go v1.35.0-dev
)
require (
github.com/beevik/ntp v0.3.0
github.com/cloudflare/tableflip v1.2.2
github.com/containerd/cgroups v0.0.0-20201118023556-2819c83ced99
github.com/getsentry/sentry-go v0.10.0
github.com/git-lfs/git-lfs v1.5.1-0.20210304194248-2e1d981afbe3
github.com/go-enry/go-license-detector/v4 v4.3.0
github.com/go-git/go-git/v5 v5.3.0 // indirect
github.com/google/go-cmp v0.5.5
github.com/google/uuid v1.2.0
github.com/grpc-ecosystem/go-grpc-middleware v1.3.0
github.com/grpc-ecosystem/go-grpc-prometheus v1.2.0
github.com/hashicorp/golang-lru v0.5.4
github.com/hashicorp/yamux v0.0.0-20210316155119-a95892c5f864
github.com/jackc/pgconn v1.10.1
github.com/jackc/pgtype v1.9.1
github.com/jackc/pgx/v4 v4.14.1
github.com/kelseyhightower/envconfig v1.3.0
github.com/libgit2/git2go/v33 v33.0.9
github.com/olekukonko/tablewriter v0.0.2
github.com/opencontainers/runtime-spec v1.0.2
github.com/opentracing/opentracing-go v1.2.0
github.com/pelletier/go-toml v1.8.1
github.com/prometheus/client_golang v1.10.0
github.com/prometheus/client_model v0.2.0
github.com/rubenv/sql-migrate v0.0.0-20191213152630-06338513c237
github.com/sirupsen/logrus v1.8.1
github.com/stretchr/testify v1.7.0
github.com/uber/jaeger-client-go v2.27.0+incompatible
gitlab.com/gitlab-org/gitlab-shell v1.9.8-0.20210720163109-50da611814d2
gitlab.com/gitlab-org/labkit v1.5.0
go.uber.org/goleak v1.1.10
gocloud.dev v0.23.0
golang.org/x/exp v0.0.0-20200331195152-e8c3332aa8e5 // indirect
golang.org/x/sync v0.0.0-20210220032951-036812b2e83c
golang.org/x/sys v0.0.0-20211102192858-4dd72447c267
golang.org/x/time v0.0.0-20220224211638-0e9765cccd65
google.golang.org/grpc v1.38.0
google.golang.org/protobuf v1.26.0
)
go 1.16

View file

@ -1,23 +0,0 @@
//go:build boringcrypto
// +build boringcrypto
package boring
import (
"crypto/boring"
"gitlab.com/gitlab-org/labkit/log"
)
// CheckBoring checks whether FIPS crypto has been enabled. For the FIPS Go
// compiler in https://github.com/golang-fips/go, this requires that:
//
// 1. The kernel has FIPS enabled (e.g. `/proc/sys/crypto/fips_enabled` is 1).
// 2. A system OpenSSL can be dynamically loaded via ldopen().
func CheckBoring() {
if boring.Enabled() {
log.Info("FIPS mode is enabled. Using an external SSL library.")
return
}
log.Info("Gitaly was compiled with FIPS mode, but an external SSL library was not enabled.")
}

View file

@ -1,9 +0,0 @@
//go:build !boringcrypto
// +build !boringcrypto
package boring
// CheckBoring does nothing when the boringcrypto tag is not in the
// build.
func CheckBoring() {
}

View file

@ -1,32 +0,0 @@
package cgroups
import (
"github.com/prometheus/client_golang/prometheus"
"gitlab.com/gitlab-org/gitaly/v14/internal/command"
"gitlab.com/gitlab-org/gitaly/v14/internal/gitaly/config/cgroups"
)
// Manager supplies an interface for interacting with cgroups
type Manager interface {
// Setup creates cgroups and assigns configured limitations.
// It is expected to be called once at Gitaly startup from any
// instance of the Manager.
Setup() error
// AddCommand adds a Command to a cgroup
AddCommand(*command.Command) error
// Cleanup cleans up cgroups created in Setup.
// It is expected to be called once at Gitaly shutdown from any
// instance of the Manager.
Cleanup() error
Describe(ch chan<- *prometheus.Desc)
Collect(ch chan<- prometheus.Metric)
}
// NewManager returns the appropriate Cgroups manager
func NewManager(cfg cgroups.Config) Manager {
if cfg.Count > 0 {
return newV1Manager(cfg)
}
return &NoopManager{}
}

View file

@ -1,20 +0,0 @@
package cgroups
import (
"testing"
"github.com/stretchr/testify/require"
"gitlab.com/gitlab-org/gitaly/v14/internal/gitaly/config/cgroups"
"gitlab.com/gitlab-org/gitaly/v14/internal/testhelper"
)
func TestMain(m *testing.M) {
testhelper.Run(m)
}
func TestNewManager(t *testing.T) {
cfg := cgroups.Config{Count: 10}
require.IsType(t, &CGroupV1Manager{}, &CGroupV1Manager{cfg: cfg})
require.IsType(t, &NoopManager{}, NewManager(cgroups.Config{}))
}

View file

@ -1,189 +0,0 @@
package cgroups
import (
"fmt"
"hash/crc32"
"os"
"strings"
"github.com/containerd/cgroups"
specs "github.com/opencontainers/runtime-spec/specs-go"
"github.com/prometheus/client_golang/prometheus"
"gitlab.com/gitlab-org/gitaly/v14/internal/command"
cgroupscfg "gitlab.com/gitlab-org/gitaly/v14/internal/gitaly/config/cgroups"
"gitlab.com/gitlab-org/gitaly/v14/internal/log"
)
// CGroupV1Manager is the manager for cgroups v1
type CGroupV1Manager struct {
cfg cgroupscfg.Config
hierarchy func() ([]cgroups.Subsystem, error)
memoryFailedTotal, cpuUsage *prometheus.GaugeVec
procs *prometheus.GaugeVec
}
func newV1Manager(cfg cgroupscfg.Config) *CGroupV1Manager {
return &CGroupV1Manager{
cfg: cfg,
hierarchy: func() ([]cgroups.Subsystem, error) {
return defaultSubsystems(cfg.Mountpoint)
},
memoryFailedTotal: prometheus.NewGaugeVec(
prometheus.GaugeOpts{
Name: "gitaly_cgroup_memory_failed_total",
Help: "Number of memory usage hits limits",
},
[]string{"path"},
),
cpuUsage: prometheus.NewGaugeVec(
prometheus.GaugeOpts{
Name: "gitaly_cgroup_cpu_usage",
Help: "CPU Usage of Cgroup",
},
[]string{"path", "type"},
),
procs: prometheus.NewGaugeVec(
prometheus.GaugeOpts{
Name: "gitaly_cgroup_procs_total",
Help: "Total number of procs",
},
[]string{"path", "subsystem"},
),
}
}
//nolint: revive,stylecheck // This is unintentionally missing documentation.
func (cg *CGroupV1Manager) Setup() error {
resources := &specs.LinuxResources{}
if cg.cfg.CPU.Enabled {
resources.CPU = &specs.LinuxCPU{
Shares: &cg.cfg.CPU.Shares,
}
}
if cg.cfg.Memory.Enabled {
resources.Memory = &specs.LinuxMemory{
Limit: &cg.cfg.Memory.Limit,
}
}
for i := 0; i < int(cg.cfg.Count); i++ {
_, err := cgroups.New(cg.hierarchy, cgroups.StaticPath(cg.cgroupPath(i)), resources)
if err != nil {
return fmt.Errorf("failed creating cgroup: %w", err)
}
}
return nil
}
// AddCommand adds the given command to one of the CGroup's buckets. The bucket used for the command
// is determined by hashing the commands arguments. No error is returned if the command has already
// exited.
func (cg *CGroupV1Manager) AddCommand(cmd *command.Command) error {
checksum := crc32.ChecksumIEEE([]byte(strings.Join(cmd.Args(), "")))
groupID := uint(checksum) % cg.cfg.Count
cgroupPath := cg.cgroupPath(int(groupID))
control, err := cgroups.Load(cg.hierarchy, cgroups.StaticPath(cgroupPath))
if err != nil {
return fmt.Errorf("failed loading %s cgroup: %w", cgroupPath, err)
}
if err := control.Add(cgroups.Process{Pid: cmd.Pid()}); err != nil {
// Command could finish so quickly before we can add it to a cgroup, so
// we don't consider it an error.
if strings.Contains(err.Error(), "no such process") {
return nil
}
return fmt.Errorf("failed adding process to cgroup: %w", err)
}
cmd.SetCgroupPath(cgroupPath)
return nil
}
// Collect collects metrics from the cgroups controller
func (cg *CGroupV1Manager) Collect(ch chan<- prometheus.Metric) {
path := cg.currentProcessCgroup()
logger := log.Default().WithField("cgroup_path", path)
control, err := cgroups.Load(cg.hierarchy, cgroups.StaticPath(path))
if err != nil {
logger.WithError(err).Warn("unable to load cgroup controller")
return
}
if metrics, err := control.Stat(); err != nil {
logger.WithError(err).Warn("unable to get cgroup stats")
} else {
memoryMetric := cg.memoryFailedTotal.WithLabelValues(path)
memoryMetric.Set(float64(metrics.Memory.Usage.Failcnt))
ch <- memoryMetric
cpuUserMetric := cg.cpuUsage.WithLabelValues(path, "user")
cpuUserMetric.Set(float64(metrics.CPU.Usage.User))
ch <- cpuUserMetric
cpuKernelMetric := cg.cpuUsage.WithLabelValues(path, "kernel")
cpuKernelMetric.Set(float64(metrics.CPU.Usage.Kernel))
ch <- cpuKernelMetric
}
if subsystems, err := cg.hierarchy(); err != nil {
logger.WithError(err).Warn("unable to get cgroup hierarchy")
} else {
for _, subsystem := range subsystems {
processes, err := control.Processes(subsystem.Name(), true)
if err != nil {
logger.WithField("subsystem", subsystem.Name()).
WithError(err).
Warn("unable to get process list")
continue
}
procsMetric := cg.procs.WithLabelValues(path, string(subsystem.Name()))
procsMetric.Set(float64(len(processes)))
ch <- procsMetric
}
}
}
// Describe describes the cgroup metrics that Collect provides
func (cg *CGroupV1Manager) Describe(ch chan<- *prometheus.Desc) {
prometheus.DescribeByCollect(cg, ch)
}
//nolint: revive,stylecheck // This is unintentionally missing documentation.
func (cg *CGroupV1Manager) Cleanup() error {
processCgroupPath := cg.currentProcessCgroup()
control, err := cgroups.Load(cg.hierarchy, cgroups.StaticPath(processCgroupPath))
if err != nil {
return fmt.Errorf("failed loading cgroup %s: %w", processCgroupPath, err)
}
if err := control.Delete(); err != nil {
return fmt.Errorf("failed cleaning up cgroup %s: %w", processCgroupPath, err)
}
return nil
}
func (cg *CGroupV1Manager) cgroupPath(groupID int) string {
return fmt.Sprintf("/%s/shard-%d", cg.currentProcessCgroup(), groupID)
}
func (cg *CGroupV1Manager) currentProcessCgroup() string {
return fmt.Sprintf("/%s/gitaly-%d", cg.cfg.HierarchyRoot, os.Getpid())
}
func defaultSubsystems(root string) ([]cgroups.Subsystem, error) {
subsystems := []cgroups.Subsystem{
cgroups.NewMemory(root, cgroups.OptionalSwap()),
cgroups.NewCpu(root),
}
return subsystems, nil
}

View file

@ -1,183 +0,0 @@
package cgroups
import (
"bytes"
"fmt"
"hash/crc32"
"os"
"os/exec"
"path/filepath"
"strconv"
"strings"
"testing"
"github.com/grpc-ecosystem/go-grpc-middleware/logging/logrus/ctxlogrus"
"github.com/prometheus/client_golang/prometheus/testutil"
"github.com/sirupsen/logrus"
"github.com/sirupsen/logrus/hooks/test"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"gitlab.com/gitlab-org/gitaly/v14/internal/command"
"gitlab.com/gitlab-org/gitaly/v14/internal/gitaly/config/cgroups"
"gitlab.com/gitlab-org/gitaly/v14/internal/testhelper"
)
func defaultCgroupsConfig() cgroups.Config {
return cgroups.Config{
Count: 3,
HierarchyRoot: "gitaly",
CPU: cgroups.CPU{
Enabled: true,
Shares: 256,
},
Memory: cgroups.Memory{
Enabled: true,
Limit: 1024000,
},
}
}
func TestSetup(t *testing.T) {
mock := newMock(t)
v1Manager := &CGroupV1Manager{
cfg: defaultCgroupsConfig(),
hierarchy: mock.hierarchy,
}
require.NoError(t, v1Manager.Setup())
for i := 0; i < 3; i++ {
memoryPath := filepath.Join(
mock.root, "memory", "gitaly", fmt.Sprintf("gitaly-%d", os.Getpid()), fmt.Sprintf("shard-%d", i), "memory.limit_in_bytes",
)
memoryContent := readCgroupFile(t, memoryPath)
require.Equal(t, string(memoryContent), "1024000")
cpuPath := filepath.Join(
mock.root, "cpu", "gitaly", fmt.Sprintf("gitaly-%d", os.Getpid()), fmt.Sprintf("shard-%d", i), "cpu.shares",
)
cpuContent := readCgroupFile(t, cpuPath)
require.Equal(t, string(cpuContent), "256")
}
}
func TestAddCommand(t *testing.T) {
mock := newMock(t)
config := defaultCgroupsConfig()
v1Manager1 := &CGroupV1Manager{
cfg: config,
hierarchy: mock.hierarchy,
}
require.NoError(t, v1Manager1.Setup())
ctx := testhelper.Context(t)
cmd1 := exec.Command("ls", "-hal", ".")
cmd2, err := command.New(ctx, cmd1, nil, nil, nil)
require.NoError(t, err)
require.NoError(t, cmd2.Wait())
v1Manager2 := &CGroupV1Manager{
cfg: config,
hierarchy: mock.hierarchy,
}
require.NoError(t, v1Manager2.AddCommand(cmd2))
checksum := crc32.ChecksumIEEE([]byte(strings.Join(cmd2.Args(), "")))
groupID := uint(checksum) % config.Count
for _, s := range mock.subsystems {
path := filepath.Join(mock.root, string(s.Name()), "gitaly", fmt.Sprintf("gitaly-%d", os.Getpid()), fmt.Sprintf("shard-%d", groupID), "cgroup.procs")
content := readCgroupFile(t, path)
pid, err := strconv.Atoi(string(content))
require.NoError(t, err)
require.Equal(t, cmd2.Pid(), pid)
}
}
func TestCleanup(t *testing.T) {
mock := newMock(t)
v1Manager := &CGroupV1Manager{
cfg: defaultCgroupsConfig(),
hierarchy: mock.hierarchy,
}
require.NoError(t, v1Manager.Setup())
require.NoError(t, v1Manager.Cleanup())
for i := 0; i < 3; i++ {
memoryPath := filepath.Join(mock.root, "memory", "gitaly", fmt.Sprintf("gitaly-%d", os.Getpid()), fmt.Sprintf("shard-%d", i))
cpuPath := filepath.Join(mock.root, "cpu", "gitaly", fmt.Sprintf("gitaly-%d", os.Getpid()), fmt.Sprintf("shard-%d", i))
require.NoDirExists(t, memoryPath)
require.NoDirExists(t, cpuPath)
}
}
func TestMetrics(t *testing.T) {
mock := newMock(t)
config := defaultCgroupsConfig()
v1Manager1 := newV1Manager(config)
v1Manager1.hierarchy = mock.hierarchy
mock.setupMockCgroupFiles(t, v1Manager1, 2)
require.NoError(t, v1Manager1.Setup())
ctx := testhelper.Context(t)
logger, hook := test.NewNullLogger()
logger.SetLevel(logrus.DebugLevel)
ctx = ctxlogrus.ToContext(ctx, logrus.NewEntry(logger))
cmd1 := exec.Command("ls", "-hal", ".")
cmd2, err := command.New(ctx, cmd1, nil, nil, nil)
require.NoError(t, err)
require.NoError(t, v1Manager1.AddCommand(cmd2))
require.NoError(t, cmd2.Wait())
processCgroupPath := v1Manager1.currentProcessCgroup()
expected := bytes.NewBufferString(fmt.Sprintf(`# HELP gitaly_cgroup_cpu_usage CPU Usage of Cgroup
# TYPE gitaly_cgroup_cpu_usage gauge
gitaly_cgroup_cpu_usage{path="%s",type="kernel"} 0
gitaly_cgroup_cpu_usage{path="%s",type="user"} 0
# HELP gitaly_cgroup_memory_failed_total Number of memory usage hits limits
# TYPE gitaly_cgroup_memory_failed_total gauge
gitaly_cgroup_memory_failed_total{path="%s"} 2
# HELP gitaly_cgroup_procs_total Total number of procs
# TYPE gitaly_cgroup_procs_total gauge
gitaly_cgroup_procs_total{path="%s",subsystem="memory"} 1
gitaly_cgroup_procs_total{path="%s",subsystem="cpu"} 1
`, processCgroupPath, processCgroupPath, processCgroupPath, processCgroupPath, processCgroupPath))
assert.NoError(t, testutil.CollectAndCompare(
v1Manager1,
expected,
"gitaly_cgroup_memory_failed_total",
"gitaly_cgroup_cpu_usage",
"gitaly_cgroup_procs_total"))
logEntry := hook.LastEntry()
assert.Contains(
t,
logEntry.Data["command.cgroup_path"],
processCgroupPath,
"log field includes a cgroup path that is a subdirectory of the current process' cgroup path",
)
}
func readCgroupFile(t *testing.T, path string) []byte {
t.Helper()
// The cgroups package defaults to permission 0 as it expects the file to be existing (the kernel creates the file)
// and its testing override the permission private variable to something sensible, hence we have to chmod ourselves
// so we can read the file.
require.NoError(t, os.Chmod(path, 0o666))
return testhelper.MustReadFile(t, path)
}

View file

@ -1,386 +0,0 @@
package command
import (
"bytes"
"context"
"fmt"
"io"
"os/exec"
"regexp"
"runtime"
"strings"
"testing"
"time"
"github.com/grpc-ecosystem/go-grpc-middleware/logging/logrus/ctxlogrus"
"github.com/sirupsen/logrus"
"github.com/sirupsen/logrus/hooks/test"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"gitlab.com/gitlab-org/gitaly/v14/internal/testhelper"
)
func TestMain(m *testing.M) {
testhelper.Run(m)
}
func TestNewCommandExtraEnv(t *testing.T) {
ctx := testhelper.Context(t)
extraVar := "FOOBAR=123456"
buff := &bytes.Buffer{}
cmd, err := New(ctx, exec.Command("/usr/bin/env"), nil, buff, nil, extraVar)
require.NoError(t, err)
require.NoError(t, cmd.Wait())
require.Contains(t, strings.Split(buff.String(), "\n"), extraVar)
}
func TestNewCommandExportedEnv(t *testing.T) {
ctx := testhelper.Context(t)
testCases := []struct {
key string
value string
}{
{
key: "HOME",
value: "/home/git",
},
{
key: "PATH",
value: "/usr/bin:/bin:/usr/sbin:/sbin:/usr/local/bin",
},
{
key: "LD_LIBRARY_PATH",
value: "/path/to/your/lib",
},
{
key: "TZ",
value: "foobar",
},
{
key: "GIT_TRACE",
value: "true",
},
{
key: "GIT_TRACE_PACK_ACCESS",
value: "true",
},
{
key: "GIT_TRACE_PACKET",
value: "true",
},
{
key: "GIT_TRACE_PERFORMANCE",
value: "true",
},
{
key: "GIT_TRACE_SETUP",
value: "true",
},
{
key: "all_proxy",
value: "http://localhost:4000",
},
{
key: "http_proxy",
value: "http://localhost:5000",
},
{
key: "HTTP_PROXY",
value: "http://localhost:6000",
},
{
key: "https_proxy",
value: "https://localhost:5000",
},
{
key: "HTTPS_PROXY",
value: "https://localhost:6000",
},
{
key: "no_proxy",
value: "https://excluded:5000",
},
{
key: "NO_PROXY",
value: "https://excluded:5000",
},
}
for _, tc := range testCases {
t.Run(tc.key, func(t *testing.T) {
if tc.key == "LD_LIBRARY_PATH" && runtime.GOOS == "darwin" {
t.Skip("System Integrity Protection prevents using dynamic linker (dyld) environment variables on macOS. https://apple.co/2XDH4iC")
}
testhelper.ModifyEnvironment(t, tc.key, tc.value)
buff := &bytes.Buffer{}
cmd, err := New(ctx, exec.Command("/usr/bin/env"), nil, buff, nil)
require.NoError(t, err)
require.NoError(t, cmd.Wait())
expectedEnv := fmt.Sprintf("%s=%s", tc.key, tc.value)
require.Contains(t, strings.Split(buff.String(), "\n"), expectedEnv)
})
}
}
func TestNewCommandUnexportedEnv(t *testing.T) {
ctx := testhelper.Context(t)
unexportedEnvKey, unexportedEnvVal := "GITALY_UNEXPORTED_ENV", "foobar"
testhelper.ModifyEnvironment(t, unexportedEnvKey, unexportedEnvVal)
buff := &bytes.Buffer{}
cmd, err := New(ctx, exec.Command("/usr/bin/env"), nil, buff, nil)
require.NoError(t, err)
require.NoError(t, cmd.Wait())
require.NotContains(t, strings.Split(buff.String(), "\n"), fmt.Sprintf("%s=%s", unexportedEnvKey, unexportedEnvVal))
}
func TestRejectEmptyContextDone(t *testing.T) {
defer func() {
p := recover()
if p == nil {
t.Error("expected panic, got none")
return
}
if _, ok := p.(contextWithoutDonePanic); !ok {
panic(p)
}
}()
_, err := New(testhelper.ContextWithoutCancel(), exec.Command("true"), nil, nil, nil)
require.NoError(t, err)
}
func TestNewCommandTimeout(t *testing.T) {
ctx := testhelper.Context(t)
defer func(ch chan struct{}, t time.Duration) {
spawnTokens = ch
spawnConfig.Timeout = t
}(spawnTokens, spawnConfig.Timeout)
// This unbuffered channel will behave like a full/blocked buffered channel.
spawnTokens = make(chan struct{})
// Speed up the test by lowering the timeout
spawnTimeout := 200 * time.Millisecond
spawnConfig.Timeout = spawnTimeout
testDeadline := time.After(1 * time.Second)
tick := time.After(spawnTimeout / 2)
errCh := make(chan error)
go func() {
_, err := New(ctx, exec.Command("true"), nil, nil, nil)
errCh <- err
}()
var err error
timePassed := false
wait:
for {
select {
case err = <-errCh:
break wait
case <-tick:
timePassed = true
case <-testDeadline:
t.Fatal("test timed out")
}
}
require.True(t, timePassed, "time must have passed")
require.Error(t, err)
require.Contains(t, err.Error(), "process spawn timed out after")
}
func TestCommand_Wait_interrupts_after_context_cancellation(t *testing.T) {
ctx, cancel := context.WithCancel(testhelper.Context(t))
cmd, err := New(ctx, exec.CommandContext(ctx, "sleep", "1h"), nil, nil, nil)
require.NoError(t, err)
// Cancel the command early.
go cancel()
err = cmd.Wait()
require.Error(t, err)
s, ok := ExitStatus(err)
require.True(t, ok)
require.Equal(t, -1, s)
}
func TestNewCommandWithSetupStdin(t *testing.T) {
ctx := testhelper.Context(t)
value := "Test value"
output := bytes.NewBuffer(nil)
cmd, err := New(ctx, exec.Command("cat"), SetupStdin, nil, nil)
require.NoError(t, err)
_, err = fmt.Fprintf(cmd, "%s", value)
require.NoError(t, err)
// The output of the `cat` subprocess should exactly match its input
_, err = io.CopyN(output, cmd, int64(len(value)))
require.NoError(t, err)
require.Equal(t, value, output.String())
require.NoError(t, cmd.Wait())
}
func TestNewCommandNullInArg(t *testing.T) {
ctx := testhelper.Context(t)
_, err := New(ctx, exec.Command("sh", "-c", "hello\x00world"), nil, nil, nil)
require.Error(t, err)
require.EqualError(t, err, `detected null byte in command argument "hello\x00world"`)
}
func TestNewNonExistent(t *testing.T) {
ctx := testhelper.Context(t)
cmd, err := New(ctx, exec.Command("command-non-existent"), nil, nil, nil)
require.Nil(t, cmd)
require.Error(t, err)
}
func TestCommandStdErr(t *testing.T) {
ctx := testhelper.Context(t)
var stdout, stderr bytes.Buffer
expectedMessage := `hello world\nhello world\nhello world\nhello world\nhello world\n`
logger := logrus.New()
logger.SetOutput(&stderr)
ctx = ctxlogrus.ToContext(ctx, logrus.NewEntry(logger))
cmd, err := New(ctx, exec.Command("./testdata/stderr_script.sh"), nil, &stdout, nil)
require.NoError(t, err)
require.Error(t, cmd.Wait())
assert.Empty(t, stdout.Bytes())
require.Equal(t, expectedMessage, extractLastMessage(stderr.String()))
}
func TestCommandStdErrLargeOutput(t *testing.T) {
ctx := testhelper.Context(t)
var stdout, stderr bytes.Buffer
logger := logrus.New()
logger.SetOutput(&stderr)
ctx = ctxlogrus.ToContext(ctx, logrus.NewEntry(logger))
cmd, err := New(ctx, exec.Command("./testdata/stderr_many_lines.sh"), nil, &stdout, nil)
require.NoError(t, err)
require.Error(t, cmd.Wait())
assert.Empty(t, stdout.Bytes())
msg := strings.ReplaceAll(extractLastMessage(stderr.String()), "\\n", "\n")
require.LessOrEqual(t, len(msg), maxStderrBytes)
}
func TestCommandStdErrBinaryNullBytes(t *testing.T) {
ctx := testhelper.Context(t)
var stdout, stderr bytes.Buffer
logger := logrus.New()
logger.SetOutput(&stderr)
ctx = ctxlogrus.ToContext(ctx, logrus.NewEntry(logger))
cmd, err := New(ctx, exec.Command("./testdata/stderr_binary_null.sh"), nil, &stdout, nil)
require.NoError(t, err)
require.Error(t, cmd.Wait())
assert.Empty(t, stdout.Bytes())
msg := strings.SplitN(extractLastMessage(stderr.String()), "\\n", 2)[0]
require.Equal(t, strings.Repeat("\\x00", maxStderrLineLength), msg)
}
func TestCommandStdErrLongLine(t *testing.T) {
ctx := testhelper.Context(t)
var stdout, stderr bytes.Buffer
logger := logrus.New()
logger.SetOutput(&stderr)
ctx = ctxlogrus.ToContext(ctx, logrus.NewEntry(logger))
cmd, err := New(ctx, exec.Command("./testdata/stderr_repeat_a.sh"), nil, &stdout, nil)
require.NoError(t, err)
require.Error(t, cmd.Wait())
assert.Empty(t, stdout.Bytes())
require.Contains(t, stderr.String(), fmt.Sprintf("%s\\n%s", strings.Repeat("a", maxStderrLineLength), strings.Repeat("b", maxStderrLineLength)))
}
func TestCommandStdErrMaxBytes(t *testing.T) {
ctx := testhelper.Context(t)
var stdout, stderr bytes.Buffer
logger := logrus.New()
logger.SetOutput(&stderr)
ctx = ctxlogrus.ToContext(ctx, logrus.NewEntry(logger))
cmd, err := New(ctx, exec.Command("./testdata/stderr_max_bytes_edge_case.sh"), nil, &stdout, nil)
require.NoError(t, err)
require.Error(t, cmd.Wait())
assert.Empty(t, stdout.Bytes())
message := extractLastMessage(stderr.String())
require.Equal(t, maxStderrBytes, len(strings.ReplaceAll(message, "\\n", "\n")))
}
var logMsgRegex = regexp.MustCompile(`msg="(.+?)"`)
func extractLastMessage(logMessage string) string {
subMatchesAll := logMsgRegex.FindAllStringSubmatch(logMessage, -1)
if len(subMatchesAll) < 1 {
return ""
}
subMatches := subMatchesAll[len(subMatchesAll)-1]
if len(subMatches) != 2 {
return ""
}
return subMatches[1]
}
func TestCommand_logMessage(t *testing.T) {
logger, hook := test.NewNullLogger()
logger.SetLevel(logrus.DebugLevel)
ctx := ctxlogrus.ToContext(testhelper.Context(t), logrus.NewEntry(logger))
cmd, err := New(ctx, exec.Command("echo", "hello world"), nil, nil, nil)
require.NoError(t, err)
cgroupPath := "/sys/fs/cgroup/1"
cmd.SetCgroupPath(cgroupPath)
require.NoError(t, cmd.Wait())
logEntry := hook.LastEntry()
assert.Equal(t, cmd.Pid(), logEntry.Data["pid"])
assert.Equal(t, []string{"echo", "hello world"}, logEntry.Data["args"])
assert.Equal(t, 0, logEntry.Data["command.exitCode"])
assert.Equal(t, cgroupPath, logEntry.Data["command.cgroup_path"])
}

View file

@ -1,4 +0,0 @@
#!/bin/bash
dd if=/dev/zero bs=1000 count=1000 >&2
exit 1

View file

@ -1,9 +0,0 @@
#!/bin/bash
let x=0
while [ $x -lt 100010 ]
do
let x=x+1
printf '%06d zzzzzzzzzz\n' $x >&2
done
exit 1

View file

@ -1,20 +0,0 @@
#!/bin/bash
# This script is used to test that a command writes at most maxBytes to stderr. It simulates the
# edge case where the logwriter has already written MaxStderrBytes-1 (9999) bytes
# This edge case happens when 9999 bytes are written. To simulate this, stderr_max_bytes_edge_case has 4 lines of the following format:
# line1: 3333 bytes long
# line2: 3331 bytes
# line3: 3331 bytes
# line4: 1 byte
# The first 3 lines sum up to 9999 bytes written, since we write a 2-byte escaped `\n` for each \n we see.
# The 4th line can be any data.
printf 'a%.0s' {1..3333} >&2
printf '\n' >&2
printf 'a%.0s' {1..3331} >&2
printf '\n' >&2
printf 'a%.0s' {1..3331} >&2
printf '\na\n' >&2
exit 1

View file

@ -1,6 +0,0 @@
#!/bin/bash
printf 'a%.0s' {1..8192} >&2
printf '\n' >&2
printf 'b%.0s' {1..8192} >&2
exit 1

View file

@ -1,7 +0,0 @@
#!/bin/bash
for i in {1..5}
do
echo 'hello world' 1>&2
done
exit 1

View file

@ -1,78 +0,0 @@
package catfile
import (
"testing"
"github.com/stretchr/testify/require"
"gitlab.com/gitlab-org/gitaly/v14/internal/git"
"gitlab.com/gitlab-org/gitaly/v14/internal/git/gittest"
"gitlab.com/gitlab-org/gitaly/v14/internal/testhelper"
"gitlab.com/gitlab-org/gitaly/v14/proto/go/gitalypb"
"google.golang.org/grpc/metadata"
)
func TestGetCommit(t *testing.T) {
ctx := testhelper.Context(t)
_, objectReader, _ := setupObjectReader(t, ctx)
ctx = metadata.NewIncomingContext(ctx, metadata.MD{})
const commitSha = "2d1db523e11e777e49377cfb22d368deec3f0793"
const commitMsg = "Correct test_env.rb path for adding branch\n"
const blobSha = "c60514b6d3d6bf4bec1030f70026e34dfbd69ad5"
testCases := []struct {
desc string
revision string
errStr string
}{
{
desc: "commit",
revision: commitSha,
},
{
desc: "not existing commit",
revision: "not existing revision",
errStr: "object not found",
},
{
desc: "blob sha",
revision: blobSha,
errStr: "object not found",
},
}
for _, tc := range testCases {
t.Run(tc.desc, func(t *testing.T) {
c, err := GetCommit(ctx, objectReader, git.Revision(tc.revision))
if tc.errStr == "" {
require.NoError(t, err)
require.Equal(t, commitMsg, string(c.Body))
} else {
require.EqualError(t, err, tc.errStr)
}
})
}
}
func TestGetCommitWithTrailers(t *testing.T) {
ctx := testhelper.Context(t)
cfg, objectReader, testRepo := setupObjectReader(t, ctx)
ctx = metadata.NewIncomingContext(ctx, metadata.MD{})
commit, err := GetCommitWithTrailers(ctx, gittest.NewCommandFactory(t, cfg), testRepo,
objectReader, "5937ac0a7beb003549fc5fd26fc247adbce4a52e")
require.NoError(t, err)
require.Equal(t, commit.Trailers, []*gitalypb.CommitTrailer{
{
Key: []byte("Signed-off-by"),
Value: []byte("Dmitriy Zaporozhets <dmitriy.zaporozhets@gmail.com>"),
},
})
}

View file

@ -1,69 +0,0 @@
package git
import (
"fmt"
"testing"
"github.com/stretchr/testify/require"
)
func TestCommandDescriptions_revListPositionalArgs(t *testing.T) {
revlist, ok := commandDescriptions["rev-list"]
require.True(t, ok)
require.NotNil(t, revlist.validatePositionalArgs)
for _, tc := range []struct {
desc string
args []string
expectedErr error
}{
{
desc: "normal reference",
args: []string{
"master",
},
},
{
desc: "reference with leading dash",
args: []string{
"-master",
},
expectedErr: fmt.Errorf("rev-list: %w",
fmt.Errorf("positional arg \"-master\" cannot start with dash '-': %w", ErrInvalidArg),
),
},
{
desc: "revisions and pseudo-revisions",
args: []string{
"master --not --all",
},
},
} {
t.Run(tc.desc, func(t *testing.T) {
err := revlist.validatePositionalArgs(tc.args)
require.Equal(t, tc.expectedErr, err)
})
}
}
func TestThreadsConfigValue(t *testing.T) {
t.Parallel()
for _, tt := range []struct {
cpus int
threads string
}{
{1, "1"},
{2, "1"},
{3, "1"},
{4, "2"},
{8, "3"},
{9, "3"},
{13, "3"},
{16, "4"},
{27, "4"},
{32, "5"},
} {
actualThreads := threadsConfigValue(tt.cpus)
require.Equal(t, tt.threads, actualThreads)
}
}

View file

@ -1,187 +0,0 @@
package gitpipe
import (
"errors"
"testing"
"github.com/stretchr/testify/require"
"gitlab.com/gitlab-org/gitaly/v14/internal/git/catfile"
"gitlab.com/gitlab-org/gitaly/v14/internal/git/gittest"
"gitlab.com/gitlab-org/gitaly/v14/internal/git/localrepo"
"gitlab.com/gitlab-org/gitaly/v14/internal/testhelper"
"gitlab.com/gitlab-org/gitaly/v14/internal/testhelper/testcfg"
)
const (
lfsPointer1 = "0c304a93cb8430108629bbbcaa27db3343299bc0"
lfsPointer2 = "f78df813119a79bfbe0442ab92540a61d3ab7ff3"
lfsPointer3 = "bab31d249f78fba464d1b75799aad496cc07fa3b"
lfsPointer4 = "125fcc9f6e33175cb278b9b2809154d2535fe19f"
)
func TestCatfileInfo(t *testing.T) {
cfg := testcfg.Build(t)
repoProto, _ := gittest.CloneRepo(t, cfg, cfg.Storages[0])
repo := localrepo.NewTestRepo(t, cfg, repoProto)
for _, tc := range []struct {
desc string
revlistInputs []RevisionResult
opts []CatfileInfoOption
expectedResults []CatfileInfoResult
expectedErr error
}{
{
desc: "single blob",
revlistInputs: []RevisionResult{
{OID: lfsPointer1},
},
expectedResults: []CatfileInfoResult{
{ObjectInfo: &catfile.ObjectInfo{Oid: lfsPointer1, Type: "blob", Size: 133}},
},
},
{
desc: "multiple blobs",
revlistInputs: []RevisionResult{
{OID: lfsPointer1},
{OID: lfsPointer2},
{OID: lfsPointer3},
{OID: lfsPointer4},
},
expectedResults: []CatfileInfoResult{
{ObjectInfo: &catfile.ObjectInfo{Oid: lfsPointer1, Type: "blob", Size: 133}},
{ObjectInfo: &catfile.ObjectInfo{Oid: lfsPointer2, Type: "blob", Size: 127}},
{ObjectInfo: &catfile.ObjectInfo{Oid: lfsPointer3, Type: "blob", Size: 127}},
{ObjectInfo: &catfile.ObjectInfo{Oid: lfsPointer4, Type: "blob", Size: 129}},
},
},
{
desc: "object name",
revlistInputs: []RevisionResult{
{OID: "b95c0fad32f4361845f91d9ce4c1721b52b82793"},
{OID: "93e123ac8a3e6a0b600953d7598af629dec7b735", ObjectName: []byte("branch-test.txt")},
},
expectedResults: []CatfileInfoResult{
{ObjectInfo: &catfile.ObjectInfo{Oid: "b95c0fad32f4361845f91d9ce4c1721b52b82793", Type: "tree", Size: 43}},
{ObjectInfo: &catfile.ObjectInfo{Oid: "93e123ac8a3e6a0b600953d7598af629dec7b735", Type: "blob", Size: 59}, ObjectName: []byte("branch-test.txt")},
},
},
{
desc: "invalid object ID",
revlistInputs: []RevisionResult{
{OID: "invalidobjectid"},
},
expectedErr: errors.New("retrieving object info for \"invalidobjectid\": object not found"),
},
{
desc: "mixed valid and invalid revision",
revlistInputs: []RevisionResult{
{OID: lfsPointer1},
{OID: "invalidobjectid"},
{OID: lfsPointer2},
},
expectedResults: []CatfileInfoResult{
{ObjectInfo: &catfile.ObjectInfo{Oid: lfsPointer1, Type: "blob", Size: 133}},
},
expectedErr: errors.New("retrieving object info for \"invalidobjectid\": object not found"),
},
{
desc: "skip everything",
revlistInputs: []RevisionResult{
{OID: lfsPointer1},
{OID: lfsPointer2},
},
opts: []CatfileInfoOption{
WithSkipCatfileInfoResult(func(*catfile.ObjectInfo) bool { return true }),
},
},
{
desc: "skip one",
revlistInputs: []RevisionResult{
{OID: lfsPointer1},
{OID: lfsPointer2},
},
opts: []CatfileInfoOption{
WithSkipCatfileInfoResult(func(objectInfo *catfile.ObjectInfo) bool {
return objectInfo.Oid == lfsPointer1
}),
},
expectedResults: []CatfileInfoResult{
{ObjectInfo: &catfile.ObjectInfo{Oid: lfsPointer2, Type: "blob", Size: 127}},
},
},
{
desc: "skip nothing",
revlistInputs: []RevisionResult{
{OID: lfsPointer1},
{OID: lfsPointer2},
},
opts: []CatfileInfoOption{
WithSkipCatfileInfoResult(func(*catfile.ObjectInfo) bool { return false }),
},
expectedResults: []CatfileInfoResult{
{ObjectInfo: &catfile.ObjectInfo{Oid: lfsPointer1, Type: "blob", Size: 133}},
{ObjectInfo: &catfile.ObjectInfo{Oid: lfsPointer2, Type: "blob", Size: 127}},
},
},
} {
t.Run(tc.desc, func(t *testing.T) {
ctx := testhelper.Context(t)
catfileCache := catfile.NewCache(cfg)
defer catfileCache.Stop()
objectInfoReader, err := catfileCache.ObjectInfoReader(ctx, repo)
require.NoError(t, err)
it, err := CatfileInfo(ctx, objectInfoReader, NewRevisionIterator(tc.revlistInputs), tc.opts...)
require.NoError(t, err)
var results []CatfileInfoResult
for it.Next() {
results = append(results, it.Result())
}
// We're converting the error here to a plain un-nested error such
// that we don't have to replicate the complete error's structure.
err = it.Err()
if err != nil {
err = errors.New(err.Error())
}
require.Equal(t, tc.expectedErr, err)
require.Equal(t, tc.expectedResults, results)
})
}
}
func TestCatfileInfoAllObjects(t *testing.T) {
cfg := testcfg.Build(t)
ctx := testhelper.Context(t)
repoProto, repoPath := gittest.InitRepo(t, cfg, cfg.Storages[0])
repo := localrepo.NewTestRepo(t, cfg, repoProto)
blob1 := gittest.WriteBlob(t, cfg, repoPath, []byte("foobar"))
blob2 := gittest.WriteBlob(t, cfg, repoPath, []byte("barfoo"))
tree := gittest.WriteTree(t, cfg, repoPath, []gittest.TreeEntry{
{Path: "foobar", Mode: "100644", OID: blob1},
})
commit := gittest.WriteCommit(t, cfg, repoPath, gittest.WithParents())
it := CatfileInfoAllObjects(ctx, repo)
var results []CatfileInfoResult
for it.Next() {
results = append(results, it.Result())
}
require.NoError(t, it.Err())
require.ElementsMatch(t, []CatfileInfoResult{
{ObjectInfo: &catfile.ObjectInfo{Oid: blob1, Type: "blob", Size: 6}},
{ObjectInfo: &catfile.ObjectInfo{Oid: blob2, Type: "blob", Size: 6}},
{ObjectInfo: &catfile.ObjectInfo{Oid: tree, Type: "tree", Size: 34}},
{ObjectInfo: &catfile.ObjectInfo{Oid: commit, Type: "commit", Size: 177}},
}, results)
}

View file

@ -1,117 +0,0 @@
package gitpipe
import (
"errors"
"io"
"testing"
"github.com/stretchr/testify/require"
"gitlab.com/gitlab-org/gitaly/v14/internal/git/catfile"
"gitlab.com/gitlab-org/gitaly/v14/internal/git/gittest"
"gitlab.com/gitlab-org/gitaly/v14/internal/git/localrepo"
"gitlab.com/gitlab-org/gitaly/v14/internal/testhelper"
"gitlab.com/gitlab-org/gitaly/v14/internal/testhelper/testcfg"
)
func TestCatfileObject(t *testing.T) {
cfg := testcfg.Build(t)
repoProto, _ := gittest.CloneRepo(t, cfg, cfg.Storages[0])
repo := localrepo.NewTestRepo(t, cfg, repoProto)
for _, tc := range []struct {
desc string
catfileInfoInputs []CatfileInfoResult
expectedResults []CatfileObjectResult
expectedErr error
}{
{
desc: "single blob",
catfileInfoInputs: []CatfileInfoResult{
{ObjectInfo: &catfile.ObjectInfo{Oid: lfsPointer1, Type: "blob", Size: 133}},
},
expectedResults: []CatfileObjectResult{
{Object: &catfile.Object{ObjectInfo: catfile.ObjectInfo{Oid: lfsPointer1, Type: "blob", Size: 133}}},
},
},
{
desc: "multiple blobs",
catfileInfoInputs: []CatfileInfoResult{
{ObjectInfo: &catfile.ObjectInfo{Oid: lfsPointer1, Type: "blob", Size: 133}},
{ObjectInfo: &catfile.ObjectInfo{Oid: lfsPointer2, Type: "blob", Size: 127}},
{ObjectInfo: &catfile.ObjectInfo{Oid: lfsPointer3, Type: "blob", Size: 127}},
{ObjectInfo: &catfile.ObjectInfo{Oid: lfsPointer4, Type: "blob", Size: 129}},
},
expectedResults: []CatfileObjectResult{
{Object: &catfile.Object{ObjectInfo: catfile.ObjectInfo{Oid: lfsPointer1, Type: "blob", Size: 133}}},
{Object: &catfile.Object{ObjectInfo: catfile.ObjectInfo{Oid: lfsPointer2, Type: "blob", Size: 127}}},
{Object: &catfile.Object{ObjectInfo: catfile.ObjectInfo{Oid: lfsPointer3, Type: "blob", Size: 127}}},
{Object: &catfile.Object{ObjectInfo: catfile.ObjectInfo{Oid: lfsPointer4, Type: "blob", Size: 129}}},
},
},
{
desc: "revlist result with object names",
catfileInfoInputs: []CatfileInfoResult{
{ObjectInfo: &catfile.ObjectInfo{Oid: "b95c0fad32f4361845f91d9ce4c1721b52b82793", Type: "tree", Size: 43}},
{ObjectInfo: &catfile.ObjectInfo{Oid: "93e123ac8a3e6a0b600953d7598af629dec7b735", Type: "blob", Size: 59}, ObjectName: []byte("branch-test.txt")},
},
expectedResults: []CatfileObjectResult{
{Object: &catfile.Object{ObjectInfo: catfile.ObjectInfo{Oid: "b95c0fad32f4361845f91d9ce4c1721b52b82793", Type: "tree", Size: 43}}},
{Object: &catfile.Object{ObjectInfo: catfile.ObjectInfo{Oid: "93e123ac8a3e6a0b600953d7598af629dec7b735", Type: "blob", Size: 59}}, ObjectName: []byte("branch-test.txt")},
},
},
{
desc: "invalid object ID",
catfileInfoInputs: []CatfileInfoResult{
{ObjectInfo: &catfile.ObjectInfo{Oid: "invalidobjectid", Type: "blob"}},
},
expectedErr: errors.New("requesting object: object not found"),
},
} {
t.Run(tc.desc, func(t *testing.T) {
ctx := testhelper.Context(t)
catfileCache := catfile.NewCache(cfg)
defer catfileCache.Stop()
objectReader, err := catfileCache.ObjectReader(ctx, repo)
require.NoError(t, err)
it, err := CatfileObject(ctx, objectReader, NewCatfileInfoIterator(tc.catfileInfoInputs))
require.NoError(t, err)
var results []CatfileObjectResult
for it.Next() {
result := it.Result()
objectData, err := io.ReadAll(result)
require.NoError(t, err)
require.Len(t, objectData, int(result.ObjectSize()))
// We only really want to compare the publicly visible fields
// containing info about the object itself, and not the object's
// private state. We thus need to reconstruct the objects here.
results = append(results, CatfileObjectResult{
Object: &catfile.Object{
ObjectInfo: catfile.ObjectInfo{
Oid: result.ObjectID(),
Type: result.ObjectType(),
Size: result.ObjectSize(),
},
},
ObjectName: result.ObjectName,
})
}
// We're converting the error here to a plain un-nested error such
// that we don't have to replicate the complete error's structure.
err = it.Err()
if err != nil {
err = errors.New(err.Error())
}
require.Equal(t, tc.expectedErr, err)
require.Equal(t, tc.expectedResults, results)
})
}
}

View file

@ -1,11 +0,0 @@
package gitpipe
import (
"testing"
"gitlab.com/gitlab-org/gitaly/v14/internal/testhelper"
)
func TestMain(m *testing.M) {
testhelper.Run(m)
}

View file

@ -1,18 +0,0 @@
package gittest
import (
"testing"
"github.com/stretchr/testify/require"
"gitlab.com/gitlab-org/gitaly/v14/internal/git"
"gitlab.com/gitlab-org/gitaly/v14/internal/gitaly/config"
)
// NewCommandFactory creates a new Git command factory.
func NewCommandFactory(tb testing.TB, cfg config.Cfg, opts ...git.ExecCommandFactoryOption) git.CommandFactory {
tb.Helper()
factory, cleanup, err := git.NewExecCommandFactory(cfg, opts...)
require.NoError(tb, err)
tb.Cleanup(cleanup)
return factory
}

View file

@ -1,179 +0,0 @@
package gittest
import (
"bytes"
"fmt"
"os"
"path/filepath"
"testing"
"github.com/stretchr/testify/require"
"gitlab.com/gitlab-org/gitaly/v14/internal/git"
"gitlab.com/gitlab-org/gitaly/v14/internal/gitaly/config"
"gitlab.com/gitlab-org/gitaly/v14/internal/helper/text"
"gitlab.com/gitlab-org/gitaly/v14/proto/go/gitalypb"
)
const (
committerName = "Scrooge McDuck"
committerEmail = "scrooge@mcduck.com"
)
type writeCommitConfig struct {
branch string
parents []git.ObjectID
committerName string
message string
treeEntries []TreeEntry
alternateObjectDir string
}
// WriteCommitOption is an option which can be passed to WriteCommit.
type WriteCommitOption func(*writeCommitConfig)
// WithBranch is an option for WriteCommit which will cause it to update the update the given branch
// name to the new commit.
func WithBranch(branch string) WriteCommitOption {
return func(cfg *writeCommitConfig) {
cfg.branch = branch
}
}
// WithMessage is an option for WriteCommit which will set the commit message.
func WithMessage(message string) WriteCommitOption {
return func(cfg *writeCommitConfig) {
cfg.message = message
}
}
// WithParents is an option for WriteCommit which will set the parent OIDs of the resulting commit.
func WithParents(parents ...git.ObjectID) WriteCommitOption {
return func(cfg *writeCommitConfig) {
if parents != nil {
cfg.parents = parents
} else {
// We're explicitly initializing parents here such that we can discern the
// case where the commit should be created with no parents.
cfg.parents = []git.ObjectID{}
}
}
}
// WithTreeEntries is an option for WriteCommit which will cause it to create a new tree and use it
// as root tree of the resulting commit.
func WithTreeEntries(entries ...TreeEntry) WriteCommitOption {
return func(cfg *writeCommitConfig) {
cfg.treeEntries = entries
}
}
// WithCommitterName is an option for WriteCommit which will set the committer name.
func WithCommitterName(name string) WriteCommitOption {
return func(cfg *writeCommitConfig) {
cfg.committerName = name
}
}
// WithAlternateObjectDirectory will cause the commit to be written into the given alternate object
// directory. This can either be an absolute path or a relative path. In the latter case the path
// is considered to be relative to the repository path.
func WithAlternateObjectDirectory(alternateObjectDir string) WriteCommitOption {
return func(cfg *writeCommitConfig) {
cfg.alternateObjectDir = alternateObjectDir
}
}
// WriteCommit writes a new commit into the target repository.
func WriteCommit(t testing.TB, cfg config.Cfg, repoPath string, opts ...WriteCommitOption) git.ObjectID {
t.Helper()
var writeCommitConfig writeCommitConfig
for _, opt := range opts {
opt(&writeCommitConfig)
}
message := "message"
if writeCommitConfig.message != "" {
message = writeCommitConfig.message
}
stdin := bytes.NewBufferString(message)
// The ID of an arbitrary commit known to exist in the test repository.
parents := []git.ObjectID{"1a0b36b3cdad1d2ee32457c102a8c0b7056fa863"}
if writeCommitConfig.parents != nil {
parents = writeCommitConfig.parents
}
var tree string
if len(writeCommitConfig.treeEntries) > 0 {
tree = WriteTree(t, cfg, repoPath, writeCommitConfig.treeEntries).String()
} else if len(parents) == 0 {
// If there are no parents, then we set the root tree to the empty tree.
tree = "4b825dc642cb6eb9a060e54bf8d69288fbee4904"
} else {
tree = parents[0].String() + "^{tree}"
}
if writeCommitConfig.committerName == "" {
writeCommitConfig.committerName = committerName
}
// Use 'commit-tree' instead of 'commit' because we are in a bare
// repository. What we do here is the same as "commit -m message
// --allow-empty".
commitArgs := []string{
"-c", fmt.Sprintf("user.name=%s", writeCommitConfig.committerName),
"-c", fmt.Sprintf("user.email=%s", committerEmail),
"-C", repoPath,
"commit-tree", "-F", "-", tree,
}
var env []string
if writeCommitConfig.alternateObjectDir != "" {
require.True(t, filepath.IsAbs(writeCommitConfig.alternateObjectDir),
"alternate object directory must be an absolute path")
require.NoError(t, os.MkdirAll(writeCommitConfig.alternateObjectDir, 0o755))
env = append(env,
fmt.Sprintf("GIT_OBJECT_DIRECTORY=%s", writeCommitConfig.alternateObjectDir),
fmt.Sprintf("GIT_ALTERNATE_OBJECT_DIRECTORIES=%s", filepath.Join(repoPath, "objects")),
)
}
for _, parent := range parents {
commitArgs = append(commitArgs, "-p", parent.String())
}
stdout := ExecOpts(t, cfg, ExecConfig{
Stdin: stdin,
Env: env,
}, commitArgs...)
oid, err := git.NewObjectIDFromHex(text.ChompBytes(stdout))
require.NoError(t, err)
if writeCommitConfig.branch != "" {
ExecOpts(t, cfg, ExecConfig{
Env: env,
}, "-C", repoPath, "update-ref", "refs/heads/"+writeCommitConfig.branch, oid.String())
}
return oid
}
func authorEqualIgnoringDate(t testing.TB, expected *gitalypb.CommitAuthor, actual *gitalypb.CommitAuthor) {
t.Helper()
require.Equal(t, expected.GetName(), actual.GetName(), "author name does not match")
require.Equal(t, expected.GetEmail(), actual.GetEmail(), "author mail does not match")
}
// CommitEqual tests if two `GitCommit`s are equal
func CommitEqual(t testing.TB, expected, actual *gitalypb.GitCommit) {
t.Helper()
authorEqualIgnoringDate(t, expected.GetAuthor(), actual.GetAuthor())
authorEqualIgnoringDate(t, expected.GetCommitter(), actual.GetCommitter())
require.Equal(t, expected.GetBody(), actual.GetBody(), "body does not match")
require.Equal(t, expected.GetSubject(), actual.GetSubject(), "subject does not match")
require.Equal(t, expected.GetId(), actual.GetId(), "object ID does not match")
require.Equal(t, expected.GetParentIds(), actual.GetParentIds(), "parent IDs do not match")
}

View file

@ -1,175 +0,0 @@
package gittest_test
import (
"testing"
"github.com/stretchr/testify/require"
"gitlab.com/gitlab-org/gitaly/v14/internal/git"
"gitlab.com/gitlab-org/gitaly/v14/internal/git/catfile"
"gitlab.com/gitlab-org/gitaly/v14/internal/git/gittest"
"gitlab.com/gitlab-org/gitaly/v14/internal/git/localrepo"
"gitlab.com/gitlab-org/gitaly/v14/internal/testhelper"
"gitlab.com/gitlab-org/gitaly/v14/internal/testhelper/testcfg"
"gitlab.com/gitlab-org/gitaly/v14/proto/go/gitalypb"
)
func TestWriteCommit(t *testing.T) {
cfg, repoProto, repoPath := testcfg.BuildWithRepo(t)
repo := localrepo.NewTestRepo(t, cfg, repoProto)
ctx := testhelper.Context(t)
catfileCache := catfile.NewCache(cfg)
defer catfileCache.Stop()
objectReader, err := catfileCache.ObjectReader(ctx, repo)
require.NoError(t, err)
defaultCommitter := &gitalypb.CommitAuthor{
Name: []byte("Scrooge McDuck"),
Email: []byte("scrooge@mcduck.com"),
}
defaultParentID := "1a0b36b3cdad1d2ee32457c102a8c0b7056fa863"
revisions := map[git.Revision]git.ObjectID{
"refs/heads/master": "",
"refs/heads/master~": "",
}
for revision := range revisions {
oid, err := repo.ResolveRevision(ctx, revision)
require.NoError(t, err)
revisions[revision] = oid
}
for _, tc := range []struct {
desc string
opts []gittest.WriteCommitOption
expectedCommit *gitalypb.GitCommit
expectedTreeEntries []gittest.TreeEntry
expectedRevUpdate git.Revision
}{
{
desc: "no options",
expectedCommit: &gitalypb.GitCommit{
Author: defaultCommitter,
Committer: defaultCommitter,
Subject: []byte("message"),
Body: []byte("message"),
Id: "cab056fb7bfc5a4d024c2c5b9b445b80f212fdcd",
ParentIds: []string{
defaultParentID,
},
},
},
{
desc: "with commit message",
opts: []gittest.WriteCommitOption{
gittest.WithMessage("my custom message\n\nfoobar\n"),
},
expectedCommit: &gitalypb.GitCommit{
Author: defaultCommitter,
Committer: defaultCommitter,
Subject: []byte("my custom message"),
Body: []byte("my custom message\n\nfoobar\n"),
Id: "7b7e8876f7df27ab99e46678acbf9ae3d29264ba",
ParentIds: []string{
defaultParentID,
},
},
},
{
desc: "with no parents",
opts: []gittest.WriteCommitOption{
gittest.WithParents(),
},
expectedCommit: &gitalypb.GitCommit{
Author: defaultCommitter,
Committer: defaultCommitter,
Subject: []byte("message"),
Body: []byte("message"),
Id: "549090fbeacc6607bc70648d3ba554c355e670c5",
ParentIds: nil,
},
},
{
desc: "with multiple parents",
opts: []gittest.WriteCommitOption{
gittest.WithParents(revisions["refs/heads/master"], revisions["refs/heads/master~"]),
},
expectedCommit: &gitalypb.GitCommit{
Author: defaultCommitter,
Committer: defaultCommitter,
Subject: []byte("message"),
Body: []byte("message"),
Id: "650084693e5ca9c0b05a21fc5ac21ad1805c758b",
ParentIds: []string{
revisions["refs/heads/master"].String(),
revisions["refs/heads/master~"].String(),
},
},
},
{
desc: "with branch",
opts: []gittest.WriteCommitOption{
gittest.WithBranch("foo"),
},
expectedCommit: &gitalypb.GitCommit{
Author: defaultCommitter,
Committer: defaultCommitter,
Subject: []byte("message"),
Body: []byte("message"),
Id: "cab056fb7bfc5a4d024c2c5b9b445b80f212fdcd",
ParentIds: []string{
defaultParentID,
},
},
expectedRevUpdate: "refs/heads/foo",
},
{
desc: "with tree entry",
opts: []gittest.WriteCommitOption{
gittest.WithTreeEntries(gittest.TreeEntry{
Content: "foobar",
Mode: "100644",
Path: "file",
}),
},
expectedCommit: &gitalypb.GitCommit{
Author: defaultCommitter,
Committer: defaultCommitter,
Subject: []byte("message"),
Body: []byte("message"),
Id: "12da4907ed3331f4991ba6817317a3a90801288e",
ParentIds: []string{
defaultParentID,
},
},
expectedTreeEntries: []gittest.TreeEntry{
{
Content: "foobar",
Mode: "100644",
Path: "file",
},
},
},
} {
t.Run(tc.desc, func(t *testing.T) {
oid := gittest.WriteCommit(t, cfg, repoPath, tc.opts...)
commit, err := catfile.GetCommit(ctx, objectReader, oid.Revision())
require.NoError(t, err)
gittest.CommitEqual(t, tc.expectedCommit, commit)
if tc.expectedTreeEntries != nil {
gittest.RequireTree(t, cfg, repoPath, oid.String(), tc.expectedTreeEntries)
}
if tc.expectedRevUpdate != "" {
updatedOID, err := repo.ResolveRevision(ctx, tc.expectedRevUpdate)
require.NoError(t, err)
require.Equal(t, oid, updatedOID)
}
})
}
}

View file

@ -1,77 +0,0 @@
package gittest
import (
"crypto/rand"
"io"
"strings"
"testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"gitlab.com/gitlab-org/gitaly/v14/internal/git"
"gitlab.com/gitlab-org/gitaly/v14/internal/gitaly/config"
)
// TestDeltaIslands is based on the tests in
// https://github.com/git/git/blob/master/t/t5320-delta-islands.sh .
func TestDeltaIslands(t *testing.T, cfg config.Cfg, repoPath string, repack func() error) {
t.Helper()
// Create blobs that we expect Git to use delta compression on.
blob1, err := io.ReadAll(io.LimitReader(rand.Reader, 100000))
require.NoError(t, err)
blob2 := append(blob1, "\nblob 2"...)
// Assume Git prefers the largest blob as the delta base.
badBlob := append(blob2, "\nbad blob"...)
blob1ID := commitBlob(t, cfg, repoPath, "refs/heads/branch1", blob1)
blob2ID := commitBlob(t, cfg, repoPath, "refs/tags/tag2", blob2)
// The bad blob will only be reachable via a non-standard ref. Because of
// that it should be excluded from delta chains in the main island.
badBlobID := commitBlob(t, cfg, repoPath, "refs/bad/ref3", badBlob)
// So far we have create blobs and commits but they will be in loose
// object files; we want them to be delta compressed. Run repack to make
// that happen.
Exec(t, cfg, "-C", repoPath, "repack", "-ad")
assert.Equal(t, badBlobID, deltaBase(t, cfg, repoPath, blob1ID), "expect blob 1 delta base to be bad blob after test setup")
assert.Equal(t, badBlobID, deltaBase(t, cfg, repoPath, blob2ID), "expect blob 2 delta base to be bad blob after test setup")
require.NoError(t, repack(), "repack after delta island setup")
assert.Equal(t, blob2ID, deltaBase(t, cfg, repoPath, blob1ID), "blob 1 delta base should be blob 2 after repack")
// blob2 is the bigger of the two so it should be the delta base
assert.Equal(t, git.ZeroOID.String(), deltaBase(t, cfg, repoPath, blob2ID), "blob 2 should not be delta compressed after repack")
}
func commitBlob(t *testing.T, cfg config.Cfg, repoPath, ref string, content []byte) string {
blobID := WriteBlob(t, cfg, repoPath, content)
// No parent, that means this will be an initial commit. Not very
// realistic but it doesn't matter for delta compression.
commitID := WriteCommit(t, cfg, repoPath,
WithTreeEntries(TreeEntry{
Mode: "100644", OID: blobID, Path: "file",
}),
WithParents(),
)
Exec(t, cfg, "-C", repoPath, "update-ref", ref, commitID.String())
return blobID.String()
}
func deltaBase(t *testing.T, cfg config.Cfg, repoPath string, blobID string) string {
catfileOut := ExecOpts(t, cfg, ExecConfig{Stdin: strings.NewReader(blobID)},
"-C", repoPath, "cat-file", "--batch-check=%(deltabase)",
)
return chompToString(catfileOut)
}
func chompToString(s []byte) string { return strings.TrimSuffix(string(s), "\n") }

View file

@ -1,60 +0,0 @@
package gittest
import (
"bytes"
"fmt"
"testing"
"github.com/stretchr/testify/require"
"gitlab.com/gitlab-org/gitaly/v14/internal/git"
"gitlab.com/gitlab-org/gitaly/v14/internal/testhelper"
)
func TestInterceptingCommandFactory(t *testing.T) {
cfg, repoProto, repoPath := setup(t)
ctx := testhelper.Context(t)
factory := NewInterceptingCommandFactory(ctx, t, cfg, func(execEnv git.ExecutionEnvironment) string {
return fmt.Sprintf(
`#!/usr/bin/env bash
%q rev-parse --sq-quote 'Hello, world!'
`, execEnv.BinaryPath)
})
expectedString := " 'Hello, world'\\!''\n"
t.Run("New", func(t *testing.T) {
var stdout bytes.Buffer
cmd, err := factory.New(ctx, repoProto, git.SubCmd{
Name: "rev-parse",
Args: []string{"something"},
}, git.WithStdout(&stdout))
require.NoError(t, err)
require.NoError(t, cmd.Wait())
require.Equal(t, expectedString, stdout.String())
})
t.Run("NewWithDir", func(t *testing.T) {
var stdout bytes.Buffer
cmd, err := factory.NewWithDir(ctx, repoPath, git.SubCmd{
Name: "rev-parse",
Args: []string{"something"},
}, git.WithStdout(&stdout))
require.NoError(t, err)
require.NoError(t, cmd.Wait())
require.Equal(t, expectedString, stdout.String())
})
t.Run("NewWithoutRepo", func(t *testing.T) {
var stdout bytes.Buffer
cmd, err := factory.NewWithoutRepo(ctx, git.SubCmd{
Name: "rev-parse",
Args: []string{"something"},
Flags: []git.Option{
git.ValueFlag{Name: "-C", Value: repoPath},
},
}, git.WithStdout(&stdout))
require.NoError(t, err)
require.NoError(t, cmd.Wait())
require.Equal(t, expectedString, stdout.String())
})
}

View file

@ -1,83 +0,0 @@
package gittest
import (
"bytes"
"os/exec"
"path/filepath"
"strconv"
"strings"
"testing"
"time"
"github.com/stretchr/testify/require"
"gitlab.com/gitlab-org/gitaly/v14/internal/git"
"gitlab.com/gitlab-org/gitaly/v14/internal/gitaly/config"
"gitlab.com/gitlab-org/gitaly/v14/internal/helper/text"
)
// RequireObjectExists asserts that the given repository does contain an object with the specified
// object ID.
func RequireObjectExists(t testing.TB, cfg config.Cfg, repoPath string, objectID git.ObjectID) {
requireObjectExists(t, cfg, repoPath, objectID, true)
}
// RequireObjectNotExists asserts that the given repository does not contain an object with the
// specified object ID.
func RequireObjectNotExists(t testing.TB, cfg config.Cfg, repoPath string, objectID git.ObjectID) {
requireObjectExists(t, cfg, repoPath, objectID, false)
}
func requireObjectExists(t testing.TB, cfg config.Cfg, repoPath string, objectID git.ObjectID, exists bool) {
cmd := NewCommand(t, cfg, "-C", repoPath, "cat-file", "-e", objectID.String())
cmd.Env = []string{
"GIT_ALLOW_PROTOCOL=", // To prevent partial clone reaching remote repo over SSH
}
if exists {
require.NoError(t, cmd.Run(), "checking for object should succeed")
return
}
require.Error(t, cmd.Run(), "checking for object should fail")
}
// GetGitPackfileDirSize gets the number of 1k blocks of a git object directory
func GetGitPackfileDirSize(t testing.TB, repoPath string) int64 {
return getGitDirSize(t, repoPath, "objects", "pack")
}
func getGitDirSize(t testing.TB, repoPath string, subdirs ...string) int64 {
cmd := exec.Command("du", "-s", "-k", filepath.Join(append([]string{repoPath}, subdirs...)...))
output, err := cmd.Output()
require.NoError(t, err)
if len(output) < 2 {
t.Error("invalid output of du -s -k")
}
outputSplit := strings.SplitN(string(output), "\t", 2)
blocks, err := strconv.ParseInt(outputSplit[0], 10, 64)
require.NoError(t, err)
return blocks
}
// WriteBlobs writes n distinct blobs into the git repository's object
// database. Each object has the current time in nanoseconds as contents.
func WriteBlobs(t testing.TB, cfg config.Cfg, testRepoPath string, n int) []string {
var blobIDs []string
for i := 0; i < n; i++ {
contents := []byte(strconv.Itoa(time.Now().Nanosecond()))
blobIDs = append(blobIDs, WriteBlob(t, cfg, testRepoPath, contents).String())
}
return blobIDs
}
// WriteBlob writes the given contents as a blob into the repository and returns its OID.
func WriteBlob(t testing.TB, cfg config.Cfg, testRepoPath string, contents []byte) git.ObjectID {
hex := text.ChompBytes(ExecOpts(t, cfg, ExecConfig{Stdin: bytes.NewReader(contents)},
"-C", testRepoPath, "hash-object", "-w", "--stdin",
))
oid, err := git.NewObjectIDFromHex(hex)
require.NoError(t, err)
return oid
}

View file

@ -1,25 +0,0 @@
package gittest
import (
"io"
"testing"
"github.com/stretchr/testify/require"
"gitlab.com/gitlab-org/gitaly/v14/internal/git/pktline"
)
// WritePktlineString writes the pktline-formatted data into the writer.
func WritePktlineString(t *testing.T, writer io.Writer, data string) {
_, err := pktline.WriteString(writer, data)
require.NoError(t, err)
}
// WritePktlineFlush writes the pktline-formatted flush into the writer.
func WritePktlineFlush(t *testing.T, writer io.Writer) {
require.NoError(t, pktline.WriteFlush(writer))
}
// WritePktlineDelim writes the pktline-formatted delimiter into the writer.
func WritePktlineDelim(t *testing.T, writer io.Writer) {
require.NoError(t, pktline.WriteDelim(writer))
}

View file

@ -1,35 +0,0 @@
package gittest
import (
"context"
"fmt"
"os"
"path/filepath"
"testing"
"github.com/stretchr/testify/require"
"gitlab.com/gitlab-org/gitaly/v14/internal/git"
"gitlab.com/gitlab-org/gitaly/v14/internal/gitaly/config"
"gitlab.com/gitlab-org/gitaly/v14/internal/testhelper"
)
// NewProtocolDetectingCommandFactory creates a new intercepting Git command factory that allows the
// protocol to be tested. It returns this factory and a function to read the GIT_PROTOCOL
// environment variable created by the wrapper script.
func NewProtocolDetectingCommandFactory(ctx context.Context, t testing.TB, cfg config.Cfg) (git.CommandFactory, func() string) {
envPath := filepath.Join(testhelper.TempDir(t), "git-env")
gitCmdFactory := NewInterceptingCommandFactory(ctx, t, cfg, func(execEnv git.ExecutionEnvironment) string {
return fmt.Sprintf(
`#!/usr/bin/env bash
env | grep ^GIT_PROTOCOL= >>%q
exec %q "$@"
`, envPath, execEnv.BinaryPath)
})
return gitCmdFactory, func() string {
data, err := os.ReadFile(envPath)
require.NoError(t, err)
return string(data)
}
}

View file

@ -1,13 +0,0 @@
package gittest
import (
"testing"
"gitlab.com/gitlab-org/gitaly/v14/internal/git"
"gitlab.com/gitlab-org/gitaly/v14/internal/gitaly/config"
)
// WriteRef writes a reference into the repository pointing to the given object ID.
func WriteRef(t testing.TB, cfg config.Cfg, repoPath string, ref git.ReferenceName, oid git.ObjectID) {
Exec(t, cfg, "-C", repoPath, "update-ref", ref.String(), oid.String())
}

View file

@ -1,378 +0,0 @@
package gittest
import (
"bytes"
"context"
"crypto/sha256"
"os"
"path/filepath"
"runtime"
"testing"
log "github.com/sirupsen/logrus"
"github.com/stretchr/testify/require"
gitalyauth "gitlab.com/gitlab-org/gitaly/v14/auth"
"gitlab.com/gitlab-org/gitaly/v14/client"
"gitlab.com/gitlab-org/gitaly/v14/internal/git"
"gitlab.com/gitlab-org/gitaly/v14/internal/git/repository"
"gitlab.com/gitlab-org/gitaly/v14/internal/gitaly/config"
"gitlab.com/gitlab-org/gitaly/v14/internal/helper/text"
"gitlab.com/gitlab-org/gitaly/v14/internal/testhelper"
"gitlab.com/gitlab-org/gitaly/v14/proto/go/gitalypb"
"google.golang.org/grpc"
"google.golang.org/grpc/codes"
"google.golang.org/grpc/status"
"google.golang.org/protobuf/proto"
)
const (
// GlRepository is the default repository name for newly created test
// repos.
GlRepository = "project-1"
// GlProjectPath is the default project path for newly created test
// repos.
GlProjectPath = "gitlab-org/gitlab-test"
// SeedGitLabTest is the path of the gitlab-test.git repository in _build/testrepos
SeedGitLabTest = "gitlab-test.git"
// SeedGitLabTestMirror is the path of the gitlab-test-mirror.git repository in _build/testrepos
SeedGitLabTestMirror = "gitlab-test-mirror.git"
)
// InitRepoDir creates a temporary directory for a repo, without initializing it
func InitRepoDir(t testing.TB, storagePath, relativePath string) *gitalypb.Repository {
repoPath := filepath.Join(storagePath, relativePath, "..")
require.NoError(t, os.MkdirAll(repoPath, 0o755), "making repo parent dir")
return &gitalypb.Repository{
StorageName: "default",
RelativePath: relativePath,
GlRepository: GlRepository,
GlProjectPath: GlProjectPath,
}
}
// NewObjectPoolName returns a random pool repository name in format
// '@pools/[0-9a-z]{2}/[0-9a-z]{2}/[0-9a-z]{64}.git'.
func NewObjectPoolName(t testing.TB) string {
return filepath.Join("@pools", newDiskHash(t)+".git")
}
// NewRepositoryName returns a random repository hash
// in format '@hashed/[0-9a-f]{2}/[0-9a-f]{2}/[0-9a-f]{64}(.git)?'.
func NewRepositoryName(t testing.TB, bare bool) string {
suffix := ""
if bare {
suffix = ".git"
}
return filepath.Join("@hashed", newDiskHash(t)+suffix)
}
// newDiskHash generates a random directory path following the Rails app's
// approach in the hashed storage module, formatted as '[0-9a-f]{2}/[0-9a-f]{2}/[0-9a-f]{64}'.
// https://gitlab.com/gitlab-org/gitlab/-/blob/f5c7d8eb1dd4eee5106123e04dec26d277ff6a83/app/models/storage/hashed.rb#L38-43
func newDiskHash(t testing.TB) string {
// rails app calculates a sha256 and uses its hex representation
// as the directory path
b, err := text.RandomHex(sha256.Size)
require.NoError(t, err)
return filepath.Join(b[0:2], b[2:4], b)
}
// CreateRepositoryConfig allows for configuring how the repository is created.
type CreateRepositoryConfig struct {
// ClientConn is the connection used to create the repository. If unset, the config is used to
// dial the service.
ClientConn *grpc.ClientConn
// Storage determines the storage the repository is created in. If unset, the first storage
// from the config is used.
Storage config.Storage
// RelativePath sets the relative path of the repository in the storage. If unset,
// the relative path is set to a randomly generated hashed storage path
RelativePath string
// Seed determines which repository is used to seed the created repository. If unset, the repository
// is just created. The value should be one of the test repositories in _build/testrepos.
Seed string
}
func dialService(ctx context.Context, t testing.TB, cfg config.Cfg) *grpc.ClientConn {
dialOptions := []grpc.DialOption{}
if cfg.Auth.Token != "" {
dialOptions = append(dialOptions, grpc.WithPerRPCCredentials(gitalyauth.RPCCredentialsV2(cfg.Auth.Token)))
}
conn, err := client.DialContext(ctx, cfg.SocketPath, dialOptions)
require.NoError(t, err)
return conn
}
// CreateRepository creates a new repository and returns it and its absolute path.
func CreateRepository(ctx context.Context, t testing.TB, cfg config.Cfg, configs ...CreateRepositoryConfig) (*gitalypb.Repository, string) {
t.Helper()
require.Less(t, len(configs), 2, "you must either pass no or exactly one option")
opts := CreateRepositoryConfig{}
if len(configs) == 1 {
opts = configs[0]
}
conn := opts.ClientConn
if conn == nil {
conn = dialService(ctx, t, cfg)
t.Cleanup(func() { conn.Close() })
}
client := gitalypb.NewRepositoryServiceClient(conn)
storage := cfg.Storages[0]
if (opts.Storage != config.Storage{}) {
storage = opts.Storage
}
relativePath := newDiskHash(t)
if opts.RelativePath != "" {
relativePath = opts.RelativePath
}
repository := &gitalypb.Repository{
StorageName: storage.Name,
RelativePath: relativePath,
GlRepository: GlRepository,
GlProjectPath: GlProjectPath,
}
if opts.Seed != "" {
_, err := client.CreateRepositoryFromURL(ctx, &gitalypb.CreateRepositoryFromURLRequest{
Repository: repository,
Url: testRepositoryPath(t, opts.Seed),
Mirror: true,
})
require.NoError(t, err)
} else {
_, err := client.CreateRepository(ctx, &gitalypb.CreateRepositoryRequest{
Repository: repository,
})
require.NoError(t, err)
}
t.Cleanup(func() {
// The ctx parameter would be canceled by now as the tests defer the cancellation.
_, err := client.RemoveRepository(context.TODO(), &gitalypb.RemoveRepositoryRequest{
Repository: repository,
})
if st, ok := status.FromError(err); ok && st.Code() == codes.NotFound {
// The tests may delete the repository, so this is not a failure.
return
}
require.NoError(t, err)
})
// Return a cloned repository so the above clean up function still targets the correct repository
// if the tests modify the returned repository.
clonedRepo := proto.Clone(repository).(*gitalypb.Repository)
return clonedRepo, filepath.Join(storage.Path, getReplicaPath(ctx, t, conn, repository))
}
// GetReplicaPath retrieves the repository's replica path if the test has been
// run with Praefect in front of it. This is necessary if the test creates a repository
// through Praefect and peeks into the filesystem afterwards. Conn should be pointing to
// Praefect.
func GetReplicaPath(ctx context.Context, t testing.TB, cfg config.Cfg, repo repository.GitRepo) string {
conn := dialService(ctx, t, cfg)
defer conn.Close()
return getReplicaPath(ctx, t, conn, repo)
}
func getReplicaPath(ctx context.Context, t testing.TB, conn *grpc.ClientConn, repo repository.GitRepo) string {
metadata, err := gitalypb.NewPraefectInfoServiceClient(conn).GetRepositoryMetadata(
ctx, &gitalypb.GetRepositoryMetadataRequest{
Query: &gitalypb.GetRepositoryMetadataRequest_Path_{
Path: &gitalypb.GetRepositoryMetadataRequest_Path{
VirtualStorage: repo.GetStorageName(),
RelativePath: repo.GetRelativePath(),
},
},
})
if status, ok := status.FromError(err); ok && status.Code() == codes.Unimplemented && status.Message() == "unknown service gitaly.PraefectInfoService" {
// The repository is stored at relative path if the test is running without Praefect in front.
return repo.GetRelativePath()
}
require.NoError(t, err)
return metadata.ReplicaPath
}
// RewrittenRepository returns the repository as it would be received by a Gitaly after being rewritten by Praefect.
// This should be used when the repository is being accessed through the filesystem to ensure the access path is
// correct. If the test is not running with Praefect in front, it returns the an unaltered copy of repository.
func RewrittenRepository(ctx context.Context, t testing.TB, cfg config.Cfg, repository *gitalypb.Repository) *gitalypb.Repository {
// Don't modify the original repository.
rewritten := proto.Clone(repository).(*gitalypb.Repository)
rewritten.RelativePath = GetReplicaPath(ctx, t, cfg, repository)
return rewritten
}
// InitRepoOpts contains options for InitRepo.
type InitRepoOpts struct {
// WithWorktree determines whether the resulting Git repository should have a worktree or
// not.
WithWorktree bool
// WithRelativePath determines the relative path of this repository.
WithRelativePath string
}
// InitRepo creates a new empty repository in the given storage. You can either pass no or exactly
// one InitRepoOpts.
func InitRepo(t testing.TB, cfg config.Cfg, storage config.Storage, opts ...InitRepoOpts) (*gitalypb.Repository, string) {
require.Less(t, len(opts), 2, "you must either pass no or exactly one option")
opt := InitRepoOpts{}
if len(opts) == 1 {
opt = opts[0]
}
relativePath := opt.WithRelativePath
if relativePath == "" {
relativePath = NewRepositoryName(t, !opt.WithWorktree)
}
repoPath := filepath.Join(storage.Path, relativePath)
args := []string{"init"}
if !opt.WithWorktree {
args = append(args, "--bare")
}
Exec(t, cfg, append(args, repoPath)...)
repo := InitRepoDir(t, storage.Path, relativePath)
repo.StorageName = storage.Name
if opt.WithWorktree {
repo.RelativePath = filepath.Join(repo.RelativePath, ".git")
}
t.Cleanup(func() { require.NoError(t, os.RemoveAll(repoPath)) })
return repo, repoPath
}
// CloneRepoOpts is an option for CloneRepo.
type CloneRepoOpts struct {
// RelativePath determines the relative path of newly created Git repository. If unset, the
// relative path is computed via NewRepositoryName.
RelativePath string
// WithWorktree determines whether the resulting Git repository should have a worktree or
// not.
WithWorktree bool
// SourceRepo determines the name of the source repository which shall be cloned. The source
// repository is assumed to be relative to "_build/testrepos". If unset, defaults to
// "gitlab-test.git".
SourceRepo string
}
// CloneRepo clones a new copy of test repository under a subdirectory in the storage root. You can
// either pass no or exactly one CloneRepoOpts.
func CloneRepo(t testing.TB, cfg config.Cfg, storage config.Storage, opts ...CloneRepoOpts) (*gitalypb.Repository, string) {
require.Less(t, len(opts), 2, "you must either pass no or exactly one option")
opt := CloneRepoOpts{}
if len(opts) == 1 {
opt = opts[0]
}
relativePath := opt.RelativePath
if relativePath == "" {
relativePath = NewRepositoryName(t, !opt.WithWorktree)
}
sourceRepo := opt.SourceRepo
if sourceRepo == "" {
sourceRepo = "gitlab-test.git"
}
repo := InitRepoDir(t, storage.Path, relativePath)
repo.StorageName = storage.Name
args := []string{"clone", "--no-hardlinks", "--dissociate"}
if !opt.WithWorktree {
args = append(args, "--bare")
} else {
// For non-bare repos the relative path is the .git folder inside the path
repo.RelativePath = filepath.Join(relativePath, ".git")
}
absolutePath := filepath.Join(storage.Path, relativePath)
Exec(t, cfg, append(args, testRepositoryPath(t, sourceRepo), absolutePath)...)
Exec(t, cfg, "-C", absolutePath, "remote", "remove", "origin")
t.Cleanup(func() { require.NoError(t, os.RemoveAll(absolutePath)) })
return repo, absolutePath
}
// BundleTestRepo creates a bundle of a local test repo. E.g.
// `gitlab-test.git`. `patterns` define the bundle contents as per
// `git-rev-list-args`. If there are no patterns then `--all` is assumed.
func BundleTestRepo(t testing.TB, cfg config.Cfg, sourceRepo, bundlePath string, patterns ...string) {
if len(patterns) == 0 {
patterns = []string{"--all"}
}
repoPath := testRepositoryPath(t, sourceRepo)
Exec(t, cfg, append([]string{"-C", repoPath, "bundle", "create", bundlePath}, patterns...)...)
}
// ChecksumTestRepo calculates the checksum of a local test repo. E.g.
// `gitlab-test.git`.
func ChecksumTestRepo(t testing.TB, cfg config.Cfg, sourceRepo string) *git.Checksum {
var checksum git.Checksum
repoPath := testRepositoryPath(t, sourceRepo)
lines := bytes.Split(Exec(t, cfg, "-C", repoPath, "show-ref", "--head"), []byte("\n"))
for _, line := range lines {
checksum.AddBytes(line)
}
return &checksum
}
// testRepositoryPath returns the absolute path of local 'gitlab-org/gitlab-test.git' clone.
// It is cloned under the path by the test preparing step of make.
func testRepositoryPath(t testing.TB, repo string) string {
_, currentFile, _, ok := runtime.Caller(0)
if !ok {
require.Fail(t, "could not get caller info")
}
path := filepath.Join(filepath.Dir(currentFile), "..", "..", "..", "_build", "testrepos", repo)
if !isValidRepoPath(path) {
makePath := filepath.Join(filepath.Dir(currentFile), "..", "..", "..")
makeTarget := "prepare-test-repos"
log.Printf("local clone of test repository %q not found in %q, running `make %v`", repo, path, makeTarget)
testhelper.MustRunCommand(t, nil, "make", "-C", makePath, makeTarget)
}
return path
}
// isValidRepoPath checks whether a valid git repository exists at the given path.
func isValidRepoPath(absolutePath string) bool {
if _, err := os.Stat(filepath.Join(absolutePath, "objects")); err != nil {
return false
}
return true
}
// AddWorktreeArgs returns git command arguments for adding a worktree at the
// specified repo
func AddWorktreeArgs(repoPath, worktreeName string) []string {
return []string{"-C", repoPath, "worktree", "add", "--detach", worktreeName}
}
// AddWorktree creates a worktree in the repository path for tests
func AddWorktree(t testing.TB, cfg config.Cfg, repoPath string, worktreeName string) {
Exec(t, cfg, AddWorktreeArgs(repoPath, worktreeName)...)
}

View file

@ -1,56 +0,0 @@
package gittest
import (
"os"
"path/filepath"
"runtime"
"testing"
"github.com/stretchr/testify/require"
"gitlab.com/gitlab-org/gitaly/v14/internal/gitaly/config"
"gitlab.com/gitlab-org/gitaly/v14/internal/testhelper"
"gitlab.com/gitlab-org/gitaly/v14/proto/go/gitalypb"
)
func TestMain(m *testing.M) {
testhelper.Run(m)
}
// setup sets up a test configuration and repository. Ideally we'd use our central test helpers to
// do this, but because of an import cycle we can't.
func setup(t testing.TB) (config.Cfg, *gitalypb.Repository, string) {
t.Helper()
rootDir := testhelper.TempDir(t)
var cfg config.Cfg
cfg.SocketPath = "it is a stub to bypass Validate method"
cfg.Storages = []config.Storage{
{
Name: "default",
Path: filepath.Join(rootDir, "storage.d"),
},
}
require.NoError(t, os.Mkdir(cfg.Storages[0].Path, 0o755))
_, currentFile, _, ok := runtime.Caller(0)
require.True(t, ok, "could not get caller info")
cfg.Ruby.Dir = filepath.Join(filepath.Dir(currentFile), "../../../ruby")
cfg.GitlabShell.Dir = filepath.Join(rootDir, "shell.d")
require.NoError(t, os.Mkdir(cfg.GitlabShell.Dir, 0o755))
cfg.BinDir = filepath.Join(rootDir, "bin.d")
require.NoError(t, os.Mkdir(cfg.BinDir, 0o755))
cfg.RuntimeDir = filepath.Join(rootDir, "run.d")
require.NoError(t, os.Mkdir(cfg.RuntimeDir, 0o700))
require.NoError(t, cfg.Validate())
repo, repoPath := CloneRepo(t, cfg, cfg.Storages[0])
return cfg, repo, repoPath
}

View file

@ -1,44 +0,0 @@
package git
import (
"testing"
"github.com/stretchr/testify/require"
"gitlab.com/gitlab-org/gitaly/v14/internal/gitaly/config"
"gitlab.com/gitlab-org/gitaly/v14/internal/testhelper"
)
func TestMain(m *testing.M) {
testhelper.Run(m)
}
func TestValidateRevision(t *testing.T) {
testCases := []struct {
rev string
ok bool
}{
{rev: "foo/bar", ok: true},
{rev: "-foo/bar", ok: false},
{rev: "foo bar", ok: false},
{rev: "foo\x00bar", ok: false},
{rev: "foo/bar:baz", ok: false},
}
for _, tc := range testCases {
t.Run(tc.rev, func(t *testing.T) {
err := ValidateRevision([]byte(tc.rev))
if tc.ok {
require.NoError(t, err)
} else {
require.Error(t, err)
}
})
}
}
func newCommandFactory(tb testing.TB, cfg config.Cfg, opts ...ExecCommandFactoryOption) *ExecCommandFactory {
gitCmdFactory, cleanup, err := NewExecCommandFactory(cfg, opts...)
require.NoError(tb, err)
tb.Cleanup(cleanup)
return gitCmdFactory
}

View file

@ -1,57 +0,0 @@
package git_test
import (
"strings"
"testing"
"github.com/stretchr/testify/require"
"gitlab.com/gitlab-org/gitaly/v14/internal/command"
"gitlab.com/gitlab-org/gitaly/v14/internal/git"
"gitlab.com/gitlab-org/gitaly/v14/internal/git/gittest"
"gitlab.com/gitlab-org/gitaly/v14/internal/testhelper"
"gitlab.com/gitlab-org/gitaly/v14/internal/testhelper/testcfg"
)
func TestWithRefHook(t *testing.T) {
cfg, repo, _ := testcfg.BuildWithRepo(t)
ctx := testhelper.Context(t)
opt := git.WithRefTxHook(repo)
subCmd := git.SubCmd{Name: "update-ref", Args: []string{"refs/heads/master", git.ZeroOID.String()}}
for _, tt := range []struct {
name string
fn func() (*command.Command, error)
}{
{
name: "NewCommand",
fn: func() (*command.Command, error) {
return gittest.NewCommandFactory(t, cfg, git.WithSkipHooks()).New(ctx, repo, subCmd, opt)
},
},
} {
t.Run(tt.name, func(t *testing.T) {
cmd, err := tt.fn()
require.NoError(t, err)
require.NoError(t, cmd.Wait())
var actualEnvVars []string
for _, env := range cmd.Env() {
kv := strings.SplitN(env, "=", 2)
require.Len(t, kv, 2)
key, val := kv[0], kv[1]
if strings.HasPrefix(key, "GL_") || strings.HasPrefix(key, "GITALY_") {
require.NotEmptyf(t, strings.TrimSpace(val),
"env var %s value should not be empty string", key)
actualEnvVars = append(actualEnvVars, key)
}
}
require.EqualValues(t, []string{
"GITALY_HOOKS_PAYLOAD",
"GITALY_LOG_DIR",
}, actualEnvVars)
})
}
}

View file

@ -1,64 +0,0 @@
package housekeeping
import (
"bytes"
"context"
"errors"
"fmt"
"os"
"path/filepath"
"gitlab.com/gitlab-org/gitaly/v14/internal/git"
"gitlab.com/gitlab-org/gitaly/v14/internal/git/localrepo"
"gitlab.com/gitlab-org/gitaly/v14/internal/git/stats"
"gitlab.com/gitlab-org/gitaly/v14/internal/helper"
)
// WriteCommitGraph updates the commit-graph in the given repository. The commit-graph is updated
// incrementally, except in the case where it doesn't exist yet or in case it is detected that the
// commit-graph is missing bloom filters.
func WriteCommitGraph(ctx context.Context, repo *localrepo.Repo) error {
repoPath, err := repo.Path()
if err != nil {
return err
}
missingBloomFilters := true
if _, err := os.Stat(filepath.Join(repoPath, stats.CommitGraphRelPath)); err != nil {
if !errors.Is(err, os.ErrNotExist) {
return helper.ErrInternal(fmt.Errorf("remove commit graph file: %w", err))
}
// objects/info/commit-graph file doesn't exists
// check if commit-graph chain exists and includes Bloom filters
if missingBloomFilters, err = stats.IsMissingBloomFilters(repoPath); err != nil {
return helper.ErrInternal(fmt.Errorf("should remove commit graph chain: %w", err))
}
}
flags := []git.Option{
git.Flag{Name: "--reachable"},
git.Flag{Name: "--changed-paths"}, // enables Bloom filters
git.ValueFlag{Name: "--size-multiple", Value: "4"},
}
if missingBloomFilters {
// if commit graph doesn't use Bloom filters we instruct operation to replace
// existent commit graph with the new one
// https://git-scm.com/docs/git-commit-graph#Documentation/git-commit-graph.txt-emwriteem
flags = append(flags, git.Flag{Name: "--split=replace"})
} else {
flags = append(flags, git.Flag{Name: "--split"})
}
var stderr bytes.Buffer
if err := repo.ExecAndWait(ctx, git.SubSubCmd{
Name: "commit-graph",
Action: "write",
Flags: flags,
}, git.WithStderr(&stderr)); err != nil {
return helper.ErrInternalf("writing commit-graph: %s: %v", err, stderr.String())
}
return nil
}

View file

@ -1,25 +0,0 @@
package housekeeping
import (
"regexp"
"strings"
)
// railsPoolDirRegexp is used to validate object pool directory structure and name as generated by Rails.
var railsPoolDirRegexp = regexp.MustCompile(`^@pools/([0-9a-f]{2})/([0-9a-f]{2})/([0-9a-f]{64})\.git$`)
// IsRailsPoolPath returns whether the relative path indicates this is a pool path generated by Rails.
func IsRailsPoolPath(relativePath string) bool {
matches := railsPoolDirRegexp.FindStringSubmatch(relativePath)
if matches == nil || !strings.HasPrefix(matches[3], matches[1]+matches[2]) {
return false
}
return true
}
// IsPoolPath returns whether the relative path indicates the repository is an object
// pool.
func IsPoolPath(relativePath string) bool {
return IsRailsPoolPath(relativePath)
}

View file

@ -1,37 +0,0 @@
package housekeeping
import (
"testing"
"github.com/stretchr/testify/require"
"gitlab.com/gitlab-org/gitaly/v14/internal/git/gittest"
)
func TestIsPoolPath(t *testing.T) {
for _, tc := range []struct {
desc string
relativePath string
isPoolPath bool
}{
{
desc: "rails pool directory",
relativePath: gittest.NewObjectPoolName(t),
isPoolPath: true,
},
{
desc: "empty string",
},
{
desc: "rails path first to subdirs dont match full hash",
relativePath: "@pools/aa/bb/ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff.git",
},
{
desc: "normal repos dont match",
relativePath: "@hashed/" + gittest.NewRepositoryName(t, true),
},
} {
t.Run(tc.desc, func(t *testing.T) {
require.Equal(t, tc.isPoolPath, IsPoolPath(tc.relativePath))
})
}
}

View file

@ -1,11 +0,0 @@
package housekeeping
import (
"testing"
"gitlab.com/gitlab-org/gitaly/v14/internal/testhelper"
)
func TestMain(m *testing.M) {
testhelper.Run(m)
}

View file

@ -1,73 +0,0 @@
package git
import (
"encoding/hex"
"errors"
"fmt"
"regexp"
)
const (
// EmptyTreeOID is the Git tree object hash that corresponds to an empty tree (directory)
EmptyTreeOID = ObjectID("4b825dc642cb6eb9a060e54bf8d69288fbee4904")
// ZeroOID is the special value that Git uses to signal a ref or object does not exist
ZeroOID = ObjectID("0000000000000000000000000000000000000000")
)
var (
// ErrInvalidObjectID is returned in case an object ID's string
// representation is not a valid one.
ErrInvalidObjectID = errors.New("invalid object ID")
objectIDRegex = regexp.MustCompile(`\A[0-9a-f]{40}\z`)
)
// ObjectID represents an object ID.
type ObjectID string
// NewObjectIDFromHex constructs a new ObjectID from the given hex
// representation of the object ID. Returns ErrInvalidObjectID if the given
// OID is not valid.
func NewObjectIDFromHex(hex string) (ObjectID, error) {
if err := ValidateObjectID(hex); err != nil {
return "", err
}
return ObjectID(hex), nil
}
// String returns the hex representation of the ObjectID.
func (oid ObjectID) String() string {
return string(oid)
}
// Bytes returns the byte representation of the ObjectID.
func (oid ObjectID) Bytes() ([]byte, error) {
decoded, err := hex.DecodeString(string(oid))
if err != nil {
return nil, err
}
return decoded, nil
}
// Revision returns a revision of the ObjectID. This directly returns the hex
// representation as every object ID is a valid revision.
func (oid ObjectID) Revision() Revision {
return Revision(oid.String())
}
// ValidateObjectID checks if id is a syntactically correct object ID. Abbreviated
// object IDs are not deemed to be valid. Returns an ErrInvalidObjectID if the
// id is not valid.
func ValidateObjectID(id string) error {
if objectIDRegex.MatchString(id) {
return nil
}
return fmt.Errorf("%w: %q", ErrInvalidObjectID, id)
}
// IsZeroOID is a shortcut for `something == git.ZeroOID.String()`
func (oid ObjectID) IsZeroOID() bool {
return string(oid) == string(ZeroOID)
}

View file

@ -1,163 +0,0 @@
package git
import (
"bytes"
"encoding/hex"
"fmt"
"strings"
"testing"
"github.com/stretchr/testify/require"
)
func TestValidateObjectID(t *testing.T) {
for _, tc := range []struct {
desc string
oid string
valid bool
}{
{
desc: "valid object ID",
oid: "356e7793f9654d51dfb27312a1464062bceb9fa3",
valid: true,
},
{
desc: "object ID with non-hex characters fails",
oid: "x56e7793f9654d51dfb27312a1464062bceb9fa3",
valid: false,
},
{
desc: "object ID with upper-case letters fails",
oid: "356E7793F9654D51DFB27312A1464062BCEB9FA3",
valid: false,
},
{
desc: "too short object ID fails",
oid: "356e7793f9654d51dfb27312a1464062bceb9fa",
valid: false,
},
{
desc: "too long object ID fails",
oid: "356e7793f9654d51dfb27312a1464062bceb9fa33",
valid: false,
},
{
desc: "empty string fails",
oid: "",
valid: false,
},
} {
t.Run(tc.desc, func(t *testing.T) {
err := ValidateObjectID(tc.oid)
if tc.valid {
require.NoError(t, err)
} else {
require.Error(t, err)
require.EqualError(t, err, fmt.Sprintf("invalid object ID: %q", tc.oid))
}
})
}
}
func TestNewObjectIDFromHex(t *testing.T) {
for _, tc := range []struct {
desc string
oid string
valid bool
}{
{
desc: "valid object ID",
oid: "356e7793f9654d51dfb27312a1464062bceb9fa3",
valid: true,
},
{
desc: "object ID with non-hex characters fails",
oid: "x56e7793f9654d51dfb27312a1464062bceb9fa3",
valid: false,
},
{
desc: "object ID with upper-case letters fails",
oid: "356E7793F9654D51DFB27312A1464062BCEB9FA3",
valid: false,
},
{
desc: "too short object ID fails",
oid: "356e7793f9654d51dfb27312a1464062bceb9fa",
valid: false,
},
{
desc: "too long object ID fails",
oid: "356e7793f9654d51dfb27312a1464062bceb9fa33",
valid: false,
},
{
desc: "empty string fails",
oid: "",
valid: false,
},
} {
t.Run(tc.desc, func(t *testing.T) {
oid, err := NewObjectIDFromHex(tc.oid)
if tc.valid {
require.NoError(t, err)
require.Equal(t, tc.oid, oid.String())
} else {
require.Error(t, err)
}
})
}
}
func TestObjectID_Bytes(t *testing.T) {
for _, tc := range []struct {
desc string
oid ObjectID
expectedBytes []byte
expectedErr error
}{
{
desc: "zero OID",
oid: ZeroOID,
expectedBytes: bytes.Repeat([]byte{0}, 20),
},
{
desc: "valid object ID",
oid: ObjectID(strings.Repeat("8", 40)),
expectedBytes: bytes.Repeat([]byte{0x88}, 20),
},
{
desc: "invalid object ID",
oid: ObjectID(strings.Repeat("8", 39) + "x"),
expectedErr: hex.InvalidByteError('x'),
},
} {
t.Run(tc.desc, func(t *testing.T) {
actualBytes, err := tc.oid.Bytes()
require.Equal(t, tc.expectedErr, err)
require.Equal(t, tc.expectedBytes, actualBytes)
})
}
}
func TestIsZeroOID(t *testing.T) {
for _, tc := range []struct {
desc string
oid ObjectID
isZero bool
}{
{
desc: "zero object ID",
oid: ZeroOID,
isZero: true,
},
{
desc: "zero object ID",
oid: EmptyTreeOID,
isZero: false,
},
} {
t.Run(tc.desc, func(t *testing.T) {
require.Equal(t, tc.isZero, tc.oid.IsZeroOID())
})
}
}

View file

@ -1,121 +0,0 @@
package localrepo
import (
"bytes"
"context"
"fmt"
"strconv"
"strings"
"testing"
"github.com/stretchr/testify/require"
"gitlab.com/gitlab-org/gitaly/v14/internal/command"
"gitlab.com/gitlab-org/gitaly/v14/internal/git"
"gitlab.com/gitlab-org/gitaly/v14/internal/git/catfile"
"gitlab.com/gitlab-org/gitaly/v14/internal/git/gittest"
"gitlab.com/gitlab-org/gitaly/v14/internal/git/repository"
"gitlab.com/gitlab-org/gitaly/v14/internal/gitaly/config"
"gitlab.com/gitlab-org/gitaly/v14/internal/gitaly/storage"
"gitlab.com/gitlab-org/gitaly/v14/internal/testhelper"
"gitlab.com/gitlab-org/gitaly/v14/internal/testhelper/testcfg"
"gitlab.com/gitlab-org/gitaly/v14/proto/go/gitalypb"
)
// Repo represents a local Git repository.
type Repo struct {
repository.GitRepo
locator storage.Locator
gitCmdFactory git.CommandFactory
catfileCache catfile.Cache
}
// New creates a new Repo from its protobuf representation.
func New(locator storage.Locator, gitCmdFactory git.CommandFactory, catfileCache catfile.Cache, repo repository.GitRepo) *Repo {
return &Repo{
GitRepo: repo,
locator: locator,
gitCmdFactory: gitCmdFactory,
catfileCache: catfileCache,
}
}
// NewTestRepo constructs a Repo. It is intended as a helper function for tests which assembles
// dependencies ad-hoc from the given config.
func NewTestRepo(t testing.TB, cfg config.Cfg, repo repository.GitRepo, factoryOpts ...git.ExecCommandFactoryOption) *Repo {
t.Helper()
if cfg.SocketPath != testcfg.UnconfiguredSocketPath {
repo = gittest.RewrittenRepository(testhelper.Context(t), t, cfg, &gitalypb.Repository{
StorageName: repo.GetStorageName(),
RelativePath: repo.GetRelativePath(),
GitObjectDirectory: repo.GetGitObjectDirectory(),
GitAlternateObjectDirectories: repo.GetGitAlternateObjectDirectories(),
})
}
gitCmdFactory, cleanup, err := git.NewExecCommandFactory(cfg, factoryOpts...)
t.Cleanup(cleanup)
require.NoError(t, err)
catfileCache := catfile.NewCache(cfg)
t.Cleanup(catfileCache.Stop)
locator := config.NewLocator(cfg)
return New(locator, gitCmdFactory, catfileCache, repo)
}
// Exec creates a git command with the given args and Repo, executed in the
// Repo. It validates the arguments in the command before executing.
func (repo *Repo) Exec(ctx context.Context, cmd git.Cmd, opts ...git.CmdOpt) (*command.Command, error) {
return repo.gitCmdFactory.New(ctx, repo, cmd, opts...)
}
// ExecAndWait is similar to Exec, but waits for the command to exit before
// returning.
func (repo *Repo) ExecAndWait(ctx context.Context, cmd git.Cmd, opts ...git.CmdOpt) error {
command, err := repo.Exec(ctx, cmd, opts...)
if err != nil {
return err
}
return command.Wait()
}
// GitVersion returns the Git version in use.
func (repo *Repo) GitVersion(ctx context.Context) (git.Version, error) {
return repo.gitCmdFactory.GitVersion(ctx)
}
func errorWithStderr(err error, stderr []byte) error {
if len(stderr) == 0 {
return err
}
return fmt.Errorf("%w, stderr: %q", err, stderr)
}
// Size calculates the size of all reachable objects in bytes
func (repo *Repo) Size(ctx context.Context) (int64, error) {
var stdout bytes.Buffer
if err := repo.ExecAndWait(ctx,
git.SubCmd{
Name: "rev-list",
Flags: []git.Option{
git.Flag{Name: "--all"},
git.Flag{Name: "--objects"},
git.Flag{Name: "--use-bitmap-index"},
git.Flag{Name: "--disk-usage"},
},
},
git.WithStdout(&stdout),
); err != nil {
return -1, err
}
size, err := strconv.ParseInt(strings.TrimSuffix(stdout.String(), "\n"), 10, 64)
if err != nil {
return -1, err
}
return size, nil
}

View file

@ -1,173 +0,0 @@
package localrepo
import (
"bytes"
"context"
"os"
"path/filepath"
"testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"gitlab.com/gitlab-org/gitaly/v14/internal/git"
"gitlab.com/gitlab-org/gitaly/v14/internal/git/catfile"
"gitlab.com/gitlab-org/gitaly/v14/internal/git/gittest"
"gitlab.com/gitlab-org/gitaly/v14/internal/gitaly/config"
"gitlab.com/gitlab-org/gitaly/v14/internal/testhelper"
"gitlab.com/gitlab-org/gitaly/v14/internal/testhelper/testcfg"
"gitlab.com/gitlab-org/gitaly/v14/proto/go/gitalypb"
)
func TestRepo(t *testing.T) {
cfg := testcfg.Build(t)
gittest.TestRepository(t, cfg, func(ctx context.Context, t testing.TB, seeded bool) (git.Repository, string) {
t.Helper()
var (
pbRepo *gitalypb.Repository
repoPath string
)
if seeded {
pbRepo, repoPath = gittest.CloneRepo(t, cfg, cfg.Storages[0])
} else {
pbRepo, repoPath = gittest.InitRepo(t, cfg, cfg.Storages[0])
}
gitCmdFactory := gittest.NewCommandFactory(t, cfg)
catfileCache := catfile.NewCache(cfg)
t.Cleanup(catfileCache.Stop)
return New(config.NewLocator(cfg), gitCmdFactory, catfileCache, pbRepo), repoPath
})
}
func TestSize(t *testing.T) {
cfg := testcfg.Build(t)
gitCmdFactory := gittest.NewCommandFactory(t, cfg)
catfileCache := catfile.NewCache(cfg)
t.Cleanup(catfileCache.Stop)
testCases := []struct {
desc string
setup func(repoPath string, t *testing.T)
expectedSize int64
}{
{
desc: "empty repository",
expectedSize: 0,
},
{
desc: "one committed file",
setup: func(repoPath string, t *testing.T) {
require.NoError(t, os.WriteFile(
filepath.Join(repoPath, "file"),
bytes.Repeat([]byte("a"), 1000),
0o644,
))
cmd := gittest.NewCommand(t, cfg, "-C", repoPath, "add", "file")
require.NoError(t, cmd.Run())
cmd = gittest.NewCommand(t, cfg, "-C", repoPath, "commit", "-m", "initial")
require.NoError(t, cmd.Run())
},
expectedSize: 202,
},
{
desc: "one large loose blob",
setup: func(repoPath string, t *testing.T) {
require.NoError(t, os.WriteFile(
filepath.Join(repoPath, "file"),
bytes.Repeat([]byte("a"), 1000),
0o644,
))
cmd := gittest.NewCommand(t, cfg, "-C", repoPath, "checkout", "-b", "branch-a")
require.NoError(t, cmd.Run())
cmd = gittest.NewCommand(t, cfg, "-C", repoPath, "add", "file")
require.NoError(t, cmd.Run())
cmd = gittest.NewCommand(t, cfg, "-C", repoPath, "commit", "-m", "initial")
require.NoError(t, cmd.Run())
cmd = gittest.NewCommand(t, cfg, "-C", repoPath, "update-ref", "-d", "refs/heads/branch-a")
require.NoError(t, cmd.Run())
},
expectedSize: 0,
},
{
desc: "modification to blob without repack",
setup: func(repoPath string, t *testing.T) {
require.NoError(t, os.WriteFile(
filepath.Join(repoPath, "file"),
bytes.Repeat([]byte("a"), 1000),
0o644,
))
cmd := gittest.NewCommand(t, cfg, "-C", repoPath, "add", "file")
require.NoError(t, cmd.Run())
cmd = gittest.NewCommand(t, cfg, "-C", repoPath, "commit", "-m", "initial")
require.NoError(t, cmd.Run())
f, err := os.OpenFile(
filepath.Join(repoPath, "file"),
os.O_APPEND|os.O_WRONLY,
0o644)
require.NoError(t, err)
defer f.Close()
_, err = f.WriteString("a")
assert.NoError(t, err)
cmd = gittest.NewCommand(t, cfg, "-C", repoPath, "commit", "-am", "modification")
require.NoError(t, cmd.Run())
},
expectedSize: 437,
},
{
desc: "modification to blob after repack",
setup: func(repoPath string, t *testing.T) {
require.NoError(t, os.WriteFile(
filepath.Join(repoPath, "file"),
bytes.Repeat([]byte("a"), 1000),
0o644,
))
cmd := gittest.NewCommand(t, cfg, "-C", repoPath, "add", "file")
require.NoError(t, cmd.Run())
cmd = gittest.NewCommand(t, cfg, "-C", repoPath, "commit", "-m", "initial")
require.NoError(t, cmd.Run())
f, err := os.OpenFile(
filepath.Join(repoPath, "file"),
os.O_APPEND|os.O_WRONLY,
0o644)
require.NoError(t, err)
defer f.Close()
_, err = f.WriteString("a")
assert.NoError(t, err)
cmd = gittest.NewCommand(t, cfg, "-C", repoPath, "commit", "-am", "modification")
require.NoError(t, cmd.Run())
cmd = gittest.NewCommand(t, cfg, "-C", repoPath, "repack", "-a", "-d")
require.NoError(t, cmd.Run())
},
expectedSize: 391,
},
}
for _, tc := range testCases {
t.Run(tc.desc, func(t *testing.T) {
pbRepo, repoPath := gittest.InitRepo(t, cfg, cfg.Storages[0], gittest.InitRepoOpts{
WithWorktree: true,
})
repo := New(config.NewLocator(cfg), gitCmdFactory, catfileCache, pbRepo)
if tc.setup != nil {
tc.setup(repoPath, t)
}
ctx := testhelper.Context(t)
size, err := repo.Size(ctx)
require.NoError(t, err)
assert.Equal(t, tc.expectedSize, size)
})
}
}

View file

@ -1,109 +0,0 @@
package lstree
import (
"io"
"os"
"testing"
"github.com/stretchr/testify/require"
)
func TestParser(t *testing.T) {
testCases := []struct {
desc string
filename string
entries Entries
}{
{
desc: "regular entries",
filename: "testdata/z-lstree.txt",
entries: Entries{
{
Mode: []byte("100644"),
Type: Blob,
ObjectID: "dfaa3f97ca337e20154a98ac9d0be76ddd1fcc82",
Path: ".gitignore",
},
{
Mode: []byte("100644"),
Type: Blob,
ObjectID: "0792c58905eff3432b721f8c4a64363d8e28d9ae",
Path: ".gitmodules",
},
{
Mode: []byte("040000"),
Type: Tree,
ObjectID: "3c122d2b7830eca25235131070602575cf8b41a1",
Path: "encoding",
},
{
Mode: []byte("160000"),
Type: Submodule,
ObjectID: "79bceae69cb5750d6567b223597999bfa91cb3b9",
Path: "gitlab-shell",
},
},
},
{
desc: "irregular path",
filename: "testdata/z-lstree-irregular.txt",
entries: Entries{
{
Mode: []byte("100644"),
Type: Blob,
ObjectID: "dfaa3f97ca337e20154a98ac9d0be76ddd1fcc82",
Path: ".gitignore",
},
{
Mode: []byte("100644"),
Type: Blob,
ObjectID: "0792c58905eff3432b721f8c4a64363d8e28d9ae",
Path: ".gitmodules",
},
{
Mode: []byte("040000"),
Type: Tree,
ObjectID: "3c122d2b7830eca25235131070602575cf8b41a1",
Path: "some encoding",
},
{
Mode: []byte("160000"),
Type: Submodule,
ObjectID: "79bceae69cb5750d6567b223597999bfa91cb3b9",
Path: "gitlab-shell",
},
},
},
}
for _, testCase := range testCases {
t.Run(testCase.desc, func(t *testing.T) {
file, err := os.Open(testCase.filename)
require.NoError(t, err)
defer file.Close()
parsedEntries := Entries{}
parser := NewParser(file)
for {
entry, err := parser.NextEntry()
if err == io.EOF {
break
}
require.NoError(t, err)
parsedEntries = append(parsedEntries, *entry)
}
expectedEntries := testCase.entries
require.Len(t, expectedEntries, len(parsedEntries))
for index, parsedEntry := range parsedEntries {
expectedEntry := expectedEntries[index]
require.Equal(t, expectedEntry, parsedEntry)
}
})
}
}

View file

@ -1,40 +0,0 @@
package objectpool
import (
"path/filepath"
"testing"
"github.com/stretchr/testify/require"
"gitlab.com/gitlab-org/gitaly/v14/internal/git/localrepo"
"gitlab.com/gitlab-org/gitaly/v14/internal/testhelper"
)
func TestClone(t *testing.T) {
ctx := testhelper.Context(t)
cfg, pool, repoProto := setupObjectPool(t, ctx)
repo := localrepo.NewTestRepo(t, cfg, repoProto)
require.NoError(t, pool.clone(ctx, repo))
defer func() {
require.NoError(t, pool.Remove(ctx))
}()
require.DirExists(t, pool.FullPath())
require.DirExists(t, filepath.Join(pool.FullPath(), "objects"))
}
func TestCloneExistingPool(t *testing.T) {
ctx := testhelper.Context(t)
cfg, pool, repoProto := setupObjectPool(t, ctx)
repo := localrepo.NewTestRepo(t, cfg, repoProto)
require.NoError(t, pool.clone(ctx, repo))
defer func() {
require.NoError(t, pool.Remove(ctx))
}()
// re-clone on the directory
require.Error(t, pool.clone(ctx, repo))
}

View file

@ -1,255 +0,0 @@
package objectpool
import (
"bufio"
"bytes"
"context"
"fmt"
"io"
"os/exec"
"path/filepath"
"strconv"
"strings"
"github.com/grpc-ecosystem/go-grpc-middleware/logging/logrus/ctxlogrus"
"github.com/sirupsen/logrus"
"gitlab.com/gitlab-org/gitaly/v14/internal/command"
"gitlab.com/gitlab-org/gitaly/v14/internal/git"
"gitlab.com/gitlab-org/gitaly/v14/internal/git/localrepo"
"gitlab.com/gitlab-org/gitaly/v14/internal/git/repository"
"gitlab.com/gitlab-org/gitaly/v14/internal/git/updateref"
"gitlab.com/gitlab-org/gitaly/v14/internal/helper"
)
const sourceRefNamespace = "refs/remotes/origin"
// FetchFromOrigin initializes the pool and fetches the objects from its origin repository
func (o *ObjectPool) FetchFromOrigin(ctx context.Context, origin *localrepo.Repo) error {
if err := o.Init(ctx); err != nil {
return err
}
originPath, err := origin.Path()
if err != nil {
return err
}
if err := o.housekeepingManager.CleanStaleData(ctx, o.Repo); err != nil {
return err
}
if err := o.logStats(ctx, "before fetch"); err != nil {
return err
}
refSpec := fmt.Sprintf("+refs/*:%s/*", sourceRefNamespace)
var stderr bytes.Buffer
if err := o.Repo.ExecAndWait(ctx,
git.SubCmd{
Name: "fetch",
Flags: []git.Option{
git.Flag{Name: "--quiet"},
git.Flag{Name: "--atomic"},
// We already fetch tags via our refspec, so we don't
// want to fetch them a second time via Git's default
// tag refspec.
git.Flag{Name: "--no-tags"},
// We don't need FETCH_HEAD, and it can potentially be hundreds of
// megabytes when doing a mirror-sync of repos with huge numbers of
// references.
git.Flag{Name: "--no-write-fetch-head"},
},
Args: []string{originPath, refSpec},
},
git.WithRefTxHook(o.Repo),
git.WithStderr(&stderr),
); err != nil {
return helper.ErrInternalf("fetch into object pool: %w, stderr: %q", err,
stderr.String())
}
if err := o.rescueDanglingObjects(ctx); err != nil {
return err
}
if err := o.logStats(ctx, "after fetch"); err != nil {
return err
}
if err := o.Repo.ExecAndWait(ctx, git.SubCmd{
Name: "pack-refs",
Flags: []git.Option{git.Flag{Name: "--all"}},
}); err != nil {
return err
}
return o.repackPool(ctx, o)
}
const danglingObjectNamespace = "refs/dangling/"
// rescueDanglingObjects creates refs for all dangling objects if finds
// with `git fsck`, which converts those objects from "dangling" to
// "not-dangling". This guards against any object ever being deleted from
// a pool repository. This is a defense in depth against accidental use
// of `git prune`, which could remove Git objects that a pool member
// relies on. There is currently no way for us to reliably determine if
// an object is still used anywhere, so the only safe thing to do is to
// assume that every object _is_ used.
func (o *ObjectPool) rescueDanglingObjects(ctx context.Context) error {
fsck, err := o.Repo.Exec(ctx, git.SubCmd{
Name: "fsck",
Flags: []git.Option{git.Flag{Name: "--connectivity-only"}, git.Flag{Name: "--dangling"}},
})
if err != nil {
return err
}
updater, err := updateref.New(ctx, o.Repo, updateref.WithDisabledTransactions())
if err != nil {
return err
}
scanner := bufio.NewScanner(fsck)
for scanner.Scan() {
split := strings.SplitN(scanner.Text(), " ", 3)
if len(split) != 3 {
continue
}
if split[0] != "dangling" {
continue
}
ref := git.ReferenceName(danglingObjectNamespace + split[2])
if err := updater.Create(ref, split[2]); err != nil {
return err
}
}
if err := scanner.Err(); err != nil {
return err
}
if err := fsck.Wait(); err != nil {
return fmt.Errorf("git fsck: %v", err)
}
return updater.Commit()
}
func (o *ObjectPool) repackPool(ctx context.Context, pool repository.GitRepo) error {
config := []git.ConfigPair{
{Key: "pack.island", Value: sourceRefNamespace + "/he(a)ds"},
{Key: "pack.island", Value: sourceRefNamespace + "/t(a)gs"},
{Key: "pack.islandCore", Value: "a"},
{Key: "pack.writeBitmapHashCache", Value: "true"},
}
if err := o.Repo.ExecAndWait(ctx, git.SubCmd{
Name: "repack",
Flags: []git.Option{
git.Flag{Name: "-aidb"},
// This can be removed as soon as we have upstreamed a
// `repack.updateServerInfo` config option. See gitlab-org/git#105 for more
// details.
git.Flag{Name: "-n"},
},
}, git.WithConfig(config...)); err != nil {
return err
}
return nil
}
func (o *ObjectPool) logStats(ctx context.Context, when string) error {
fields := logrus.Fields{
"when": when,
}
for key, dir := range map[string]string{
"poolObjectsSize": "objects",
"poolRefsSize": "refs",
} {
var err error
fields[key], err = sizeDir(ctx, filepath.Join(o.FullPath(), dir))
if err != nil {
return err
}
}
forEachRef, err := o.Repo.Exec(ctx, git.SubCmd{
Name: "for-each-ref",
Flags: []git.Option{git.Flag{Name: "--format=%(objecttype)%00%(refname)"}},
Args: []string{"refs/"},
})
if err != nil {
return err
}
danglingRefsByType := make(map[string]int)
normalRefsByType := make(map[string]int)
scanner := bufio.NewScanner(forEachRef)
for scanner.Scan() {
line := bytes.SplitN(scanner.Bytes(), []byte{0}, 2)
if len(line) != 2 {
continue
}
objectType := string(line[0])
refname := string(line[1])
if strings.HasPrefix(refname, danglingObjectNamespace) {
danglingRefsByType[objectType]++
} else {
normalRefsByType[objectType]++
}
}
if err := scanner.Err(); err != nil {
return err
}
if err := forEachRef.Wait(); err != nil {
return err
}
for _, key := range []string{"blob", "commit", "tag", "tree"} {
fields["dangling."+key+".ref"] = danglingRefsByType[key]
fields["normal."+key+".ref"] = normalRefsByType[key]
}
ctxlogrus.Extract(ctx).WithFields(fields).Info("pool dangling ref stats")
return nil
}
func sizeDir(ctx context.Context, dir string) (int64, error) {
// du -k reports size in KB
cmd, err := command.New(ctx, exec.Command("du", "-sk", dir), nil, nil, nil)
if err != nil {
return 0, err
}
sizeLine, err := io.ReadAll(cmd)
if err != nil {
return 0, err
}
if err := cmd.Wait(); err != nil {
return 0, err
}
sizeParts := bytes.Split(sizeLine, []byte("\t"))
if len(sizeParts) != 2 {
return 0, fmt.Errorf("malformed du output: %q", sizeLine)
}
size, err := strconv.ParseInt(string(sizeParts[0]), 10, 0)
if err != nil {
return 0, err
}
// Convert KB to B
return size * 1024, nil
}

View file

@ -1,288 +0,0 @@
package updateref
import (
"context"
"fmt"
"strings"
"testing"
"github.com/stretchr/testify/require"
"gitlab.com/gitlab-org/gitaly/v14/internal/git"
"gitlab.com/gitlab-org/gitaly/v14/internal/git/gittest"
"gitlab.com/gitlab-org/gitaly/v14/internal/git/localrepo"
"gitlab.com/gitlab-org/gitaly/v14/internal/gitaly/config"
"gitlab.com/gitlab-org/gitaly/v14/internal/testhelper"
"gitlab.com/gitlab-org/gitaly/v14/internal/testhelper/testcfg"
)
func TestMain(m *testing.M) {
testhelper.Run(m)
}
func setupUpdater(t *testing.T, ctx context.Context) (config.Cfg, *localrepo.Repo, *Updater) {
t.Helper()
cfg, protoRepo, _ := testcfg.BuildWithRepo(t)
repo := localrepo.NewTestRepo(t, cfg, protoRepo, git.WithSkipHooks())
updater, err := New(ctx, repo)
require.NoError(t, err)
return cfg, repo, updater
}
func TestCreate(t *testing.T) {
ctx := testhelper.Context(t)
_, repo, updater := setupUpdater(t, ctx)
headCommit, err := repo.ReadCommit(ctx, "HEAD")
require.NoError(t, err)
ref := git.ReferenceName("refs/heads/_create")
sha := headCommit.Id
require.NoError(t, updater.Create(ref, sha))
require.NoError(t, updater.Commit())
// check the ref was created
commit, logErr := repo.ReadCommit(ctx, ref.Revision())
require.NoError(t, logErr)
require.Equal(t, commit.Id, sha, "reference was created with the wrong SHA")
}
func TestUpdate(t *testing.T) {
ctx := testhelper.Context(t)
_, repo, updater := setupUpdater(t, ctx)
headCommit, err := repo.ReadCommit(ctx, "HEAD")
require.NoError(t, err)
ref := git.ReferenceName("refs/heads/feature")
sha := headCommit.Id
// Sanity check: ensure the ref exists before we start
commit, logErr := repo.ReadCommit(ctx, ref.Revision())
require.NoError(t, logErr)
require.NotEqual(t, commit.Id, sha, "%s points to HEAD: %s in the test repository", ref.String(), sha)
require.NoError(t, updater.Update(ref, sha, ""))
require.NoError(t, updater.Prepare())
require.NoError(t, updater.Commit())
// check the ref was updated
commit, logErr = repo.ReadCommit(ctx, ref.Revision())
require.NoError(t, logErr)
require.Equal(t, commit.Id, sha, "reference was not updated")
// since ref has been updated to HEAD, we know that it does not point to HEAD^. So, HEAD^ is an invalid "old value" for updating ref
parentCommit, err := repo.ReadCommit(ctx, "HEAD^")
require.NoError(t, err)
require.Error(t, updater.Update(ref, parentCommit.Id, parentCommit.Id))
// check the ref was not updated
commit, logErr = repo.ReadCommit(ctx, ref.Revision())
require.NoError(t, logErr)
require.NotEqual(t, commit.Id, parentCommit.Id, "reference was updated when it shouldn't have been")
}
func TestDelete(t *testing.T) {
ctx := testhelper.Context(t)
_, repo, updater := setupUpdater(t, ctx)
ref := git.ReferenceName("refs/heads/feature")
require.NoError(t, updater.Delete(ref))
require.NoError(t, updater.Commit())
// check the ref was removed
_, err := repo.ReadCommit(ctx, ref.Revision())
require.Equal(t, localrepo.ErrObjectNotFound, err, "expected 'not found' error got %v", err)
}
func TestUpdater_prepareLocksTransaction(t *testing.T) {
ctx := testhelper.Context(t)
_, repo, updater := setupUpdater(t, ctx)
commit, logErr := repo.ReadCommit(ctx, "refs/heads/master")
require.NoError(t, logErr)
require.NoError(t, updater.Update("refs/heads/feature", commit.Id, ""))
require.NoError(t, updater.Prepare())
require.NoError(t, updater.Update("refs/heads/feature", commit.Id, ""))
err := updater.Commit()
require.Error(t, err, "cannot update after prepare")
require.Contains(t, err.Error(), "fatal: prepared transactions can only be closed")
}
func TestUpdater_concurrentLocking(t *testing.T) {
t.Parallel()
ctx := testhelper.Context(t)
cfg, protoRepo, _ := testcfg.BuildWithRepo(t)
repo := localrepo.NewTestRepo(t, cfg, protoRepo, git.WithSkipHooks())
commit, logErr := repo.ReadCommit(ctx, "refs/heads/master")
require.NoError(t, logErr)
firstUpdater, err := New(ctx, repo)
require.NoError(t, err)
require.NoError(t, firstUpdater.Update("refs/heads/master", "", commit.Id))
require.NoError(t, firstUpdater.Prepare())
secondUpdater, err := New(ctx, repo)
require.NoError(t, err)
require.NoError(t, secondUpdater.Update("refs/heads/master", "", commit.Id))
// With flushing, we're able to detect concurrent locking at prepare time already instead of
// at commit time.
if gitSupportsStatusFlushing(t, ctx, cfg) {
err := secondUpdater.Prepare()
require.Error(t, err)
require.Contains(t, err.Error(), "fatal: prepare: cannot lock ref 'refs/heads/master'")
require.NoError(t, firstUpdater.Commit())
} else {
require.NoError(t, secondUpdater.Prepare())
require.NoError(t, firstUpdater.Commit())
err := secondUpdater.Commit()
require.Error(t, err)
require.Contains(t, err.Error(), "fatal: prepare: cannot lock ref 'refs/heads/master'")
}
}
func TestBulkOperation(t *testing.T) {
ctx := testhelper.Context(t)
_, repo, updater := setupUpdater(t, ctx)
headCommit, err := repo.ReadCommit(ctx, "HEAD")
require.NoError(t, err)
for i := 0; i < 1000; i++ {
ref := fmt.Sprintf("refs/head/_test_%d", i)
require.NoError(t, updater.Create(git.ReferenceName(ref), headCommit.Id), "Failed to create ref %d", i)
}
require.NoError(t, updater.Commit())
refs, err := repo.GetReferences(ctx, "refs/")
require.NoError(t, err)
require.Greater(t, len(refs), 1000, "At least 1000 refs should be present")
}
func TestContextCancelAbortsRefChanges(t *testing.T) {
ctx := testhelper.Context(t)
cfg, repo, _ := setupUpdater(t, ctx)
headCommit, err := repo.ReadCommit(ctx, "HEAD")
require.NoError(t, err)
childCtx, childCancel := context.WithCancel(ctx)
localRepo := localrepo.NewTestRepo(t, cfg, repo)
updater, err := New(childCtx, localRepo)
require.NoError(t, err)
ref := git.ReferenceName("refs/heads/_shouldnotexist")
require.NoError(t, updater.Create(ref, headCommit.Id))
// Force the update-ref process to terminate early
childCancel()
require.Error(t, updater.Commit())
// check the ref doesn't exist
_, err = repo.ReadCommit(ctx, ref.Revision())
require.Equal(t, localrepo.ErrObjectNotFound, err, "expected 'not found' error got %v", err)
}
func TestUpdater_cancel(t *testing.T) {
ctx := testhelper.Context(t)
_, repo, updater := setupUpdater(t, ctx)
require.NoError(t, updater.Delete(git.ReferenceName("refs/heads/master")))
require.NoError(t, updater.Prepare())
// A concurrent update shouldn't be allowed.
concurrentUpdater, err := New(ctx, repo)
require.NoError(t, err)
require.NoError(t, concurrentUpdater.Delete(git.ReferenceName("refs/heads/master")))
err = concurrentUpdater.Commit()
require.NotNil(t, err)
require.Contains(t, err.Error(), "fatal: commit: cannot lock ref 'refs/heads/master'")
// We now cancel the initial updater. Afterwards, it should be possible again to update the
// ref because locks should have been released.
require.NoError(t, updater.Cancel())
concurrentUpdater, err = New(ctx, repo)
require.NoError(t, err)
require.NoError(t, concurrentUpdater.Delete(git.ReferenceName("refs/heads/master")))
require.NoError(t, concurrentUpdater.Commit())
}
func TestUpdater_closingStdinAbortsChanges(t *testing.T) {
ctx := testhelper.Context(t)
_, repo, updater := setupUpdater(t, ctx)
headCommit, err := repo.ReadCommit(ctx, "HEAD")
require.NoError(t, err)
ref := git.ReferenceName("refs/heads/shouldnotexist")
require.NoError(t, updater.Create(ref, headCommit.Id))
// Note that we call `Wait()` on the command, not on the updater. This
// circumvents our usual semantics of sending "commit" and thus
// emulates that the command somehow terminates correctly without us
// terminating it intentionally. Previous to our use of the "start"
// verb, this would've caused the reference to be created...
require.NoError(t, updater.cmd.Wait())
// ... but as we now use explicit transactional behaviour, this is no
// longer the case.
_, err = repo.ReadCommit(ctx, ref.Revision())
require.Equal(t, localrepo.ErrObjectNotFound, err, "expected 'not found' error got %v", err)
}
func TestUpdater_capturesStderr(t *testing.T) {
t.Parallel()
ctx := testhelper.Context(t)
cfg, _, updater := setupUpdater(t, ctx)
ref := "refs/heads/a"
newValue := strings.Repeat("1", 40)
oldValue := git.ZeroOID.String()
require.NoError(t, updater.Update(git.ReferenceName(ref), newValue, oldValue))
var expectedErr string
if gitSupportsStatusFlushing(t, ctx, cfg) {
expectedErr = fmt.Sprintf("state update to \"commit\" failed: EOF, stderr: \"fatal: commit: cannot update ref '%s': "+
"trying to write ref '%s' with nonexistent object %s\\n\"", ref, ref, newValue)
} else {
expectedErr = fmt.Sprintf("git update-ref: exit status 128, stderr: "+
"\"fatal: commit: cannot update ref '%s': "+
"trying to write ref '%s' with nonexistent object %s\\n\"", ref, ref, newValue)
}
err := updater.Commit()
require.NotNil(t, err)
require.Equal(t, err.Error(), expectedErr)
}
func gitSupportsStatusFlushing(t *testing.T, ctx context.Context, cfg config.Cfg) bool {
version, err := gittest.NewCommandFactory(t, cfg).GitVersion(ctx)
require.NoError(t, err)
return version.FlushesUpdaterefStatus()
}

View file

@ -1,32 +0,0 @@
package cgroups
// Config is a struct for cgroups config
type Config struct {
// Count is the number of cgroups to be created at startup
Count uint `toml:"count"`
// Mountpoint is where the cgroup filesystem is mounted, usually under /sys/fs/cgroup/
Mountpoint string `toml:"mountpoint"`
// HierarchyRoot is the parent cgroup under which Gitaly creates <Count> of cgroups.
// A system administrator is expected to create such cgroup/directory under <Mountpoint>/memory
// and/or <Mountpoint>/cpu depending on which resource is enabled. HierarchyRoot is expected to
// be owned by the user and group Gitaly runs as.
HierarchyRoot string `toml:"hierarchy_root"`
// CPU holds CPU resource configurations
CPU CPU `toml:"cpu"`
// Memory holds memory resource configurations
Memory Memory `toml:"memory"`
}
// Memory is a struct storing cgroups memory config
type Memory struct {
Enabled bool `toml:"enabled"`
// Limit is the memory limit in bytes. Could be -1 to indicate unlimited memory.
Limit int64 `toml:"limit"`
}
// CPU is a struct storing cgroups CPU config
type CPU struct {
Enabled bool `toml:"enabled"`
// Shares is the number of CPU shares (relative weight (ratio) vs. other cgroups with CPU shares).
Shares uint64 `toml:"shares"`
}

View file

@ -1,82 +0,0 @@
package config
import (
"fmt"
"path/filepath"
"time"
)
// Ruby contains setting for Ruby worker processes
type Ruby struct {
Dir string `toml:"dir"`
MaxRSS int `toml:"max_rss"`
GracefulRestartTimeout Duration `toml:"graceful_restart_timeout"`
RestartDelay Duration `toml:"restart_delay"`
NumWorkers int `toml:"num_workers"`
LinguistLanguagesPath string `toml:"linguist_languages_path"`
RuggedGitConfigSearchPath string `toml:"rugged_git_config_search_path"`
}
// Duration is a trick to let our TOML library parse durations from strings.
type Duration time.Duration
//nolint: revive,stylecheck // This is unintentionally missing documentation.
func (d *Duration) Duration() time.Duration {
if d != nil {
return time.Duration(*d)
}
return 0
}
//nolint: revive,stylecheck // This is unintentionally missing documentation.
func (d *Duration) UnmarshalText(text []byte) error {
td, err := time.ParseDuration(string(text))
if err == nil {
*d = Duration(td)
}
return err
}
//nolint: revive,stylecheck // This is unintentionally missing documentation.
func (d Duration) MarshalText() ([]byte, error) {
return []byte(time.Duration(d).String()), nil
}
// ConfigureRuby validates the gitaly-ruby configuration and sets default values.
func (cfg *Cfg) ConfigureRuby() error {
if cfg.Ruby.GracefulRestartTimeout.Duration() == 0 {
cfg.Ruby.GracefulRestartTimeout = Duration(10 * time.Minute)
}
if cfg.Ruby.MaxRSS == 0 {
cfg.Ruby.MaxRSS = 200 * 1024 * 1024
}
if cfg.Ruby.RestartDelay.Duration() == 0 {
cfg.Ruby.RestartDelay = Duration(5 * time.Minute)
}
if len(cfg.Ruby.Dir) == 0 {
return fmt.Errorf("gitaly-ruby.dir: is not set")
}
minWorkers := 2
if cfg.Ruby.NumWorkers < minWorkers {
cfg.Ruby.NumWorkers = minWorkers
}
var err error
cfg.Ruby.Dir, err = filepath.Abs(cfg.Ruby.Dir)
if err != nil {
return err
}
if len(cfg.Ruby.RuggedGitConfigSearchPath) != 0 {
cfg.Ruby.RuggedGitConfigSearchPath, err = filepath.Abs(cfg.Ruby.RuggedGitConfigSearchPath)
if err != nil {
return err
}
}
return validateIsDirectory(cfg.Ruby.Dir, "gitaly-ruby.dir")
}

View file

@ -1,186 +0,0 @@
package linguist
import (
"context"
"crypto/sha256"
"encoding/json"
"fmt"
"io"
"os"
"os/exec"
"path/filepath"
"gitlab.com/gitlab-org/gitaly/v14/internal/command"
"gitlab.com/gitlab-org/gitaly/v14/internal/git"
"gitlab.com/gitlab-org/gitaly/v14/internal/gitaly/config"
)
var exportedEnvVars = []string{"HOME", "PATH", "GEM_HOME", "BUNDLE_PATH", "BUNDLE_APP_CONFIG", "BUNDLE_USER_CONFIG"}
// Language is used to parse Linguist's language.json file.
type Language struct {
Color string `json:"color"`
}
// ByteCountPerLanguage represents a counter value (bytes) per language.
type ByteCountPerLanguage map[string]uint64
// Instance is a holder of the defined in the system language settings.
type Instance struct {
cfg config.Cfg
colorMap map[string]Language
gitCmdFactory git.CommandFactory
}
// New loads the name->color map from the Linguist gem and returns initialised instance
// to use back to the caller or an error.
func New(cfg config.Cfg, gitCmdFactory git.CommandFactory) (*Instance, error) {
jsonReader, err := openLanguagesJSON(cfg)
if err != nil {
return nil, err
}
defer jsonReader.Close()
var colorMap map[string]Language
if err := json.NewDecoder(jsonReader).Decode(&colorMap); err != nil {
return nil, err
}
return &Instance{
cfg: cfg,
gitCmdFactory: gitCmdFactory,
colorMap: colorMap,
}, nil
}
// Stats returns the repository's language stats as reported by 'git-linguist'.
func (inst *Instance) Stats(ctx context.Context, repoPath string, commitID string) (ByteCountPerLanguage, error) {
cmd, err := inst.startGitLinguist(ctx, repoPath, commitID, "stats")
if err != nil {
return nil, fmt.Errorf("starting linguist: %w", err)
}
data, err := io.ReadAll(cmd)
if err != nil {
return nil, fmt.Errorf("reading linguist output: %w", err)
}
if err := cmd.Wait(); err != nil {
return nil, fmt.Errorf("waiting for linguist: %w", err)
}
stats := make(ByteCountPerLanguage)
if err := json.Unmarshal(data, &stats); err != nil {
return nil, fmt.Errorf("unmarshaling stats: %w", err)
}
return stats, nil
}
// Color returns the color Linguist has assigned to language.
func (inst *Instance) Color(language string) string {
if color := inst.colorMap[language].Color; color != "" {
return color
}
colorSha := sha256.Sum256([]byte(language))
return fmt.Sprintf("#%x", colorSha[0:3])
}
func (inst *Instance) startGitLinguist(ctx context.Context, repoPath string, commitID string, linguistCommand string) (*command.Command, error) {
bundle, err := exec.LookPath("bundle")
if err != nil {
return nil, fmt.Errorf("finding bundle executable: %w", err)
}
args := []string{
bundle,
"exec",
"bin/ruby-cd",
repoPath,
"git-linguist",
"--commit=" + commitID,
linguistCommand,
}
gitExecEnv := inst.gitCmdFactory.GetExecutionEnvironment(ctx)
// This is a horrible hack. git-linguist will execute `git rev-parse
// --git-dir` to check whether it is in a Git directory or not. We don't
// want to use the one provided by PATH, but instead the one specified
// via the configuration. git-linguist doesn't specify any way to choose
// a different Git implementation, so we need to prepend the configured
// Git's directory to PATH. But as our internal command interface will
// overwrite PATH even if we pass it in here, we need to work around it
// and instead execute the command with `env PATH=$GITDIR:$PATH`.
gitDir := filepath.Dir(gitExecEnv.BinaryPath)
if path, ok := os.LookupEnv("PATH"); ok && gitDir != "." {
args = append([]string{
"env", fmt.Sprintf("PATH=%s:%s", gitDir, path),
}, args...)
}
cmd := exec.Command(args[0], args[1:]...)
cmd.Dir = inst.cfg.Ruby.Dir
internalCmd, err := command.New(ctx, cmd, nil, nil, nil, exportEnvironment()...)
if err != nil {
return nil, fmt.Errorf("creating command: %w", err)
}
internalCmd.SetMetricsCmd("git-linguist")
internalCmd.SetMetricsSubCmd(linguistCommand)
return internalCmd, nil
}
func openLanguagesJSON(cfg config.Cfg) (io.ReadCloser, error) {
if jsonPath := cfg.Ruby.LinguistLanguagesPath; jsonPath != "" {
// This is a fallback for environments where dynamic discovery of the
// linguist path via Bundler is not working for some reason, for example
// https://gitlab.com/gitlab-org/gitaly/issues/1119.
return os.Open(jsonPath)
}
linguistPathSymlink, err := os.CreateTemp("", "gitaly-linguist-path")
if err != nil {
return nil, err
}
defer func() { _ = os.Remove(linguistPathSymlink.Name()) }()
if err := linguistPathSymlink.Close(); err != nil {
return nil, err
}
// We use a symlink because we cannot trust Bundler to not print garbage
// on its stdout.
rubyScript := `FileUtils.ln_sf(Bundler.rubygems.find_name('github-linguist').first.full_gem_path, ARGV.first)`
cmd := exec.Command("bundle", "exec", "ruby", "-rfileutils", "-e", rubyScript, linguistPathSymlink.Name())
cmd.Dir = cfg.Ruby.Dir
// We have learned that in practice the command we are about to run is a
// canary for Ruby/Bundler configuration problems. Including stderr and
// stdout in the gitaly log is useful for debugging such problems.
cmd.Stderr = os.Stderr
cmd.Stdout = os.Stdout
if err := cmd.Run(); err != nil {
if exitError, ok := err.(*exec.ExitError); ok {
err = fmt.Errorf("%v; stderr: %q", exitError, exitError.Stderr)
}
return nil, err
}
return os.Open(filepath.Join(linguistPathSymlink.Name(), "lib", "linguist", "languages.json"))
}
func exportEnvironment() []string {
var env []string
for _, envVarName := range exportedEnvVars {
if val, ok := os.LookupEnv(envVarName); ok {
env = append(env, fmt.Sprintf("%s=%s", envVarName, val))
}
}
return env
}

View file

@ -1,67 +0,0 @@
package linguist
import (
"encoding/json"
"path/filepath"
"testing"
"github.com/stretchr/testify/require"
"gitlab.com/gitlab-org/gitaly/v14/internal/git/gittest"
"gitlab.com/gitlab-org/gitaly/v14/internal/gitaly/config"
"gitlab.com/gitlab-org/gitaly/v14/internal/testhelper"
"gitlab.com/gitlab-org/gitaly/v14/internal/testhelper/testcfg"
)
func TestMain(m *testing.M) {
testhelper.Run(m)
}
func TestInstance_Stats_successful(t *testing.T) {
ctx := testhelper.Context(t)
cfg, _, repoPath := testcfg.BuildWithRepo(t)
ling, err := New(cfg, gittest.NewCommandFactory(t, cfg))
require.NoError(t, err)
counts, err := ling.Stats(ctx, repoPath, "1e292f8fedd741b75372e19097c76d327140c312")
require.NoError(t, err)
require.Equal(t, uint64(2943), counts["Ruby"])
}
func TestInstance_Stats_unmarshalJSONError(t *testing.T) {
cfg := testcfg.Build(t)
ctx := testhelper.Context(t)
ling, err := New(cfg, gittest.NewCommandFactory(t, cfg))
require.NoError(t, err)
// When an error occurs, this used to trigger JSON marshelling of a plain string
// the new behaviour shouldn't do that, and return an command error
_, err = ling.Stats(ctx, "/var/empty", "deadbeef")
require.Error(t, err)
_, ok := err.(*json.SyntaxError)
require.False(t, ok, "expected the error not be a json Syntax Error")
}
func TestNew(t *testing.T) {
cfg := testcfg.Build(t, testcfg.WithRealLinguist())
ling, err := New(cfg, gittest.NewCommandFactory(t, cfg))
require.NoError(t, err)
require.Equal(t, "#701516", ling.Color("Ruby"), "color value for 'Ruby'")
}
func TestNew_loadLanguagesCustomPath(t *testing.T) {
jsonPath, err := filepath.Abs("testdata/fake-languages.json")
require.NoError(t, err)
cfg := testcfg.Build(t, testcfg.WithBase(config.Cfg{Ruby: config.Ruby{LinguistLanguagesPath: jsonPath}}))
ling, err := New(cfg, gittest.NewCommandFactory(t, cfg))
require.NoError(t, err)
require.Equal(t, "foo color", ling.Color("FooBar"))
}

View file

@ -1,131 +0,0 @@
package rubyserver
import (
"context"
"fmt"
"testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"gitlab.com/gitlab-org/gitaly/v14/internal/git"
"gitlab.com/gitlab-org/gitaly/v14/internal/gitaly/config"
"gitlab.com/gitlab-org/gitaly/v14/internal/gitaly/config/auth"
"gitlab.com/gitlab-org/gitaly/v14/internal/gitaly/config/log"
"gitlab.com/gitlab-org/gitaly/v14/internal/gitaly/storage"
"gitlab.com/gitlab-org/gitaly/v14/internal/helper"
"gitlab.com/gitlab-org/gitaly/v14/internal/testhelper"
"gitlab.com/gitlab-org/gitaly/v14/internal/testhelper/testcfg"
"gitlab.com/gitlab-org/gitaly/v14/internal/version"
"gitlab.com/gitlab-org/gitaly/v14/proto/go/gitalypb"
"gitlab.com/gitlab-org/gitaly/v14/streamio"
"google.golang.org/grpc/codes"
)
func TestStopSafe(t *testing.T) {
badServers := []*Server{
nil,
New(config.Cfg{}, nil),
}
for _, bs := range badServers {
bs.Stop()
}
}
func TestSetHeaders(t *testing.T) {
cfg, repo, _ := testcfg.BuildWithRepo(t)
ctx := testhelper.Context(t)
locator := config.NewLocator(cfg)
testCases := []struct {
desc string
repo *gitalypb.Repository
errType codes.Code
setter func(context.Context, storage.Locator, *gitalypb.Repository) (context.Context, error)
}{
{
desc: "SetHeaders invalid storage",
repo: &gitalypb.Repository{StorageName: "foo", RelativePath: "bar.git"},
errType: codes.InvalidArgument,
setter: SetHeaders,
},
{
desc: "SetHeaders invalid rel path",
repo: &gitalypb.Repository{StorageName: repo.StorageName, RelativePath: "bar.git"},
errType: codes.NotFound,
setter: SetHeaders,
},
{
desc: "SetHeaders OK",
repo: repo,
errType: codes.OK,
setter: SetHeaders,
},
}
for _, tc := range testCases {
t.Run(tc.desc, func(t *testing.T) {
clientCtx, err := tc.setter(ctx, locator, tc.repo)
if tc.errType != codes.OK {
testhelper.RequireGrpcCode(t, err, tc.errType)
assert.Nil(t, clientCtx)
} else {
assert.NoError(t, err)
assert.NotNil(t, clientCtx)
}
})
}
}
type mockGitCommandFactory struct {
git.CommandFactory
}
func (mockGitCommandFactory) GetExecutionEnvironment(context.Context) git.ExecutionEnvironment {
return git.ExecutionEnvironment{
BinaryPath: "/something",
EnvironmentVariables: []string{
"FOO=bar",
},
}
}
func (mockGitCommandFactory) HooksPath(context.Context) string {
return "custom_hooks_path"
}
func TestSetupEnv(t *testing.T) {
cfg := config.Cfg{
BinDir: "/bin/dit",
InternalSocketDir: "/gitaly",
Logging: config.Logging{
Config: log.Config{
Dir: "/log/dir",
},
RubySentryDSN: "testDSN",
Sentry: config.Sentry{
Environment: "testEnvironment",
},
},
Auth: auth.Config{Token: "paswd"},
Ruby: config.Ruby{RuggedGitConfigSearchPath: "/bin/rugged"},
}
env := setupEnv(cfg, mockGitCommandFactory{})
require.Contains(t, env, "FOO=bar")
require.Contains(t, env, "GITALY_LOG_DIR=/log/dir")
require.Contains(t, env, "GITALY_RUBY_GIT_BIN_PATH=/something")
require.Contains(t, env, fmt.Sprintf("GITALY_RUBY_WRITE_BUFFER_SIZE=%d", streamio.WriteBufferSize))
require.Contains(t, env, fmt.Sprintf("GITALY_RUBY_MAX_COMMIT_OR_TAG_MESSAGE_SIZE=%d", helper.MaxCommitOrTagMessageSize))
require.Contains(t, env, "GITALY_RUBY_GITALY_BIN_DIR=/bin/dit")
require.Contains(t, env, "GITALY_VERSION="+version.GetVersion())
require.Contains(t, env, fmt.Sprintf("GITALY_SOCKET=%s", cfg.GitalyInternalSocketPath()))
require.Contains(t, env, "GITALY_TOKEN=paswd")
require.Contains(t, env, "GITALY_RUGGED_GIT_CONFIG_SEARCH_PATH=/bin/rugged")
require.Contains(t, env, "SENTRY_DSN=testDSN")
require.Contains(t, env, "SENTRY_ENVIRONMENT=testEnvironment")
require.Contains(t, env, "GITALY_GIT_HOOKS_DIR=custom_hooks_path")
}

View file

@ -1,110 +0,0 @@
package commit
import (
"context"
"io"
"gitlab.com/gitlab-org/gitaly/v14/internal/git"
"gitlab.com/gitlab-org/gitaly/v14/internal/git/catfile"
"gitlab.com/gitlab-org/gitaly/v14/internal/helper"
"gitlab.com/gitlab-org/gitaly/v14/internal/helper/chunk"
"gitlab.com/gitlab-org/gitaly/v14/proto/go/gitalypb"
"google.golang.org/protobuf/proto"
)
func (s *server) CheckObjectsExist(
stream gitalypb.CommitService_CheckObjectsExistServer,
) error {
ctx := stream.Context()
request, err := stream.Recv()
if err != nil {
return err
}
if err := validateCheckObjectsExistRequest(request); err != nil {
return err
}
objectInfoReader, err := s.catfileCache.ObjectInfoReader(
ctx,
s.localrepo(request.GetRepository()),
)
if err != nil {
return err
}
chunker := chunk.New(&checkObjectsExistSender{stream: stream})
for {
request, err := stream.Recv()
if err != nil {
if err == io.EOF {
return chunker.Flush()
}
return err
}
if err = checkObjectsExist(ctx, request, objectInfoReader, chunker); err != nil {
return err
}
}
}
type checkObjectsExistSender struct {
stream gitalypb.CommitService_CheckObjectsExistServer
revisions []*gitalypb.CheckObjectsExistResponse_RevisionExistence
}
func (c *checkObjectsExistSender) Send() error {
return c.stream.Send(&gitalypb.CheckObjectsExistResponse{
Revisions: c.revisions,
})
}
func (c *checkObjectsExistSender) Reset() {
c.revisions = make([]*gitalypb.CheckObjectsExistResponse_RevisionExistence, 0)
}
func (c *checkObjectsExistSender) Append(m proto.Message) {
c.revisions = append(c.revisions, m.(*gitalypb.CheckObjectsExistResponse_RevisionExistence))
}
func checkObjectsExist(
ctx context.Context,
request *gitalypb.CheckObjectsExistRequest,
objectInfoReader catfile.ObjectInfoReader,
chunker *chunk.Chunker,
) error {
revisions := request.GetRevisions()
for _, revision := range revisions {
revisionExistence := gitalypb.CheckObjectsExistResponse_RevisionExistence{
Name: revision,
Exists: true,
}
_, err := objectInfoReader.Info(ctx, git.Revision(revision))
if err != nil {
if catfile.IsNotFound(err) {
revisionExistence.Exists = false
} else {
return err
}
}
if err := chunker.Send(&revisionExistence); err != nil {
return err
}
}
return nil
}
func validateCheckObjectsExistRequest(in *gitalypb.CheckObjectsExistRequest) error {
for _, revision := range in.GetRevisions() {
if err := git.ValidateRevision(revision); err != nil {
return helper.ErrInvalidArgument(err)
}
}
return nil
}

View file

@ -1,121 +0,0 @@
package commit
import (
"io"
"testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"gitlab.com/gitlab-org/gitaly/v14/internal/git/gittest"
"gitlab.com/gitlab-org/gitaly/v14/internal/testhelper"
"gitlab.com/gitlab-org/gitaly/v14/proto/go/gitalypb"
"google.golang.org/grpc/codes"
)
func TestCheckObjectsExist(t *testing.T) {
t.Parallel()
ctx := testhelper.Context(t)
cfg, repo, repoPath, client := setupCommitServiceWithRepo(ctx, t, true)
// write a few commitIDs we can use
commitID1 := gittest.WriteCommit(t, cfg, repoPath)
commitID2 := gittest.WriteCommit(t, cfg, repoPath)
commitID3 := gittest.WriteCommit(t, cfg, repoPath)
// remove a ref from the repository so we know it doesn't exist
gittest.Exec(t, cfg, "-C", repoPath, "update-ref", "-d", "refs/heads/many_files")
nonexistingObject := "abcdefg"
cmd := gittest.NewCommand(t, cfg, "-C", repoPath, "rev-parse", nonexistingObject)
require.Error(t, cmd.Wait(), "ensure the object doesn't exist")
testCases := []struct {
desc string
input [][]byte
revisionsExistence map[string]bool
returnCode codes.Code
}{
{
desc: "commit ids and refs that exist",
input: [][]byte{
[]byte(commitID1),
[]byte("master"),
[]byte(commitID2),
[]byte(commitID3),
[]byte("feature"),
},
revisionsExistence: map[string]bool{
"master": true,
commitID2.String(): true,
commitID3.String(): true,
"feature": true,
},
returnCode: codes.OK,
},
{
desc: "ref and objects missing",
input: [][]byte{
[]byte(commitID1),
[]byte("master"),
[]byte(commitID2),
[]byte(commitID3),
[]byte("feature"),
[]byte("many_files"),
[]byte(nonexistingObject),
},
revisionsExistence: map[string]bool{
"master": true,
commitID2.String(): true,
commitID3.String(): true,
"feature": true,
"many_files": false,
nonexistingObject: false,
},
returnCode: codes.OK,
},
{
desc: "empty input",
input: [][]byte{},
returnCode: codes.OK,
revisionsExistence: map[string]bool{},
},
{
desc: "invalid input",
input: [][]byte{[]byte("-not-a-rev")},
returnCode: codes.InvalidArgument,
},
}
for _, tc := range testCases {
t.Run(tc.desc, func(t *testing.T) {
c, err := client.CheckObjectsExist(ctx)
require.NoError(t, err)
require.NoError(t, c.Send(
&gitalypb.CheckObjectsExistRequest{
Repository: repo,
Revisions: tc.input,
},
))
require.NoError(t, c.CloseSend())
for {
resp, err := c.Recv()
if tc.returnCode != codes.OK {
testhelper.RequireGrpcCode(t, err, tc.returnCode)
break
} else if err != nil {
require.Error(t, err, io.EOF)
break
}
actualRevisionsExistence := make(map[string]bool)
for _, revisionExistence := range resp.GetRevisions() {
actualRevisionsExistence[string(revisionExistence.GetName())] = revisionExistence.GetExists()
}
assert.Equal(t, tc.revisionsExistence, actualRevisionsExistence)
}
})
}
}

View file

@ -1,121 +0,0 @@
package commit
import (
"testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"gitlab.com/gitlab-org/gitaly/v14/internal/testhelper"
"gitlab.com/gitlab-org/gitaly/v14/proto/go/gitalypb"
"google.golang.org/grpc/codes"
)
func TestCommitStatsSuccess(t *testing.T) {
t.Parallel()
ctx := testhelper.Context(t)
_, repo, _, client := setupCommitServiceWithRepo(ctx, t, true)
tests := []struct {
desc string
revision string
oid string
additions, deletions int32
}{
{
desc: "multiple changes, multiple files",
revision: "test-do-not-touch",
oid: "899d3d27b04690ac1cd9ef4d8a74fde0667c57f1",
additions: 27,
deletions: 59,
},
{
desc: "multiple changes, multiple files, reference by commit ID",
revision: "899d3d27b04690ac1cd9ef4d8a74fde0667c57f1",
oid: "899d3d27b04690ac1cd9ef4d8a74fde0667c57f1",
additions: 27,
deletions: 59,
},
{
desc: "merge commit",
revision: "60ecb67",
oid: "60ecb67744cb56576c30214ff52294f8ce2def98",
additions: 1,
deletions: 0,
},
{
desc: "binary file",
revision: "ae73cb0",
oid: "ae73cb07c9eeaf35924a10f713b364d32b2dd34f",
additions: 0,
deletions: 0,
},
{
desc: "initial commit",
revision: "1a0b36b3",
oid: "1a0b36b3cdad1d2ee32457c102a8c0b7056fa863",
additions: 43,
deletions: 0,
},
}
for _, tc := range tests {
t.Run(tc.desc, func(t *testing.T) {
resp, err := client.CommitStats(ctx, &gitalypb.CommitStatsRequest{
Repository: repo,
Revision: []byte(tc.revision),
})
require.NoError(t, err)
assert.Equal(t, tc.oid, resp.GetOid())
assert.Equal(t, tc.additions, resp.GetAdditions())
assert.Equal(t, tc.deletions, resp.GetDeletions())
})
}
}
func TestCommitStatsFailure(t *testing.T) {
t.Parallel()
ctx := testhelper.Context(t)
_, repo, _, client := setupCommitServiceWithRepo(ctx, t, true)
tests := []struct {
desc string
repo *gitalypb.Repository
revision []byte
err codes.Code
}{
{
desc: "repo not found",
repo: &gitalypb.Repository{StorageName: repo.GetStorageName(), RelativePath: "bar.git"},
revision: []byte("test-do-not-touch"),
err: codes.NotFound,
},
{
desc: "storage not found",
repo: &gitalypb.Repository{StorageName: "foo", RelativePath: "bar.git"},
revision: []byte("test-do-not-touch"),
err: codes.InvalidArgument,
},
{
desc: "ref not found",
repo: repo,
revision: []byte("non/existing"),
err: codes.Internal,
},
{
desc: "invalid revision",
repo: repo,
revision: []byte("--outpu=/meow"),
err: codes.InvalidArgument,
},
}
for _, tc := range tests {
t.Run(tc.desc, func(t *testing.T) {
_, err := client.CommitStats(ctx, &gitalypb.CommitStatsRequest{Repository: tc.repo, Revision: tc.revision})
testhelper.RequireGrpcCode(t, err, tc.err)
})
}
}

View file

@ -1,101 +0,0 @@
package conflicts
import (
"context"
"path/filepath"
"testing"
"gitlab.com/gitlab-org/gitaly/v14/internal/git/gittest"
"gitlab.com/gitlab-org/gitaly/v14/internal/gitaly/config"
"gitlab.com/gitlab-org/gitaly/v14/internal/gitaly/hook"
"gitlab.com/gitlab-org/gitaly/v14/internal/gitaly/service"
"gitlab.com/gitlab-org/gitaly/v14/internal/gitaly/service/commit"
hookservice "gitlab.com/gitlab-org/gitaly/v14/internal/gitaly/service/hook"
"gitlab.com/gitlab-org/gitaly/v14/internal/gitaly/service/repository"
"gitlab.com/gitlab-org/gitaly/v14/internal/gitaly/service/ssh"
"gitlab.com/gitlab-org/gitaly/v14/internal/testhelper"
"gitlab.com/gitlab-org/gitaly/v14/internal/testhelper/testcfg"
"gitlab.com/gitlab-org/gitaly/v14/internal/testhelper/testserver"
"gitlab.com/gitlab-org/gitaly/v14/proto/go/gitalypb"
"google.golang.org/grpc"
)
func TestMain(m *testing.M) {
testhelper.Run(m)
}
func SetupConflictsService(ctx context.Context, t testing.TB, bare bool, hookManager hook.Manager) (config.Cfg, *gitalypb.Repository, string, gitalypb.ConflictsServiceClient) {
cfg := testcfg.Build(t)
testcfg.BuildGitalyGit2Go(t, cfg)
serverSocketPath := runConflictsServer(t, cfg, hookManager)
cfg.SocketPath = serverSocketPath
client, conn := NewConflictsClient(t, serverSocketPath)
t.Cleanup(func() { conn.Close() })
repo, repoPath := gittest.CreateRepository(ctx, t, cfg, gittest.CreateRepositoryConfig{
Seed: gittest.SeedGitLabTest,
})
if !bare {
gittest.AddWorktree(t, cfg, repoPath, "worktree")
repoPath = filepath.Join(repoPath, "worktree")
// AddWorktree creates a detached worktree. Checkout master here so the
// branch pointer moves as we later commit.
gittest.Exec(t, cfg, "-C", repoPath, "checkout", "master")
}
return cfg, repo, repoPath, client
}
func runConflictsServer(t testing.TB, cfg config.Cfg, hookManager hook.Manager) string {
return testserver.RunGitalyServer(t, cfg, nil, func(srv *grpc.Server, deps *service.Dependencies) {
gitalypb.RegisterConflictsServiceServer(srv, NewServer(
deps.GetHookManager(),
deps.GetLocator(),
deps.GetGitCmdFactory(),
deps.GetCatfileCache(),
deps.GetConnsPool(),
deps.GetGit2goExecutor(),
deps.GetUpdaterWithHooks(),
))
gitalypb.RegisterRepositoryServiceServer(srv, repository.NewServer(
deps.GetCfg(),
deps.GetRubyServer(),
deps.GetLocator(),
deps.GetTxManager(),
deps.GetGitCmdFactory(),
deps.GetCatfileCache(),
deps.GetConnsPool(),
deps.GetGit2goExecutor(),
deps.GetHousekeepingManager(),
))
gitalypb.RegisterSSHServiceServer(srv, ssh.NewServer(
deps.GetLocator(),
deps.GetGitCmdFactory(),
deps.GetTxManager(),
))
gitalypb.RegisterHookServiceServer(srv, hookservice.NewServer(deps.GetHookManager(), deps.GetGitCmdFactory(), deps.GetPackObjectsCache()))
gitalypb.RegisterCommitServiceServer(srv, commit.NewServer(
deps.GetLocator(),
deps.GetGitCmdFactory(),
deps.GetLinguist(),
deps.GetCatfileCache(),
))
}, testserver.WithHookManager(hookManager))
}
func NewConflictsClient(t testing.TB, serverSocketPath string) (gitalypb.ConflictsServiceClient, *grpc.ClientConn) {
connOpts := []grpc.DialOption{
grpc.WithInsecure(),
}
conn, err := grpc.Dial(serverSocketPath, connOpts...)
if err != nil {
t.Fatal(err)
}
return gitalypb.NewConflictsServiceClient(conn), conn
}

View file

@ -1,155 +0,0 @@
package diff
import (
"bufio"
"context"
"fmt"
"io"
"strings"
"gitlab.com/gitlab-org/gitaly/v14/internal/git"
"gitlab.com/gitlab-org/gitaly/v14/internal/helper/chunk"
"gitlab.com/gitlab-org/gitaly/v14/proto/go/gitalypb"
"google.golang.org/grpc/codes"
"google.golang.org/grpc/status"
"google.golang.org/protobuf/proto"
)
const (
numStatDelimiter = 0
)
func (s *server) FindChangedPaths(in *gitalypb.FindChangedPathsRequest, stream gitalypb.DiffService_FindChangedPathsServer) error {
if err := s.validateFindChangedPathsRequestParams(stream.Context(), in); err != nil {
return err
}
diffChunker := chunk.New(&findChangedPathsSender{stream: stream})
cmd, err := s.gitCmdFactory.New(stream.Context(), in.Repository, git.SubCmd{
Name: "diff-tree",
Flags: []git.Option{
git.Flag{Name: "-z"},
git.Flag{Name: "--stdin"},
git.Flag{Name: "-m"},
git.Flag{Name: "-r"},
git.Flag{Name: "--name-status"},
git.Flag{Name: "--no-renames"},
git.Flag{Name: "--no-commit-id"},
git.Flag{Name: "--diff-filter=AMDTC"},
},
}, git.WithStdin(strings.NewReader(strings.Join(in.GetCommits(), "\n")+"\n")))
if err != nil {
if _, ok := status.FromError(err); ok {
return fmt.Errorf("FindChangedPaths Stdin Err: %w", err)
}
return status.Errorf(codes.Internal, "FindChangedPaths: Cmd Err: %v", err)
}
if err := parsePaths(bufio.NewReader(cmd), diffChunker); err != nil {
return fmt.Errorf("FindChangedPaths Parsing Err: %w", err)
}
if err := cmd.Wait(); err != nil {
return status.Errorf(codes.Unavailable, "FindChangedPaths: Cmd Wait Err: %v", err)
}
return diffChunker.Flush()
}
func parsePaths(reader *bufio.Reader, chunker *chunk.Chunker) error {
for {
path, err := nextPath(reader)
if err != nil {
if err == io.EOF {
break
}
return fmt.Errorf("FindChangedPaths Next Path Err: %w", err)
}
if err := chunker.Send(path); err != nil {
return fmt.Errorf("FindChangedPaths: err sending to chunker: %v", err)
}
}
return nil
}
func nextPath(reader *bufio.Reader) (*gitalypb.ChangedPaths, error) {
pathStatus, err := reader.ReadBytes(numStatDelimiter)
if err != nil {
return nil, err
}
path, err := reader.ReadBytes(numStatDelimiter)
if err != nil {
return nil, err
}
statusTypeMap := map[string]gitalypb.ChangedPaths_Status{
"M": gitalypb.ChangedPaths_MODIFIED,
"D": gitalypb.ChangedPaths_DELETED,
"T": gitalypb.ChangedPaths_TYPE_CHANGE,
"C": gitalypb.ChangedPaths_COPIED,
"A": gitalypb.ChangedPaths_ADDED,
}
parsedPath, ok := statusTypeMap[string(pathStatus[:len(pathStatus)-1])]
if !ok {
return nil, status.Errorf(codes.Internal, "FindChangedPaths: Unknown changed paths returned: %v", string(pathStatus))
}
changedPath := &gitalypb.ChangedPaths{
Status: parsedPath,
Path: path[:len(path)-1],
}
return changedPath, nil
}
// This sender implements the interface in the chunker class
type findChangedPathsSender struct {
paths []*gitalypb.ChangedPaths
stream gitalypb.DiffService_FindChangedPathsServer
}
func (t *findChangedPathsSender) Reset() {
t.paths = nil
}
func (t *findChangedPathsSender) Append(m proto.Message) {
t.paths = append(t.paths, m.(*gitalypb.ChangedPaths))
}
func (t *findChangedPathsSender) Send() error {
return t.stream.Send(&gitalypb.FindChangedPathsResponse{
Paths: t.paths,
})
}
func (s *server) validateFindChangedPathsRequestParams(ctx context.Context, in *gitalypb.FindChangedPathsRequest) error {
repo := in.GetRepository()
if _, err := s.locator.GetRepoPath(repo); err != nil {
return err
}
gitRepo := s.localrepo(in.GetRepository())
for _, commit := range in.GetCommits() {
if commit == "" {
return status.Errorf(codes.InvalidArgument, "FindChangedPaths: commits cannot contain an empty commit")
}
containsRef, err := gitRepo.HasRevision(ctx, git.Revision(commit+"^{commit}"))
if err != nil {
return fmt.Errorf("contains ref err: %w", err)
}
if !containsRef {
return status.Errorf(codes.NotFound, "FindChangedPaths: commit: %v can not be found", commit)
}
}
return nil
}

View file

@ -1,163 +0,0 @@
package diff
import (
"io"
"path/filepath"
"testing"
"github.com/stretchr/testify/require"
"gitlab.com/gitlab-org/gitaly/v14/internal/testhelper"
"gitlab.com/gitlab-org/gitaly/v14/internal/testhelper/testserver"
"gitlab.com/gitlab-org/gitaly/v14/proto/go/gitalypb"
"google.golang.org/grpc/codes"
"google.golang.org/grpc/status"
)
func TestFindChangedPathsRequest_success(t *testing.T) {
ctx := testhelper.Context(t)
_, repo, _, client := setupDiffService(ctx, t)
testCases := []struct {
desc string
commits []string
expectedPaths []*gitalypb.ChangedPaths
}{
{
"Returns the expected results without a merge commit",
[]string{"e4003da16c1c2c3fc4567700121b17bf8e591c6c", "57290e673a4c87f51294f5216672cbc58d485d25", "8a0f2ee90d940bfb0ba1e14e8214b0649056e4ab", "d59c60028b053793cecfb4022de34602e1a9218e"},
[]*gitalypb.ChangedPaths{
{
Status: gitalypb.ChangedPaths_MODIFIED,
Path: []byte("CONTRIBUTING.md"),
},
{
Status: gitalypb.ChangedPaths_MODIFIED,
Path: []byte("MAINTENANCE.md"),
},
{
Status: gitalypb.ChangedPaths_ADDED,
Path: []byte("gitaly/テスト.txt"),
},
{
Status: gitalypb.ChangedPaths_ADDED,
Path: []byte("gitaly/deleted-file"),
},
{
Status: gitalypb.ChangedPaths_ADDED,
Path: []byte("gitaly/file-with-multiple-chunks"),
},
{
Status: gitalypb.ChangedPaths_ADDED,
Path: []byte("gitaly/mode-file"),
},
{
Status: gitalypb.ChangedPaths_ADDED,
Path: []byte("gitaly/mode-file-with-mods"),
},
{
Status: gitalypb.ChangedPaths_ADDED,
Path: []byte("gitaly/named-file"),
},
{
Status: gitalypb.ChangedPaths_ADDED,
Path: []byte("gitaly/named-file-with-mods"),
},
{
Status: gitalypb.ChangedPaths_DELETED,
Path: []byte("files/js/commit.js.coffee"),
},
},
},
{
"Returns the expected results with a merge commit",
[]string{"7975be0116940bf2ad4321f79d02a55c5f7779aa", "55bc176024cfa3baaceb71db584c7e5df900ea65"},
[]*gitalypb.ChangedPaths{
{
Status: gitalypb.ChangedPaths_ADDED,
Path: []byte("files/images/emoji.png"),
},
{
Status: gitalypb.ChangedPaths_MODIFIED,
Path: []byte(".gitattributes"),
},
},
},
}
for _, tc := range testCases {
t.Run(tc.desc, func(t *testing.T) {
rpcRequest := &gitalypb.FindChangedPathsRequest{Repository: repo, Commits: tc.commits}
stream, err := client.FindChangedPaths(ctx, rpcRequest)
require.NoError(t, err)
var paths []*gitalypb.ChangedPaths
for {
fetchedPaths, err := stream.Recv()
if err == io.EOF {
break
}
require.NoError(t, err)
paths = append(paths, fetchedPaths.GetPaths()...)
}
require.Equal(t, tc.expectedPaths, paths)
})
}
}
func TestFindChangedPathsRequest_failing(t *testing.T) {
ctx := testhelper.Context(t)
cfg, repo, _, client := setupDiffService(ctx, t, testserver.WithDisablePraefect())
tests := []struct {
desc string
repo *gitalypb.Repository
commits []string
err error
}{
{
desc: "Repo not found",
repo: &gitalypb.Repository{StorageName: repo.GetStorageName(), RelativePath: "bar.git"},
commits: []string{"e4003da16c1c2c3fc4567700121b17bf8e591c6c", "8a0f2ee90d940bfb0ba1e14e8214b0649056e4ab"},
err: status.Errorf(codes.NotFound, "GetRepoPath: not a git repository: %q", filepath.Join(cfg.Storages[0].Path, "bar.git")),
},
{
desc: "Storage not found",
repo: &gitalypb.Repository{StorageName: "foo", RelativePath: "bar.git"},
commits: []string{"e4003da16c1c2c3fc4567700121b17bf8e591c6c", "8a0f2ee90d940bfb0ba1e14e8214b0649056e4ab"},
err: status.Error(codes.InvalidArgument, "GetStorageByName: no such storage: \"foo\""),
},
{
desc: "Commits cannot contain an empty commit",
repo: repo,
commits: []string{""},
err: status.Error(codes.InvalidArgument, "FindChangedPaths: commits cannot contain an empty commit"),
},
{
desc: "Invalid commit",
repo: repo,
commits: []string{"invalidinvalidinvalid", "8a0f2ee90d940bfb0ba1e14e8214b0649056e4ab"},
err: status.Error(codes.NotFound, "FindChangedPaths: commit: invalidinvalidinvalid can not be found"),
},
{
desc: "Commit not found",
repo: repo,
commits: []string{"z4003da16c1c2c3fc4567700121b17bf8e591c6c", "8a0f2ee90d940bfb0ba1e14e8214b0649056e4ab"},
err: status.Error(codes.NotFound, "FindChangedPaths: commit: z4003da16c1c2c3fc4567700121b17bf8e591c6c can not be found"),
},
}
for _, tc := range tests {
rpcRequest := &gitalypb.FindChangedPathsRequest{Repository: tc.repo, Commits: tc.commits}
stream, err := client.FindChangedPaths(ctx, rpcRequest)
require.NoError(t, err)
t.Run(tc.desc, func(t *testing.T) {
_, err := stream.Recv()
testhelper.RequireGrpcError(t, tc.err, err)
})
}
}

View file

@ -1,209 +0,0 @@
package diff
import (
"fmt"
"io"
"regexp"
"testing"
"github.com/stretchr/testify/require"
"gitlab.com/gitlab-org/gitaly/v14/internal/git/gittest"
"gitlab.com/gitlab-org/gitaly/v14/internal/testhelper"
"gitlab.com/gitlab-org/gitaly/v14/proto/go/gitalypb"
"gitlab.com/gitlab-org/gitaly/v14/streamio"
"google.golang.org/grpc/codes"
)
func TestSuccessfulRawDiffRequest(t *testing.T) {
ctx := testhelper.Context(t)
cfg, repo, repoPath, client := setupDiffService(ctx, t)
rightCommit := "e395f646b1499e8e0279445fc99a0596a65fab7e"
leftCommit := "8a0f2ee90d940bfb0ba1e14e8214b0649056e4ab"
rpcRequest := &gitalypb.RawDiffRequest{Repository: repo, RightCommitId: rightCommit, LeftCommitId: leftCommit}
c, err := client.RawDiff(ctx, rpcRequest)
require.NoError(t, err)
_, sandboxRepoPath := gittest.CloneRepo(t, cfg, cfg.Storages[0], gittest.CloneRepoOpts{
WithWorktree: true,
})
reader := streamio.NewReader(func() ([]byte, error) {
response, err := c.Recv()
return response.GetData(), err
})
committerName := "Scrooge McDuck"
committerEmail := "scrooge@mcduck.com"
gittest.Exec(t, cfg, "-C", sandboxRepoPath, "reset", "--hard", leftCommit)
gittest.ExecOpts(t, cfg, gittest.ExecConfig{Stdin: reader}, "-C", sandboxRepoPath, "apply")
gittest.Exec(t, cfg, "-C", sandboxRepoPath, "add", ".")
gittest.Exec(t, cfg, "-C", sandboxRepoPath,
"-c", fmt.Sprintf("user.name=%s", committerName),
"-c", fmt.Sprintf("user.email=%s", committerEmail),
"commit", "-m", "Applying received raw diff")
expectedTreeStructure := gittest.Exec(t, cfg, "-C", repoPath, "ls-tree", "-r", rightCommit)
actualTreeStructure := gittest.Exec(t, cfg, "-C", sandboxRepoPath, "ls-tree", "-r", "HEAD")
require.Equal(t, expectedTreeStructure, actualTreeStructure)
}
func TestFailedRawDiffRequestDueToValidations(t *testing.T) {
ctx := testhelper.Context(t)
_, repo, _, client := setupDiffService(ctx, t)
testCases := []struct {
desc string
request *gitalypb.RawDiffRequest
code codes.Code
}{
{
desc: "empty left commit",
request: &gitalypb.RawDiffRequest{
Repository: repo,
LeftCommitId: "",
RightCommitId: "e395f646b1499e8e0279445fc99a0596a65fab7e",
},
code: codes.InvalidArgument,
},
{
desc: "empty right commit",
request: &gitalypb.RawDiffRequest{
Repository: repo,
RightCommitId: "",
LeftCommitId: "e395f646b1499e8e0279445fc99a0596a65fab7e",
},
code: codes.InvalidArgument,
},
{
desc: "empty repo",
request: &gitalypb.RawDiffRequest{
Repository: nil,
RightCommitId: "8a0f2ee90d940bfb0ba1e14e8214b0649056e4ab",
LeftCommitId: "e395f646b1499e8e0279445fc99a0596a65fab7e",
},
code: codes.InvalidArgument,
},
}
for _, testCase := range testCases {
t.Run(testCase.desc, func(t *testing.T) {
c, _ := client.RawDiff(ctx, testCase.request)
testhelper.RequireGrpcCode(t, drainRawDiffResponse(c), testCase.code)
})
}
}
func TestSuccessfulRawPatchRequest(t *testing.T) {
ctx := testhelper.Context(t)
cfg, repo, repoPath, client := setupDiffService(ctx, t)
rightCommit := "e395f646b1499e8e0279445fc99a0596a65fab7e"
leftCommit := "8a0f2ee90d940bfb0ba1e14e8214b0649056e4ab"
rpcRequest := &gitalypb.RawPatchRequest{Repository: repo, RightCommitId: rightCommit, LeftCommitId: leftCommit}
c, err := client.RawPatch(ctx, rpcRequest)
require.NoError(t, err)
reader := streamio.NewReader(func() ([]byte, error) {
response, err := c.Recv()
return response.GetData(), err
})
_, sandboxRepoPath := gittest.CloneRepo(t, cfg, cfg.Storages[0], gittest.CloneRepoOpts{
WithWorktree: true,
})
gittest.Exec(t, cfg, "-C", sandboxRepoPath, "reset", "--hard", leftCommit)
gittest.ExecOpts(t, cfg, gittest.ExecConfig{Stdin: reader}, "-C", sandboxRepoPath, "am")
expectedTreeStructure := gittest.Exec(t, cfg, "-C", repoPath, "ls-tree", "-r", rightCommit)
actualTreeStructure := gittest.Exec(t, cfg, "-C", sandboxRepoPath, "ls-tree", "-r", "HEAD")
require.Equal(t, expectedTreeStructure, actualTreeStructure)
}
func TestFailedRawPatchRequestDueToValidations(t *testing.T) {
ctx := testhelper.Context(t)
_, repo, _, client := setupDiffService(ctx, t)
testCases := []struct {
desc string
request *gitalypb.RawPatchRequest
code codes.Code
}{
{
desc: "empty left commit",
request: &gitalypb.RawPatchRequest{
Repository: repo,
LeftCommitId: "",
RightCommitId: "e395f646b1499e8e0279445fc99a0596a65fab7e",
},
code: codes.InvalidArgument,
},
{
desc: "empty right commit",
request: &gitalypb.RawPatchRequest{
Repository: repo,
RightCommitId: "",
LeftCommitId: "e395f646b1499e8e0279445fc99a0596a65fab7e",
},
code: codes.InvalidArgument,
},
{
desc: "empty repo",
request: &gitalypb.RawPatchRequest{
Repository: nil,
RightCommitId: "8a0f2ee90d940bfb0ba1e14e8214b0649056e4ab",
LeftCommitId: "e395f646b1499e8e0279445fc99a0596a65fab7e",
},
code: codes.InvalidArgument,
},
}
for _, testCase := range testCases {
t.Run(testCase.desc, func(t *testing.T) {
c, _ := client.RawPatch(ctx, testCase.request)
testhelper.RequireGrpcCode(t, drainRawPatchResponse(c), testCase.code)
})
}
}
func TestRawPatchContainsGitLabSignature(t *testing.T) {
ctx := testhelper.Context(t)
_, repo, _, client := setupDiffService(ctx, t)
rightCommit := "e395f646b1499e8e0279445fc99a0596a65fab7e"
leftCommit := "8a0f2ee90d940bfb0ba1e14e8214b0649056e4ab"
rpcRequest := &gitalypb.RawPatchRequest{Repository: repo, RightCommitId: rightCommit, LeftCommitId: leftCommit}
c, err := client.RawPatch(ctx, rpcRequest)
require.NoError(t, err)
reader := streamio.NewReader(func() ([]byte, error) {
response, err := c.Recv()
return response.GetData(), err
})
patch, err := io.ReadAll(reader)
require.NoError(t, err)
require.Regexp(t, regexp.MustCompile(`\n-- \nGitLab\s+$`), string(patch))
}
func drainRawDiffResponse(c gitalypb.DiffService_RawDiffClient) error {
var err error
for err == nil {
_, err = c.Recv()
}
return err
}
func drainRawPatchResponse(c gitalypb.DiffService_RawPatchClient) error {
var err error
for err == nil {
_, err = c.Recv()
}
return err
}

View file

@ -1,55 +0,0 @@
package diff
import (
"context"
"testing"
"github.com/stretchr/testify/require"
"gitlab.com/gitlab-org/gitaly/v14/internal/git/gittest"
"gitlab.com/gitlab-org/gitaly/v14/internal/gitaly/config"
"gitlab.com/gitlab-org/gitaly/v14/internal/gitaly/service"
"gitlab.com/gitlab-org/gitaly/v14/internal/gitaly/service/repository"
"gitlab.com/gitlab-org/gitaly/v14/internal/testhelper"
"gitlab.com/gitlab-org/gitaly/v14/internal/testhelper/testcfg"
"gitlab.com/gitlab-org/gitaly/v14/internal/testhelper/testserver"
"gitlab.com/gitlab-org/gitaly/v14/proto/go/gitalypb"
"google.golang.org/grpc"
)
func TestMain(m *testing.M) {
testhelper.Run(m)
}
func setupDiffService(ctx context.Context, t testing.TB, opt ...testserver.GitalyServerOpt) (config.Cfg, *gitalypb.Repository, string, gitalypb.DiffServiceClient) {
cfg := testcfg.Build(t)
addr := testserver.RunGitalyServer(t, cfg, nil, func(srv *grpc.Server, deps *service.Dependencies) {
gitalypb.RegisterDiffServiceServer(srv, NewServer(
deps.GetLocator(),
deps.GetGitCmdFactory(),
deps.GetCatfileCache(),
))
gitalypb.RegisterRepositoryServiceServer(srv, repository.NewServer(
cfg,
deps.GetRubyServer(),
deps.GetLocator(),
deps.GetTxManager(),
deps.GetGitCmdFactory(),
deps.GetCatfileCache(),
deps.GetConnsPool(),
deps.GetGit2goExecutor(),
deps.GetHousekeepingManager(),
))
}, opt...)
cfg.SocketPath = addr
conn, err := grpc.Dial(addr, grpc.WithInsecure())
require.NoError(t, err)
t.Cleanup(func() { testhelper.MustClose(t, conn) })
repo, repoPath := gittest.CreateRepository(ctx, t, cfg, gittest.CreateRepositoryConfig{
Seed: gittest.SeedGitLabTest,
})
return cfg, repo, repoPath, gitalypb.NewDiffServiceClient(conn)
}

View file

@ -1,26 +0,0 @@
package hook
import (
"gitlab.com/gitlab-org/gitaly/v14/internal/git"
gitalyhook "gitlab.com/gitlab-org/gitaly/v14/internal/gitaly/hook"
"gitlab.com/gitlab-org/gitaly/v14/internal/streamcache"
"gitlab.com/gitlab-org/gitaly/v14/proto/go/gitalypb"
)
type server struct {
gitalypb.UnimplementedHookServiceServer
manager gitalyhook.Manager
gitCmdFactory git.CommandFactory
packObjectsCache streamcache.Cache
}
// NewServer creates a new instance of a gRPC namespace server
func NewServer(manager gitalyhook.Manager, gitCmdFactory git.CommandFactory, packObjectsCache streamcache.Cache) gitalypb.HookServiceServer {
srv := &server{
manager: manager,
gitCmdFactory: gitCmdFactory,
packObjectsCache: packObjectsCache,
}
return srv
}

View file

@ -1,82 +0,0 @@
package hook
import (
"context"
"testing"
"gitlab.com/gitlab-org/gitaly/v14/internal/git/gittest"
"gitlab.com/gitlab-org/gitaly/v14/internal/gitaly/config"
gitalyhook "gitlab.com/gitlab-org/gitaly/v14/internal/gitaly/hook"
"gitlab.com/gitlab-org/gitaly/v14/internal/gitaly/service"
"gitlab.com/gitlab-org/gitaly/v14/internal/gitaly/service/repository"
"gitlab.com/gitlab-org/gitaly/v14/internal/testhelper"
"gitlab.com/gitlab-org/gitaly/v14/internal/testhelper/testcfg"
"gitlab.com/gitlab-org/gitaly/v14/internal/testhelper/testserver"
"gitlab.com/gitlab-org/gitaly/v14/proto/go/gitalypb"
"google.golang.org/grpc"
)
func TestMain(m *testing.M) {
testhelper.Run(m)
}
func setupHookService(ctx context.Context, t testing.TB) (config.Cfg, *gitalypb.Repository, string, gitalypb.HookServiceClient) {
t.Helper()
cfg := testcfg.Build(t)
cfg.SocketPath = runHooksServer(t, cfg, nil)
client, conn := newHooksClient(t, cfg.SocketPath)
t.Cleanup(func() { conn.Close() })
repo, repoPath := gittest.CreateRepository(ctx, t, cfg, gittest.CreateRepositoryConfig{
Seed: gittest.SeedGitLabTest,
})
return cfg, repo, repoPath, client
}
func newHooksClient(t testing.TB, serverSocketPath string) (gitalypb.HookServiceClient, *grpc.ClientConn) {
t.Helper()
connOpts := []grpc.DialOption{
grpc.WithInsecure(),
}
conn, err := grpc.Dial(serverSocketPath, connOpts...)
if err != nil {
t.Fatal(err)
}
return gitalypb.NewHookServiceClient(conn), conn
}
type serverOption func(*server)
func runHooksServer(t testing.TB, cfg config.Cfg, opts []serverOption, serverOpts ...testserver.GitalyServerOpt) string {
t.Helper()
serverOpts = append(serverOpts, testserver.WithDisablePraefect())
return testserver.RunGitalyServer(t, cfg, nil, func(srv *grpc.Server, deps *service.Dependencies) {
hookServer := NewServer(
gitalyhook.NewManager(deps.GetCfg(), deps.GetLocator(), deps.GetGitCmdFactory(), deps.GetTxManager(), deps.GetGitlabClient()),
deps.GetGitCmdFactory(),
deps.GetPackObjectsCache(),
)
for _, opt := range opts {
opt(hookServer.(*server))
}
gitalypb.RegisterHookServiceServer(srv, hookServer)
gitalypb.RegisterRepositoryServiceServer(srv, repository.NewServer(
cfg,
deps.GetRubyServer(),
deps.GetLocator(),
deps.GetTxManager(),
deps.GetGitCmdFactory(),
deps.GetCatfileCache(),
deps.GetConnsPool(),
deps.GetGit2goExecutor(),
deps.GetHousekeepingManager(),
))
}, serverOpts...)
}

View file

@ -1,29 +0,0 @@
package namespace
import (
"testing"
"github.com/stretchr/testify/require"
"gitlab.com/gitlab-org/gitaly/v14/internal/gitaly/config"
"gitlab.com/gitlab-org/gitaly/v14/internal/gitaly/service"
"gitlab.com/gitlab-org/gitaly/v14/internal/testhelper"
"gitlab.com/gitlab-org/gitaly/v14/internal/testhelper/testcfg"
"gitlab.com/gitlab-org/gitaly/v14/internal/testhelper/testserver"
"gitlab.com/gitlab-org/gitaly/v14/proto/go/gitalypb"
"google.golang.org/grpc"
)
func setupNamespaceService(t testing.TB, opts ...testserver.GitalyServerOpt) (config.Cfg, gitalypb.NamespaceServiceClient) {
cfgBuilder := testcfg.NewGitalyCfgBuilder(testcfg.WithStorages("default", "other"))
cfg := cfgBuilder.Build(t)
addr := testserver.RunGitalyServer(t, cfg, nil, func(srv *grpc.Server, deps *service.Dependencies) {
gitalypb.RegisterNamespaceServiceServer(srv, NewServer(deps.GetLocator()))
}, opts...)
conn, err := grpc.Dial(addr, grpc.WithInsecure())
require.NoError(t, err)
t.Cleanup(func() { testhelper.MustClose(t, conn) })
return cfg, gitalypb.NewNamespaceServiceClient(conn)
}

View file

@ -1,252 +0,0 @@
package objectpool
import (
"os"
"path/filepath"
"testing"
"time"
"github.com/grpc-ecosystem/go-grpc-middleware/logging/logrus/ctxlogrus"
"github.com/sirupsen/logrus/hooks/test"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"gitlab.com/gitlab-org/gitaly/v14/internal/backchannel"
"gitlab.com/gitlab-org/gitaly/v14/internal/git"
"gitlab.com/gitlab-org/gitaly/v14/internal/git/catfile"
"gitlab.com/gitlab-org/gitaly/v14/internal/git/gittest"
"gitlab.com/gitlab-org/gitaly/v14/internal/git/housekeeping"
"gitlab.com/gitlab-org/gitaly/v14/internal/gitaly/config"
"gitlab.com/gitlab-org/gitaly/v14/internal/gitaly/transaction"
"gitlab.com/gitlab-org/gitaly/v14/internal/testhelper"
"gitlab.com/gitlab-org/gitaly/v14/internal/testhelper/testcfg"
"gitlab.com/gitlab-org/gitaly/v14/internal/testhelper/testserver"
"gitlab.com/gitlab-org/gitaly/v14/proto/go/gitalypb"
"gitlab.com/gitlab-org/labkit/log"
"google.golang.org/grpc"
"google.golang.org/grpc/codes"
"google.golang.org/grpc/status"
)
func TestFetchIntoObjectPool_Success(t *testing.T) {
ctx := testhelper.Context(t)
cfg, repo, repoPath, locator, client := setup(ctx, t)
repoCommit := gittest.WriteCommit(t, cfg, repoPath, gittest.WithBranch(t.Name()))
pool := initObjectPool(t, cfg, cfg.Storages[0])
_, err := client.CreateObjectPool(ctx, &gitalypb.CreateObjectPoolRequest{
ObjectPool: pool.ToProto(),
Origin: repo,
})
require.NoError(t, err)
req := &gitalypb.FetchIntoObjectPoolRequest{
ObjectPool: pool.ToProto(),
Origin: repo,
Repack: true,
}
_, err = client.FetchIntoObjectPool(ctx, req)
require.NoError(t, err)
pool = rewrittenObjectPool(ctx, t, cfg, pool)
require.True(t, pool.IsValid(), "ensure underlying repository is valid")
// No problems
gittest.Exec(t, cfg, "-C", pool.FullPath(), "fsck")
packFiles, err := filepath.Glob(filepath.Join(pool.FullPath(), "objects", "pack", "pack-*.pack"))
require.NoError(t, err)
require.Len(t, packFiles, 1, "ensure commits got packed")
packContents := gittest.Exec(t, cfg, "-C", pool.FullPath(), "verify-pack", "-v", packFiles[0])
require.Contains(t, string(packContents), repoCommit)
_, err = client.FetchIntoObjectPool(ctx, req)
require.NoError(t, err, "calling FetchIntoObjectPool twice should be OK")
require.True(t, pool.IsValid(), "ensure that pool is valid")
// Simulate a broken ref
poolPath, err := locator.GetRepoPath(pool)
require.NoError(t, err)
brokenRef := filepath.Join(poolPath, "refs", "heads", "broken")
require.NoError(t, os.MkdirAll(filepath.Dir(brokenRef), 0o755))
require.NoError(t, os.WriteFile(brokenRef, []byte{}, 0o777))
oldTime := time.Now().Add(-25 * time.Hour)
require.NoError(t, os.Chtimes(brokenRef, oldTime, oldTime))
_, err = client.FetchIntoObjectPool(ctx, req)
require.NoError(t, err)
_, err = os.Stat(brokenRef)
require.Error(t, err, "Expected refs/heads/broken to be deleted")
}
func TestFetchIntoObjectPool_hooks(t *testing.T) {
cfg := testcfg.Build(t)
gitCmdFactory := gittest.NewCommandFactory(t, cfg, git.WithHooksPath(testhelper.TempDir(t)))
cfg.SocketPath = runObjectPoolServer(t, cfg, config.NewLocator(cfg), testhelper.NewDiscardingLogger(t), testserver.WithGitCommandFactory(gitCmdFactory))
ctx := testhelper.Context(t)
repo, _ := gittest.CreateRepository(ctx, t, cfg, gittest.CreateRepositoryConfig{
Seed: gittest.SeedGitLabTest,
})
conn, err := grpc.Dial(cfg.SocketPath, grpc.WithInsecure())
require.NoError(t, err)
defer testhelper.MustClose(t, conn)
client := gitalypb.NewObjectPoolServiceClient(conn)
pool := initObjectPool(t, cfg, cfg.Storages[0])
_, err = client.CreateObjectPool(ctx, &gitalypb.CreateObjectPoolRequest{
ObjectPool: pool.ToProto(),
Origin: repo,
})
require.NoError(t, err)
// Set up a custom reference-transaction hook which simply exits failure. This asserts that
// the RPC doesn't invoke any reference-transaction.
testhelper.WriteExecutable(t, filepath.Join(gitCmdFactory.HooksPath(ctx), "reference-transaction"), []byte("#!/bin/sh\nexit 1\n"))
req := &gitalypb.FetchIntoObjectPoolRequest{
ObjectPool: pool.ToProto(),
Origin: repo,
Repack: true,
}
_, err = client.FetchIntoObjectPool(ctx, req)
testhelper.RequireGrpcError(t, status.Error(codes.Internal, "fetch into object pool: exit status 128, stderr: \"fatal: ref updates aborted by hook\\n\""), err)
}
func TestFetchIntoObjectPool_CollectLogStatistics(t *testing.T) {
cfg := testcfg.Build(t)
testcfg.BuildGitalyHooks(t, cfg)
locator := config.NewLocator(cfg)
logger, hook := test.NewNullLogger()
cfg.SocketPath = runObjectPoolServer(t, cfg, locator, logger)
ctx := testhelper.Context(t)
ctx = ctxlogrus.ToContext(ctx, log.WithField("test", "logging"))
repo, _ := gittest.CreateRepository(ctx, t, cfg, gittest.CreateRepositoryConfig{
Seed: gittest.SeedGitLabTest,
})
conn, err := grpc.Dial(cfg.SocketPath, grpc.WithInsecure())
require.NoError(t, err)
t.Cleanup(func() { testhelper.MustClose(t, conn) })
client := gitalypb.NewObjectPoolServiceClient(conn)
pool := initObjectPool(t, cfg, cfg.Storages[0])
_, err = client.CreateObjectPool(ctx, &gitalypb.CreateObjectPoolRequest{
ObjectPool: pool.ToProto(),
Origin: repo,
})
require.NoError(t, err)
req := &gitalypb.FetchIntoObjectPoolRequest{
ObjectPool: pool.ToProto(),
Origin: repo,
Repack: true,
}
_, err = client.FetchIntoObjectPool(ctx, req)
require.NoError(t, err)
const key = "count_objects"
for _, logEntry := range hook.AllEntries() {
if stats, ok := logEntry.Data[key]; ok {
require.IsType(t, map[string]interface{}{}, stats)
var keys []string
for key := range stats.(map[string]interface{}) {
keys = append(keys, key)
}
require.ElementsMatch(t, []string{
"count",
"garbage",
"in-pack",
"packs",
"prune-packable",
"size",
"size-garbage",
"size-pack",
}, keys)
return
}
}
require.FailNow(t, "no info about statistics")
}
func TestFetchIntoObjectPool_Failure(t *testing.T) {
cfgBuilder := testcfg.NewGitalyCfgBuilder()
cfg, repos := cfgBuilder.BuildWithRepoAt(t, t.Name())
locator := config.NewLocator(cfg)
gitCmdFactory := gittest.NewCommandFactory(t, cfg)
catfileCache := catfile.NewCache(cfg)
t.Cleanup(catfileCache.Stop)
txManager := transaction.NewManager(cfg, backchannel.NewRegistry())
server := NewServer(
locator,
gitCmdFactory,
catfileCache,
txManager,
housekeeping.NewManager(cfg.Prometheus, txManager),
)
ctx := testhelper.Context(t)
pool := initObjectPool(t, cfg, cfg.Storages[0])
poolWithDifferentStorage := pool.ToProto()
poolWithDifferentStorage.Repository.StorageName = "some other storage"
testCases := []struct {
description string
request *gitalypb.FetchIntoObjectPoolRequest
code codes.Code
errMsg string
}{
{
description: "empty origin",
request: &gitalypb.FetchIntoObjectPoolRequest{
ObjectPool: pool.ToProto(),
},
code: codes.InvalidArgument,
errMsg: "origin is empty",
},
{
description: "empty pool",
request: &gitalypb.FetchIntoObjectPoolRequest{
Origin: repos[0],
},
code: codes.InvalidArgument,
errMsg: "object pool is empty",
},
{
description: "origin and pool do not share the same storage",
request: &gitalypb.FetchIntoObjectPoolRequest{
Origin: repos[0],
ObjectPool: poolWithDifferentStorage,
},
code: codes.InvalidArgument,
errMsg: "origin has different storage than object pool",
},
}
for _, tc := range testCases {
t.Run(tc.description, func(t *testing.T) {
_, err := server.FetchIntoObjectPool(ctx, tc.request)
require.Error(t, err)
testhelper.RequireGrpcCode(t, err, tc.code)
assert.Contains(t, err.Error(), tc.errMsg)
})
}
}

View file

@ -1,127 +0,0 @@
package operations
import (
"context"
"errors"
"fmt"
"time"
"gitlab.com/gitlab-org/gitaly/v14/internal/git"
"gitlab.com/gitlab-org/gitaly/v14/internal/git/updateref"
"gitlab.com/gitlab-org/gitaly/v14/internal/git2go"
"gitlab.com/gitlab-org/gitaly/v14/internal/helper"
"gitlab.com/gitlab-org/gitaly/v14/proto/go/gitalypb"
"google.golang.org/grpc/codes"
"google.golang.org/grpc/status"
)
//nolint: revive,stylecheck // This is unintentionally missing documentation.
func (s *Server) UserCherryPick(ctx context.Context, req *gitalypb.UserCherryPickRequest) (*gitalypb.UserCherryPickResponse, error) {
if err := validateCherryPickOrRevertRequest(req); err != nil {
return nil, status.Errorf(codes.InvalidArgument, "UserCherryPick: %v", err)
}
quarantineDir, quarantineRepo, err := s.quarantinedRepo(ctx, req.GetRepository())
if err != nil {
return nil, err
}
startRevision, err := s.fetchStartRevision(ctx, quarantineRepo, req)
if err != nil {
return nil, err
}
repoHadBranches, err := quarantineRepo.HasBranches(ctx)
if err != nil {
return nil, err
}
repoPath, err := quarantineRepo.Path()
if err != nil {
return nil, err
}
var mainline uint
if len(req.Commit.ParentIds) > 1 {
mainline = 1
}
committerDate := time.Now()
if req.Timestamp != nil {
committerDate = req.Timestamp.AsTime()
}
newrev, err := s.git2goExecutor.CherryPick(ctx, quarantineRepo, git2go.CherryPickCommand{
Repository: repoPath,
CommitterName: string(req.User.Name),
CommitterMail: string(req.User.Email),
CommitterDate: committerDate,
Message: string(req.Message),
Commit: req.Commit.Id,
Ours: startRevision.String(),
Mainline: mainline,
})
if err != nil {
switch {
case errors.As(err, &git2go.HasConflictsError{}):
return &gitalypb.UserCherryPickResponse{
CreateTreeError: err.Error(),
CreateTreeErrorCode: gitalypb.UserCherryPickResponse_CONFLICT,
}, nil
case errors.As(err, &git2go.EmptyError{}):
return &gitalypb.UserCherryPickResponse{
CreateTreeError: err.Error(),
CreateTreeErrorCode: gitalypb.UserCherryPickResponse_EMPTY,
}, nil
case errors.Is(err, git2go.ErrInvalidArgument):
return nil, helper.ErrInvalidArgument(err)
default:
return nil, helper.ErrInternalf("cherry-pick command: %w", err)
}
}
referenceName := git.NewReferenceNameFromBranchName(string(req.BranchName))
branchCreated := false
oldrev, err := quarantineRepo.ResolveRevision(ctx, referenceName.Revision()+"^{commit}")
if errors.Is(err, git.ErrReferenceNotFound) {
branchCreated = true
oldrev = git.ZeroOID
} else if err != nil {
return nil, helper.ErrInvalidArgumentf("resolve ref: %w", err)
}
if req.DryRun {
newrev = startRevision
}
if !branchCreated {
ancestor, err := quarantineRepo.IsAncestor(ctx, oldrev.Revision(), newrev.Revision())
if err != nil {
return nil, err
}
if !ancestor {
return &gitalypb.UserCherryPickResponse{
CommitError: "Branch diverged",
}, nil
}
}
if err := s.updateReferenceWithHooks(ctx, req.GetRepository(), req.User, quarantineDir, referenceName, newrev, oldrev); err != nil {
if errors.As(err, &updateref.HookError{}) {
return &gitalypb.UserCherryPickResponse{
PreReceiveError: err.Error(),
}, nil
}
return nil, fmt.Errorf("update reference with hooks: %w", err)
}
return &gitalypb.UserCherryPickResponse{
BranchUpdate: &gitalypb.OperationBranchUpdate{
CommitId: newrev.String(),
BranchCreated: branchCreated,
RepoCreated: !repoHadBranches,
},
}, nil
}

View file

@ -1,33 +0,0 @@
package repository
import (
"context"
"gitlab.com/gitlab-org/gitaly/v14/internal/helper"
"gitlab.com/gitlab-org/gitaly/v14/proto/go/gitalypb"
)
const fullPathKey = "gitlab.fullpath"
// SetFullPath writes the provided path value into the repository's gitconfig under the
// "gitlab.fullpath" key.
func (s *server) SetFullPath(
ctx context.Context,
request *gitalypb.SetFullPathRequest,
) (*gitalypb.SetFullPathResponse, error) {
if request.GetRepository() == nil {
return nil, helper.ErrInvalidArgumentf("empty Repository")
}
if len(request.GetPath()) == 0 {
return nil, helper.ErrInvalidArgumentf("no path provided")
}
repo := s.localrepo(request.GetRepository())
if err := repo.SetConfig(ctx, fullPathKey, request.GetPath(), s.txManager); err != nil {
return nil, helper.ErrInternalf("setting config: %w", err)
}
return &gitalypb.SetFullPathResponse{}, nil
}

View file

@ -1,162 +0,0 @@
package repository
import (
"bytes"
"context"
"errors"
"fmt"
"io"
"regexp"
"strings"
"github.com/go-enry/go-license-detector/v4/licensedb"
"github.com/go-enry/go-license-detector/v4/licensedb/filer"
"gitlab.com/gitlab-org/gitaly/v14/internal/git"
"gitlab.com/gitlab-org/gitaly/v14/internal/git/localrepo"
"gitlab.com/gitlab-org/gitaly/v14/internal/git/lstree"
"gitlab.com/gitlab-org/gitaly/v14/internal/gitaly/rubyserver"
"gitlab.com/gitlab-org/gitaly/v14/internal/helper"
"gitlab.com/gitlab-org/gitaly/v14/internal/metadata/featureflag"
"gitlab.com/gitlab-org/gitaly/v14/proto/go/gitalypb"
)
func (s *server) FindLicense(ctx context.Context, req *gitalypb.FindLicenseRequest) (*gitalypb.FindLicenseResponse, error) {
if featureflag.GoFindLicense.IsEnabled(ctx) {
repo := localrepo.New(s.locator, s.gitCmdFactory, s.catfileCache, req.GetRepository())
hasHeadRevision, err := repo.HasRevision(ctx, "HEAD")
if err != nil {
return nil, helper.ErrInternalf("cannot check HEAD revision: %v", err)
}
if !hasHeadRevision {
return &gitalypb.FindLicenseResponse{}, nil
}
repoFiler := &gitFiler{ctx, repo, false}
licenses, err := licensedb.Detect(repoFiler)
if err != nil {
if errors.Is(err, licensedb.ErrNoLicenseFound) {
licenseShortName := ""
if repoFiler.foundLicense {
// The Ruby implementation of FindLicense returned 'other' when a license file
// was found and '' when no license file was found. `Detect` method returns ErrNoLicenseFound
// if it doesn't identify the license. To retain backwards compatibility, the repoFiler records
// whether it encountered any license files. That information is used here to then determine that
// we need to send back 'other'.
licenseShortName = "other"
}
return &gitalypb.FindLicenseResponse{LicenseShortName: licenseShortName}, nil
}
return nil, helper.ErrInternal(fmt.Errorf("FindLicense: Err: %w", err))
}
var result string
var bestConfidence float32
for candidate, match := range licenses {
if match.Confidence > bestConfidence {
result = candidate
bestConfidence = match.Confidence
}
}
return &gitalypb.FindLicenseResponse{LicenseShortName: strings.ToLower(result)}, nil
}
client, err := s.ruby.RepositoryServiceClient(ctx)
if err != nil {
return nil, err
}
clientCtx, err := rubyserver.SetHeaders(ctx, s.locator, req.GetRepository())
if err != nil {
return nil, err
}
return client.FindLicense(clientCtx, req)
}
var readmeRegexp = regexp.MustCompile(`(readme|guidelines)(\.md|\.rst|\.html|\.txt)?$`)
type gitFiler struct {
ctx context.Context
repo *localrepo.Repo
foundLicense bool
}
func (f *gitFiler) ReadFile(path string) ([]byte, error) {
var stdout, stderr bytes.Buffer
if err := f.repo.ExecAndWait(f.ctx, git.SubCmd{
Name: "cat-file",
Args: []string{"blob", fmt.Sprintf("HEAD:%s", path)},
}, git.WithStdout(&stdout), git.WithStderr(&stderr)); err != nil {
return nil, fmt.Errorf("cat-file failed: %w, stderr: %q", err, stderr.String())
}
// `licensedb.Detect` only opens files that look like licenses. Failing that, it will
// also open readme files to try to identify license files. The RPC handler needs the
// knowledge of whether any license files were encountered, so we filter out the
// readme files as defined in licensedb.Detect:
// https://github.com/go-enry/go-license-detector/blob/4f2ca6af2ab943d9b5fa3a02782eebc06f79a5f4/licensedb/internal/investigation.go#L61
//
// This doesn't filter out the possible license files identified from the readme files which may infact not
// be licenses.
if !f.foundLicense {
f.foundLicense = !readmeRegexp.MatchString(strings.ToLower(path))
}
return stdout.Bytes(), nil
}
func (f *gitFiler) ReadDir(string) ([]filer.File, error) {
// We're doing a recursive listing returning all files at once such that we do not have to
// call git-ls-tree(1) multiple times.
var stderr bytes.Buffer
cmd, err := f.repo.Exec(f.ctx, git.SubCmd{
Name: "ls-tree",
Flags: []git.Option{
git.Flag{Name: "--full-tree"},
git.Flag{Name: "-z"},
},
Args: []string{"HEAD"},
}, git.WithStderr(&stderr))
if err != nil {
return nil, err
}
tree := lstree.NewParser(cmd)
var files []filer.File
for {
entry, err := tree.NextEntry()
if err != nil {
if err == io.EOF {
break
}
return nil, err
}
// Given that we're doing a recursive listing, we skip over all types which aren't
// blobs.
if entry.Type != lstree.Blob {
continue
}
files = append(files, filer.File{
Name: entry.Path,
IsDir: false,
})
}
if err := cmd.Wait(); err != nil {
return nil, fmt.Errorf("ls-tree failed: %w, stderr: %q", err, stderr.String())
}
return files, nil
}
func (f *gitFiler) Close() {}
func (f *gitFiler) PathsAreAlwaysSlash() bool {
// git ls-files uses unix slash `/`
return true
}

View file

@ -1,120 +0,0 @@
package repository
import (
"context"
"os"
"testing"
"github.com/stretchr/testify/require"
"gitlab.com/gitlab-org/gitaly/v14/internal/git/gittest"
"gitlab.com/gitlab-org/gitaly/v14/internal/gitaly/config"
"gitlab.com/gitlab-org/gitaly/v14/internal/gitaly/rubyserver"
"gitlab.com/gitlab-org/gitaly/v14/internal/metadata/featureflag"
"gitlab.com/gitlab-org/gitaly/v14/internal/testhelper"
"gitlab.com/gitlab-org/gitaly/v14/proto/go/gitalypb"
)
func testSuccessfulFindLicenseRequest(t *testing.T, cfg config.Cfg, client gitalypb.RepositoryServiceClient, rubySrv *rubyserver.Server) {
testhelper.NewFeatureSets(featureflag.GoFindLicense).Run(t, func(t *testing.T, ctx context.Context) {
for _, tc := range []struct {
desc string
nonExistentRepository bool
files map[string]string
expectedLicense string
errorContains string
}{
{
desc: "repository does not exist",
nonExistentRepository: true,
errorContains: "rpc error: code = NotFound desc = GetRepoPath: not a git repository",
},
{
desc: "empty if no license file in repo",
files: map[string]string{
"README.md": "readme content",
},
expectedLicense: "",
},
{
desc: "high confidence mit result and less confident mit-0 result",
files: map[string]string{
"LICENSE": `MIT License
Copyright (c) [year] [fullname]
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.`,
},
expectedLicense: "mit",
},
{
desc: "unknown license",
files: map[string]string{
"LICENSE.md": "this doesn't match any known license",
},
expectedLicense: "other",
},
} {
t.Run(tc.desc, func(t *testing.T) {
repo, repoPath := gittest.CreateRepository(ctx, t, cfg)
var treeEntries []gittest.TreeEntry
for file, content := range tc.files {
treeEntries = append(treeEntries, gittest.TreeEntry{
Mode: "100644",
Path: file,
Content: content,
})
}
gittest.WriteCommit(t, cfg, repoPath, gittest.WithBranch("main"), gittest.WithTreeEntries(treeEntries...), gittest.WithParents())
if tc.nonExistentRepository {
require.NoError(t, os.RemoveAll(repoPath))
}
resp, err := client.FindLicense(ctx, &gitalypb.FindLicenseRequest{Repository: repo})
if tc.errorContains != "" {
require.Error(t, err)
require.Contains(t, err.Error(), tc.errorContains)
return
}
require.NoError(t, err)
testhelper.ProtoEqual(t, &gitalypb.FindLicenseResponse{
LicenseShortName: tc.expectedLicense,
}, resp)
})
}
})
}
func testFindLicenseRequestEmptyRepo(t *testing.T, cfg config.Cfg, client gitalypb.RepositoryServiceClient, rubySrv *rubyserver.Server) {
testhelper.NewFeatureSets(featureflag.GoFindLicense).Run(t, func(t *testing.T, ctx context.Context) {
repo, repoPath := gittest.InitRepo(t, cfg, cfg.Storages[0])
require.NoError(t, os.RemoveAll(repoPath))
_, err := client.CreateRepository(ctx, &gitalypb.CreateRepositoryRequest{Repository: repo})
require.NoError(t, err)
resp, err := client.FindLicense(ctx, &gitalypb.FindLicenseRequest{Repository: repo})
require.NoError(t, err)
require.Empty(t, resp.GetLicenseShortName())
})
}

View file

@ -1,82 +0,0 @@
package repository
import (
"bytes"
"context"
"fmt"
"io"
"os/exec"
"strconv"
"github.com/grpc-ecosystem/go-grpc-middleware/logging/logrus/ctxlogrus"
"gitlab.com/gitlab-org/gitaly/v14/internal/command"
"gitlab.com/gitlab-org/gitaly/v14/internal/metadata/featureflag"
"gitlab.com/gitlab-org/gitaly/v14/proto/go/gitalypb"
)
func (s *server) RepositorySize(ctx context.Context, in *gitalypb.RepositorySizeRequest) (*gitalypb.RepositorySizeResponse, error) {
repo := s.localrepo(in.GetRepository())
var size int64
var err error
if featureflag.RevlistForRepoSize.IsEnabled(ctx) {
size, err = repo.Size(ctx)
if err != nil {
return nil, err
}
// return the size in kb to remain consistent
size = size / 1024
} else {
path, err := repo.Path()
if err != nil {
return nil, err
}
size = getPathSize(ctx, path)
}
return &gitalypb.RepositorySizeResponse{Size: size}, nil
}
func (s *server) GetObjectDirectorySize(ctx context.Context, in *gitalypb.GetObjectDirectorySizeRequest) (*gitalypb.GetObjectDirectorySizeResponse, error) {
repo := s.localrepo(in.GetRepository())
path, err := repo.ObjectDirectoryPath()
if err != nil {
return nil, err
}
return &gitalypb.GetObjectDirectorySizeResponse{Size: getPathSize(ctx, path)}, nil
}
func getPathSize(ctx context.Context, path string) int64 {
cmd, err := command.New(ctx, exec.Command("du", "-sk", path), nil, nil, nil)
if err != nil {
ctxlogrus.Extract(ctx).WithError(err).Warn("ignoring du command error")
return 0
}
sizeLine, err := io.ReadAll(cmd)
if err != nil {
ctxlogrus.Extract(ctx).WithError(err).Warn("ignoring command read error")
return 0
}
if err := cmd.Wait(); err != nil {
ctxlogrus.Extract(ctx).WithError(err).Warn("ignoring du wait error")
return 0
}
sizeParts := bytes.Split(sizeLine, []byte("\t"))
if len(sizeParts) != 2 {
ctxlogrus.Extract(ctx).Warn(fmt.Sprintf("ignoring du malformed output: %q", sizeLine))
return 0
}
size, err := strconv.ParseInt(string(sizeParts[0]), 10, 0)
if err != nil {
ctxlogrus.Extract(ctx).WithError(err).Warn("ignoring parsing size error")
return 0
}
return size
}

View file

@ -1,158 +0,0 @@
package repository
import (
"bytes"
"path/filepath"
"testing"
"github.com/stretchr/testify/require"
"gitlab.com/gitlab-org/gitaly/v14/internal/git/gittest"
"gitlab.com/gitlab-org/gitaly/v14/internal/gitaly/transaction"
"gitlab.com/gitlab-org/gitaly/v14/internal/metadata"
"gitlab.com/gitlab-org/gitaly/v14/internal/testhelper"
"gitlab.com/gitlab-org/gitaly/v14/internal/testhelper/testserver"
"gitlab.com/gitlab-org/gitaly/v14/internal/transaction/txinfo"
"gitlab.com/gitlab-org/gitaly/v14/proto/go/gitalypb"
"google.golang.org/grpc/codes"
)
func TestWriteRefSuccessful(t *testing.T) {
txManager := transaction.NewTrackingManager()
cfg, repo, repoPath, client := setupRepositoryService(testhelper.Context(t), t, testserver.WithTransactionManager(txManager))
testCases := []struct {
desc string
req *gitalypb.WriteRefRequest
expectedVotes int
}{
{
desc: "shell update HEAD to refs/heads/master",
req: &gitalypb.WriteRefRequest{
Repository: repo,
Ref: []byte("HEAD"),
Revision: []byte("refs/heads/master"),
},
expectedVotes: 2,
},
{
desc: "shell update refs/heads/master",
req: &gitalypb.WriteRefRequest{
Repository: repo,
Ref: []byte("refs/heads/master"),
Revision: []byte("b83d6e391c22777fca1ed3012fce84f633d7fed0"),
},
expectedVotes: 2,
},
{
desc: "shell update refs/heads/master w/ validation",
req: &gitalypb.WriteRefRequest{
Repository: repo,
Ref: []byte("refs/heads/master"),
Revision: []byte("498214de67004b1da3d820901307bed2a68a8ef6"),
OldRevision: []byte("b83d6e391c22777fca1ed3012fce84f633d7fed0"),
},
expectedVotes: 2,
},
}
ctx, err := txinfo.InjectTransaction(testhelper.Context(t), 1, "node", true)
require.NoError(t, err)
ctx = metadata.IncomingToOutgoing(ctx)
for _, tc := range testCases {
t.Run(tc.desc, func(t *testing.T) {
txManager.Reset()
_, err = client.WriteRef(ctx, tc.req)
require.NoError(t, err)
require.Len(t, txManager.Votes(), tc.expectedVotes)
if bytes.Equal(tc.req.Ref, []byte("HEAD")) {
content := testhelper.MustReadFile(t, filepath.Join(repoPath, "HEAD"))
refRevision := bytes.Join([][]byte{[]byte("ref: "), tc.req.Revision, []byte("\n")}, nil)
require.EqualValues(t, refRevision, content)
return
}
rev := gittest.Exec(t, cfg, "--git-dir", repoPath, "log", "--pretty=%H", "-1", string(tc.req.Ref))
rev = bytes.Replace(rev, []byte("\n"), nil, 1)
require.Equal(t, string(tc.req.Revision), string(rev))
})
}
}
func TestWriteRefValidationError(t *testing.T) {
ctx := testhelper.Context(t)
_, repo, _, client := setupRepositoryService(ctx, t)
testCases := []struct {
desc string
req *gitalypb.WriteRefRequest
}{
{
desc: "empty revision",
req: &gitalypb.WriteRefRequest{
Repository: repo,
Ref: []byte("refs/heads/master"),
},
},
{
desc: "empty ref name",
req: &gitalypb.WriteRefRequest{
Repository: repo,
Revision: []byte("498214de67004b1da3d820901307bed2a68a8ef6"),
},
},
{
desc: "non-prefixed ref name for shell",
req: &gitalypb.WriteRefRequest{
Repository: repo,
Ref: []byte("master"),
Revision: []byte("498214de67004b1da3d820901307bed2a68a8ef6"),
},
},
{
desc: "revision contains \\x00",
req: &gitalypb.WriteRefRequest{
Repository: repo,
Ref: []byte("refs/heads/master"),
Revision: []byte("012301230123\x001243"),
},
},
{
desc: "ref contains \\x00",
req: &gitalypb.WriteRefRequest{
Repository: repo,
Ref: []byte("refs/head\x00s/master\x00"),
Revision: []byte("0123012301231243"),
},
},
{
desc: "ref contains whitespace",
req: &gitalypb.WriteRefRequest{
Repository: repo,
Ref: []byte("refs/heads /master"),
Revision: []byte("0123012301231243"),
},
},
{
desc: "invalid revision",
req: &gitalypb.WriteRefRequest{
Repository: repo,
Ref: []byte("refs/heads/master"),
Revision: []byte("--output=/meow"),
},
},
}
for _, tc := range testCases {
t.Run(tc.desc, func(t *testing.T) {
_, err := client.WriteRef(ctx, tc.req)
testhelper.RequireGrpcCode(t, err, codes.InvalidArgument)
})
}
}

View file

@ -1,17 +0,0 @@
package server
import (
"context"
"time"
"gitlab.com/gitlab-org/gitaly/v14/internal/helper"
"gitlab.com/gitlab-org/gitaly/v14/proto/go/gitalypb"
)
func (s *server) ClockSynced(_ context.Context, req *gitalypb.ClockSyncedRequest) (*gitalypb.ClockSyncedResponse, error) {
synced, err := helper.CheckClockSync(req.NtpHost, time.Duration(req.DriftThresholdMillis*int64(time.Millisecond)))
if err != nil {
return nil, err
}
return &gitalypb.ClockSyncedResponse{Synced: synced}, nil
}

View file

@ -1,56 +0,0 @@
package ssh
import (
"testing"
"gitlab.com/gitlab-org/gitaly/v14/internal/gitaly/config"
"gitlab.com/gitlab-org/gitaly/v14/internal/gitaly/service"
hookservice "gitlab.com/gitlab-org/gitaly/v14/internal/gitaly/service/hook"
"gitlab.com/gitlab-org/gitaly/v14/internal/gitaly/service/repository"
"gitlab.com/gitlab-org/gitaly/v14/internal/testhelper"
"gitlab.com/gitlab-org/gitaly/v14/internal/testhelper/testserver"
"gitlab.com/gitlab-org/gitaly/v14/proto/go/gitalypb"
"google.golang.org/grpc"
)
func TestMain(m *testing.M) {
testhelper.Run(m)
}
func runSSHServer(t *testing.T, cfg config.Cfg, serverOpts ...testserver.GitalyServerOpt) string {
return runSSHServerWithOptions(t, cfg, nil, serverOpts...)
}
func runSSHServerWithOptions(t *testing.T, cfg config.Cfg, opts []ServerOpt, serverOpts ...testserver.GitalyServerOpt) string {
return testserver.RunGitalyServer(t, cfg, nil, func(srv *grpc.Server, deps *service.Dependencies) {
gitalypb.RegisterSSHServiceServer(srv, NewServer(
deps.GetLocator(),
deps.GetGitCmdFactory(),
deps.GetTxManager(),
opts...))
gitalypb.RegisterHookServiceServer(srv, hookservice.NewServer(deps.GetHookManager(), deps.GetGitCmdFactory(), deps.GetPackObjectsCache()))
gitalypb.RegisterRepositoryServiceServer(srv, repository.NewServer(
cfg,
deps.GetRubyServer(),
deps.GetLocator(),
deps.GetTxManager(),
deps.GetGitCmdFactory(),
deps.GetCatfileCache(),
deps.GetConnsPool(),
deps.GetGit2goExecutor(),
deps.GetHousekeepingManager(),
))
}, serverOpts...)
}
func newSSHClient(t *testing.T, serverSocketPath string) (gitalypb.SSHServiceClient, *grpc.ClientConn) {
connOpts := []grpc.DialOption{
grpc.WithInsecure(),
}
conn, err := grpc.Dial(serverSocketPath, connOpts...)
if err != nil {
t.Fatal(err)
}
return gitalypb.NewSSHServiceClient(conn), conn
}

View file

@ -1,668 +0,0 @@
package ssh
import (
"bytes"
"fmt"
"os"
"os/exec"
"path/filepath"
"strings"
"testing"
"time"
"github.com/prometheus/client_golang/prometheus"
promtest "github.com/prometheus/client_golang/prometheus/testutil"
"github.com/stretchr/testify/require"
"gitlab.com/gitlab-org/gitaly/v14/internal/git"
"gitlab.com/gitlab-org/gitaly/v14/internal/git/gittest"
"gitlab.com/gitlab-org/gitaly/v14/internal/gitaly/config"
"gitlab.com/gitlab-org/gitaly/v14/internal/helper/text"
"gitlab.com/gitlab-org/gitaly/v14/internal/testhelper"
"gitlab.com/gitlab-org/gitaly/v14/internal/testhelper/testcfg"
"gitlab.com/gitlab-org/gitaly/v14/internal/testhelper/testserver"
"gitlab.com/gitlab-org/gitaly/v14/proto/go/gitalypb"
"google.golang.org/grpc/codes"
"google.golang.org/protobuf/encoding/protojson"
)
type cloneCommand struct {
command git.Cmd
repository *gitalypb.Repository
server string
featureFlags []string
gitConfig string
gitProtocol string
cfg config.Cfg
sidechannel bool
}
func runTestWithAndWithoutConfigOptions(t *testing.T, tf func(t *testing.T, opts ...testcfg.Option), opts ...testcfg.Option) {
t.Run("no config options", func(t *testing.T) { tf(t) })
if len(opts) > 0 {
t.Run("with config options", func(t *testing.T) {
tf(t, opts...)
})
}
}
func (cmd cloneCommand) execute(t *testing.T) error {
req := &gitalypb.SSHUploadPackRequest{
Repository: cmd.repository,
GitProtocol: cmd.gitProtocol,
}
if cmd.gitConfig != "" {
req.GitConfigOptions = strings.Split(cmd.gitConfig, " ")
}
payload, err := protojson.Marshal(req)
require.NoError(t, err)
var flagPairs []string
for _, flag := range cmd.featureFlags {
flagPairs = append(flagPairs, fmt.Sprintf("%s:true", flag))
}
ctx := testhelper.Context(t)
env := []string{
fmt.Sprintf("GITALY_ADDRESS=%s", cmd.server),
fmt.Sprintf("GITALY_PAYLOAD=%s", payload),
fmt.Sprintf("GITALY_FEATUREFLAGS=%s", strings.Join(flagPairs, ",")),
fmt.Sprintf("PATH=.:%s", os.Getenv("PATH")),
fmt.Sprintf(`GIT_SSH_COMMAND=%s upload-pack`, filepath.Join(cmd.cfg.BinDir, "gitaly-ssh")),
}
if cmd.sidechannel {
env = append(env, "GITALY_USE_SIDECHANNEL=1")
}
var output bytes.Buffer
gitCommand, err := gittest.NewCommandFactory(t, cmd.cfg).NewWithoutRepo(ctx,
cmd.command, git.WithStdout(&output), git.WithStderr(&output), git.WithEnv(env...), git.WithDisabledHooks(),
)
require.NoError(t, err)
if err := gitCommand.Wait(); err != nil {
return fmt.Errorf("Failed to run `git clone`: %q", output.Bytes())
}
return nil
}
func (cmd cloneCommand) test(t *testing.T, cfg config.Cfg, repoPath string, localRepoPath string) (string, string, string, string) {
t.Helper()
defer func() { require.NoError(t, os.RemoveAll(localRepoPath)) }()
err := cmd.execute(t)
require.NoError(t, err)
remoteHead := text.ChompBytes(gittest.Exec(t, cfg, "-C", repoPath, "rev-parse", "master"))
localHead := text.ChompBytes(gittest.Exec(t, cfg, "-C", localRepoPath, "rev-parse", "master"))
remoteTags := text.ChompBytes(gittest.Exec(t, cfg, "-C", repoPath, "tag"))
localTags := text.ChompBytes(gittest.Exec(t, cfg, "-C", localRepoPath, "tag"))
return localHead, remoteHead, localTags, remoteTags
}
func TestFailedUploadPackRequestDueToTimeout(t *testing.T) {
t.Parallel()
runTestWithAndWithoutConfigOptions(t, testFailedUploadPackRequestDueToTimeout, testcfg.WithPackObjectsCacheEnabled())
}
func testFailedUploadPackRequestDueToTimeout(t *testing.T, opts ...testcfg.Option) {
cfg := testcfg.Build(t, opts...)
cfg.SocketPath = runSSHServerWithOptions(t, cfg, []ServerOpt{WithUploadPackRequestTimeout(10 * time.Microsecond)})
repo, _ := gittest.CreateRepository(testhelper.Context(t), t, cfg, gittest.CreateRepositoryConfig{
Seed: gittest.SeedGitLabTest,
})
client, conn := newSSHClient(t, cfg.SocketPath)
defer conn.Close()
ctx := testhelper.Context(t)
stream, err := client.SSHUploadPack(ctx)
require.NoError(t, err)
// The first request is not limited by timeout, but also not under attacker control
require.NoError(t, stream.Send(&gitalypb.SSHUploadPackRequest{Repository: repo}))
// Because the client says nothing, the server would block. Because of
// the timeout, it won't block forever, and return with a non-zero exit
// code instead.
requireFailedSSHStream(t, func() (int32, error) {
resp, err := stream.Recv()
if err != nil {
return 0, err
}
var code int32
if status := resp.GetExitStatus(); status != nil {
code = status.Value
}
return code, nil
})
}
func requireFailedSSHStream(t *testing.T, recv func() (int32, error)) {
done := make(chan struct{})
var code int32
var err error
go func() {
for err == nil {
code, err = recv()
}
close(done)
}()
select {
case <-done:
testhelper.RequireGrpcCode(t, err, codes.Internal)
require.NotEqual(t, 0, code, "exit status")
case <-time.After(10 * time.Second):
t.Fatal("timeout waiting for SSH stream")
}
}
func TestFailedUploadPackRequestDueToValidationError(t *testing.T) {
t.Parallel()
cfg := testcfg.Build(t)
serverSocketPath := runSSHServer(t, cfg)
client, conn := newSSHClient(t, serverSocketPath)
defer conn.Close()
tests := []struct {
Desc string
Req *gitalypb.SSHUploadPackRequest
Code codes.Code
}{
{
Desc: "Repository.RelativePath is empty",
Req: &gitalypb.SSHUploadPackRequest{Repository: &gitalypb.Repository{StorageName: cfg.Storages[0].Name, RelativePath: ""}},
Code: codes.InvalidArgument,
},
{
Desc: "Repository is nil",
Req: &gitalypb.SSHUploadPackRequest{Repository: nil},
Code: codes.InvalidArgument,
},
{
Desc: "Data exists on first request",
Req: &gitalypb.SSHUploadPackRequest{Repository: &gitalypb.Repository{StorageName: cfg.Storages[0].Name, RelativePath: "path/to/repo"}, Stdin: []byte("Fail")},
Code: func() codes.Code {
if testhelper.IsPraefectEnabled() {
return codes.NotFound
}
return codes.InvalidArgument
}(),
},
}
for _, test := range tests {
t.Run(test.Desc, func(t *testing.T) {
ctx := testhelper.Context(t)
stream, err := client.SSHUploadPack(ctx)
if err != nil {
t.Fatal(err)
}
if err = stream.Send(test.Req); err != nil {
t.Fatal(err)
}
require.NoError(t, stream.CloseSend())
err = testPostUploadPackFailedResponse(t, stream)
testhelper.RequireGrpcCode(t, err, test.Code)
})
}
}
func TestUploadPackCloneSuccess(t *testing.T) {
t.Parallel()
runTestWithAndWithoutConfigOptions(t, testUploadPackCloneSuccess, testcfg.WithPackObjectsCacheEnabled())
}
func testUploadPackCloneSuccess(t *testing.T, opts ...testcfg.Option) {
testUploadPackCloneSuccess2(t, false, opts...)
}
func TestUploadPackWithSidechannelCloneSuccess(t *testing.T) {
t.Parallel()
runTestWithAndWithoutConfigOptions(t, testUploadPackWithSidechannelCloneSuccess, testcfg.WithPackObjectsCacheEnabled())
}
func testUploadPackWithSidechannelCloneSuccess(t *testing.T, opts ...testcfg.Option) {
testUploadPackCloneSuccess2(t, true, opts...)
}
func testUploadPackCloneSuccess2(t *testing.T, sidechannel bool, opts ...testcfg.Option) {
cfg := testcfg.Build(t, opts...)
testcfg.BuildGitalyHooks(t, cfg)
testcfg.BuildGitalySSH(t, cfg)
negotiationMetrics := prometheus.NewCounterVec(prometheus.CounterOpts{}, []string{"feature"})
cfg.SocketPath = runSSHServerWithOptions(t, cfg, []ServerOpt{WithPackfileNegotiationMetrics(negotiationMetrics)})
repo, repoPath := gittest.CreateRepository(testhelper.Context(t), t, cfg, gittest.CreateRepositoryConfig{
Seed: gittest.SeedGitLabTest,
})
localRepoPath := testhelper.TempDir(t)
tests := []struct {
cmd git.Cmd
desc string
deepen float64
}{
{
cmd: git.SubCmd{
Name: "clone",
Args: []string{"git@localhost:test/test.git", localRepoPath},
},
desc: "full clone",
deepen: 0,
},
{
cmd: git.SubCmd{
Name: "clone",
Flags: []git.Option{
git.ValueFlag{Name: "--depth", Value: "1"},
},
Args: []string{"git@localhost:test/test.git", localRepoPath},
},
desc: "shallow clone",
deepen: 1,
},
}
for _, tc := range tests {
t.Run(tc.desc, func(t *testing.T) {
negotiationMetrics.Reset()
cmd := cloneCommand{
repository: repo,
command: tc.cmd,
server: cfg.SocketPath,
cfg: cfg,
sidechannel: sidechannel,
}
lHead, rHead, _, _ := cmd.test(t, cfg, repoPath, localRepoPath)
require.Equal(t, lHead, rHead, "local and remote head not equal")
metric, err := negotiationMetrics.GetMetricWithLabelValues("deepen")
require.NoError(t, err)
require.Equal(t, tc.deepen, promtest.ToFloat64(metric))
})
}
}
func TestUploadPackWithPackObjectsHook(t *testing.T) {
t.Parallel()
cfg := testcfg.Build(t, testcfg.WithPackObjectsCacheEnabled())
filterDir := testhelper.TempDir(t)
outputPath := filepath.Join(filterDir, "output")
cfg.BinDir = filterDir
testcfg.BuildGitalySSH(t, cfg)
// We're using a custom pack-objetcs hook for git-upload-pack. In order
// to assure that it's getting executed as expected, we're writing a
// custom script which replaces the hook binary. It doesn't do anything
// special, but writes an error message and errors out and should thus
// cause the clone to fail with this error message.
testhelper.WriteExecutable(t, filepath.Join(filterDir, "gitaly-hooks"), []byte(fmt.Sprintf(
`#!/bin/bash
set -eo pipefail
echo 'I was invoked' >'%s'
shift
exec git "$@"
`, outputPath)))
cfg.SocketPath = runSSHServer(t, cfg)
repo, _ := gittest.CreateRepository(testhelper.Context(t), t, cfg, gittest.CreateRepositoryConfig{
Seed: gittest.SeedGitLabTest,
})
localRepoPath := testhelper.TempDir(t)
err := cloneCommand{
repository: repo,
command: git.SubCmd{
Name: "clone", Args: []string{"git@localhost:test/test.git", localRepoPath},
},
server: cfg.SocketPath,
cfg: cfg,
}.execute(t)
require.NoError(t, err)
require.Equal(t, []byte("I was invoked\n"), testhelper.MustReadFile(t, outputPath))
}
func TestUploadPackWithoutSideband(t *testing.T) {
t.Parallel()
runTestWithAndWithoutConfigOptions(t, testUploadPackWithoutSideband, testcfg.WithPackObjectsCacheEnabled())
}
func testUploadPackWithoutSideband(t *testing.T, opts ...testcfg.Option) {
cfg := testcfg.Build(t, opts...)
testcfg.BuildGitalySSH(t, cfg)
testcfg.BuildGitalyHooks(t, cfg)
cfg.SocketPath = runSSHServer(t, cfg)
repo, _ := gittest.CreateRepository(testhelper.Context(t), t, cfg, gittest.CreateRepositoryConfig{
Seed: gittest.SeedGitLabTest,
})
// While Git knows the side-band-64 capability, some other clients don't. There is no way
// though to have Git not use that capability, so we're instead manually crafting a packfile
// negotiation without that capability and send it along.
negotiation := bytes.NewBuffer([]byte{})
gittest.WritePktlineString(t, negotiation, "want 1e292f8fedd741b75372e19097c76d327140c312 multi_ack_detailed thin-pack include-tag ofs-delta agent=git/2.29.1")
gittest.WritePktlineString(t, negotiation, "want 1e292f8fedd741b75372e19097c76d327140c312")
gittest.WritePktlineFlush(t, negotiation)
gittest.WritePktlineString(t, negotiation, "done")
request := &gitalypb.SSHUploadPackRequest{
Repository: repo,
}
payload, err := protojson.Marshal(request)
require.NoError(t, err)
// As we're not using the sideband, the remote process will write both to stdout and stderr.
// Those simultaneous writes to both stdout and stderr created a race as we could've invoked
// two concurrent `SendMsg`s on the gRPC stream. And given that `SendMsg` is not thread-safe
// a deadlock would result.
uploadPack := exec.Command(filepath.Join(cfg.BinDir, "gitaly-ssh"), "upload-pack", "dontcare", "dontcare")
uploadPack.Env = []string{
fmt.Sprintf("GITALY_ADDRESS=%s", cfg.SocketPath),
fmt.Sprintf("GITALY_PAYLOAD=%s", payload),
fmt.Sprintf("PATH=.:%s", os.Getenv("PATH")),
}
uploadPack.Stdin = negotiation
out, err := uploadPack.CombinedOutput()
require.NoError(t, err)
require.True(t, uploadPack.ProcessState.Success())
require.Contains(t, string(out), "refs/heads/master")
require.Contains(t, string(out), "Counting objects")
require.Contains(t, string(out), "PACK")
}
func TestUploadPackCloneWithPartialCloneFilter(t *testing.T) {
t.Parallel()
runTestWithAndWithoutConfigOptions(t, testUploadPackCloneWithPartialCloneFilter, testcfg.WithPackObjectsCacheEnabled())
}
func testUploadPackCloneWithPartialCloneFilter(t *testing.T, opts ...testcfg.Option) {
cfg := testcfg.Build(t, opts...)
testcfg.BuildGitalySSH(t, cfg)
testcfg.BuildGitalyHooks(t, cfg)
cfg.SocketPath = runSSHServer(t, cfg)
repo, _ := gittest.CreateRepository(testhelper.Context(t), t, cfg, gittest.CreateRepositoryConfig{
Seed: gittest.SeedGitLabTest,
})
// Ruby file which is ~1kB in size and not present in HEAD
blobLessThanLimit := git.ObjectID("6ee41e85cc9bf33c10b690df09ca735b22f3790f")
// Image which is ~100kB in size and not present in HEAD
blobGreaterThanLimit := git.ObjectID("18079e308ff9b3a5e304941020747e5c39b46c88")
tests := []struct {
desc string
repoTest func(t *testing.T, repoPath string)
cmd git.SubCmd
}{
{
desc: "full_clone",
repoTest: func(t *testing.T, repoPath string) {
gittest.RequireObjectExists(t, cfg, repoPath, blobGreaterThanLimit)
},
cmd: git.SubCmd{
Name: "clone",
},
},
{
desc: "partial_clone",
repoTest: func(t *testing.T, repoPath string) {
gittest.RequireObjectNotExists(t, cfg, repoPath, blobGreaterThanLimit)
},
cmd: git.SubCmd{
Name: "clone",
Flags: []git.Option{
git.ValueFlag{Name: "--filter", Value: "blob:limit=2048"},
},
},
},
}
for _, tc := range tests {
t.Run(tc.desc, func(t *testing.T) {
// Run the clone with filtering enabled in both runs. The only
// difference is that in the first run, we have the
// UploadPackFilter flag disabled.
localPath := testhelper.TempDir(t)
tc.cmd.Args = []string{"git@localhost:test/test.git", localPath}
cmd := cloneCommand{
repository: repo,
command: tc.cmd,
server: cfg.SocketPath,
cfg: cfg,
}
err := cmd.execute(t)
defer func() { require.NoError(t, os.RemoveAll(localPath)) }()
require.NoError(t, err, "clone failed")
gittest.RequireObjectExists(t, cfg, localPath, blobLessThanLimit)
tc.repoTest(t, localPath)
})
}
}
func TestUploadPackCloneSuccessWithGitProtocol(t *testing.T) {
t.Parallel()
runTestWithAndWithoutConfigOptions(t, testUploadPackCloneSuccessWithGitProtocol, testcfg.WithPackObjectsCacheEnabled())
}
func testUploadPackCloneSuccessWithGitProtocol(t *testing.T, opts ...testcfg.Option) {
cfg := testcfg.Build(t, opts...)
ctx := testhelper.Context(t)
gitCmdFactory, readProto := gittest.NewProtocolDetectingCommandFactory(ctx, t, cfg)
cfg.SocketPath = runSSHServer(t, cfg, testserver.WithGitCommandFactory(gitCmdFactory))
repo, repoPath := gittest.CreateRepository(ctx, t, cfg, gittest.CreateRepositoryConfig{
Seed: gittest.SeedGitLabTest,
})
testcfg.BuildGitalySSH(t, cfg)
testcfg.BuildGitalyHooks(t, cfg)
localRepoPath := testhelper.TempDir(t)
tests := []struct {
cmd git.Cmd
desc string
}{
{
cmd: git.SubCmd{
Name: "clone",
Args: []string{"git@localhost:test/test.git", localRepoPath},
},
desc: "full clone",
},
{
cmd: git.SubCmd{
Name: "clone",
Args: []string{"git@localhost:test/test.git", localRepoPath},
Flags: []git.Option{
git.ValueFlag{Name: "--depth", Value: "1"},
},
},
desc: "shallow clone",
},
}
for _, tc := range tests {
t.Run(tc.desc, func(t *testing.T) {
cmd := cloneCommand{
repository: repo,
command: tc.cmd,
server: cfg.SocketPath,
gitProtocol: git.ProtocolV2,
cfg: cfg,
}
lHead, rHead, _, _ := cmd.test(t, cfg, repoPath, localRepoPath)
require.Equal(t, lHead, rHead, "local and remote head not equal")
envData := readProto()
require.Contains(t, envData, fmt.Sprintf("GIT_PROTOCOL=%s\n", git.ProtocolV2))
})
}
}
func TestUploadPackCloneHideTags(t *testing.T) {
t.Parallel()
cfg := testcfg.Build(t)
testcfg.BuildGitalySSH(t, cfg)
testcfg.BuildGitalyHooks(t, cfg)
cfg.SocketPath = runSSHServer(t, cfg)
repo, repoPath := gittest.CreateRepository(testhelper.Context(t), t, cfg, gittest.CreateRepositoryConfig{
Seed: gittest.SeedGitLabTest,
})
localRepoPath := testhelper.TempDir(t)
cloneCmd := cloneCommand{
repository: repo,
command: git.SubCmd{
Name: "clone",
Flags: []git.Option{
git.Flag{Name: "--mirror"},
},
Args: []string{"git@localhost:test/test.git", localRepoPath},
},
server: cfg.SocketPath,
gitConfig: "transfer.hideRefs=refs/tags",
cfg: cfg,
}
_, _, lTags, rTags := cloneCmd.test(t, cfg, repoPath, localRepoPath)
if lTags == rTags {
t.Fatalf("local and remote tags are equal. clone failed: %q != %q", lTags, rTags)
}
if tag := "v1.0.0"; !strings.Contains(rTags, tag) {
t.Fatalf("sanity check failed, tag %q not found in %q", tag, rTags)
}
}
func TestUploadPackCloneFailure(t *testing.T) {
t.Parallel()
cfg := testcfg.Build(t)
cfg.SocketPath = runSSHServer(t, cfg)
repo, _ := gittest.CreateRepository(testhelper.Context(t), t, cfg, gittest.CreateRepositoryConfig{
Seed: gittest.SeedGitLabTest,
})
localRepoPath := testhelper.TempDir(t)
cmd := cloneCommand{
repository: &gitalypb.Repository{
StorageName: "foobar",
RelativePath: repo.GetRelativePath(),
},
command: git.SubCmd{
Name: "clone",
Args: []string{"git@localhost:test/test.git", localRepoPath},
},
server: cfg.SocketPath,
cfg: cfg,
}
err := cmd.execute(t)
require.Error(t, err, "clone didn't fail")
}
func TestUploadPackCloneGitFailure(t *testing.T) {
t.Parallel()
cfg := testcfg.Build(t)
cfg.SocketPath = runSSHServer(t, cfg)
repo, repoPath := gittest.CreateRepository(testhelper.Context(t), t, cfg, gittest.CreateRepositoryConfig{
Seed: gittest.SeedGitLabTest,
})
client, conn := newSSHClient(t, cfg.SocketPath)
defer conn.Close()
configPath := filepath.Join(repoPath, "config")
gitconfig, err := os.Create(configPath)
require.NoError(t, err)
// Writing an invalid config will allow repo to pass the `IsGitDirectory` check but still
// trigger an error when git tries to access the repo.
_, err = gitconfig.WriteString("Not a valid git config")
require.NoError(t, err)
require.NoError(t, gitconfig.Close())
ctx := testhelper.Context(t)
stream, err := client.SSHUploadPack(ctx)
if err != nil {
t.Fatal(err)
}
if err = stream.Send(&gitalypb.SSHUploadPackRequest{Repository: repo}); err != nil {
t.Fatal(err)
}
require.NoError(t, stream.CloseSend())
err = testPostUploadPackFailedResponse(t, stream)
testhelper.RequireGrpcCode(t, err, codes.Internal)
require.EqualError(t, err, "rpc error: code = Internal desc = cmd wait: exit status 128, stderr: \"fatal: bad config line 1 in file ./config\\n\"")
}
func testPostUploadPackFailedResponse(t *testing.T, stream gitalypb.SSHService_SSHUploadPackClient) error {
var err error
var res *gitalypb.SSHUploadPackResponse
for err == nil {
res, err = stream.Recv()
require.Nil(t, res.GetStdout())
}
return err
}

View file

@ -1,207 +0,0 @@
package env_test
import (
"errors"
"fmt"
"strconv"
"testing"
"time"
"github.com/stretchr/testify/assert"
"gitlab.com/gitlab-org/gitaly/v14/internal/helper/env"
"gitlab.com/gitlab-org/gitaly/v14/internal/testhelper"
)
func TestGetBool(t *testing.T) {
for _, tc := range []struct {
value string
fallback bool
expected bool
expectedErrIs error
}{
{
value: "true",
expected: true,
},
{
value: "false",
expected: false,
},
{
value: "1",
expected: true,
},
{
value: "0",
expected: false,
},
{
value: "",
expected: false,
},
{
value: "",
fallback: true,
expected: true,
},
{
value: "bad",
expected: false,
expectedErrIs: strconv.ErrSyntax,
},
{
value: "bad",
fallback: true,
expected: true,
expectedErrIs: strconv.ErrSyntax,
},
} {
t.Run(fmt.Sprintf("value=%s,fallback=%t", tc.value, tc.fallback), func(t *testing.T) {
testhelper.ModifyEnvironment(t, "TEST_BOOL", tc.value)
result, err := env.GetBool("TEST_BOOL", tc.fallback)
if tc.expectedErrIs != nil {
assert.Error(t, err)
assert.True(t, errors.Is(err, tc.expectedErrIs), err)
} else {
assert.NoError(t, err)
}
assert.Equal(t, tc.expected, result)
})
}
}
func TestGetInt(t *testing.T) {
for _, tc := range []struct {
value string
fallback int
expected int
expectedErrIs error
}{
{
value: "3",
expected: 3,
},
{
value: "",
expected: 0,
},
{
value: "",
fallback: 3,
expected: 3,
},
{
value: "bad",
expected: 0,
expectedErrIs: strconv.ErrSyntax,
},
{
value: "bad",
fallback: 3,
expected: 3,
expectedErrIs: strconv.ErrSyntax,
},
} {
t.Run(fmt.Sprintf("value=%s,fallback=%d", tc.value, tc.fallback), func(t *testing.T) {
testhelper.ModifyEnvironment(t, "TEST_INT", tc.value)
result, err := env.GetInt("TEST_INT", tc.fallback)
if tc.expectedErrIs != nil {
assert.Error(t, err)
assert.True(t, errors.Is(err, tc.expectedErrIs), err)
} else {
assert.NoError(t, err)
}
assert.Equal(t, tc.expected, result)
})
}
}
func TestGetDuration(t *testing.T) {
for _, tc := range []struct {
value string
fallback time.Duration
expected time.Duration
expectedErr string
}{
{
value: "3m",
fallback: 0,
expected: 3 * time.Minute,
},
{
value: "",
expected: 0,
},
{
value: "",
fallback: 3,
expected: 3,
},
{
value: "bad",
expected: 0,
expectedErr: `get duration TEST_DURATION: time: invalid duration "bad"`,
},
{
value: "bad",
fallback: 3,
expected: 3,
expectedErr: `get duration TEST_DURATION: time: invalid duration "bad"`,
},
} {
t.Run(fmt.Sprintf("value=%s,fallback=%d", tc.value, tc.fallback), func(t *testing.T) {
testhelper.ModifyEnvironment(t, "TEST_DURATION", tc.value)
result, err := env.GetDuration("TEST_DURATION", tc.fallback)
if tc.expectedErr != "" {
assert.Error(t, err)
assert.EqualError(t, err, tc.expectedErr)
} else {
assert.NoError(t, err)
}
assert.Equal(t, tc.expected, result)
})
}
}
func TestGetString(t *testing.T) {
for _, tc := range []struct {
value string
fallback string
expected string
}{
{
value: "Hello",
expected: "Hello",
},
{
value: "hello ",
expected: "hello",
},
{
fallback: "fallback value",
expected: "fallback value",
},
{
value: " ",
fallback: "fallback value",
expected: "",
},
} {
t.Run(fmt.Sprintf("value=%s,fallback=%s", tc.value, tc.fallback), func(t *testing.T) {
testhelper.ModifyEnvironment(t, "TEST_STRING", tc.value)
result := env.GetString("TEST_STRING", tc.fallback)
assert.Equal(t, tc.expected, result)
})
}
}

View file

@ -1,128 +0,0 @@
package featureflag
import (
"context"
"fmt"
"strconv"
"strings"
"github.com/prometheus/client_golang/prometheus"
"github.com/prometheus/client_golang/prometheus/promauto"
"gitlab.com/gitlab-org/gitaly/v14/internal/helper/env"
"google.golang.org/grpc/metadata"
)
var (
// featureFlagsOverride allows to enable all feature flags with a
// single environment variable. If the value of
// GITALY_TESTING_ENABLE_ALL_FEATURE_FLAGS is set to "true", then all
// feature flags will be enabled. This is only used for testing
// purposes such that we can run integration tests with feature flags.
featureFlagsOverride, _ = env.GetBool("GITALY_TESTING_ENABLE_ALL_FEATURE_FLAGS", false)
flagChecks = promauto.NewCounterVec(
prometheus.CounterOpts{
Name: "gitaly_feature_flag_checks_total",
Help: "Number of enabled/disabled checks for Gitaly server side feature flags",
},
[]string{"flag", "enabled"},
)
// All is the list of all registered feature flags.
All = []FeatureFlag{}
)
const explicitFeatureFlagKey = "require_explicit_feature_flag_checks"
func injectIntoIncomingAndOutgoingContext(ctx context.Context, key string, enabled bool) context.Context {
incomingMD, ok := metadata.FromIncomingContext(ctx)
if !ok {
incomingMD = metadata.New(map[string]string{})
}
incomingMD.Set(key, strconv.FormatBool(enabled))
ctx = metadata.NewIncomingContext(ctx, incomingMD)
return metadata.AppendToOutgoingContext(ctx, key, strconv.FormatBool(enabled))
}
// ContextWithExplicitFeatureFlags marks the context such that all feature flags which are checked
// must have been explicitly set in that context. If a feature flag wasn't set to an explicit value,
// then checking this feature flag will panic. This is not for use in production systems, but is
// intended for tests to verify that we test each feature flag properly.
func ContextWithExplicitFeatureFlags(ctx context.Context) context.Context {
return injectIntoIncomingAndOutgoingContext(ctx, explicitFeatureFlagKey, true)
}
// FeatureFlag gates the implementation of new or changed functionality.
type FeatureFlag struct {
// Name is the name of the feature flag.
Name string `json:"name"`
// OnByDefault is the default value if the feature flag is not explicitly set in
// the incoming context.
OnByDefault bool `json:"on_by_default"`
}
// NewFeatureFlag creates a new feature flag and adds it to the array of all existing feature flags.
func NewFeatureFlag(name string, onByDefault bool) FeatureFlag {
featureFlag := FeatureFlag{
Name: name,
OnByDefault: onByDefault,
}
All = append(All, featureFlag)
return featureFlag
}
// IsEnabled checks if the feature flag is enabled for the passed context.
// Only returns true if the metadata for the feature flag is set to "true"
func (ff FeatureFlag) IsEnabled(ctx context.Context) bool {
if featureFlagsOverride {
return true
}
val, ok := ff.valueFromContext(ctx)
if !ok {
if md, ok := metadata.FromIncomingContext(ctx); ok {
if _, ok := md[explicitFeatureFlagKey]; ok {
panic(fmt.Sprintf("checking for feature %q without use of feature sets", ff.Name))
}
}
return ff.OnByDefault
}
enabled := val == "true"
flagChecks.WithLabelValues(ff.Name, strconv.FormatBool(enabled)).Inc()
return enabled
}
// IsDisabled determines whether the feature flag is disabled in the incoming context.
func (ff FeatureFlag) IsDisabled(ctx context.Context) bool {
return !ff.IsEnabled(ctx)
}
// MetadataKey returns the key of the feature flag as it is present in the metadata map.
func (ff FeatureFlag) MetadataKey() string {
return ffPrefix + strings.ReplaceAll(ff.Name, "_", "-")
}
func (ff FeatureFlag) valueFromContext(ctx context.Context) (string, bool) {
md, ok := metadata.FromIncomingContext(ctx)
if !ok {
return "", false
}
val, ok := md[ff.MetadataKey()]
if !ok {
return "", false
}
if len(val) == 0 {
return "", false
}
return val[0], true
}

View file

@ -1,83 +0,0 @@
package featureflag
import (
"testing"
"github.com/stretchr/testify/require"
"google.golang.org/grpc/metadata"
)
func TestFeatureFlag_enabled(t *testing.T) {
for _, tc := range []struct {
desc string
flag string
headers map[string]string
enabled bool
onByDefault bool
}{
{
desc: "empty name and no headers",
flag: "",
headers: nil,
enabled: false,
onByDefault: false,
},
{
desc: "no headers",
flag: "flag",
headers: nil,
enabled: false,
onByDefault: false,
},
{
desc: "no 'gitaly-feature' prefix in flag name",
flag: "flag",
headers: map[string]string{"flag": "true"},
enabled: false,
onByDefault: false,
},
{
desc: "not valid header value",
flag: "flag",
headers: map[string]string{"gitaly-feature-flag": "TRUE"},
enabled: false,
onByDefault: false,
},
{
desc: "flag name with underscores",
flag: "flag_under_score",
headers: map[string]string{"gitaly-feature-flag-under-score": "true"},
enabled: true,
onByDefault: false,
},
{
desc: "flag name with dashes",
flag: "flag-dash-ok",
headers: map[string]string{"gitaly-feature-flag-dash-ok": "true"},
enabled: true,
onByDefault: false,
},
{
desc: "flag explicitly disabled",
flag: "flag",
headers: map[string]string{"gitaly-feature-flag": "false"},
enabled: false,
onByDefault: true,
},
{
desc: "flag enabled by default but missing",
flag: "flag",
headers: map[string]string{},
enabled: true,
onByDefault: true,
},
} {
t.Run(tc.desc, func(t *testing.T) {
ctx := metadata.NewIncomingContext(createContext(), metadata.New(tc.headers))
ff := FeatureFlag{tc.flag, tc.onByDefault}
require.Equal(t, tc.enabled, ff.IsEnabled(ctx))
require.Equal(t, tc.enabled, !ff.IsDisabled(ctx))
})
}
}

View file

@ -1,4 +0,0 @@
package featureflag
// CommandStatsMetrics tracks additional prometheus metrics for each shelled out command
var CommandStatsMetrics = NewFeatureFlag("command_stats_metrics", false)

View file

@ -1,5 +0,0 @@
package featureflag
// ExactPaginationTokenMatch enables exact matching for provided pagination tokens and
// returns an error if the match is not found.
var ExactPaginationTokenMatch = NewFeatureFlag("exact_pagination_token_match", false)

View file

@ -1,4 +0,0 @@
package featureflag
// GoFindLicense enables Go implementation of FindLicense
var GoFindLicense = NewFeatureFlag("go_find_license", false)

View file

@ -1,5 +0,0 @@
package featureflag
// ConcurrencyQueueMaxWait will enable the concurrency limiter to drop requests that are waiting in
// the concurrency queue for longer than the configured time.
var ConcurrencyQueueMaxWait = NewFeatureFlag("concurrency_queue_max_wait", false)

View file

@ -1,5 +0,0 @@
package featureflag
// ConcurrencyQueueEnforceMax enforces a maximum number of items that are waiting in a concurrency queue.
// when this flag is turned on, subsequent requests that come in will be rejected with an error.
var ConcurrencyQueueEnforceMax = NewFeatureFlag("concurrency_queue_enforce_max", false)

View file

@ -1,5 +0,0 @@
package featureflag
// RateLimit will enable the rate limiter to reject requests beyond a configured
// rate.
var RateLimit = NewFeatureFlag("rate_limit", false)

View file

@ -1,5 +0,0 @@
package featureflag
// RevlistForRepoSize enables the RepositorySize RPC to use git rev-list to
// calculate the disk usage of the repository.
var RevlistForRepoSize = NewFeatureFlag("revlist_for_repo_size", false)

View file

@ -1,4 +0,0 @@
package featureflag
// RunCommandsInCGroup allows all commands to be run within a cgroup
var RunCommandsInCGroup = NewFeatureFlag("run_cmds_in_cgroup", true)

View file

@ -1,5 +0,0 @@
package featureflag
// TransactionalRestoreCustomHooks will use transactional voting in the
// RestoreCustomHooks RPC
var TransactionalRestoreCustomHooks = NewFeatureFlag("tx_restore_custom_hooks", false)

View file

@ -1,7 +0,0 @@
package featureflag
// UserRebaseConfirmableImprovedErrorHandling enables proper error handling in the UserRebaseConfirmable
// RPC. When this flag is disabled many error cases were returning successfully with an error message
// embedded in the response. With this flag enabled, this is converted to return real gRPC errors with
// structured errors.
var UserRebaseConfirmableImprovedErrorHandling = NewFeatureFlag("user_rebase_confirmable_improved_error_handling", false)

View file

@ -1,498 +0,0 @@
// Copyright 2017 Michal Witkowski. All Rights Reserved.
// See LICENSE for licensing terms.
package proxy_test
import (
"context"
"errors"
"fmt"
"io"
"net"
"net/http"
"net/http/httptest"
"net/url"
"path/filepath"
"testing"
"github.com/getsentry/sentry-go"
grpcmw "github.com/grpc-ecosystem/go-grpc-middleware"
grpcmwtags "github.com/grpc-ecosystem/go-grpc-middleware/tags"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite"
"gitlab.com/gitlab-org/gitaly/v14/client"
"gitlab.com/gitlab-org/gitaly/v14/internal/helper/fieldextractors"
"gitlab.com/gitlab-org/gitaly/v14/internal/metadata"
"gitlab.com/gitlab-org/gitaly/v14/internal/middleware/sentryhandler"
"gitlab.com/gitlab-org/gitaly/v14/internal/praefect/grpc-proxy/proxy"
pb "gitlab.com/gitlab-org/gitaly/v14/internal/praefect/grpc-proxy/testdata"
"gitlab.com/gitlab-org/gitaly/v14/internal/testhelper"
"google.golang.org/grpc"
"google.golang.org/grpc/codes"
grpc_metadata "google.golang.org/grpc/metadata"
"google.golang.org/grpc/status"
)
const (
pingDefaultValue = "I like kittens."
clientMdKey = "test-client-header"
serverHeaderMdKey = "test-client-header"
serverTrailerMdKey = "test-client-trailer"
rejectingMdKey = "test-reject-rpc-if-in-context"
countListResponses = 20
)
func TestMain(m *testing.M) {
testhelper.Run(m)
}
// asserting service is implemented on the server side and serves as a handler for stuff
type assertingService struct {
pb.UnimplementedTestServiceServer
t *testing.T
}
func (s *assertingService) PingEmpty(ctx context.Context, _ *pb.Empty) (*pb.PingResponse, error) {
// Check that this call has client's metadata.
md, ok := grpc_metadata.FromIncomingContext(ctx)
assert.True(s.t, ok, "PingEmpty call must have metadata in context")
_, ok = md[clientMdKey]
assert.True(s.t, ok, "PingEmpty call must have clients's custom headers in metadata")
return &pb.PingResponse{Value: pingDefaultValue, Counter: 42}, nil
}
func (s *assertingService) Ping(ctx context.Context, ping *pb.PingRequest) (*pb.PingResponse, error) {
// Send user trailers and headers.
require.NoError(s.t, grpc.SendHeader(ctx, grpc_metadata.Pairs(serverHeaderMdKey, "I like turtles.")))
require.NoError(s.t, grpc.SetTrailer(ctx, grpc_metadata.Pairs(serverTrailerMdKey, "I like ending turtles.")))
return &pb.PingResponse{Value: ping.Value, Counter: 42}, nil
}
func (s *assertingService) PingError(ctx context.Context, ping *pb.PingRequest) (*pb.Empty, error) {
return nil, status.Errorf(codes.ResourceExhausted, "Userspace error.")
}
func (s *assertingService) PingList(ping *pb.PingRequest, stream pb.TestService_PingListServer) error {
// Send user trailers and headers.
require.NoError(s.t, stream.SendHeader(grpc_metadata.Pairs(serverHeaderMdKey, "I like turtles.")))
for i := 0; i < countListResponses; i++ {
require.NoError(s.t, stream.Send(&pb.PingResponse{Value: ping.Value, Counter: int32(i)}))
}
stream.SetTrailer(grpc_metadata.Pairs(serverTrailerMdKey, "I like ending turtles."))
return nil
}
func (s *assertingService) PingStream(stream pb.TestService_PingStreamServer) error {
require.NoError(s.t, stream.SendHeader(grpc_metadata.Pairs(serverHeaderMdKey, "I like turtles.")))
counter := int32(0)
for {
ping, err := stream.Recv()
if err == io.EOF {
break
} else if err != nil {
require.NoError(s.t, err, "can't fail reading stream")
return err
}
pong := &pb.PingResponse{Value: ping.Value, Counter: counter}
if err := stream.Send(pong); err != nil {
require.NoError(s.t, err, "can't fail sending back a pong")
}
counter++
}
stream.SetTrailer(grpc_metadata.Pairs(serverTrailerMdKey, "I like ending turtles."))
return nil
}
// ProxyHappySuite tests the "happy" path of handling: that everything works in absence of connection issues.
type ProxyHappySuite struct {
suite.Suite
ctx context.Context
cancel context.CancelFunc
server *grpc.Server
proxy *grpc.Server
connProxy2Server *grpc.ClientConn
client pb.TestServiceClient
connClient2Proxy *grpc.ClientConn
}
func (s *ProxyHappySuite) TestPingEmptyCarriesClientMetadata() {
ctx := grpc_metadata.NewOutgoingContext(s.ctx, grpc_metadata.Pairs(clientMdKey, "true"))
out, err := s.client.PingEmpty(ctx, &pb.Empty{})
require.NoError(s.T(), err, "PingEmpty should succeed without errors")
testhelper.ProtoEqual(s.T(), &pb.PingResponse{Value: pingDefaultValue, Counter: 42}, out)
}
func (s *ProxyHappySuite) TestPingEmpty_StressTest() {
for i := 0; i < 50; i++ {
s.TestPingEmptyCarriesClientMetadata()
}
}
func (s *ProxyHappySuite) TestPingCarriesServerHeadersAndTrailers() {
headerMd := make(grpc_metadata.MD)
trailerMd := make(grpc_metadata.MD)
// This is an awkward calling convention... but meh.
out, err := s.client.Ping(s.ctx, &pb.PingRequest{Value: "foo"}, grpc.Header(&headerMd), grpc.Trailer(&trailerMd))
require.NoError(s.T(), err, "Ping should succeed without errors")
testhelper.ProtoEqual(s.T(), &pb.PingResponse{Value: "foo", Counter: 42}, out)
assert.Contains(s.T(), headerMd, serverHeaderMdKey, "server response headers must contain server data")
assert.Len(s.T(), trailerMd, 1, "server response trailers must contain server data")
}
func (s *ProxyHappySuite) TestPingErrorPropagatesAppError() {
sentryTriggered := 0
sentrySrv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
sentryTriggered++
}))
defer sentrySrv.Close()
// minimal required sentry client configuration
sentryURL, err := url.Parse(sentrySrv.URL)
require.NoError(s.T(), err)
sentryURL.User = url.UserPassword("stub", "stub")
sentryURL.Path = "/stub/1"
require.NoError(s.T(), sentry.Init(sentry.ClientOptions{
Dsn: sentryURL.String(),
Transport: sentry.NewHTTPSyncTransport(),
}))
sentry.CaptureEvent(sentry.NewEvent())
require.Equal(s.T(), 1, sentryTriggered, "sentry configured incorrectly")
_, err = s.client.PingError(s.ctx, &pb.PingRequest{Value: "foo"})
require.Error(s.T(), err, "PingError should never succeed")
assert.Equal(s.T(), codes.ResourceExhausted, status.Code(err))
assert.Equal(s.T(), "Userspace error.", status.Convert(err).Message())
require.Equal(s.T(), 1, sentryTriggered, "sentry must not be triggered because errors from remote must be just propagated")
}
func (s *ProxyHappySuite) TestDirectorErrorIsPropagated() {
// See SetupSuite where the StreamDirector has a special case.
ctx := grpc_metadata.NewOutgoingContext(s.ctx, grpc_metadata.Pairs(rejectingMdKey, "true"))
_, err := s.client.Ping(ctx, &pb.PingRequest{Value: "foo"})
require.Error(s.T(), err, "Director should reject this RPC")
assert.Equal(s.T(), codes.PermissionDenied, status.Code(err))
assert.Equal(s.T(), "testing rejection", status.Convert(err).Message())
}
func (s *ProxyHappySuite) TestPingStream_FullDuplexWorks() {
stream, err := s.client.PingStream(s.ctx)
require.NoError(s.T(), err, "PingStream request should be successful.")
for i := 0; i < countListResponses; i++ {
ping := &pb.PingRequest{Value: fmt.Sprintf("foo:%d", i)}
require.NoError(s.T(), stream.Send(ping), "sending to PingStream must not fail")
resp, err := stream.Recv()
if err == io.EOF {
break
}
if i == 0 {
// Check that the header arrives before all entries.
headerMd, err := stream.Header()
require.NoError(s.T(), err, "PingStream headers should not error.")
assert.Contains(s.T(), headerMd, serverHeaderMdKey, "PingStream response headers user contain metadata")
}
assert.EqualValues(s.T(), i, resp.Counter, "ping roundtrip must succeed with the correct id")
}
require.NoError(s.T(), stream.CloseSend(), "no error on close send")
_, err = stream.Recv()
require.Equal(s.T(), io.EOF, err, "stream should close with io.EOF, meaining OK")
// Check that the trailer headers are here.
trailerMd := stream.Trailer()
assert.Len(s.T(), trailerMd, 1, "PingList trailer headers user contain metadata")
}
func (s *ProxyHappySuite) TestPingStream_StressTest() {
for i := 0; i < 50; i++ {
s.TestPingStream_FullDuplexWorks()
}
}
func (s *ProxyHappySuite) SetupSuite() {
s.ctx, s.cancel = context.WithCancel(testhelper.Context(s.T()))
listenerProxy, err := net.Listen("tcp", "127.0.0.1:0")
require.NoError(s.T(), err, "must be able to allocate a port for listenerProxy")
listenerServer, err := net.Listen("tcp", "127.0.0.1:0")
require.NoError(s.T(), err, "must be able to allocate a port for listenerServer")
// Setup of the proxy's Director.
s.connProxy2Server, err = grpc.Dial(listenerServer.Addr().String(), grpc.WithInsecure(), grpc.WithDefaultCallOptions(grpc.ForceCodec(proxy.NewCodec())))
require.NoError(s.T(), err, "must not error on deferred client Dial")
director := func(ctx context.Context, fullName string, peeker proxy.StreamPeeker) (*proxy.StreamParameters, error) {
payload, err := peeker.Peek()
if err != nil {
return nil, err
}
md, ok := grpc_metadata.FromIncomingContext(ctx)
if ok {
if _, exists := md[rejectingMdKey]; exists {
return proxy.NewStreamParameters(proxy.Destination{Ctx: metadata.IncomingToOutgoing(ctx), Msg: payload}, nil, nil, nil), status.Errorf(codes.PermissionDenied, "testing rejection")
}
}
// Explicitly copy the metadata, otherwise the tests will fail.
return proxy.NewStreamParameters(proxy.Destination{Ctx: metadata.IncomingToOutgoing(ctx), Conn: s.connProxy2Server, Msg: payload}, nil, nil, nil), nil
}
// Setup backend server for test suite
s.server = grpc.NewServer()
pb.RegisterTestServiceServer(s.server, &assertingService{t: s.T()})
go func() {
s.server.Serve(listenerServer)
}()
// Setup grpc-proxy server for test suite
s.proxy = grpc.NewServer(
grpc.ForceServerCodec(proxy.NewCodec()),
grpc.StreamInterceptor(
grpcmw.ChainStreamServer(
// context tags usage is required by sentryhandler.StreamLogHandler
grpcmwtags.StreamServerInterceptor(grpcmwtags.WithFieldExtractorForInitialReq(fieldextractors.FieldExtractor)),
// sentry middleware to capture errors
sentryhandler.StreamLogHandler,
),
),
grpc.UnknownServiceHandler(proxy.TransparentHandler(director)),
)
// Ping handler is handled as an explicit registration and not as a TransparentHandler.
proxy.RegisterService(s.proxy, director, "mwitkow.testproto.TestService", "Ping")
go func() {
s.proxy.Serve(listenerProxy)
}()
// Setup client for test suite
ctx := testhelper.Context(s.T())
s.connClient2Proxy, err = grpc.DialContext(ctx, listenerProxy.Addr().String(), grpc.WithInsecure())
require.NoError(s.T(), err, "must not error on deferred client Dial")
s.client = pb.NewTestServiceClient(s.connClient2Proxy)
}
func (s *ProxyHappySuite) TearDownSuite() {
if s.cancel != nil {
s.cancel()
}
if s.connClient2Proxy != nil {
s.connClient2Proxy.Close()
}
if s.connProxy2Server != nil {
s.connProxy2Server.Close()
}
if s.proxy != nil {
s.proxy.Stop()
}
if s.server != nil {
s.server.Stop()
}
}
func TestProxyHappySuite(t *testing.T) {
suite.Run(t, &ProxyHappySuite{})
}
func TestProxyErrorPropagation(t *testing.T) {
errBackend := status.Error(codes.InvalidArgument, "backend error")
errDirector := status.Error(codes.FailedPrecondition, "director error")
errRequestFinalizer := status.Error(codes.Internal, "request finalizer error")
for _, tc := range []struct {
desc string
backendError error
directorError error
requestFinalizerError error
returnedError error
errHandler func(error) error
}{
{
desc: "backend error is propagated",
backendError: errBackend,
returnedError: errBackend,
},
{
desc: "director error is propagated",
directorError: errDirector,
returnedError: errDirector,
},
{
desc: "request finalizer error is propagated",
requestFinalizerError: errRequestFinalizer,
returnedError: errRequestFinalizer,
},
{
desc: "director error cancels proxying",
backendError: errBackend,
requestFinalizerError: errRequestFinalizer,
directorError: errDirector,
returnedError: errDirector,
},
{
desc: "backend error prioritized over request finalizer error",
backendError: errBackend,
requestFinalizerError: errRequestFinalizer,
returnedError: errBackend,
},
{
desc: "err handler gets error",
backendError: errBackend,
requestFinalizerError: errRequestFinalizer,
returnedError: errBackend,
errHandler: func(err error) error {
testhelper.RequireGrpcError(t, errBackend, err)
return errBackend
},
},
{
desc: "err handler can swallow error",
backendError: errBackend,
returnedError: io.EOF,
errHandler: func(err error) error {
testhelper.RequireGrpcError(t, errBackend, err)
return nil
},
},
{
desc: "swallowed error surfaces request finalizer error",
backendError: errBackend,
requestFinalizerError: errRequestFinalizer,
returnedError: errRequestFinalizer,
errHandler: func(err error) error {
testhelper.RequireGrpcError(t, errBackend, err)
return nil
},
},
} {
t.Run(tc.desc, func(t *testing.T) {
tmpDir := testhelper.TempDir(t)
backendListener, err := net.Listen("unix", filepath.Join(tmpDir, "backend"))
require.NoError(t, err)
backendServer := grpc.NewServer(grpc.UnknownServiceHandler(func(interface{}, grpc.ServerStream) error {
return tc.backendError
}))
go func() { backendServer.Serve(backendListener) }()
defer backendServer.Stop()
ctx := testhelper.Context(t)
backendClientConn, err := grpc.DialContext(ctx, "unix://"+backendListener.Addr().String(),
grpc.WithInsecure(), grpc.WithDefaultCallOptions(grpc.ForceCodec(proxy.NewCodec())))
require.NoError(t, err)
defer func() {
require.NoError(t, backendClientConn.Close())
}()
proxyListener, err := net.Listen("unix", filepath.Join(tmpDir, "proxy"))
require.NoError(t, err)
proxyServer := grpc.NewServer(
grpc.ForceServerCodec(proxy.NewCodec()),
grpc.UnknownServiceHandler(proxy.TransparentHandler(func(ctx context.Context, fullMethodName string, peeker proxy.StreamPeeker) (*proxy.StreamParameters, error) {
return proxy.NewStreamParameters(
proxy.Destination{
Ctx: ctx,
Conn: backendClientConn,
ErrHandler: tc.errHandler,
},
nil,
func() error { return tc.requestFinalizerError },
nil,
), tc.directorError
})),
)
go func() { proxyServer.Serve(proxyListener) }()
defer proxyServer.Stop()
proxyClientConn, err := grpc.DialContext(ctx, "unix://"+proxyListener.Addr().String(), grpc.WithInsecure())
require.NoError(t, err)
defer func() {
require.NoError(t, proxyClientConn.Close())
}()
resp, err := pb.NewTestServiceClient(proxyClientConn).Ping(ctx, &pb.PingRequest{})
testhelper.RequireGrpcError(t, tc.returnedError, err)
require.Nil(t, resp)
})
}
}
func TestRegisterStreamHandlers(t *testing.T) {
directorCalledError := errors.New("director was called")
server := grpc.NewServer(
grpc.ForceServerCodec(proxy.NewCodec()),
grpc.UnknownServiceHandler(proxy.TransparentHandler(func(ctx context.Context, fullMethodName string, peeker proxy.StreamPeeker) (*proxy.StreamParameters, error) {
return nil, directorCalledError
})),
)
var pingStreamHandlerCalled, pingEmptyStreamHandlerCalled bool
pingValue := "hello"
pingStreamHandler := func(srv interface{}, stream grpc.ServerStream) error {
pingStreamHandlerCalled = true
var req pb.PingRequest
if err := stream.RecvMsg(&req); err != nil {
return err
}
require.Equal(t, pingValue, req.Value)
return stream.SendMsg(nil)
}
pingEmptyStreamHandler := func(srv interface{}, stream grpc.ServerStream) error {
pingEmptyStreamHandlerCalled = true
var req pb.Empty
if err := stream.RecvMsg(&req); err != nil {
return err
}
return stream.SendMsg(nil)
}
streamers := map[string]grpc.StreamHandler{
"Ping": pingStreamHandler,
"PingEmpty": pingEmptyStreamHandler,
}
proxy.RegisterStreamHandlers(server, "mwitkow.testproto.TestService", streamers)
serverSocketPath := testhelper.GetTemporaryGitalySocketFileName(t)
listener, err := net.Listen("unix", serverSocketPath)
if err != nil {
t.Fatal(err)
}
go server.Serve(listener)
defer server.Stop()
cc, err := client.Dial("unix://"+serverSocketPath, []grpc.DialOption{grpc.WithBlock()})
require.NoError(t, err)
defer cc.Close()
testServiceClient := pb.NewTestServiceClient(cc)
ctx := testhelper.Context(t)
_, err = testServiceClient.Ping(ctx, &pb.PingRequest{Value: pingValue})
require.NoError(t, err)
require.True(t, pingStreamHandlerCalled)
_, err = testServiceClient.PingEmpty(ctx, &pb.Empty{})
require.NoError(t, err)
require.True(t, pingEmptyStreamHandlerCalled)
// since PingError was never registered with its own streamer, it should get sent to the UnknownServiceHandler
_, err = testServiceClient.PingError(ctx, &pb.PingRequest{})
testhelper.RequireGrpcError(t, status.Error(codes.Unknown, directorCalledError.Error()), err)
}

View file

@ -1,117 +0,0 @@
package proxy_test
import (
"context"
"net"
"testing"
"github.com/stretchr/testify/require"
"gitlab.com/gitlab-org/gitaly/v14/internal/praefect/grpc-proxy/proxy"
testservice "gitlab.com/gitlab-org/gitaly/v14/internal/praefect/grpc-proxy/testdata"
"google.golang.org/grpc"
)
func newListener(tb testing.TB) net.Listener {
listener, err := net.Listen("tcp", "127.0.0.1:0")
require.NoError(tb, err, "must be able to allocate a port for listener")
return listener
}
func newBackendPinger(tb testing.TB, ctx context.Context) (*grpc.ClientConn, *interceptPinger, func()) {
ip := &interceptPinger{}
done := make(chan struct{})
srvr := grpc.NewServer()
listener := newListener(tb)
testservice.RegisterTestServiceServer(srvr, ip)
go func() {
defer close(done)
srvr.Serve(listener)
}()
cc, err := grpc.DialContext(
ctx,
listener.Addr().String(),
grpc.WithInsecure(),
grpc.WithBlock(),
grpc.WithDefaultCallOptions(
grpc.ForceCodec(proxy.NewCodec()),
),
)
require.NoError(tb, err)
cleanup := func() {
srvr.GracefulStop()
require.NoError(tb, cc.Close())
<-done
}
return cc, ip, cleanup
}
func newProxy(tb testing.TB, ctx context.Context, director proxy.StreamDirector, svc, method string) (*grpc.ClientConn, func()) {
proxySrvr := grpc.NewServer(
grpc.ForceServerCodec(proxy.NewCodec()),
grpc.UnknownServiceHandler(proxy.TransparentHandler(director)),
)
proxy.RegisterService(proxySrvr, director, svc, method)
done := make(chan struct{})
listener := newListener(tb)
go func() {
defer close(done)
proxySrvr.Serve(listener)
}()
proxyCC, err := grpc.DialContext(
ctx,
listener.Addr().String(),
grpc.WithInsecure(),
grpc.WithBlock(),
)
require.NoError(tb, err)
cleanup := func() {
proxySrvr.GracefulStop()
require.NoError(tb, proxyCC.Close())
<-done
}
return proxyCC, cleanup
}
// interceptPinger allows an RPC to be intercepted with a custom
// function defined in each unit test
type interceptPinger struct {
testservice.UnimplementedTestServiceServer
pingStream func(testservice.TestService_PingStreamServer) error
pingEmpty func(context.Context, *testservice.Empty) (*testservice.PingResponse, error)
ping func(context.Context, *testservice.PingRequest) (*testservice.PingResponse, error)
pingError func(context.Context, *testservice.PingRequest) (*testservice.Empty, error)
pingList func(*testservice.PingRequest, testservice.TestService_PingListServer) error
}
func (ip *interceptPinger) PingStream(stream testservice.TestService_PingStreamServer) error {
return ip.pingStream(stream)
}
func (ip *interceptPinger) PingEmpty(ctx context.Context, req *testservice.Empty) (*testservice.PingResponse, error) {
return ip.pingEmpty(ctx, req)
}
func (ip *interceptPinger) Ping(ctx context.Context, req *testservice.PingRequest) (*testservice.PingResponse, error) {
return ip.ping(ctx, req)
}
func (ip *interceptPinger) PingError(ctx context.Context, req *testservice.PingRequest) (*testservice.Empty, error) {
return ip.pingError(ctx, req)
}
func (ip *interceptPinger) PingList(req *testservice.PingRequest, stream testservice.TestService_PingListServer) error {
return ip.pingList(req, stream)
}

View file

@ -1,137 +0,0 @@
package proxy_test
import (
"context"
"io"
"testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"gitlab.com/gitlab-org/gitaly/v14/internal/metadata"
"gitlab.com/gitlab-org/gitaly/v14/internal/praefect/grpc-proxy/proxy"
testservice "gitlab.com/gitlab-org/gitaly/v14/internal/praefect/grpc-proxy/testdata"
"gitlab.com/gitlab-org/gitaly/v14/internal/testhelper"
"google.golang.org/protobuf/proto"
)
// TestStreamPeeking demonstrates that a director function is able to peek
// into a stream. Further more, it demonstrates that peeking into a stream
// will not disturb the stream sent from the proxy client to the backend.
func TestStreamPeeking(t *testing.T) {
ctx := testhelper.Context(t)
backendCC, backendSrvr, cleanupPinger := newBackendPinger(t, ctx)
defer cleanupPinger()
pingReqSent := &testservice.PingRequest{Value: "hi"}
// director will peek into stream before routing traffic
director := func(ctx context.Context, fullMethodName string, peeker proxy.StreamPeeker) (*proxy.StreamParameters, error) {
peekedMsg, err := peeker.Peek()
require.NoError(t, err)
peekedRequest := &testservice.PingRequest{}
err = proto.Unmarshal(peekedMsg, peekedRequest)
require.NoError(t, err)
require.True(t, proto.Equal(pingReqSent, peekedRequest), "expected to be the same")
return proxy.NewStreamParameters(proxy.Destination{Ctx: metadata.IncomingToOutgoing(ctx), Conn: backendCC, Msg: peekedMsg}, nil, nil, nil), nil
}
pingResp := &testservice.PingResponse{
Counter: 1,
}
// we expect the backend server to receive the peeked message
backendSrvr.pingStream = func(stream testservice.TestService_PingStreamServer) error {
pingReqReceived, err := stream.Recv()
assert.NoError(t, err)
assert.True(t, proto.Equal(pingReqSent, pingReqReceived), "expected to be the same")
return stream.Send(pingResp)
}
proxyCC, cleanupProxy := newProxy(t, ctx, director, "mwitkow.testproto.TestService", "PingStream")
defer cleanupProxy()
proxyClient := testservice.NewTestServiceClient(proxyCC)
proxyClientPingStream, err := proxyClient.PingStream(ctx)
require.NoError(t, err)
defer func() {
require.NoError(t, proxyClientPingStream.CloseSend())
}()
require.NoError(t,
proxyClientPingStream.Send(pingReqSent),
)
resp, err := proxyClientPingStream.Recv()
require.NoError(t, err)
require.True(t, proto.Equal(resp, pingResp), "expected to be the same")
_, err = proxyClientPingStream.Recv()
require.Equal(t, io.EOF, err)
}
func TestStreamInjecting(t *testing.T) {
ctx := testhelper.Context(t)
backendCC, backendSrvr, cleanupPinger := newBackendPinger(t, ctx)
defer cleanupPinger()
pingReqSent := &testservice.PingRequest{Value: "hi"}
newValue := "bye"
// director will peek into stream and change some frames
director := func(ctx context.Context, fullMethodName string, peeker proxy.StreamPeeker) (*proxy.StreamParameters, error) {
peekedMsg, err := peeker.Peek()
require.NoError(t, err)
peekedRequest := &testservice.PingRequest{}
require.NoError(t, proto.Unmarshal(peekedMsg, peekedRequest))
require.Equal(t, "hi", peekedRequest.GetValue())
peekedRequest.Value = newValue
newPayload, err := proto.Marshal(peekedRequest)
require.NoError(t, err)
return proxy.NewStreamParameters(proxy.Destination{Ctx: metadata.IncomingToOutgoing(ctx), Conn: backendCC, Msg: newPayload}, nil, nil, nil), nil
}
pingResp := &testservice.PingResponse{
Counter: 1,
}
// we expect the backend server to receive the modified message
backendSrvr.pingStream = func(stream testservice.TestService_PingStreamServer) error {
pingReqReceived, err := stream.Recv()
assert.NoError(t, err)
assert.Equal(t, newValue, pingReqReceived.GetValue())
return stream.Send(pingResp)
}
proxyCC, cleanupProxy := newProxy(t, ctx, director, "mwitkow.testproto.TestService", "PingStream")
defer cleanupProxy()
proxyClient := testservice.NewTestServiceClient(proxyCC)
proxyClientPingStream, err := proxyClient.PingStream(ctx)
require.NoError(t, err)
defer func() {
require.NoError(t, proxyClientPingStream.CloseSend())
}()
require.NoError(t,
proxyClientPingStream.Send(pingReqSent),
)
resp, err := proxyClientPingStream.Recv()
require.NoError(t, err)
require.True(t, proto.Equal(resp, pingResp), "expected to be the same")
_, err = proxyClientPingStream.Recv()
require.Equal(t, io.EOF, err)
}

View file

@ -1,306 +0,0 @@
// Code generated by protoc-gen-go. DO NOT EDIT.
// versions:
// protoc-gen-go v1.26.0
// protoc v3.17.3
// source: praefect/grpc-proxy/testdata/test.proto
package testdata
import (
protoreflect "google.golang.org/protobuf/reflect/protoreflect"
protoimpl "google.golang.org/protobuf/runtime/protoimpl"
reflect "reflect"
sync "sync"
)
const (
// Verify that this generated code is sufficiently up-to-date.
_ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion)
// Verify that runtime/protoimpl is sufficiently up-to-date.
_ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20)
)
type Empty struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
}
func (x *Empty) Reset() {
*x = Empty{}
if protoimpl.UnsafeEnabled {
mi := &file_praefect_grpc_proxy_testdata_test_proto_msgTypes[0]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *Empty) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*Empty) ProtoMessage() {}
func (x *Empty) ProtoReflect() protoreflect.Message {
mi := &file_praefect_grpc_proxy_testdata_test_proto_msgTypes[0]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use Empty.ProtoReflect.Descriptor instead.
func (*Empty) Descriptor() ([]byte, []int) {
return file_praefect_grpc_proxy_testdata_test_proto_rawDescGZIP(), []int{0}
}
type PingRequest struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
Value string `protobuf:"bytes,1,opt,name=value,proto3" json:"value,omitempty"`
}
func (x *PingRequest) Reset() {
*x = PingRequest{}
if protoimpl.UnsafeEnabled {
mi := &file_praefect_grpc_proxy_testdata_test_proto_msgTypes[1]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *PingRequest) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*PingRequest) ProtoMessage() {}
func (x *PingRequest) ProtoReflect() protoreflect.Message {
mi := &file_praefect_grpc_proxy_testdata_test_proto_msgTypes[1]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use PingRequest.ProtoReflect.Descriptor instead.
func (*PingRequest) Descriptor() ([]byte, []int) {
return file_praefect_grpc_proxy_testdata_test_proto_rawDescGZIP(), []int{1}
}
func (x *PingRequest) GetValue() string {
if x != nil {
return x.Value
}
return ""
}
type PingResponse struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
Value string `protobuf:"bytes,1,opt,name=Value,proto3" json:"Value,omitempty"`
Counter int32 `protobuf:"varint,2,opt,name=counter,proto3" json:"counter,omitempty"`
}
func (x *PingResponse) Reset() {
*x = PingResponse{}
if protoimpl.UnsafeEnabled {
mi := &file_praefect_grpc_proxy_testdata_test_proto_msgTypes[2]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *PingResponse) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*PingResponse) ProtoMessage() {}
func (x *PingResponse) ProtoReflect() protoreflect.Message {
mi := &file_praefect_grpc_proxy_testdata_test_proto_msgTypes[2]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use PingResponse.ProtoReflect.Descriptor instead.
func (*PingResponse) Descriptor() ([]byte, []int) {
return file_praefect_grpc_proxy_testdata_test_proto_rawDescGZIP(), []int{2}
}
func (x *PingResponse) GetValue() string {
if x != nil {
return x.Value
}
return ""
}
func (x *PingResponse) GetCounter() int32 {
if x != nil {
return x.Counter
}
return 0
}
var File_praefect_grpc_proxy_testdata_test_proto protoreflect.FileDescriptor
var file_praefect_grpc_proxy_testdata_test_proto_rawDesc = []byte{
0x0a, 0x27, 0x70, 0x72, 0x61, 0x65, 0x66, 0x65, 0x63, 0x74, 0x2f, 0x67, 0x72, 0x70, 0x63, 0x2d,
0x70, 0x72, 0x6f, 0x78, 0x79, 0x2f, 0x74, 0x65, 0x73, 0x74, 0x64, 0x61, 0x74, 0x61, 0x2f, 0x74,
0x65, 0x73, 0x74, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x11, 0x6d, 0x77, 0x69, 0x74, 0x6b,
0x6f, 0x77, 0x2e, 0x74, 0x65, 0x73, 0x74, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, 0x07, 0x0a, 0x05,
0x45, 0x6d, 0x70, 0x74, 0x79, 0x22, 0x23, 0x0a, 0x0b, 0x50, 0x69, 0x6e, 0x67, 0x52, 0x65, 0x71,
0x75, 0x65, 0x73, 0x74, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x01, 0x20,
0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x22, 0x3e, 0x0a, 0x0c, 0x50, 0x69,
0x6e, 0x67, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x56, 0x61,
0x6c, 0x75, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x56, 0x61, 0x6c, 0x75, 0x65,
0x12, 0x18, 0x0a, 0x07, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x65, 0x72, 0x18, 0x02, 0x20, 0x01, 0x28,
0x05, 0x52, 0x07, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x65, 0x72, 0x32, 0x91, 0x03, 0x0a, 0x0b, 0x54,
0x65, 0x73, 0x74, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x12, 0x48, 0x0a, 0x09, 0x50, 0x69,
0x6e, 0x67, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x12, 0x18, 0x2e, 0x6d, 0x77, 0x69, 0x74, 0x6b, 0x6f,
0x77, 0x2e, 0x74, 0x65, 0x73, 0x74, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x45, 0x6d, 0x70, 0x74,
0x79, 0x1a, 0x1f, 0x2e, 0x6d, 0x77, 0x69, 0x74, 0x6b, 0x6f, 0x77, 0x2e, 0x74, 0x65, 0x73, 0x74,
0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x50, 0x69, 0x6e, 0x67, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e,
0x73, 0x65, 0x22, 0x00, 0x12, 0x49, 0x0a, 0x04, 0x50, 0x69, 0x6e, 0x67, 0x12, 0x1e, 0x2e, 0x6d,
0x77, 0x69, 0x74, 0x6b, 0x6f, 0x77, 0x2e, 0x74, 0x65, 0x73, 0x74, 0x70, 0x72, 0x6f, 0x74, 0x6f,
0x2e, 0x50, 0x69, 0x6e, 0x67, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1f, 0x2e, 0x6d,
0x77, 0x69, 0x74, 0x6b, 0x6f, 0x77, 0x2e, 0x74, 0x65, 0x73, 0x74, 0x70, 0x72, 0x6f, 0x74, 0x6f,
0x2e, 0x50, 0x69, 0x6e, 0x67, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12,
0x47, 0x0a, 0x09, 0x50, 0x69, 0x6e, 0x67, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x12, 0x1e, 0x2e, 0x6d,
0x77, 0x69, 0x74, 0x6b, 0x6f, 0x77, 0x2e, 0x74, 0x65, 0x73, 0x74, 0x70, 0x72, 0x6f, 0x74, 0x6f,
0x2e, 0x50, 0x69, 0x6e, 0x67, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x18, 0x2e, 0x6d,
0x77, 0x69, 0x74, 0x6b, 0x6f, 0x77, 0x2e, 0x74, 0x65, 0x73, 0x74, 0x70, 0x72, 0x6f, 0x74, 0x6f,
0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x22, 0x00, 0x12, 0x4f, 0x0a, 0x08, 0x50, 0x69, 0x6e, 0x67,
0x4c, 0x69, 0x73, 0x74, 0x12, 0x1e, 0x2e, 0x6d, 0x77, 0x69, 0x74, 0x6b, 0x6f, 0x77, 0x2e, 0x74,
0x65, 0x73, 0x74, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x50, 0x69, 0x6e, 0x67, 0x52, 0x65, 0x71,
0x75, 0x65, 0x73, 0x74, 0x1a, 0x1f, 0x2e, 0x6d, 0x77, 0x69, 0x74, 0x6b, 0x6f, 0x77, 0x2e, 0x74,
0x65, 0x73, 0x74, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x50, 0x69, 0x6e, 0x67, 0x52, 0x65, 0x73,
0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x30, 0x01, 0x12, 0x53, 0x0a, 0x0a, 0x50, 0x69, 0x6e,
0x67, 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x12, 0x1e, 0x2e, 0x6d, 0x77, 0x69, 0x74, 0x6b, 0x6f,
0x77, 0x2e, 0x74, 0x65, 0x73, 0x74, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x50, 0x69, 0x6e, 0x67,
0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1f, 0x2e, 0x6d, 0x77, 0x69, 0x74, 0x6b, 0x6f,
0x77, 0x2e, 0x74, 0x65, 0x73, 0x74, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x50, 0x69, 0x6e, 0x67,
0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x28, 0x01, 0x30, 0x01, 0x42, 0x48,
0x5a, 0x46, 0x67, 0x69, 0x74, 0x6c, 0x61, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x67, 0x69, 0x74,
0x6c, 0x61, 0x62, 0x2d, 0x6f, 0x72, 0x67, 0x2f, 0x67, 0x69, 0x74, 0x61, 0x6c, 0x79, 0x2f, 0x76,
0x31, 0x34, 0x2f, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x2f, 0x70, 0x72, 0x61, 0x65,
0x66, 0x65, 0x63, 0x74, 0x2f, 0x67, 0x72, 0x70, 0x63, 0x2d, 0x70, 0x72, 0x6f, 0x78, 0x79, 0x2f,
0x74, 0x65, 0x73, 0x74, 0x64, 0x61, 0x74, 0x61, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33,
}
var (
file_praefect_grpc_proxy_testdata_test_proto_rawDescOnce sync.Once
file_praefect_grpc_proxy_testdata_test_proto_rawDescData = file_praefect_grpc_proxy_testdata_test_proto_rawDesc
)
func file_praefect_grpc_proxy_testdata_test_proto_rawDescGZIP() []byte {
file_praefect_grpc_proxy_testdata_test_proto_rawDescOnce.Do(func() {
file_praefect_grpc_proxy_testdata_test_proto_rawDescData = protoimpl.X.CompressGZIP(file_praefect_grpc_proxy_testdata_test_proto_rawDescData)
})
return file_praefect_grpc_proxy_testdata_test_proto_rawDescData
}
var file_praefect_grpc_proxy_testdata_test_proto_msgTypes = make([]protoimpl.MessageInfo, 3)
var file_praefect_grpc_proxy_testdata_test_proto_goTypes = []interface{}{
(*Empty)(nil), // 0: mwitkow.testproto.Empty
(*PingRequest)(nil), // 1: mwitkow.testproto.PingRequest
(*PingResponse)(nil), // 2: mwitkow.testproto.PingResponse
}
var file_praefect_grpc_proxy_testdata_test_proto_depIdxs = []int32{
0, // 0: mwitkow.testproto.TestService.PingEmpty:input_type -> mwitkow.testproto.Empty
1, // 1: mwitkow.testproto.TestService.Ping:input_type -> mwitkow.testproto.PingRequest
1, // 2: mwitkow.testproto.TestService.PingError:input_type -> mwitkow.testproto.PingRequest
1, // 3: mwitkow.testproto.TestService.PingList:input_type -> mwitkow.testproto.PingRequest
1, // 4: mwitkow.testproto.TestService.PingStream:input_type -> mwitkow.testproto.PingRequest
2, // 5: mwitkow.testproto.TestService.PingEmpty:output_type -> mwitkow.testproto.PingResponse
2, // 6: mwitkow.testproto.TestService.Ping:output_type -> mwitkow.testproto.PingResponse
0, // 7: mwitkow.testproto.TestService.PingError:output_type -> mwitkow.testproto.Empty
2, // 8: mwitkow.testproto.TestService.PingList:output_type -> mwitkow.testproto.PingResponse
2, // 9: mwitkow.testproto.TestService.PingStream:output_type -> mwitkow.testproto.PingResponse
5, // [5:10] is the sub-list for method output_type
0, // [0:5] is the sub-list for method input_type
0, // [0:0] is the sub-list for extension type_name
0, // [0:0] is the sub-list for extension extendee
0, // [0:0] is the sub-list for field type_name
}
func init() { file_praefect_grpc_proxy_testdata_test_proto_init() }
func file_praefect_grpc_proxy_testdata_test_proto_init() {
if File_praefect_grpc_proxy_testdata_test_proto != nil {
return
}
if !protoimpl.UnsafeEnabled {
file_praefect_grpc_proxy_testdata_test_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*Empty); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_praefect_grpc_proxy_testdata_test_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*PingRequest); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_praefect_grpc_proxy_testdata_test_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*PingResponse); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
}
type x struct{}
out := protoimpl.TypeBuilder{
File: protoimpl.DescBuilder{
GoPackagePath: reflect.TypeOf(x{}).PkgPath(),
RawDescriptor: file_praefect_grpc_proxy_testdata_test_proto_rawDesc,
NumEnums: 0,
NumMessages: 3,
NumExtensions: 0,
NumServices: 1,
},
GoTypes: file_praefect_grpc_proxy_testdata_test_proto_goTypes,
DependencyIndexes: file_praefect_grpc_proxy_testdata_test_proto_depIdxs,
MessageInfos: file_praefect_grpc_proxy_testdata_test_proto_msgTypes,
}.Build()
File_praefect_grpc_proxy_testdata_test_proto = out.File
file_praefect_grpc_proxy_testdata_test_proto_rawDesc = nil
file_praefect_grpc_proxy_testdata_test_proto_goTypes = nil
file_praefect_grpc_proxy_testdata_test_proto_depIdxs = nil
}

View file

@ -1,31 +0,0 @@
syntax = "proto3";
package mwitkow.testproto;
option go_package = "gitlab.com/gitlab-org/gitaly/v14/internal/praefect/grpc-proxy/testdata";
message Empty {
}
message PingRequest {
string value = 1;
}
message PingResponse {
string Value = 1;
int32 counter = 2;
}
service TestService {
rpc PingEmpty(Empty) returns (PingResponse) {}
rpc Ping(PingRequest) returns (PingResponse) {}
rpc PingError(PingRequest) returns (Empty) {}
rpc PingList(PingRequest) returns (stream PingResponse) {}
rpc PingStream(stream PingRequest) returns (stream PingResponse) {}
}

Some files were not shown because too many files have changed in this diff Show more