bench-forgejo/routers/web/repo/view.go

1068 lines
30 KiB
Go
Raw Normal View History

// Copyright 2017 The Gitea Authors. All rights reserved.
2014-07-26 09:54:27 +05:30
// Copyright 2014 The Gogs Authors. All rights reserved.
// Use of this source code is governed by a MIT-style
// license that can be found in the LICENSE file.
package repo
import (
"bytes"
gocontext "context"
"encoding/base64"
2016-08-10 01:26:00 +05:30
"fmt"
gotemplate "html/template"
2021-01-22 23:19:13 +05:30
"io"
"net/http"
2019-10-13 18:53:14 +05:30
"net/url"
2014-07-26 09:54:27 +05:30
"path"
"strconv"
2014-07-26 09:54:27 +05:30
"strings"
"time"
2014-07-26 09:54:27 +05:30
"code.gitea.io/gitea/models"
asymkey_model "code.gitea.io/gitea/models/asymkey"
"code.gitea.io/gitea/models/db"
git_model "code.gitea.io/gitea/models/git"
repo_model "code.gitea.io/gitea/models/repo"
unit_model "code.gitea.io/gitea/models/unit"
user_model "code.gitea.io/gitea/models/user"
"code.gitea.io/gitea/modules/base"
"code.gitea.io/gitea/modules/cache"
"code.gitea.io/gitea/modules/charset"
"code.gitea.io/gitea/modules/context"
"code.gitea.io/gitea/modules/git"
"code.gitea.io/gitea/modules/highlight"
Git LFS support v2 (#122) * Import github.com/git-lfs/lfs-test-server as lfs module base Imported commit is 3968aac269a77b73924649b9412ae03f7ccd3198 Removed: Dockerfile CONTRIBUTING.md mgmt* script/ vendor/ kvlogger.go .dockerignore .gitignore README.md * Remove config, add JWT support from github.com/mgit-at/lfs-test-server Imported commit f0cdcc5a01599c5a955dc1bbf683bb4acecdba83 * Add LFS settings * Add LFS meta object model * Add LFS routes and initialization * Import github.com/dgrijalva/jwt-go into vendor/ * Adapt LFS module: handlers, routing, meta store * Move LFS routes to /user/repo/info/lfs/* * Add request header checks to LFS BatchHandler / PostHandler * Implement LFS basic authentication * Rework JWT secret generation / load * Implement LFS SSH token authentication with JWT Specification: https://github.com/github/git-lfs/tree/master/docs/api * Integrate LFS settings into install process * Remove LFS objects when repository is deleted Only removes objects from content store when deleted repo is the only referencing repository * Make LFS module stateless Fixes bug where LFS would not work after installation without restarting Gitea * Change 500 'Internal Server Error' to 400 'Bad Request' * Change sql query to xorm call * Remove unneeded type from LFS module * Change internal imports to code.gitea.io/gitea/ * Add Gitea authors copyright * Change basic auth realm to "gitea-lfs" * Add unique indexes to LFS model * Use xorm count function in LFS check on repository delete * Return io.ReadCloser from content store and close after usage * Add LFS info to runWeb() * Export LFS content store base path * LFS file download from UI * Work around git-lfs client issue with unauthenticated requests Returning a dummy Authorization header for unauthenticated requests lets git-lfs client skip asking for auth credentials See: https://github.com/github/git-lfs/issues/1088 * Fix unauthenticated UI downloads from public repositories * Authentication check order, Finish LFS file view logic * Ignore LFS hooks if installed for current OS user Fixes Gitea UI actions for repositories tracking LFS files. Checks for minimum needed git version by parsing the semantic version string. * Hide LFS metafile diff from commit view, marking as binary * Show LFS notice if file in commit view is tracked * Add notbefore/nbf JWT claim * Correct lint suggestions - comments for structs and functions - Add comments to LFS model - Function comment for GetRandomBytesAsBase64 - LFS server function comments and lint variable suggestion * Move secret generation code out of conditional Ensures no LFS code may run with an empty secret * Do not hand out JWT tokens if LFS server support is disabled
2016-12-26 06:46:37 +05:30
"code.gitea.io/gitea/modules/lfs"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/markup"
repo_module "code.gitea.io/gitea/modules/repository"
"code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/modules/structs"
"code.gitea.io/gitea/modules/typesniffer"
"code.gitea.io/gitea/modules/util"
"code.gitea.io/gitea/routers/web/feed"
2014-07-26 09:54:27 +05:30
)
const (
tplRepoEMPTY base.TplName = "repo/empty"
tplRepoHome base.TplName = "repo/home"
tplRepoViewList base.TplName = "repo/view_list"
tplWatchers base.TplName = "repo/watchers"
tplForks base.TplName = "repo/forks"
tplMigrating base.TplName = "repo/migrate/migrating"
2014-07-26 09:54:27 +05:30
)
type namedBlob struct {
name string
isSymlink bool
blob *git.Blob
}
func linesBytesCount(s []byte) int {
nl := []byte{'\n'}
n := bytes.Count(s, nl)
if len(s) > 0 && !bytes.HasSuffix(s, nl) {
n++
}
return n
}
// FIXME: There has to be a more efficient way of doing this
func getReadmeFileFromPath(commit *git.Commit, treePath string) (*namedBlob, error) {
tree, err := commit.SubTree(treePath)
if err != nil {
return nil, err
}
entries, err := tree.ListEntries()
if err != nil {
return nil, err
}
var readmeFiles [4]*namedBlob
exts := []string{".md", ".txt", ""} // sorted by priority
for _, entry := range entries {
if entry.IsDir() {
continue
}
for i, ext := range exts {
if markup.IsReadmeFile(entry.Name(), ext) {
if readmeFiles[i] == nil || base.NaturalSortLess(readmeFiles[i].name, entry.Blob().Name()) {
name := entry.Name()
isSymlink := entry.IsLink()
target := entry
if isSymlink {
target, err = entry.FollowLinks()
if err != nil && !git.IsErrBadLink(err) {
return nil, err
}
}
if target != nil && (target.IsExecutable() || target.IsRegular()) {
readmeFiles[i] = &namedBlob{
name,
isSymlink,
target.Blob(),
}
}
}
}
}
if markup.IsReadmeFile(entry.Name()) {
if readmeFiles[3] == nil || base.NaturalSortLess(readmeFiles[3].name, entry.Blob().Name()) {
name := entry.Name()
isSymlink := entry.IsLink()
if isSymlink {
entry, err = entry.FollowLinks()
if err != nil && !git.IsErrBadLink(err) {
return nil, err
}
}
if entry != nil && (entry.IsExecutable() || entry.IsRegular()) {
readmeFiles[3] = &namedBlob{
name,
isSymlink,
entry.Blob(),
}
}
}
}
}
var readmeFile *namedBlob
for _, f := range readmeFiles {
if f != nil {
readmeFile = f
break
}
}
return readmeFile, nil
}
2016-08-30 14:38:38 +05:30
func renderDirectory(ctx *context.Context, treeLink string) {
entries := renderDirectoryFiles(ctx, 1*time.Second)
if ctx.Written() {
2014-07-26 09:54:27 +05:30
return
}
if ctx.Repo.TreePath != "" {
ctx.Data["Title"] = ctx.Tr("repo.file.title", ctx.Repo.Repository.Name+"/"+path.Base(ctx.Repo.TreePath), ctx.Repo.RefName)
}
// Check permission to add or upload new file.
if ctx.Repo.CanWrite(unit_model.TypeCode) && ctx.Repo.IsViewBranch {
ctx.Data["CanAddFile"] = !ctx.Repo.Repository.IsArchived
ctx.Data["CanUploadFile"] = setting.Repository.Upload.Enabled && !ctx.Repo.Repository.IsArchived
}
readmeFile, readmeTreelink := findReadmeFile(ctx, entries, treeLink)
if ctx.Written() || readmeFile == nil {
return
}
renderReadmeFile(ctx, readmeFile, readmeTreelink)
}
func findReadmeFile(ctx *context.Context, entries git.Entries, treeLink string) (*namedBlob, string) {
// 3 for the extensions in exts[] in order
// the last one is for a readme that doesn't
// strictly match an extension
var readmeFiles [4]*namedBlob
var docsEntries [3]*git.TreeEntry
exts := []string{".md", ".txt", ""} // sorted by priority
2016-08-30 14:38:38 +05:30
for _, entry := range entries {
if entry.IsDir() {
lowerName := strings.ToLower(entry.Name())
switch lowerName {
case "docs":
if entry.Name() == "docs" || docsEntries[0] == nil {
docsEntries[0] = entry
}
case ".gitea":
if entry.Name() == ".gitea" || docsEntries[1] == nil {
docsEntries[1] = entry
}
case ".github":
if entry.Name() == ".github" || docsEntries[2] == nil {
docsEntries[2] = entry
}
}
continue
}
for i, ext := range exts {
if markup.IsReadmeFile(entry.Name(), ext) {
log.Debug("%s", entry.Name())
name := entry.Name()
isSymlink := entry.IsLink()
target := entry
if isSymlink {
var err error
target, err = entry.FollowLinks()
if err != nil && !git.IsErrBadLink(err) {
ctx.ServerError("FollowLinks", err)
return nil, ""
}
}
log.Debug("%t", target == nil)
if target != nil && (target.IsExecutable() || target.IsRegular()) {
readmeFiles[i] = &namedBlob{
name,
isSymlink,
target.Blob(),
}
}
}
}
if markup.IsReadmeFile(entry.Name()) {
name := entry.Name()
isSymlink := entry.IsLink()
if isSymlink {
var err error
entry, err = entry.FollowLinks()
if err != nil && !git.IsErrBadLink(err) {
ctx.ServerError("FollowLinks", err)
return nil, ""
}
}
if entry != nil && (entry.IsExecutable() || entry.IsRegular()) {
readmeFiles[3] = &namedBlob{
name,
isSymlink,
entry.Blob(),
}
}
2016-08-30 14:38:38 +05:30
}
}
2016-08-30 14:38:38 +05:30
var readmeFile *namedBlob
readmeTreelink := treeLink
for _, f := range readmeFiles {
if f != nil {
readmeFile = f
break
}
2016-08-30 14:38:38 +05:30
}
if ctx.Repo.TreePath == "" && readmeFile == nil {
for _, entry := range docsEntries {
if entry == nil {
continue
}
var err error
readmeFile, err = getReadmeFileFromPath(ctx.Repo.Commit, entry.GetSubJumpablePathName())
if err != nil {
ctx.ServerError("getReadmeFileFromPath", err)
return nil, ""
}
if readmeFile != nil {
readmeFile.name = entry.Name() + "/" + readmeFile.name
readmeTreelink = treeLink + "/" + util.PathEscapeSegments(entry.GetSubJumpablePathName())
break
}
}
}
return readmeFile, readmeTreelink
}
func renderReadmeFile(ctx *context.Context, readmeFile *namedBlob, readmeTreelink string) {
ctx.Data["RawFileLink"] = ""
ctx.Data["ReadmeInList"] = true
ctx.Data["ReadmeExist"] = true
ctx.Data["FileIsSymlink"] = readmeFile.isSymlink
2016-08-30 14:38:38 +05:30
dataRc, err := readmeFile.blob.DataAsync()
if err != nil {
ctx.ServerError("Data", err)
return
}
defer dataRc.Close()
buf := make([]byte, 1024)
n, _ := util.ReadAtMost(dataRc, buf)
buf = buf[:n]
st := typesniffer.DetectContentType(buf)
isTextFile := st.IsText()
ctx.Data["FileIsText"] = isTextFile
ctx.Data["FileName"] = readmeFile.name
fileSize := int64(0)
isLFSFile := false
ctx.Data["IsLFSFile"] = false
// FIXME: what happens when README file is an image?
if isTextFile && setting.LFS.StartServer {
pointer, _ := lfs.ReadPointerFromBuffer(buf)
if pointer.IsValid() {
meta, err := git_model.GetLFSMetaObjectByOid(ctx.Repo.Repository.ID, pointer.Oid)
if err != nil && err != git_model.ErrLFSObjectNotExist {
ctx.ServerError("GetLFSMetaObject", err)
return
}
if meta != nil {
ctx.Data["IsLFSFile"] = true
isLFSFile = true
// OK read the lfs object
var err error
dataRc, err = lfs.ReadMetaObject(pointer)
if err != nil {
ctx.ServerError("ReadMetaObject", err)
return
}
defer dataRc.Close()
buf = make([]byte, 1024)
n, err = util.ReadAtMost(dataRc, buf)
if err != nil {
ctx.ServerError("Data", err)
return
}
buf = buf[:n]
st = typesniffer.DetectContentType(buf)
isTextFile = st.IsText()
ctx.Data["IsTextFile"] = isTextFile
fileSize = meta.Size
ctx.Data["FileSize"] = meta.Size
filenameBase64 := base64.RawURLEncoding.EncodeToString([]byte(readmeFile.name))
ctx.Data["RawFileLink"] = fmt.Sprintf("%s.git/info/lfs/objects/%s/%s", ctx.Repo.Repository.HTMLURL(), url.PathEscape(meta.Oid), url.PathEscape(filenameBase64))
}
}
}
if !isTextFile {
return
}
if !isLFSFile {
fileSize = readmeFile.blob.Size()
}
if fileSize >= setting.UI.MaxDisplayFileSize {
// Pretend that this is a normal text file to display 'This file is too large to be shown'
ctx.Data["IsFileTooLarge"] = true
ctx.Data["IsTextFile"] = true
ctx.Data["FileSize"] = fileSize
return
2016-08-30 14:38:38 +05:30
}
rd := charset.ToUTF8WithFallbackReader(io.MultiReader(bytes.NewReader(buf), dataRc))
if markupType := markup.Type(readmeFile.name); markupType != "" {
ctx.Data["IsMarkup"] = true
ctx.Data["MarkupType"] = markupType
var result strings.Builder
err := markup.Render(&markup.RenderContext{
Ctx: ctx,
RelativePath: path.Join(ctx.Repo.TreePath, readmeFile.name), // ctx.Repo.TreePath is the directory not the Readme so we must append the Readme filename (and path).
URLPrefix: readmeTreelink,
Metas: ctx.Repo.Repository.ComposeDocumentMetas(),
GitRepo: ctx.Repo.GitRepo,
}, rd, &result)
if err != nil {
log.Error("Render failed: %v then fallback", err)
buf := &bytes.Buffer{}
ctx.Data["EscapeStatus"], _ = charset.EscapeControlReader(rd, buf)
ctx.Data["FileContent"] = strings.ReplaceAll(
gotemplate.HTMLEscapeString(buf.String()), "\n", `<br>`,
)
} else {
ctx.Data["EscapeStatus"], ctx.Data["FileContent"] = charset.EscapeControlString(result.String())
}
} else {
ctx.Data["IsRenderedHTML"] = true
buf := &bytes.Buffer{}
ctx.Data["EscapeStatus"], err = charset.EscapeControlReader(rd, buf)
if err != nil {
log.Error("Read failed: %v", err)
}
ctx.Data["FileContent"] = strings.ReplaceAll(
gotemplate.HTMLEscapeString(buf.String()), "\n", `<br>`,
)
2016-08-30 14:38:38 +05:30
}
}
2014-09-30 14:09:53 +05:30
2016-08-30 14:38:38 +05:30
func renderFile(ctx *context.Context, entry *git.TreeEntry, treeLink, rawLink string) {
ctx.Data["IsViewFile"] = true
blob := entry.Blob()
dataRc, err := blob.DataAsync()
2016-08-30 14:38:38 +05:30
if err != nil {
ctx.ServerError("DataAsync", err)
2016-08-30 14:38:38 +05:30
return
}
defer dataRc.Close()
2014-07-26 09:54:27 +05:30
ctx.Data["Title"] = ctx.Tr("repo.file.title", ctx.Repo.Repository.Name+"/"+path.Base(ctx.Repo.TreePath), ctx.Repo.RefName)
fileSize := blob.Size()
ctx.Data["FileIsSymlink"] = entry.IsLink()
2016-08-30 14:38:38 +05:30
ctx.Data["FileName"] = blob.Name()
ctx.Data["RawFileLink"] = rawLink + "/" + util.PathEscapeSegments(ctx.Repo.TreePath)
2016-08-30 14:38:38 +05:30
buf := make([]byte, 1024)
n, _ := util.ReadAtMost(dataRc, buf)
2016-09-01 02:29:23 +05:30
buf = buf[:n]
2016-08-30 14:38:38 +05:30
st := typesniffer.DetectContentType(buf)
isTextFile := st.IsText()
isLFSFile := false
isDisplayingSource := ctx.FormString("display") == "source"
isDisplayingRendered := !isDisplayingSource
// Check for LFS meta file
if isTextFile && setting.LFS.StartServer {
Add LFS Migration and Mirror (#14726) * Implemented LFS client. * Implemented scanning for pointer files. * Implemented downloading of lfs files. * Moved model-dependent code into services. * Removed models dependency. Added TryReadPointerFromBuffer. * Migrated code from service to module. * Centralised storage creation. * Removed dependency from models. * Moved ContentStore into modules. * Share structs between server and client. * Moved method to services. * Implemented lfs download on clone. * Implemented LFS sync on clone and mirror update. * Added form fields. * Updated templates. * Fixed condition. * Use alternate endpoint. * Added missing methods. * Fixed typo and make linter happy. * Detached pointer parser from gogit dependency. * Fixed TestGetLFSRange test. * Added context to support cancellation. * Use ReadFull to probably read more data. * Removed duplicated code from models. * Moved scan implementation into pointer_scanner_nogogit. * Changed method name. * Added comments. * Added more/specific log/error messages. * Embedded lfs.Pointer into models.LFSMetaObject. * Moved code from models to module. * Moved code from models to module. * Moved code from models to module. * Reduced pointer usage. * Embedded type. * Use promoted fields. * Fixed unexpected eof. * Added unit tests. * Implemented migration of local file paths. * Show an error on invalid LFS endpoints. * Hide settings if not used. * Added LFS info to mirror struct. * Fixed comment. * Check LFS endpoint. * Manage LFS settings from mirror page. * Fixed selector. * Adjusted selector. * Added more tests. * Added local filesystem migration test. * Fixed typo. * Reset settings. * Added special windows path handling. * Added unit test for HTTPClient. * Added unit test for BasicTransferAdapter. * Moved into util package. * Test if LFS endpoint is allowed. * Added support for git:// * Just use a static placeholder as the displayed url may be invalid. * Reverted to original code. * Added "Advanced Settings". * Updated wording. * Added discovery info link. * Implemented suggestion. * Fixed missing format parameter. * Added Pointer.IsValid(). * Always remove model on error. * Added suggestions. * Use channel instead of array. * Update routers/repo/migrate.go * fmt Signed-off-by: Andrew Thornton <art27@cantab.net> Co-authored-by: zeripath <art27@cantab.net>
2021-04-09 03:55:57 +05:30
pointer, _ := lfs.ReadPointerFromBuffer(buf)
if pointer.IsValid() {
meta, err := git_model.GetLFSMetaObjectByOid(ctx.Repo.Repository.ID, pointer.Oid)
if err != nil && err != git_model.ErrLFSObjectNotExist {
ctx.ServerError("GetLFSMetaObject", err)
return
}
Add LFS Migration and Mirror (#14726) * Implemented LFS client. * Implemented scanning for pointer files. * Implemented downloading of lfs files. * Moved model-dependent code into services. * Removed models dependency. Added TryReadPointerFromBuffer. * Migrated code from service to module. * Centralised storage creation. * Removed dependency from models. * Moved ContentStore into modules. * Share structs between server and client. * Moved method to services. * Implemented lfs download on clone. * Implemented LFS sync on clone and mirror update. * Added form fields. * Updated templates. * Fixed condition. * Use alternate endpoint. * Added missing methods. * Fixed typo and make linter happy. * Detached pointer parser from gogit dependency. * Fixed TestGetLFSRange test. * Added context to support cancellation. * Use ReadFull to probably read more data. * Removed duplicated code from models. * Moved scan implementation into pointer_scanner_nogogit. * Changed method name. * Added comments. * Added more/specific log/error messages. * Embedded lfs.Pointer into models.LFSMetaObject. * Moved code from models to module. * Moved code from models to module. * Moved code from models to module. * Reduced pointer usage. * Embedded type. * Use promoted fields. * Fixed unexpected eof. * Added unit tests. * Implemented migration of local file paths. * Show an error on invalid LFS endpoints. * Hide settings if not used. * Added LFS info to mirror struct. * Fixed comment. * Check LFS endpoint. * Manage LFS settings from mirror page. * Fixed selector. * Adjusted selector. * Added more tests. * Added local filesystem migration test. * Fixed typo. * Reset settings. * Added special windows path handling. * Added unit test for HTTPClient. * Added unit test for BasicTransferAdapter. * Moved into util package. * Test if LFS endpoint is allowed. * Added support for git:// * Just use a static placeholder as the displayed url may be invalid. * Reverted to original code. * Added "Advanced Settings". * Updated wording. * Added discovery info link. * Implemented suggestion. * Fixed missing format parameter. * Added Pointer.IsValid(). * Always remove model on error. * Added suggestions. * Use channel instead of array. * Update routers/repo/migrate.go * fmt Signed-off-by: Andrew Thornton <art27@cantab.net> Co-authored-by: zeripath <art27@cantab.net>
2021-04-09 03:55:57 +05:30
if meta != nil {
isLFSFile = true
Add LFS Migration and Mirror (#14726) * Implemented LFS client. * Implemented scanning for pointer files. * Implemented downloading of lfs files. * Moved model-dependent code into services. * Removed models dependency. Added TryReadPointerFromBuffer. * Migrated code from service to module. * Centralised storage creation. * Removed dependency from models. * Moved ContentStore into modules. * Share structs between server and client. * Moved method to services. * Implemented lfs download on clone. * Implemented LFS sync on clone and mirror update. * Added form fields. * Updated templates. * Fixed condition. * Use alternate endpoint. * Added missing methods. * Fixed typo and make linter happy. * Detached pointer parser from gogit dependency. * Fixed TestGetLFSRange test. * Added context to support cancellation. * Use ReadFull to probably read more data. * Removed duplicated code from models. * Moved scan implementation into pointer_scanner_nogogit. * Changed method name. * Added comments. * Added more/specific log/error messages. * Embedded lfs.Pointer into models.LFSMetaObject. * Moved code from models to module. * Moved code from models to module. * Moved code from models to module. * Reduced pointer usage. * Embedded type. * Use promoted fields. * Fixed unexpected eof. * Added unit tests. * Implemented migration of local file paths. * Show an error on invalid LFS endpoints. * Hide settings if not used. * Added LFS info to mirror struct. * Fixed comment. * Check LFS endpoint. * Manage LFS settings from mirror page. * Fixed selector. * Adjusted selector. * Added more tests. * Added local filesystem migration test. * Fixed typo. * Reset settings. * Added special windows path handling. * Added unit test for HTTPClient. * Added unit test for BasicTransferAdapter. * Moved into util package. * Test if LFS endpoint is allowed. * Added support for git:// * Just use a static placeholder as the displayed url may be invalid. * Reverted to original code. * Added "Advanced Settings". * Updated wording. * Added discovery info link. * Implemented suggestion. * Fixed missing format parameter. * Added Pointer.IsValid(). * Always remove model on error. * Added suggestions. * Use channel instead of array. * Update routers/repo/migrate.go * fmt Signed-off-by: Andrew Thornton <art27@cantab.net> Co-authored-by: zeripath <art27@cantab.net>
2021-04-09 03:55:57 +05:30
// OK read the lfs object
var err error
dataRc, err = lfs.ReadMetaObject(pointer)
if err != nil {
ctx.ServerError("ReadMetaObject", err)
return
}
defer dataRc.Close()
buf = make([]byte, 1024)
n, err = util.ReadAtMost(dataRc, buf)
if err != nil {
Add LFS Migration and Mirror (#14726) * Implemented LFS client. * Implemented scanning for pointer files. * Implemented downloading of lfs files. * Moved model-dependent code into services. * Removed models dependency. Added TryReadPointerFromBuffer. * Migrated code from service to module. * Centralised storage creation. * Removed dependency from models. * Moved ContentStore into modules. * Share structs between server and client. * Moved method to services. * Implemented lfs download on clone. * Implemented LFS sync on clone and mirror update. * Added form fields. * Updated templates. * Fixed condition. * Use alternate endpoint. * Added missing methods. * Fixed typo and make linter happy. * Detached pointer parser from gogit dependency. * Fixed TestGetLFSRange test. * Added context to support cancellation. * Use ReadFull to probably read more data. * Removed duplicated code from models. * Moved scan implementation into pointer_scanner_nogogit. * Changed method name. * Added comments. * Added more/specific log/error messages. * Embedded lfs.Pointer into models.LFSMetaObject. * Moved code from models to module. * Moved code from models to module. * Moved code from models to module. * Reduced pointer usage. * Embedded type. * Use promoted fields. * Fixed unexpected eof. * Added unit tests. * Implemented migration of local file paths. * Show an error on invalid LFS endpoints. * Hide settings if not used. * Added LFS info to mirror struct. * Fixed comment. * Check LFS endpoint. * Manage LFS settings from mirror page. * Fixed selector. * Adjusted selector. * Added more tests. * Added local filesystem migration test. * Fixed typo. * Reset settings. * Added special windows path handling. * Added unit test for HTTPClient. * Added unit test for BasicTransferAdapter. * Moved into util package. * Test if LFS endpoint is allowed. * Added support for git:// * Just use a static placeholder as the displayed url may be invalid. * Reverted to original code. * Added "Advanced Settings". * Updated wording. * Added discovery info link. * Implemented suggestion. * Fixed missing format parameter. * Added Pointer.IsValid(). * Always remove model on error. * Added suggestions. * Use channel instead of array. * Update routers/repo/migrate.go * fmt Signed-off-by: Andrew Thornton <art27@cantab.net> Co-authored-by: zeripath <art27@cantab.net>
2021-04-09 03:55:57 +05:30
ctx.ServerError("Data", err)
return
}
buf = buf[:n]
st = typesniffer.DetectContentType(buf)
isTextFile = st.IsText()
Add LFS Migration and Mirror (#14726) * Implemented LFS client. * Implemented scanning for pointer files. * Implemented downloading of lfs files. * Moved model-dependent code into services. * Removed models dependency. Added TryReadPointerFromBuffer. * Migrated code from service to module. * Centralised storage creation. * Removed dependency from models. * Moved ContentStore into modules. * Share structs between server and client. * Moved method to services. * Implemented lfs download on clone. * Implemented LFS sync on clone and mirror update. * Added form fields. * Updated templates. * Fixed condition. * Use alternate endpoint. * Added missing methods. * Fixed typo and make linter happy. * Detached pointer parser from gogit dependency. * Fixed TestGetLFSRange test. * Added context to support cancellation. * Use ReadFull to probably read more data. * Removed duplicated code from models. * Moved scan implementation into pointer_scanner_nogogit. * Changed method name. * Added comments. * Added more/specific log/error messages. * Embedded lfs.Pointer into models.LFSMetaObject. * Moved code from models to module. * Moved code from models to module. * Moved code from models to module. * Reduced pointer usage. * Embedded type. * Use promoted fields. * Fixed unexpected eof. * Added unit tests. * Implemented migration of local file paths. * Show an error on invalid LFS endpoints. * Hide settings if not used. * Added LFS info to mirror struct. * Fixed comment. * Check LFS endpoint. * Manage LFS settings from mirror page. * Fixed selector. * Adjusted selector. * Added more tests. * Added local filesystem migration test. * Fixed typo. * Reset settings. * Added special windows path handling. * Added unit test for HTTPClient. * Added unit test for BasicTransferAdapter. * Moved into util package. * Test if LFS endpoint is allowed. * Added support for git:// * Just use a static placeholder as the displayed url may be invalid. * Reverted to original code. * Added "Advanced Settings". * Updated wording. * Added discovery info link. * Implemented suggestion. * Fixed missing format parameter. * Added Pointer.IsValid(). * Always remove model on error. * Added suggestions. * Use channel instead of array. * Update routers/repo/migrate.go * fmt Signed-off-by: Andrew Thornton <art27@cantab.net> Co-authored-by: zeripath <art27@cantab.net>
2021-04-09 03:55:57 +05:30
fileSize = meta.Size
ctx.Data["RawFileLink"] = ctx.Repo.RepoLink + "/media/" + ctx.Repo.BranchNameSubURL() + "/" + util.PathEscapeSegments(ctx.Repo.TreePath)
Add LFS Migration and Mirror (#14726) * Implemented LFS client. * Implemented scanning for pointer files. * Implemented downloading of lfs files. * Moved model-dependent code into services. * Removed models dependency. Added TryReadPointerFromBuffer. * Migrated code from service to module. * Centralised storage creation. * Removed dependency from models. * Moved ContentStore into modules. * Share structs between server and client. * Moved method to services. * Implemented lfs download on clone. * Implemented LFS sync on clone and mirror update. * Added form fields. * Updated templates. * Fixed condition. * Use alternate endpoint. * Added missing methods. * Fixed typo and make linter happy. * Detached pointer parser from gogit dependency. * Fixed TestGetLFSRange test. * Added context to support cancellation. * Use ReadFull to probably read more data. * Removed duplicated code from models. * Moved scan implementation into pointer_scanner_nogogit. * Changed method name. * Added comments. * Added more/specific log/error messages. * Embedded lfs.Pointer into models.LFSMetaObject. * Moved code from models to module. * Moved code from models to module. * Moved code from models to module. * Reduced pointer usage. * Embedded type. * Use promoted fields. * Fixed unexpected eof. * Added unit tests. * Implemented migration of local file paths. * Show an error on invalid LFS endpoints. * Hide settings if not used. * Added LFS info to mirror struct. * Fixed comment. * Check LFS endpoint. * Manage LFS settings from mirror page. * Fixed selector. * Adjusted selector. * Added more tests. * Added local filesystem migration test. * Fixed typo. * Reset settings. * Added special windows path handling. * Added unit test for HTTPClient. * Added unit test for BasicTransferAdapter. * Moved into util package. * Test if LFS endpoint is allowed. * Added support for git:// * Just use a static placeholder as the displayed url may be invalid. * Reverted to original code. * Added "Advanced Settings". * Updated wording. * Added discovery info link. * Implemented suggestion. * Fixed missing format parameter. * Added Pointer.IsValid(). * Always remove model on error. * Added suggestions. * Use channel instead of array. * Update routers/repo/migrate.go * fmt Signed-off-by: Andrew Thornton <art27@cantab.net> Co-authored-by: zeripath <art27@cantab.net>
2021-04-09 03:55:57 +05:30
}
Git LFS support v2 (#122) * Import github.com/git-lfs/lfs-test-server as lfs module base Imported commit is 3968aac269a77b73924649b9412ae03f7ccd3198 Removed: Dockerfile CONTRIBUTING.md mgmt* script/ vendor/ kvlogger.go .dockerignore .gitignore README.md * Remove config, add JWT support from github.com/mgit-at/lfs-test-server Imported commit f0cdcc5a01599c5a955dc1bbf683bb4acecdba83 * Add LFS settings * Add LFS meta object model * Add LFS routes and initialization * Import github.com/dgrijalva/jwt-go into vendor/ * Adapt LFS module: handlers, routing, meta store * Move LFS routes to /user/repo/info/lfs/* * Add request header checks to LFS BatchHandler / PostHandler * Implement LFS basic authentication * Rework JWT secret generation / load * Implement LFS SSH token authentication with JWT Specification: https://github.com/github/git-lfs/tree/master/docs/api * Integrate LFS settings into install process * Remove LFS objects when repository is deleted Only removes objects from content store when deleted repo is the only referencing repository * Make LFS module stateless Fixes bug where LFS would not work after installation without restarting Gitea * Change 500 'Internal Server Error' to 400 'Bad Request' * Change sql query to xorm call * Remove unneeded type from LFS module * Change internal imports to code.gitea.io/gitea/ * Add Gitea authors copyright * Change basic auth realm to "gitea-lfs" * Add unique indexes to LFS model * Use xorm count function in LFS check on repository delete * Return io.ReadCloser from content store and close after usage * Add LFS info to runWeb() * Export LFS content store base path * LFS file download from UI * Work around git-lfs client issue with unauthenticated requests Returning a dummy Authorization header for unauthenticated requests lets git-lfs client skip asking for auth credentials See: https://github.com/github/git-lfs/issues/1088 * Fix unauthenticated UI downloads from public repositories * Authentication check order, Finish LFS file view logic * Ignore LFS hooks if installed for current OS user Fixes Gitea UI actions for repositories tracking LFS files. Checks for minimum needed git version by parsing the semantic version string. * Hide LFS metafile diff from commit view, marking as binary * Show LFS notice if file in commit view is tracked * Add notbefore/nbf JWT claim * Correct lint suggestions - comments for structs and functions - Add comments to LFS model - Function comment for GetRandomBytesAsBase64 - LFS server function comments and lint variable suggestion * Move secret generation code out of conditional Ensures no LFS code may run with an empty secret * Do not hand out JWT tokens if LFS server support is disabled
2016-12-26 06:46:37 +05:30
}
}
2021-02-05 06:59:42 +05:30
isRepresentableAsText := st.IsRepresentableAsText()
2021-02-05 06:59:42 +05:30
if !isRepresentableAsText {
// If we can't show plain text, always try to render.
isDisplayingSource = false
isDisplayingRendered = true
}
ctx.Data["IsLFSFile"] = isLFSFile
ctx.Data["FileSize"] = fileSize
ctx.Data["IsTextFile"] = isTextFile
ctx.Data["IsRepresentableAsText"] = isRepresentableAsText
ctx.Data["IsDisplayingSource"] = isDisplayingSource
ctx.Data["IsDisplayingRendered"] = isDisplayingRendered
ctx.Data["IsTextSource"] = isTextFile || isDisplayingSource
// Check LFS Lock
lfsLock, err := git_model.GetTreePathLock(ctx.Repo.Repository.ID, ctx.Repo.TreePath)
ctx.Data["LFSLock"] = lfsLock
if err != nil {
ctx.ServerError("GetTreePathLock", err)
return
}
if lfsLock != nil {
u, err := user_model.GetUserByID(lfsLock.OwnerID)
if err != nil {
ctx.ServerError("GetTreePathLock", err)
return
}
ctx.Data["LFSLockOwner"] = u.DisplayName()
2022-02-16 21:52:25 +05:30
ctx.Data["LFSLockOwnerHomeLink"] = u.HomeLink()
ctx.Data["LFSLockHint"] = ctx.Tr("repo.editor.this_file_locked")
}
Git LFS support v2 (#122) * Import github.com/git-lfs/lfs-test-server as lfs module base Imported commit is 3968aac269a77b73924649b9412ae03f7ccd3198 Removed: Dockerfile CONTRIBUTING.md mgmt* script/ vendor/ kvlogger.go .dockerignore .gitignore README.md * Remove config, add JWT support from github.com/mgit-at/lfs-test-server Imported commit f0cdcc5a01599c5a955dc1bbf683bb4acecdba83 * Add LFS settings * Add LFS meta object model * Add LFS routes and initialization * Import github.com/dgrijalva/jwt-go into vendor/ * Adapt LFS module: handlers, routing, meta store * Move LFS routes to /user/repo/info/lfs/* * Add request header checks to LFS BatchHandler / PostHandler * Implement LFS basic authentication * Rework JWT secret generation / load * Implement LFS SSH token authentication with JWT Specification: https://github.com/github/git-lfs/tree/master/docs/api * Integrate LFS settings into install process * Remove LFS objects when repository is deleted Only removes objects from content store when deleted repo is the only referencing repository * Make LFS module stateless Fixes bug where LFS would not work after installation without restarting Gitea * Change 500 'Internal Server Error' to 400 'Bad Request' * Change sql query to xorm call * Remove unneeded type from LFS module * Change internal imports to code.gitea.io/gitea/ * Add Gitea authors copyright * Change basic auth realm to "gitea-lfs" * Add unique indexes to LFS model * Use xorm count function in LFS check on repository delete * Return io.ReadCloser from content store and close after usage * Add LFS info to runWeb() * Export LFS content store base path * LFS file download from UI * Work around git-lfs client issue with unauthenticated requests Returning a dummy Authorization header for unauthenticated requests lets git-lfs client skip asking for auth credentials See: https://github.com/github/git-lfs/issues/1088 * Fix unauthenticated UI downloads from public repositories * Authentication check order, Finish LFS file view logic * Ignore LFS hooks if installed for current OS user Fixes Gitea UI actions for repositories tracking LFS files. Checks for minimum needed git version by parsing the semantic version string. * Hide LFS metafile diff from commit view, marking as binary * Show LFS notice if file in commit view is tracked * Add notbefore/nbf JWT claim * Correct lint suggestions - comments for structs and functions - Add comments to LFS model - Function comment for GetRandomBytesAsBase64 - LFS server function comments and lint variable suggestion * Move secret generation code out of conditional Ensures no LFS code may run with an empty secret * Do not hand out JWT tokens if LFS server support is disabled
2016-12-26 06:46:37 +05:30
2016-08-30 14:38:38 +05:30
// Assume file is not editable first.
if isLFSFile {
ctx.Data["EditFileTooltip"] = ctx.Tr("repo.editor.cannot_edit_lfs_files")
} else if !isRepresentableAsText {
2016-08-30 14:38:38 +05:30
ctx.Data["EditFileTooltip"] = ctx.Tr("repo.editor.cannot_edit_non_text_files")
}
switch {
case isRepresentableAsText:
if st.IsSvgImage() {
ctx.Data["IsImageFile"] = true
ctx.Data["HasSourceRenderedToggle"] = true
}
if fileSize >= setting.UI.MaxDisplayFileSize {
2016-08-30 14:38:38 +05:30
ctx.Data["IsFileTooLarge"] = true
break
2014-07-26 09:54:27 +05:30
}
rd := charset.ToUTF8WithFallbackReader(io.MultiReader(bytes.NewReader(buf), dataRc))
shouldRenderSource := ctx.FormString("display") == "source"
readmeExist := markup.IsReadmeFile(blob.Name())
2016-08-30 14:38:38 +05:30
ctx.Data["ReadmeExist"] = readmeExist
markupType := markup.Type(blob.Name())
// If the markup is detected by custom markup renderer it should not be reset later on
// to not pass it down to the render context.
detected := false
if markupType == "" {
detected = true
markupType = markup.DetectRendererType(blob.Name(), bytes.NewReader(buf))
}
if markupType != "" {
ctx.Data["HasSourceRenderedToggle"] = true
}
if markupType != "" && !shouldRenderSource {
ctx.Data["IsMarkup"] = true
ctx.Data["MarkupType"] = markupType
var result strings.Builder
if !detected {
markupType = ""
}
metas := ctx.Repo.Repository.ComposeDocumentMetas()
metas["BranchNameSubURL"] = ctx.Repo.BranchNameSubURL()
err := markup.Render(&markup.RenderContext{
Ctx: ctx,
Type: markupType,
RelativePath: ctx.Repo.TreePath,
URLPrefix: path.Dir(treeLink),
Metas: metas,
GitRepo: ctx.Repo.GitRepo,
}, rd, &result)
if err != nil {
ctx.ServerError("Render", err)
return
}
// to prevent iframe load third-party url
ctx.Resp.Header().Add("Content-Security-Policy", "frame-src 'self'")
ctx.Data["EscapeStatus"], ctx.Data["FileContent"] = charset.EscapeControlString(result.String())
} else if readmeExist && !shouldRenderSource {
buf := &bytes.Buffer{}
ctx.Data["IsRenderedHTML"] = true
ctx.Data["EscapeStatus"], _ = charset.EscapeControlReader(rd, buf)
ctx.Data["FileContent"] = strings.ReplaceAll(
gotemplate.HTMLEscapeString(buf.String()), "\n", `<br>`,
2018-06-11 00:12:16 +05:30
)
2016-08-30 14:38:38 +05:30
} else {
buf, _ := io.ReadAll(rd)
lineNums := linesBytesCount(buf)
ctx.Data["NumLines"] = strconv.Itoa(lineNums)
ctx.Data["NumLinesSet"] = true
language := ""
indexFilename, worktree, deleteTemporaryFile, err := ctx.Repo.GitRepo.ReadTreeToTemporaryIndex(ctx.Repo.CommitID)
if err == nil {
defer deleteTemporaryFile()
filename2attribute2info, err := ctx.Repo.GitRepo.CheckAttribute(git.CheckAttributeOpts{
CachedOnly: true,
Attributes: []string{"linguist-language", "gitlab-language"},
Filenames: []string{ctx.Repo.TreePath},
IndexFile: indexFilename,
WorkTree: worktree,
})
if err != nil {
log.Error("Unable to load attributes for %-v:%s. Error: %v", ctx.Repo.Repository, ctx.Repo.TreePath, err)
}
language = filename2attribute2info[ctx.Repo.TreePath]["linguist-language"]
if language == "" || language == "unspecified" {
language = filename2attribute2info[ctx.Repo.TreePath]["gitlab-language"]
}
if language == "unspecified" {
language = ""
}
}
fileContent := highlight.File(lineNums, blob.Name(), language, buf)
status, _ := charset.EscapeControlReader(bytes.NewReader(buf), io.Discard)
ctx.Data["EscapeStatus"] = status
statuses := make([]charset.EscapeStatus, len(fileContent))
for i, line := range fileContent {
statuses[i], fileContent[i] = charset.EscapeControlString(line)
}
ctx.Data["FileContent"] = fileContent
ctx.Data["LineEscapeStatus"] = statuses
2014-09-26 18:25:13 +05:30
}
if !isLFSFile {
if ctx.Repo.CanEnableEditor(ctx.Doer) {
if lfsLock != nil && lfsLock.OwnerID != ctx.Doer.ID {
ctx.Data["CanEditFile"] = false
ctx.Data["EditFileTooltip"] = ctx.Tr("repo.editor.this_file_locked")
} else {
ctx.Data["CanEditFile"] = true
ctx.Data["EditFileTooltip"] = ctx.Tr("repo.editor.edit_this_file")
}
} else if !ctx.Repo.IsViewBranch {
ctx.Data["EditFileTooltip"] = ctx.Tr("repo.editor.must_be_on_a_branch")
} else if !ctx.Repo.CanWriteToBranch(ctx.Doer, ctx.Repo.BranchName) {
ctx.Data["EditFileTooltip"] = ctx.Tr("repo.editor.fork_before_edit")
}
}
2016-08-30 14:38:38 +05:30
case st.IsPDF():
2016-08-30 14:38:38 +05:30
ctx.Data["IsPDFFile"] = true
case st.IsVideo():
ctx.Data["IsVideoFile"] = true
case st.IsAudio():
ctx.Data["IsAudioFile"] = true
case st.IsImage() && (setting.UI.SVG.Enabled || !st.IsSvgImage()):
2016-08-30 14:38:38 +05:30
ctx.Data["IsImageFile"] = true
default:
if fileSize >= setting.UI.MaxDisplayFileSize {
ctx.Data["IsFileTooLarge"] = true
break
}
if markupType := markup.Type(blob.Name()); markupType != "" {
rd := io.MultiReader(bytes.NewReader(buf), dataRc)
ctx.Data["IsMarkup"] = true
ctx.Data["MarkupType"] = markupType
var result strings.Builder
err := markup.Render(&markup.RenderContext{
Ctx: ctx,
RelativePath: ctx.Repo.TreePath,
URLPrefix: path.Dir(treeLink),
Metas: ctx.Repo.Repository.ComposeDocumentMetas(),
GitRepo: ctx.Repo.GitRepo,
}, rd, &result)
if err != nil {
ctx.ServerError("Render", err)
return
}
ctx.Data["EscapeStatus"], ctx.Data["FileContent"] = charset.EscapeControlString(result.String())
}
2014-07-26 09:54:27 +05:30
}
if ctx.Repo.CanEnableEditor(ctx.Doer) {
if lfsLock != nil && lfsLock.OwnerID != ctx.Doer.ID {
ctx.Data["CanDeleteFile"] = false
ctx.Data["DeleteFileTooltip"] = ctx.Tr("repo.editor.this_file_locked")
} else {
ctx.Data["CanDeleteFile"] = true
ctx.Data["DeleteFileTooltip"] = ctx.Tr("repo.editor.delete_this_file")
}
2016-08-30 14:38:38 +05:30
} else if !ctx.Repo.IsViewBranch {
ctx.Data["DeleteFileTooltip"] = ctx.Tr("repo.editor.must_be_on_a_branch")
} else if !ctx.Repo.CanWriteToBranch(ctx.Doer, ctx.Repo.BranchName) {
2016-08-30 14:38:38 +05:30
ctx.Data["DeleteFileTooltip"] = ctx.Tr("repo.editor.must_have_write_access")
}
}
2019-10-13 18:53:14 +05:30
func safeURL(address string) string {
u, err := url.Parse(address)
if err != nil {
return address
}
u.User = nil
return u.String()
}
func checkHomeCodeViewable(ctx *context.Context) {
if len(ctx.Repo.Units) > 0 {
2019-10-13 18:53:14 +05:30
if ctx.Repo.Repository.IsBeingCreated() {
task, err := models.GetMigratingTask(ctx.Repo.Repository.ID)
if err != nil {
if models.IsErrTaskDoesNotExist(err) {
ctx.Data["Repo"] = ctx.Repo
ctx.Data["CloneAddr"] = ""
ctx.Data["Failed"] = true
ctx.HTML(http.StatusOK, tplMigrating)
return
}
2019-10-13 18:53:14 +05:30
ctx.ServerError("models.GetMigratingTask", err)
return
}
cfg, err := task.MigrateConfig()
if err != nil {
ctx.ServerError("task.MigrateConfig", err)
return
}
ctx.Data["Repo"] = ctx.Repo
ctx.Data["MigrateTask"] = task
ctx.Data["CloneAddr"] = safeURL(cfg.CloneAddr)
ctx.Data["Failed"] = task.Status == structs.TaskStatusFailed
ctx.HTML(http.StatusOK, tplMigrating)
2019-10-13 18:53:14 +05:30
return
}
if ctx.IsSigned {
// Set repo notification-status read if unread
if err := models.SetRepoReadBy(ctx, ctx.Repo.Repository.ID, ctx.Doer.ID); err != nil {
ctx.ServerError("ReadBy", err)
return
}
}
var firstUnit *unit_model.Unit
for _, repoUnit := range ctx.Repo.Units {
if repoUnit.Type == unit_model.TypeCode {
return
}
unit, ok := unit_model.Units[repoUnit.Type]
if ok && (firstUnit == nil || !firstUnit.IsLessThan(unit)) {
firstUnit = &unit
}
}
if firstUnit != nil {
ctx.Redirect(fmt.Sprintf("%s%s", ctx.Repo.Repository.Link(), firstUnit.URI))
return
}
}
ctx.NotFound("Home", fmt.Errorf(ctx.Tr("units.error.no_unit_allowed_repo")))
}
// Home render repository home page
func Home(ctx *context.Context) {
isFeed, _, showFeedType := feed.GetFeedType(ctx.Params(":reponame"), ctx.Req)
if isFeed {
feed.ShowRepoFeed(ctx, ctx.Repo.Repository, showFeedType)
return
}
ctx.Data["FeedURL"] = ctx.Repo.Repository.HTMLURL()
checkHomeCodeViewable(ctx)
if ctx.Written() {
return
}
renderCode(ctx)
}
// LastCommit returns lastCommit data for the provided branch/tag/commit and directory (in url) and filenames in body
func LastCommit(ctx *context.Context) {
checkHomeCodeViewable(ctx)
if ctx.Written() {
return
}
renderDirectoryFiles(ctx, 0)
if ctx.Written() {
return
}
var treeNames []string
paths := make([]string, 0, 5)
if len(ctx.Repo.TreePath) > 0 {
treeNames = strings.Split(ctx.Repo.TreePath, "/")
for i := range treeNames {
paths = append(paths, strings.Join(treeNames[:i+1], "/"))
}
ctx.Data["HasParentPath"] = true
if len(paths)-2 >= 0 {
ctx.Data["ParentPath"] = "/" + paths[len(paths)-2]
}
}
branchLink := ctx.Repo.RepoLink + "/src/" + ctx.Repo.BranchNameSubURL()
ctx.Data["BranchLink"] = branchLink
ctx.HTML(http.StatusOK, tplRepoViewList)
}
func renderDirectoryFiles(ctx *context.Context, timeout time.Duration) git.Entries {
tree, err := ctx.Repo.Commit.SubTree(ctx.Repo.TreePath)
if err != nil {
ctx.NotFoundOrServerError("Repo.Commit.SubTree", git.IsErrNotExist, err)
return nil
}
ctx.Data["LastCommitLoaderURL"] = ctx.Repo.RepoLink + "/lastcommit/" + url.PathEscape(ctx.Repo.CommitID) + "/" + util.PathEscapeSegments(ctx.Repo.TreePath)
// Get current entry user currently looking at.
entry, err := ctx.Repo.Commit.GetTreeEntryByPath(ctx.Repo.TreePath)
if err != nil {
ctx.NotFoundOrServerError("Repo.Commit.GetTreeEntryByPath", git.IsErrNotExist, err)
return nil
}
if !entry.IsDir() {
ctx.NotFoundOrServerError("Repo.Commit.GetTreeEntryByPath", git.IsErrNotExist, err)
return nil
}
allEntries, err := tree.ListEntries()
if err != nil {
ctx.ServerError("ListEntries", err)
return nil
}
allEntries.CustomSort(base.NaturalSortLess)
commitInfoCtx := gocontext.Context(ctx)
if timeout > 0 {
var cancel gocontext.CancelFunc
commitInfoCtx, cancel = gocontext.WithTimeout(ctx, timeout)
defer cancel()
}
var c *git.LastCommitCache
if setting.CacheService.LastCommit.Enabled && ctx.Repo.CommitsCount >= setting.CacheService.LastCommit.CommitsCount {
c = git.NewLastCommitCache(ctx.Repo.Repository.FullName(), ctx.Repo.GitRepo, setting.LastCommitCacheTTLSeconds, cache.GetCache())
}
selected := map[string]bool{}
for _, pth := range ctx.FormStrings("f[]") {
selected[pth] = true
}
entries := allEntries
if len(selected) > 0 {
entries = make(git.Entries, 0, len(selected))
for _, entry := range allEntries {
if selected[entry.Name()] {
entries = append(entries, entry)
}
}
}
var latestCommit *git.Commit
ctx.Data["Files"], latestCommit, err = entries.GetCommitsInfo(commitInfoCtx, ctx.Repo.Commit, ctx.Repo.TreePath, c)
if err != nil {
ctx.ServerError("GetCommitsInfo", err)
return nil
}
// Show latest commit info of repository in table header,
// or of directory if not in root directory.
ctx.Data["LatestCommit"] = latestCommit
if latestCommit != nil {
verification := asymkey_model.ParseCommitWithSignature(latestCommit)
if err := asymkey_model.CalculateTrustStatus(verification, ctx.Repo.Repository.GetTrustModel(), func(user *user_model.User) (bool, error) {
return repo_model.IsOwnerMemberCollaborator(ctx.Repo.Repository, user.ID)
}, nil); err != nil {
ctx.ServerError("CalculateTrustStatus", err)
return nil
}
ctx.Data["LatestCommitVerification"] = verification
ctx.Data["LatestCommitUser"] = user_model.ValidateCommitWithEmail(latestCommit)
}
statuses, _, err := git_model.GetLatestCommitStatus(ctx, ctx.Repo.Repository.ID, ctx.Repo.Commit.ID.String(), db.ListOptions{})
if err != nil {
log.Error("GetLatestCommitStatus: %v", err)
}
ctx.Data["LatestCommitStatus"] = git_model.CalcCommitStatus(statuses)
ctx.Data["LatestCommitStatuses"] = statuses
branchLink := ctx.Repo.RepoLink + "/src/" + ctx.Repo.BranchNameSubURL()
treeLink := branchLink
if len(ctx.Repo.TreePath) > 0 {
treeLink += "/" + util.PathEscapeSegments(ctx.Repo.TreePath)
}
ctx.Data["TreeLink"] = treeLink
ctx.Data["SSHDomain"] = setting.SSH.Domain
return allEntries
}
func renderLanguageStats(ctx *context.Context) {
langs, err := repo_model.GetTopLanguageStats(ctx.Repo.Repository, 5)
if err != nil {
ctx.ServerError("Repo.GetTopLanguageStats", err)
return
}
ctx.Data["LanguageStats"] = langs
}
func renderRepoTopics(ctx *context.Context) {
topics, _, err := repo_model.FindTopics(&repo_model.FindTopicOptions{
RepoID: ctx.Repo.Repository.ID,
})
if err != nil {
ctx.ServerError("models.FindTopics", err)
return
}
ctx.Data["Topics"] = topics
}
func renderCode(ctx *context.Context) {
ctx.Data["PageIsViewCode"] = true
if ctx.Repo.Repository.IsEmpty {
reallyEmpty, err := ctx.Repo.GitRepo.IsEmpty()
if err != nil {
ctx.ServerError("GitRepo.IsEmpty", err)
return
}
if reallyEmpty {
ctx.HTML(http.StatusOK, tplRepoEMPTY)
return
}
// the repo is not really empty, so we should update the modal in database
// such problem may be caused by:
// 1) an error occurs during pushing/receiving. 2) the user replaces an empty git repo manually
// and even more: the IsEmpty flag is deeply broken and should be removed with the UI changed to manage to cope with empty repos.
// it's possible for a repository to be non-empty by that flag but still 500
// because there are no branches - only tags -or the default branch is non-extant as it has been 0-pushed.
ctx.Repo.Repository.IsEmpty = false
if err = repo_model.UpdateRepositoryCols(ctx, ctx.Repo.Repository, "is_empty"); err != nil {
ctx.ServerError("UpdateRepositoryCols", err)
return
}
if err = repo_module.UpdateRepoSize(ctx, ctx.Repo.Repository); err != nil {
ctx.ServerError("UpdateRepoSize", err)
return
}
}
2016-08-30 14:38:38 +05:30
title := ctx.Repo.Repository.Owner.Name + "/" + ctx.Repo.Repository.Name
if len(ctx.Repo.Repository.Description) > 0 {
title += ": " + ctx.Repo.Repository.Description
}
ctx.Data["Title"] = title
branchLink := ctx.Repo.RepoLink + "/src/" + ctx.Repo.BranchNameSubURL()
2016-08-30 14:38:38 +05:30
treeLink := branchLink
rawLink := ctx.Repo.RepoLink + "/raw/" + ctx.Repo.BranchNameSubURL()
2016-08-30 14:38:38 +05:30
if len(ctx.Repo.TreePath) > 0 {
treeLink += "/" + util.PathEscapeSegments(ctx.Repo.TreePath)
2016-08-30 14:38:38 +05:30
}
// Get Topics of this repo
renderRepoTopics(ctx)
if ctx.Written() {
return
}
2016-08-30 14:38:38 +05:30
// Get current entry user currently looking at.
entry, err := ctx.Repo.Commit.GetTreeEntryByPath(ctx.Repo.TreePath)
if err != nil {
ctx.NotFoundOrServerError("Repo.Commit.GetTreeEntryByPath", git.IsErrNotExist, err)
return
}
renderLanguageStats(ctx)
if ctx.Written() {
return
}
2016-08-30 14:38:38 +05:30
if entry.IsDir() {
renderDirectory(ctx, treeLink)
} else {
renderFile(ctx, entry, treeLink, rawLink)
}
if ctx.Written() {
return
}
2014-07-26 09:54:27 +05:30
2016-08-30 14:38:38 +05:30
var treeNames []string
paths := make([]string, 0, 5)
if len(ctx.Repo.TreePath) > 0 {
treeNames = strings.Split(ctx.Repo.TreePath, "/")
for i := range treeNames {
paths = append(paths, strings.Join(treeNames[:i+1], "/"))
2014-07-26 09:54:27 +05:30
}
ctx.Data["HasParentPath"] = true
2016-08-10 01:26:00 +05:30
if len(paths)-2 >= 0 {
ctx.Data["ParentPath"] = "/" + paths[len(paths)-2]
2014-07-26 09:54:27 +05:30
}
}
2016-08-10 01:26:00 +05:30
ctx.Data["Paths"] = paths
2016-08-28 03:55:01 +05:30
ctx.Data["TreeLink"] = treeLink
2016-08-30 14:38:38 +05:30
ctx.Data["TreeNames"] = treeNames
2014-07-26 09:54:27 +05:30
ctx.Data["BranchLink"] = branchLink
ctx.HTML(http.StatusOK, tplRepoHome)
2014-07-26 09:54:27 +05:30
}
2015-11-17 09:58:46 +05:30
// RenderUserCards render a page show users according the input template
func RenderUserCards(ctx *context.Context, total int, getter func(opts db.ListOptions) ([]*user_model.User, error), tpl base.TplName) {
page := ctx.FormInt("page")
2015-11-17 09:58:46 +05:30
if page <= 0 {
page = 1
}
pager := context.NewPagination(total, setting.ItemsPerPage, page, 5)
2015-11-17 09:58:46 +05:30
ctx.Data["Page"] = pager
items, err := getter(db.ListOptions{
Page: pager.Paginater.Current(),
PageSize: setting.ItemsPerPage,
})
2015-11-17 09:58:46 +05:30
if err != nil {
ctx.ServerError("getter", err)
2015-11-17 09:58:46 +05:30
return
}
ctx.Data["Cards"] = items
2015-11-17 09:58:46 +05:30
ctx.HTML(http.StatusOK, tpl)
2015-11-17 09:58:46 +05:30
}
// Watchers render repository's watch users
2016-03-11 22:26:52 +05:30
func Watchers(ctx *context.Context) {
2015-11-17 09:58:46 +05:30
ctx.Data["Title"] = ctx.Tr("repo.watchers")
ctx.Data["CardsTitle"] = ctx.Tr("repo.watchers")
2015-11-17 09:58:46 +05:30
ctx.Data["PageIsWatchers"] = true
RenderUserCards(ctx, ctx.Repo.Repository.NumWatches, func(opts db.ListOptions) ([]*user_model.User, error) {
return repo_model.GetRepoWatchers(ctx.Repo.Repository.ID, opts)
}, tplWatchers)
2015-11-17 09:58:46 +05:30
}
// Stars render repository's starred users
2016-03-11 22:26:52 +05:30
func Stars(ctx *context.Context) {
2015-11-17 09:58:46 +05:30
ctx.Data["Title"] = ctx.Tr("repo.stargazers")
ctx.Data["CardsTitle"] = ctx.Tr("repo.stargazers")
2015-11-17 09:58:46 +05:30
ctx.Data["PageIsStargazers"] = true
RenderUserCards(ctx, ctx.Repo.Repository.NumStars, func(opts db.ListOptions) ([]*user_model.User, error) {
return repo_model.GetStargazers(ctx.Repo.Repository, opts)
}, tplWatchers)
2015-11-17 09:58:46 +05:30
}
2015-11-17 10:03:40 +05:30
// Forks render repository's forked users
2016-03-11 22:26:52 +05:30
func Forks(ctx *context.Context) {
2015-11-17 10:03:40 +05:30
ctx.Data["Title"] = ctx.Tr("repos.forks")
page := ctx.FormInt("page")
if page <= 0 {
page = 1
}
pager := context.NewPagination(ctx.Repo.Repository.NumForks, setting.ItemsPerPage, page, 5)
ctx.Data["Page"] = pager
forks, err := repo_model.GetForks(ctx.Repo.Repository, db.ListOptions{
Page: pager.Paginater.Current(),
PageSize: setting.ItemsPerPage,
})
2015-11-17 10:03:40 +05:30
if err != nil {
ctx.ServerError("GetForks", err)
2015-11-17 10:03:40 +05:30
return
}
for _, fork := range forks {
if err = fork.GetOwner(ctx); err != nil {
ctx.ServerError("GetOwner", err)
2015-11-17 10:03:40 +05:30
return
}
}
2015-11-17 10:03:40 +05:30
ctx.Data["Forks"] = forks
ctx.HTML(http.StatusOK, tplForks)
2015-11-17 10:03:40 +05:30
}