forgejo/routers/web/repo/view.go

1075 lines
31 KiB
Go
Raw Normal View History

// Copyright 2017 The Gitea Authors. All rights reserved.
2014-07-26 13:24:27 +09:00
// Copyright 2014 The Gogs Authors. All rights reserved.
// SPDX-License-Identifier: MIT
2014-07-26 13:24:27 +09:00
package repo
import (
"bytes"
gocontext "context"
"encoding/base64"
2016-08-10 04:56:00 +09:00
"fmt"
2021-01-23 02:49:13 +09:00
"io"
"net/http"
2019-10-13 22:23:14 +09:00
"net/url"
2014-07-26 13:24:27 +09:00
"path"
"strings"
"time"
2014-07-26 13:24:27 +09:00
activities_model "code.gitea.io/gitea/models/activities"
admin_model "code.gitea.io/gitea/models/admin"
asymkey_model "code.gitea.io/gitea/models/asymkey"
"code.gitea.io/gitea/models/db"
git_model "code.gitea.io/gitea/models/git"
repo_model "code.gitea.io/gitea/models/repo"
unit_model "code.gitea.io/gitea/models/unit"
user_model "code.gitea.io/gitea/models/user"
"code.gitea.io/gitea/modules/base"
"code.gitea.io/gitea/modules/charset"
"code.gitea.io/gitea/modules/container"
"code.gitea.io/gitea/modules/context"
"code.gitea.io/gitea/modules/git"
"code.gitea.io/gitea/modules/highlight"
Git LFS support v2 (#122) * Import github.com/git-lfs/lfs-test-server as lfs module base Imported commit is 3968aac269a77b73924649b9412ae03f7ccd3198 Removed: Dockerfile CONTRIBUTING.md mgmt* script/ vendor/ kvlogger.go .dockerignore .gitignore README.md * Remove config, add JWT support from github.com/mgit-at/lfs-test-server Imported commit f0cdcc5a01599c5a955dc1bbf683bb4acecdba83 * Add LFS settings * Add LFS meta object model * Add LFS routes and initialization * Import github.com/dgrijalva/jwt-go into vendor/ * Adapt LFS module: handlers, routing, meta store * Move LFS routes to /user/repo/info/lfs/* * Add request header checks to LFS BatchHandler / PostHandler * Implement LFS basic authentication * Rework JWT secret generation / load * Implement LFS SSH token authentication with JWT Specification: https://github.com/github/git-lfs/tree/master/docs/api * Integrate LFS settings into install process * Remove LFS objects when repository is deleted Only removes objects from content store when deleted repo is the only referencing repository * Make LFS module stateless Fixes bug where LFS would not work after installation without restarting Gitea * Change 500 'Internal Server Error' to 400 'Bad Request' * Change sql query to xorm call * Remove unneeded type from LFS module * Change internal imports to code.gitea.io/gitea/ * Add Gitea authors copyright * Change basic auth realm to "gitea-lfs" * Add unique indexes to LFS model * Use xorm count function in LFS check on repository delete * Return io.ReadCloser from content store and close after usage * Add LFS info to runWeb() * Export LFS content store base path * LFS file download from UI * Work around git-lfs client issue with unauthenticated requests Returning a dummy Authorization header for unauthenticated requests lets git-lfs client skip asking for auth credentials See: https://github.com/github/git-lfs/issues/1088 * Fix unauthenticated UI downloads from public repositories * Authentication check order, Finish LFS file view logic * Ignore LFS hooks if installed for current OS user Fixes Gitea UI actions for repositories tracking LFS files. Checks for minimum needed git version by parsing the semantic version string. * Hide LFS metafile diff from commit view, marking as binary * Show LFS notice if file in commit view is tracked * Add notbefore/nbf JWT claim * Correct lint suggestions - comments for structs and functions - Add comments to LFS model - Function comment for GetRandomBytesAsBase64 - LFS server function comments and lint variable suggestion * Move secret generation code out of conditional Ensures no LFS code may run with an empty secret * Do not hand out JWT tokens if LFS server support is disabled
2016-12-26 10:16:37 +09:00
"code.gitea.io/gitea/modules/lfs"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/markup"
repo_module "code.gitea.io/gitea/modules/repository"
"code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/modules/structs"
"code.gitea.io/gitea/modules/typesniffer"
"code.gitea.io/gitea/modules/util"
"code.gitea.io/gitea/routers/web/feed"
2014-07-26 13:24:27 +09:00
)
const (
tplRepoEMPTY base.TplName = "repo/empty"
tplRepoHome base.TplName = "repo/home"
tplRepoViewList base.TplName = "repo/view_list"
tplWatchers base.TplName = "repo/watchers"
tplForks base.TplName = "repo/forks"
tplMigrating base.TplName = "repo/migrate/migrating"
2014-07-26 13:24:27 +09:00
)
type namedBlob struct {
name string
isSymlink bool
blob *git.Blob
}
// FIXME: There has to be a more efficient way of doing this
func getReadmeFileFromPath(ctx *context.Context, commit *git.Commit, treePath string) (*namedBlob, error) {
tree, err := commit.SubTree(treePath)
if err != nil {
return nil, err
}
entries, err := tree.ListEntries()
if err != nil {
return nil, err
}
// Create a list of extensions in priority order
// 1. Markdown files - with and without localisation - e.g. README.en-us.md or README.md
// 2. Txt files - e.g. README.txt
// 3. No extension - e.g. README
exts := append(localizedExtensions(".md", ctx.Language()), ".txt", "") // sorted by priority
extCount := len(exts)
readmeFiles := make([]*namedBlob, extCount+1)
for _, entry := range entries {
if entry.IsDir() {
continue
}
if i, ok := markup.IsReadmeFileExtension(entry.Name(), exts...); ok {
if readmeFiles[i] == nil || base.NaturalSortLess(readmeFiles[i].name, entry.Blob().Name()) {
name := entry.Name()
isSymlink := entry.IsLink()
target := entry
if isSymlink {
target, err = entry.FollowLinks()
if err != nil && !git.IsErrBadLink(err) {
return nil, err
}
}
if target != nil && (target.IsExecutable() || target.IsRegular()) {
readmeFiles[i] = &namedBlob{
name,
isSymlink,
target.Blob(),
}
}
}
}
}
var readmeFile *namedBlob
for _, f := range readmeFiles {
if f != nil {
readmeFile = f
break
}
}
return readmeFile, nil
}
2016-08-30 18:08:38 +09:00
func renderDirectory(ctx *context.Context, treeLink string) {
entries := renderDirectoryFiles(ctx, 1*time.Second)
if ctx.Written() {
2014-07-26 13:24:27 +09:00
return
}
if ctx.Repo.TreePath != "" {
2022-10-13 17:31:10 +09:00
ctx.Data["HideRepoInfo"] = true
ctx.Data["Title"] = ctx.Tr("repo.file.title", ctx.Repo.Repository.Name+"/"+path.Base(ctx.Repo.TreePath), ctx.Repo.RefName)
}
// Check permission to add or upload new file.
if ctx.Repo.CanWrite(unit_model.TypeCode) && ctx.Repo.IsViewBranch {
ctx.Data["CanAddFile"] = !ctx.Repo.Repository.IsArchived
ctx.Data["CanUploadFile"] = setting.Repository.Upload.Enabled && !ctx.Repo.Repository.IsArchived
}
readmeFile, readmeTreelink := findReadmeFile(ctx, entries, treeLink)
if ctx.Written() || readmeFile == nil {
return
}
renderReadmeFile(ctx, readmeFile, readmeTreelink)
}
// localizedExtensions prepends the provided language code with and without a
// regional identifier to the provided extension.
// Note: the language code will always be lower-cased, if a region is present it must be separated with a `-`
// Note: ext should be prefixed with a `.`
func localizedExtensions(ext, languageCode string) (localizedExts []string) {
if len(languageCode) < 1 {
return []string{ext}
}
lowerLangCode := "." + strings.ToLower(languageCode)
if strings.Contains(lowerLangCode, "-") {
underscoreLangCode := strings.ReplaceAll(lowerLangCode, "-", "_")
indexOfDash := strings.Index(lowerLangCode, "-")
// e.g. [.zh-cn.md, .zh_cn.md, .zh.md, _zh.md, .md]
return []string{lowerLangCode + ext, underscoreLangCode + ext, lowerLangCode[:indexOfDash] + ext, "_" + lowerLangCode[1:indexOfDash] + ext, ext}
}
// e.g. [.en.md, .md]
return []string{lowerLangCode + ext, ext}
}
func findReadmeFile(ctx *context.Context, entries git.Entries, treeLink string) (*namedBlob, string) {
// Create a list of extensions in priority order
// 1. Markdown files - with and without localisation - e.g. README.en-us.md or README.md
// 2. Txt files - e.g. README.txt
// 3. No extension - e.g. README
exts := append(localizedExtensions(".md", ctx.Language()), ".txt", "") // sorted by priority
extCount := len(exts)
readmeFiles := make([]*namedBlob, extCount+1)
docsEntries := make([]*git.TreeEntry, 3) // (one of docs/, .gitea/ or .github/)
2016-08-30 18:08:38 +09:00
for _, entry := range entries {
if entry.IsDir() {
lowerName := strings.ToLower(entry.Name())
switch lowerName {
case "docs":
if entry.Name() == "docs" || docsEntries[0] == nil {
docsEntries[0] = entry
}
case ".gitea":
if entry.Name() == ".gitea" || docsEntries[1] == nil {
docsEntries[1] = entry
}
case ".github":
if entry.Name() == ".github" || docsEntries[2] == nil {
docsEntries[2] = entry
}
}
continue
}
if i, ok := markup.IsReadmeFileExtension(entry.Name(), exts...); ok {
log.Debug("Potential readme file: %s", entry.Name())
name := entry.Name()
isSymlink := entry.IsLink()
target := entry
if isSymlink {
var err error
target, err = entry.FollowLinks()
if err != nil && !git.IsErrBadLink(err) {
ctx.ServerError("FollowLinks", err)
return nil, ""
}
}
if target != nil && (target.IsExecutable() || target.IsRegular()) {
readmeFiles[i] = &namedBlob{
name,
isSymlink,
target.Blob(),
}
}
2016-08-30 18:08:38 +09:00
}
}
2016-08-30 18:08:38 +09:00
var readmeFile *namedBlob
readmeTreelink := treeLink
for _, f := range readmeFiles {
if f != nil {
readmeFile = f
break
}
2016-08-30 18:08:38 +09:00
}
if ctx.Repo.TreePath == "" && readmeFile == nil {
for _, entry := range docsEntries {
if entry == nil {
continue
}
var err error
readmeFile, err = getReadmeFileFromPath(ctx, ctx.Repo.Commit, entry.GetSubJumpablePathName())
if err != nil {
ctx.ServerError("getReadmeFileFromPath", err)
return nil, ""
}
if readmeFile != nil {
readmeFile.name = entry.Name() + "/" + readmeFile.name
readmeTreelink = treeLink + "/" + util.PathEscapeSegments(entry.GetSubJumpablePathName())
break
}
}
}
return readmeFile, readmeTreelink
}
type fileInfo struct {
isTextFile bool
isLFSFile bool
fileSize int64
lfsMeta *lfs.Pointer
st typesniffer.SniffedType
}
2016-08-30 18:08:38 +09:00
func getFileReader(repoID int64, blob *git.Blob) ([]byte, io.ReadCloser, *fileInfo, error) {
dataRc, err := blob.DataAsync()
if err != nil {
return nil, nil, nil, err
}
buf := make([]byte, 1024)
n, _ := util.ReadAtMost(dataRc, buf)
buf = buf[:n]
st := typesniffer.DetectContentType(buf)
isTextFile := st.IsText()
// FIXME: what happens when README file is an image?
if !isTextFile || !setting.LFS.StartServer {
return buf, dataRc, &fileInfo{isTextFile, false, blob.Size(), nil, st}, nil
}
pointer, _ := lfs.ReadPointerFromBuffer(buf)
if !pointer.IsValid() { // fallback to plain file
return buf, dataRc, &fileInfo{isTextFile, false, blob.Size(), nil, st}, nil
}
meta, err := git_model.GetLFSMetaObjectByOid(db.DefaultContext, repoID, pointer.Oid)
if err != nil && err != git_model.ErrLFSObjectNotExist { // fallback to plain file
return buf, dataRc, &fileInfo{isTextFile, false, blob.Size(), nil, st}, nil
}
dataRc.Close()
if err != nil {
return nil, nil, nil, err
}
dataRc, err = lfs.ReadMetaObject(pointer)
if err != nil {
return nil, nil, nil, err
}
buf = make([]byte, 1024)
n, err = util.ReadAtMost(dataRc, buf)
if err != nil {
dataRc.Close()
return nil, nil, nil, err
}
buf = buf[:n]
st = typesniffer.DetectContentType(buf)
return buf, dataRc, &fileInfo{st.IsText(), true, meta.Size, &meta.Pointer, st}, nil
}
func renderReadmeFile(ctx *context.Context, readmeFile *namedBlob, readmeTreelink string) {
ctx.Data["RawFileLink"] = ""
ctx.Data["ReadmeInList"] = true
ctx.Data["ReadmeExist"] = true
ctx.Data["FileIsSymlink"] = readmeFile.isSymlink
buf, dataRc, fInfo, err := getFileReader(ctx.Repo.Repository.ID, readmeFile.blob)
if err != nil {
ctx.ServerError("getFileReader", err)
return
}
defer dataRc.Close()
ctx.Data["FileIsText"] = fInfo.isTextFile
ctx.Data["FileName"] = readmeFile.name
ctx.Data["IsLFSFile"] = fInfo.isLFSFile
if fInfo.isLFSFile {
filenameBase64 := base64.RawURLEncoding.EncodeToString([]byte(readmeFile.name))
ctx.Data["RawFileLink"] = fmt.Sprintf("%s.git/info/lfs/objects/%s/%s", ctx.Repo.Repository.HTMLURL(), url.PathEscape(fInfo.lfsMeta.Oid), url.PathEscape(filenameBase64))
}
if !fInfo.isTextFile {
return
}
if fInfo.fileSize >= setting.UI.MaxDisplayFileSize {
// Pretend that this is a normal text file to display 'This file is too large to be shown'
ctx.Data["IsFileTooLarge"] = true
ctx.Data["IsTextFile"] = true
ctx.Data["FileSize"] = fInfo.fileSize
return
2016-08-30 18:08:38 +09:00
}
rd := charset.ToUTF8WithFallbackReader(io.MultiReader(bytes.NewReader(buf), dataRc))
if markupType := markup.Type(readmeFile.name); markupType != "" {
ctx.Data["IsMarkup"] = true
ctx.Data["MarkupType"] = markupType
ctx.Data["EscapeStatus"], ctx.Data["FileContent"], err = markupRender(ctx, &markup.RenderContext{
Ctx: ctx,
RelativePath: path.Join(ctx.Repo.TreePath, readmeFile.name), // ctx.Repo.TreePath is the directory not the Readme so we must append the Readme filename (and path).
URLPrefix: readmeTreelink,
Metas: ctx.Repo.Repository.ComposeDocumentMetas(),
GitRepo: ctx.Repo.GitRepo,
}, rd)
if err != nil {
log.Error("Render failed for %s in %-v: %v Falling back to rendering source", readmeFile.name, ctx.Repo.Repository, err)
buf := &bytes.Buffer{}
ctx.Data["EscapeStatus"], _ = charset.EscapeControlStringReader(rd, buf, ctx.Locale)
ctx.Data["FileContent"] = buf.String()
}
} else {
ctx.Data["IsPlainText"] = true
buf := &bytes.Buffer{}
ctx.Data["EscapeStatus"], err = charset.EscapeControlStringReader(rd, buf, ctx.Locale)
if err != nil {
log.Error("Read failed: %v", err)
}
ctx.Data["FileContent"] = buf.String()
2016-08-30 18:08:38 +09:00
}
}
2014-09-30 17:39:53 +09:00
2016-08-30 18:08:38 +09:00
func renderFile(ctx *context.Context, entry *git.TreeEntry, treeLink, rawLink string) {
ctx.Data["IsViewFile"] = true
2022-10-13 17:31:10 +09:00
ctx.Data["HideRepoInfo"] = true
2016-08-30 18:08:38 +09:00
blob := entry.Blob()
buf, dataRc, fInfo, err := getFileReader(ctx.Repo.Repository.ID, blob)
2016-08-30 18:08:38 +09:00
if err != nil {
ctx.ServerError("getFileReader", err)
2016-08-30 18:08:38 +09:00
return
}
defer dataRc.Close()
2014-07-26 13:24:27 +09:00
ctx.Data["Title"] = ctx.Tr("repo.file.title", ctx.Repo.Repository.Name+"/"+path.Base(ctx.Repo.TreePath), ctx.Repo.RefName)
ctx.Data["FileIsSymlink"] = entry.IsLink()
2016-08-30 18:08:38 +09:00
ctx.Data["FileName"] = blob.Name()
ctx.Data["RawFileLink"] = rawLink + "/" + util.PathEscapeSegments(ctx.Repo.TreePath)
2016-08-30 18:08:38 +09:00
if ctx.Repo.TreePath == ".editorconfig" {
_, editorconfigErr := ctx.Repo.GetEditorconfig(ctx.Repo.Commit)
ctx.Data["FileError"] = editorconfigErr
}
isDisplayingSource := ctx.FormString("display") == "source"
isDisplayingRendered := !isDisplayingSource
if fInfo.isLFSFile {
ctx.Data["RawFileLink"] = ctx.Repo.RepoLink + "/media/" + ctx.Repo.BranchNameSubURL() + "/" + util.PathEscapeSegments(ctx.Repo.TreePath)
Git LFS support v2 (#122) * Import github.com/git-lfs/lfs-test-server as lfs module base Imported commit is 3968aac269a77b73924649b9412ae03f7ccd3198 Removed: Dockerfile CONTRIBUTING.md mgmt* script/ vendor/ kvlogger.go .dockerignore .gitignore README.md * Remove config, add JWT support from github.com/mgit-at/lfs-test-server Imported commit f0cdcc5a01599c5a955dc1bbf683bb4acecdba83 * Add LFS settings * Add LFS meta object model * Add LFS routes and initialization * Import github.com/dgrijalva/jwt-go into vendor/ * Adapt LFS module: handlers, routing, meta store * Move LFS routes to /user/repo/info/lfs/* * Add request header checks to LFS BatchHandler / PostHandler * Implement LFS basic authentication * Rework JWT secret generation / load * Implement LFS SSH token authentication with JWT Specification: https://github.com/github/git-lfs/tree/master/docs/api * Integrate LFS settings into install process * Remove LFS objects when repository is deleted Only removes objects from content store when deleted repo is the only referencing repository * Make LFS module stateless Fixes bug where LFS would not work after installation without restarting Gitea * Change 500 'Internal Server Error' to 400 'Bad Request' * Change sql query to xorm call * Remove unneeded type from LFS module * Change internal imports to code.gitea.io/gitea/ * Add Gitea authors copyright * Change basic auth realm to "gitea-lfs" * Add unique indexes to LFS model * Use xorm count function in LFS check on repository delete * Return io.ReadCloser from content store and close after usage * Add LFS info to runWeb() * Export LFS content store base path * LFS file download from UI * Work around git-lfs client issue with unauthenticated requests Returning a dummy Authorization header for unauthenticated requests lets git-lfs client skip asking for auth credentials See: https://github.com/github/git-lfs/issues/1088 * Fix unauthenticated UI downloads from public repositories * Authentication check order, Finish LFS file view logic * Ignore LFS hooks if installed for current OS user Fixes Gitea UI actions for repositories tracking LFS files. Checks for minimum needed git version by parsing the semantic version string. * Hide LFS metafile diff from commit view, marking as binary * Show LFS notice if file in commit view is tracked * Add notbefore/nbf JWT claim * Correct lint suggestions - comments for structs and functions - Add comments to LFS model - Function comment for GetRandomBytesAsBase64 - LFS server function comments and lint variable suggestion * Move secret generation code out of conditional Ensures no LFS code may run with an empty secret * Do not hand out JWT tokens if LFS server support is disabled
2016-12-26 10:16:37 +09:00
}
2021-02-05 10:29:42 +09:00
isRepresentableAsText := fInfo.st.IsRepresentableAsText()
2021-02-05 10:29:42 +09:00
if !isRepresentableAsText {
// If we can't show plain text, always try to render.
isDisplayingSource = false
isDisplayingRendered = true
}
ctx.Data["IsLFSFile"] = fInfo.isLFSFile
ctx.Data["FileSize"] = fInfo.fileSize
ctx.Data["IsTextFile"] = fInfo.isTextFile
2021-02-05 10:29:42 +09:00
ctx.Data["IsRepresentableAsText"] = isRepresentableAsText
ctx.Data["IsDisplayingSource"] = isDisplayingSource
ctx.Data["IsDisplayingRendered"] = isDisplayingRendered
isTextSource := fInfo.isTextFile || isDisplayingSource
ctx.Data["IsTextSource"] = isTextSource
if isTextSource {
ctx.Data["CanCopyContent"] = true
}
2021-02-05 10:29:42 +09:00
// Check LFS Lock
lfsLock, err := git_model.GetTreePathLock(ctx, ctx.Repo.Repository.ID, ctx.Repo.TreePath)
ctx.Data["LFSLock"] = lfsLock
if err != nil {
ctx.ServerError("GetTreePathLock", err)
return
}
if lfsLock != nil {
u, err := user_model.GetUserByID(ctx, lfsLock.OwnerID)
if err != nil {
ctx.ServerError("GetTreePathLock", err)
return
}
ctx.Data["LFSLockOwner"] = u.Name
2022-02-17 01:22:25 +09:00
ctx.Data["LFSLockOwnerHomeLink"] = u.HomeLink()
ctx.Data["LFSLockHint"] = ctx.Tr("repo.editor.this_file_locked")
}
Git LFS support v2 (#122) * Import github.com/git-lfs/lfs-test-server as lfs module base Imported commit is 3968aac269a77b73924649b9412ae03f7ccd3198 Removed: Dockerfile CONTRIBUTING.md mgmt* script/ vendor/ kvlogger.go .dockerignore .gitignore README.md * Remove config, add JWT support from github.com/mgit-at/lfs-test-server Imported commit f0cdcc5a01599c5a955dc1bbf683bb4acecdba83 * Add LFS settings * Add LFS meta object model * Add LFS routes and initialization * Import github.com/dgrijalva/jwt-go into vendor/ * Adapt LFS module: handlers, routing, meta store * Move LFS routes to /user/repo/info/lfs/* * Add request header checks to LFS BatchHandler / PostHandler * Implement LFS basic authentication * Rework JWT secret generation / load * Implement LFS SSH token authentication with JWT Specification: https://github.com/github/git-lfs/tree/master/docs/api * Integrate LFS settings into install process * Remove LFS objects when repository is deleted Only removes objects from content store when deleted repo is the only referencing repository * Make LFS module stateless Fixes bug where LFS would not work after installation without restarting Gitea * Change 500 'Internal Server Error' to 400 'Bad Request' * Change sql query to xorm call * Remove unneeded type from LFS module * Change internal imports to code.gitea.io/gitea/ * Add Gitea authors copyright * Change basic auth realm to "gitea-lfs" * Add unique indexes to LFS model * Use xorm count function in LFS check on repository delete * Return io.ReadCloser from content store and close after usage * Add LFS info to runWeb() * Export LFS content store base path * LFS file download from UI * Work around git-lfs client issue with unauthenticated requests Returning a dummy Authorization header for unauthenticated requests lets git-lfs client skip asking for auth credentials See: https://github.com/github/git-lfs/issues/1088 * Fix unauthenticated UI downloads from public repositories * Authentication check order, Finish LFS file view logic * Ignore LFS hooks if installed for current OS user Fixes Gitea UI actions for repositories tracking LFS files. Checks for minimum needed git version by parsing the semantic version string. * Hide LFS metafile diff from commit view, marking as binary * Show LFS notice if file in commit view is tracked * Add notbefore/nbf JWT claim * Correct lint suggestions - comments for structs and functions - Add comments to LFS model - Function comment for GetRandomBytesAsBase64 - LFS server function comments and lint variable suggestion * Move secret generation code out of conditional Ensures no LFS code may run with an empty secret * Do not hand out JWT tokens if LFS server support is disabled
2016-12-26 10:16:37 +09:00
2016-08-30 18:08:38 +09:00
// Assume file is not editable first.
if fInfo.isLFSFile {
ctx.Data["EditFileTooltip"] = ctx.Tr("repo.editor.cannot_edit_lfs_files")
} else if !isRepresentableAsText {
2016-08-30 18:08:38 +09:00
ctx.Data["EditFileTooltip"] = ctx.Tr("repo.editor.cannot_edit_non_text_files")
}
switch {
case isRepresentableAsText:
if fInfo.st.IsSvgImage() {
ctx.Data["IsImageFile"] = true
ctx.Data["CanCopyContent"] = true
ctx.Data["HasSourceRenderedToggle"] = true
}
if fInfo.fileSize >= setting.UI.MaxDisplayFileSize {
2016-08-30 18:08:38 +09:00
ctx.Data["IsFileTooLarge"] = true
break
2014-07-26 13:24:27 +09:00
}
rd := charset.ToUTF8WithFallbackReader(io.MultiReader(bytes.NewReader(buf), dataRc))
shouldRenderSource := ctx.FormString("display") == "source"
readmeExist := markup.IsReadmeFile(blob.Name())
2016-08-30 18:08:38 +09:00
ctx.Data["ReadmeExist"] = readmeExist
markupType := markup.Type(blob.Name())
// If the markup is detected by custom markup renderer it should not be reset later on
// to not pass it down to the render context.
detected := false
if markupType == "" {
detected = true
markupType = markup.DetectRendererType(blob.Name(), bytes.NewReader(buf))
}
if markupType != "" {
ctx.Data["HasSourceRenderedToggle"] = true
}
if markupType != "" && !shouldRenderSource {
ctx.Data["IsMarkup"] = true
ctx.Data["MarkupType"] = markupType
if !detected {
markupType = ""
}
metas := ctx.Repo.Repository.ComposeDocumentMetas()
metas["BranchNameSubURL"] = ctx.Repo.BranchNameSubURL()
ctx.Data["EscapeStatus"], ctx.Data["FileContent"], err = markupRender(ctx, &markup.RenderContext{
Ctx: ctx,
Type: markupType,
RelativePath: ctx.Repo.TreePath,
URLPrefix: path.Dir(treeLink),
Metas: metas,
GitRepo: ctx.Repo.GitRepo,
}, rd)
if err != nil {
ctx.ServerError("Render", err)
return
}
// to prevent iframe load third-party url
ctx.Resp.Header().Add("Content-Security-Policy", "frame-src 'self'")
2016-08-30 18:08:38 +09:00
} else {
buf, _ := io.ReadAll(rd)
// empty: 0 lines; "a": one line; "a\n": two lines; "a\nb": two lines;
// the NumLines is only used for the display on the UI: "xxx lines"
if len(buf) == 0 {
ctx.Data["NumLines"] = 0
} else {
ctx.Data["NumLines"] = bytes.Count(buf, []byte{'\n'}) + 1
}
ctx.Data["NumLinesSet"] = true
language := ""
indexFilename, worktree, deleteTemporaryFile, err := ctx.Repo.GitRepo.ReadTreeToTemporaryIndex(ctx.Repo.CommitID)
if err == nil {
defer deleteTemporaryFile()
filename2attribute2info, err := ctx.Repo.GitRepo.CheckAttribute(git.CheckAttributeOpts{
CachedOnly: true,
Refactor git command package to improve security and maintainability (#22678) This PR follows #21535 (and replace #22592) ## Review without space diff https://github.com/go-gitea/gitea/pull/22678/files?diff=split&w=1 ## Purpose of this PR 1. Make git module command completely safe (risky user inputs won't be passed as argument option anymore) 2. Avoid low-level mistakes like https://github.com/go-gitea/gitea/pull/22098#discussion_r1045234918 3. Remove deprecated and dirty `CmdArgCheck` function, hide the `CmdArg` type 4. Simplify code when using git command ## The main idea of this PR * Move the `git.CmdArg` to the `internal` package, then no other package except `git` could use it. Then developers could never do `AddArguments(git.CmdArg(userInput))` any more. * Introduce `git.ToTrustedCmdArgs`, it's for user-provided and already trusted arguments. It's only used in a few cases, for example: use git arguments from config file, help unit test with some arguments. * Introduce `AddOptionValues` and `AddOptionFormat`, they make code more clear and simple: * Before: `AddArguments("-m").AddDynamicArguments(message)` * After: `AddOptionValues("-m", message)` * - * Before: `AddArguments(git.CmdArg(fmt.Sprintf("--author='%s <%s>'", sig.Name, sig.Email)))` * After: `AddOptionFormat("--author='%s <%s>'", sig.Name, sig.Email)` ## FAQ ### Why these changes were not done in #21535 ? #21535 is mainly a search&replace, it did its best to not change too much logic. Making the framework better needs a lot of changes, so this separate PR is needed as the second step. ### The naming of `AddOptionXxx` According to git's manual, the `--xxx` part is called `option`. ### How can it guarantee that `internal.CmdArg` won't be not misused? Go's specification guarantees that. Trying to access other package's internal package causes compilation error. And, `golangci-lint` also denies the git/internal package. Only the `git/command.go` can use it carefully. ### There is still a `ToTrustedCmdArgs`, will it still allow developers to make mistakes and pass untrusted arguments? Generally speaking, no. Because when using `ToTrustedCmdArgs`, the code will be very complex (see the changes for examples). Then developers and reviewers can know that something might be unreasonable. ### Why there was a `CmdArgCheck` and why it's removed? At the moment of #21535, to reduce unnecessary changes, `CmdArgCheck` was introduced as a hacky patch. Now, almost all code could be written as `cmd := NewCommand(); cmd.AddXxx(...)`, then there is no need for `CmdArgCheck` anymore. ### Why many codes for `signArg == ""` is deleted? Because in the old code, `signArg` could never be empty string, it's either `-S[key-id]` or `--no-gpg-sign`. So the `signArg == ""` is just dead code. --------- Co-authored-by: Lunny Xiao <xiaolunwen@gmail.com>
2023-02-04 11:30:43 +09:00
Attributes: []string{"linguist-language", "gitlab-language"},
Filenames: []string{ctx.Repo.TreePath},
IndexFile: indexFilename,
WorkTree: worktree,
})
if err != nil {
log.Error("Unable to load attributes for %-v:%s. Error: %v", ctx.Repo.Repository, ctx.Repo.TreePath, err)
}
language = filename2attribute2info[ctx.Repo.TreePath]["linguist-language"]
if language == "" || language == "unspecified" {
language = filename2attribute2info[ctx.Repo.TreePath]["gitlab-language"]
}
if language == "unspecified" {
language = ""
}
}
fileContent, lexerName, err := highlight.File(blob.Name(), language, buf)
ctx.Data["LexerName"] = lexerName
if err != nil {
log.Error("highlight.File failed, fallback to plain text: %v", err)
fileContent = highlight.PlainText(buf)
}
status := &charset.EscapeStatus{}
statuses := make([]*charset.EscapeStatus, len(fileContent))
for i, line := range fileContent {
statuses[i], fileContent[i] = charset.EscapeControlHTML(line, ctx.Locale)
status = status.Or(statuses[i])
}
ctx.Data["EscapeStatus"] = status
ctx.Data["FileContent"] = fileContent
ctx.Data["LineEscapeStatus"] = statuses
2014-09-26 21:55:13 +09:00
}
if !fInfo.isLFSFile {
if ctx.Repo.CanEnableEditor(ctx.Doer) {
if lfsLock != nil && lfsLock.OwnerID != ctx.Doer.ID {
ctx.Data["CanEditFile"] = false
ctx.Data["EditFileTooltip"] = ctx.Tr("repo.editor.this_file_locked")
} else {
ctx.Data["CanEditFile"] = true
ctx.Data["EditFileTooltip"] = ctx.Tr("repo.editor.edit_this_file")
}
} else if !ctx.Repo.IsViewBranch {
ctx.Data["EditFileTooltip"] = ctx.Tr("repo.editor.must_be_on_a_branch")
} else if !ctx.Repo.CanWriteToBranch(ctx.Doer, ctx.Repo.BranchName) {
ctx.Data["EditFileTooltip"] = ctx.Tr("repo.editor.fork_before_edit")
}
}
2016-08-30 18:08:38 +09:00
case fInfo.st.IsPDF():
2016-08-30 18:08:38 +09:00
ctx.Data["IsPDFFile"] = true
case fInfo.st.IsVideo():
ctx.Data["IsVideoFile"] = true
case fInfo.st.IsAudio():
ctx.Data["IsAudioFile"] = true
case fInfo.st.IsImage() && (setting.UI.SVG.Enabled || !fInfo.st.IsSvgImage()):
2016-08-30 18:08:38 +09:00
ctx.Data["IsImageFile"] = true
ctx.Data["CanCopyContent"] = true
default:
if fInfo.fileSize >= setting.UI.MaxDisplayFileSize {
ctx.Data["IsFileTooLarge"] = true
break
}
if markupType := markup.Type(blob.Name()); markupType != "" {
rd := io.MultiReader(bytes.NewReader(buf), dataRc)
ctx.Data["IsMarkup"] = true
ctx.Data["MarkupType"] = markupType
ctx.Data["EscapeStatus"], ctx.Data["FileContent"], err = markupRender(ctx, &markup.RenderContext{
Ctx: ctx,
RelativePath: ctx.Repo.TreePath,
URLPrefix: path.Dir(treeLink),
Metas: ctx.Repo.Repository.ComposeDocumentMetas(),
GitRepo: ctx.Repo.GitRepo,
}, rd)
if err != nil {
ctx.ServerError("Render", err)
return
}
}
2014-07-26 13:24:27 +09:00
}
if ctx.Repo.CanEnableEditor(ctx.Doer) {
if lfsLock != nil && lfsLock.OwnerID != ctx.Doer.ID {
ctx.Data["CanDeleteFile"] = false
ctx.Data["DeleteFileTooltip"] = ctx.Tr("repo.editor.this_file_locked")
} else {
ctx.Data["CanDeleteFile"] = true
ctx.Data["DeleteFileTooltip"] = ctx.Tr("repo.editor.delete_this_file")
}
2016-08-30 18:08:38 +09:00
} else if !ctx.Repo.IsViewBranch {
ctx.Data["DeleteFileTooltip"] = ctx.Tr("repo.editor.must_be_on_a_branch")
} else if !ctx.Repo.CanWriteToBranch(ctx.Doer, ctx.Repo.BranchName) {
2016-08-30 18:08:38 +09:00
ctx.Data["DeleteFileTooltip"] = ctx.Tr("repo.editor.must_have_write_access")
}
}
func markupRender(ctx *context.Context, renderCtx *markup.RenderContext, input io.Reader) (escaped *charset.EscapeStatus, output string, err error) {
markupRd, markupWr := io.Pipe()
defer markupWr.Close()
done := make(chan struct{})
go func() {
sb := &strings.Builder{}
// We allow NBSP here this is rendered
escaped, _ = charset.EscapeControlReader(markupRd, sb, ctx.Locale, charset.RuneNBSP)
output = sb.String()
close(done)
}()
err = markup.Render(renderCtx, input, markupWr)
_ = markupWr.CloseWithError(err)
<-done
return escaped, output, err
}
2019-10-13 22:23:14 +09:00
func safeURL(address string) string {
u, err := url.Parse(address)
if err != nil {
return address
}
u.User = nil
return u.String()
}
func checkHomeCodeViewable(ctx *context.Context) {
if len(ctx.Repo.Units) > 0 {
2019-10-13 22:23:14 +09:00
if ctx.Repo.Repository.IsBeingCreated() {
task, err := admin_model.GetMigratingTask(ctx.Repo.Repository.ID)
2019-10-13 22:23:14 +09:00
if err != nil {
if admin_model.IsErrTaskDoesNotExist(err) {
ctx.Data["Repo"] = ctx.Repo
ctx.Data["CloneAddr"] = ""
ctx.Data["Failed"] = true
ctx.HTML(http.StatusOK, tplMigrating)
return
}
2019-10-13 22:23:14 +09:00
ctx.ServerError("models.GetMigratingTask", err)
return
}
cfg, err := task.MigrateConfig()
if err != nil {
ctx.ServerError("task.MigrateConfig", err)
return
}
ctx.Data["Repo"] = ctx.Repo
ctx.Data["MigrateTask"] = task
ctx.Data["CloneAddr"] = safeURL(cfg.CloneAddr)
ctx.Data["Failed"] = task.Status == structs.TaskStatusFailed
ctx.HTML(http.StatusOK, tplMigrating)
2019-10-13 22:23:14 +09:00
return
}
if ctx.IsSigned {
// Set repo notification-status read if unread
if err := activities_model.SetRepoReadBy(ctx, ctx.Repo.Repository.ID, ctx.Doer.ID); err != nil {
ctx.ServerError("ReadBy", err)
return
}
}
var firstUnit *unit_model.Unit
for _, repoUnit := range ctx.Repo.Units {
if repoUnit.Type == unit_model.TypeCode {
return
}
unit, ok := unit_model.Units[repoUnit.Type]
if ok && (firstUnit == nil || !firstUnit.IsLessThan(unit)) {
firstUnit = &unit
}
}
if firstUnit != nil {
ctx.Redirect(fmt.Sprintf("%s%s", ctx.Repo.Repository.Link(), firstUnit.URI))
return
}
}
ctx.NotFound("Home", fmt.Errorf(ctx.Tr("units.error.no_unit_allowed_repo")))
}
func checkCitationFile(ctx *context.Context, entry *git.TreeEntry) {
if entry.Name() != "" {
return
}
tree, err := ctx.Repo.Commit.SubTree(ctx.Repo.TreePath)
if err != nil {
ctx.NotFoundOrServerError("Repo.Commit.SubTree", git.IsErrNotExist, err)
return
}
allEntries, err := tree.ListEntries()
if err != nil {
ctx.ServerError("ListEntries", err)
return
}
for _, entry := range allEntries {
if entry.Name() == "CITATION.cff" || entry.Name() == "CITATION.bib" {
ctx.Data["CitiationExist"] = true
// Read Citation file contents
blob := entry.Blob()
dataRc, err := blob.DataAsync()
if err != nil {
ctx.ServerError("DataAsync", err)
return
}
defer dataRc.Close()
buf := make([]byte, 1024)
n, err := util.ReadAtMost(dataRc, buf)
if err != nil {
ctx.ServerError("ReadAtMost", err)
return
}
buf = buf[:n]
ctx.PageData["citationFileContent"] = string(buf)
break
}
}
}
// Home render repository home page
func Home(ctx *context.Context) {
if setting.EnableFeed {
isFeed, _, showFeedType := feed.GetFeedType(ctx.Params(":reponame"), ctx.Req)
if isFeed {
feed.ShowRepoFeed(ctx, ctx.Repo.Repository, showFeedType)
return
}
ctx.Data["EnableFeed"] = true
ctx.Data["FeedURL"] = ctx.Repo.Repository.HTMLURL()
}
checkHomeCodeViewable(ctx)
if ctx.Written() {
return
}
renderCode(ctx)
}
// LastCommit returns lastCommit data for the provided branch/tag/commit and directory (in url) and filenames in body
func LastCommit(ctx *context.Context) {
checkHomeCodeViewable(ctx)
if ctx.Written() {
return
}
renderDirectoryFiles(ctx, 0)
if ctx.Written() {
return
}
var treeNames []string
paths := make([]string, 0, 5)
if len(ctx.Repo.TreePath) > 0 {
treeNames = strings.Split(ctx.Repo.TreePath, "/")
for i := range treeNames {
paths = append(paths, strings.Join(treeNames[:i+1], "/"))
}
ctx.Data["HasParentPath"] = true
if len(paths)-2 >= 0 {
ctx.Data["ParentPath"] = "/" + paths[len(paths)-2]
}
}
branchLink := ctx.Repo.RepoLink + "/src/" + ctx.Repo.BranchNameSubURL()
ctx.Data["BranchLink"] = branchLink
ctx.HTML(http.StatusOK, tplRepoViewList)
}
func renderDirectoryFiles(ctx *context.Context, timeout time.Duration) git.Entries {
tree, err := ctx.Repo.Commit.SubTree(ctx.Repo.TreePath)
if err != nil {
ctx.NotFoundOrServerError("Repo.Commit.SubTree", git.IsErrNotExist, err)
return nil
}
ctx.Data["LastCommitLoaderURL"] = ctx.Repo.RepoLink + "/lastcommit/" + url.PathEscape(ctx.Repo.CommitID) + "/" + util.PathEscapeSegments(ctx.Repo.TreePath)
// Get current entry user currently looking at.
entry, err := ctx.Repo.Commit.GetTreeEntryByPath(ctx.Repo.TreePath)
if err != nil {
ctx.NotFoundOrServerError("Repo.Commit.GetTreeEntryByPath", git.IsErrNotExist, err)
return nil
}
if !entry.IsDir() {
ctx.NotFoundOrServerError("Repo.Commit.GetTreeEntryByPath", git.IsErrNotExist, err)
return nil
}
allEntries, err := tree.ListEntries()
if err != nil {
ctx.ServerError("ListEntries", err)
return nil
}
allEntries.CustomSort(base.NaturalSortLess)
commitInfoCtx := gocontext.Context(ctx)
if timeout > 0 {
var cancel gocontext.CancelFunc
commitInfoCtx, cancel = gocontext.WithTimeout(ctx, timeout)
defer cancel()
}
selected := make(container.Set[string])
selected.AddMultiple(ctx.FormStrings("f[]")...)
entries := allEntries
if len(selected) > 0 {
entries = make(git.Entries, 0, len(selected))
for _, entry := range allEntries {
if selected.Contains(entry.Name()) {
entries = append(entries, entry)
}
}
}
var latestCommit *git.Commit
ctx.Data["Files"], latestCommit, err = entries.GetCommitsInfo(commitInfoCtx, ctx.Repo.Commit, ctx.Repo.TreePath)
if err != nil {
ctx.ServerError("GetCommitsInfo", err)
return nil
}
// Show latest commit info of repository in table header,
// or of directory if not in root directory.
ctx.Data["LatestCommit"] = latestCommit
if latestCommit != nil {
verification := asymkey_model.ParseCommitWithSignature(latestCommit)
if err := asymkey_model.CalculateTrustStatus(verification, ctx.Repo.Repository.GetTrustModel(), func(user *user_model.User) (bool, error) {
return repo_model.IsOwnerMemberCollaborator(ctx.Repo.Repository, user.ID)
}, nil); err != nil {
ctx.ServerError("CalculateTrustStatus", err)
return nil
}
ctx.Data["LatestCommitVerification"] = verification
ctx.Data["LatestCommitUser"] = user_model.ValidateCommitWithEmail(latestCommit)
statuses, _, err := git_model.GetLatestCommitStatus(ctx, ctx.Repo.Repository.ID, latestCommit.ID.String(), db.ListOptions{})
if err != nil {
log.Error("GetLatestCommitStatus: %v", err)
}
ctx.Data["LatestCommitStatus"] = git_model.CalcCommitStatus(statuses)
ctx.Data["LatestCommitStatuses"] = statuses
}
branchLink := ctx.Repo.RepoLink + "/src/" + ctx.Repo.BranchNameSubURL()
treeLink := branchLink
if len(ctx.Repo.TreePath) > 0 {
treeLink += "/" + util.PathEscapeSegments(ctx.Repo.TreePath)
}
ctx.Data["TreeLink"] = treeLink
ctx.Data["SSHDomain"] = setting.SSH.Domain
return allEntries
}
func renderLanguageStats(ctx *context.Context) {
langs, err := repo_model.GetTopLanguageStats(ctx.Repo.Repository, 5)
if err != nil {
ctx.ServerError("Repo.GetTopLanguageStats", err)
return
}
ctx.Data["LanguageStats"] = langs
}
func renderRepoTopics(ctx *context.Context) {
topics, _, err := repo_model.FindTopics(&repo_model.FindTopicOptions{
RepoID: ctx.Repo.Repository.ID,
})
if err != nil {
ctx.ServerError("models.FindTopics", err)
return
}
ctx.Data["Topics"] = topics
}
func renderCode(ctx *context.Context) {
ctx.Data["PageIsViewCode"] = true
if ctx.Repo.Repository.IsEmpty {
reallyEmpty := true
var err error
if ctx.Repo.GitRepo != nil {
reallyEmpty, err = ctx.Repo.GitRepo.IsEmpty()
if err != nil {
ctx.ServerError("GitRepo.IsEmpty", err)
return
}
}
if reallyEmpty {
ctx.HTML(http.StatusOK, tplRepoEMPTY)
return
}
// the repo is not really empty, so we should update the modal in database
// such problem may be caused by:
// 1) an error occurs during pushing/receiving. 2) the user replaces an empty git repo manually
// and even more: the IsEmpty flag is deeply broken and should be removed with the UI changed to manage to cope with empty repos.
// it's possible for a repository to be non-empty by that flag but still 500
// because there are no branches - only tags -or the default branch is non-extant as it has been 0-pushed.
ctx.Repo.Repository.IsEmpty = false
if err = repo_model.UpdateRepositoryCols(ctx, ctx.Repo.Repository, "is_empty"); err != nil {
ctx.ServerError("UpdateRepositoryCols", err)
return
}
if err = repo_module.UpdateRepoSize(ctx, ctx.Repo.Repository); err != nil {
ctx.ServerError("UpdateRepoSize", err)
return
}
}
2016-08-30 18:08:38 +09:00
title := ctx.Repo.Repository.Owner.Name + "/" + ctx.Repo.Repository.Name
if len(ctx.Repo.Repository.Description) > 0 {
title += ": " + ctx.Repo.Repository.Description
}
ctx.Data["Title"] = title
branchLink := ctx.Repo.RepoLink + "/src/" + ctx.Repo.BranchNameSubURL()
2016-08-30 18:08:38 +09:00
treeLink := branchLink
rawLink := ctx.Repo.RepoLink + "/raw/" + ctx.Repo.BranchNameSubURL()
2016-08-30 18:08:38 +09:00
if len(ctx.Repo.TreePath) > 0 {
treeLink += "/" + util.PathEscapeSegments(ctx.Repo.TreePath)
2016-08-30 18:08:38 +09:00
}
// Get Topics of this repo
renderRepoTopics(ctx)
if ctx.Written() {
return
}
2016-08-30 18:08:38 +09:00
// Get current entry user currently looking at.
entry, err := ctx.Repo.Commit.GetTreeEntryByPath(ctx.Repo.TreePath)
if err != nil {
ctx.NotFoundOrServerError("Repo.Commit.GetTreeEntryByPath", git.IsErrNotExist, err)
return
}
if !ctx.Repo.Repository.IsEmpty {
checkCitationFile(ctx, entry)
if ctx.Written() {
return
}
}
renderLanguageStats(ctx)
if ctx.Written() {
return
}
2016-08-30 18:08:38 +09:00
if entry.IsDir() {
renderDirectory(ctx, treeLink)
} else {
renderFile(ctx, entry, treeLink, rawLink)
}
if ctx.Written() {
return
}
2014-07-26 13:24:27 +09:00
2016-08-30 18:08:38 +09:00
var treeNames []string
paths := make([]string, 0, 5)
if len(ctx.Repo.TreePath) > 0 {
treeNames = strings.Split(ctx.Repo.TreePath, "/")
for i := range treeNames {
paths = append(paths, strings.Join(treeNames[:i+1], "/"))
2014-07-26 13:24:27 +09:00
}
ctx.Data["HasParentPath"] = true
2016-08-10 04:56:00 +09:00
if len(paths)-2 >= 0 {
ctx.Data["ParentPath"] = "/" + paths[len(paths)-2]
2014-07-26 13:24:27 +09:00
}
}
2016-08-10 04:56:00 +09:00
ctx.Data["Paths"] = paths
2016-08-28 07:25:01 +09:00
ctx.Data["TreeLink"] = treeLink
2016-08-30 18:08:38 +09:00
ctx.Data["TreeNames"] = treeNames
2014-07-26 13:24:27 +09:00
ctx.Data["BranchLink"] = branchLink
ctx.HTML(http.StatusOK, tplRepoHome)
2014-07-26 13:24:27 +09:00
}
2015-11-17 13:28:46 +09:00
// RenderUserCards render a page show users according the input template
func RenderUserCards(ctx *context.Context, total int, getter func(opts db.ListOptions) ([]*user_model.User, error), tpl base.TplName) {
page := ctx.FormInt("page")
2015-11-17 13:28:46 +09:00
if page <= 0 {
page = 1
}
pager := context.NewPagination(total, setting.ItemsPerPage, page, 5)
2015-11-17 13:28:46 +09:00
ctx.Data["Page"] = pager
items, err := getter(db.ListOptions{
Page: pager.Paginater.Current(),
PageSize: setting.ItemsPerPage,
})
2015-11-17 13:28:46 +09:00
if err != nil {
ctx.ServerError("getter", err)
2015-11-17 13:28:46 +09:00
return
}
ctx.Data["Cards"] = items
2015-11-17 13:28:46 +09:00
ctx.HTML(http.StatusOK, tpl)
2015-11-17 13:28:46 +09:00
}
// Watchers render repository's watch users
2016-03-12 01:56:52 +09:00
func Watchers(ctx *context.Context) {
2015-11-17 13:28:46 +09:00
ctx.Data["Title"] = ctx.Tr("repo.watchers")
ctx.Data["CardsTitle"] = ctx.Tr("repo.watchers")
2015-11-17 13:28:46 +09:00
ctx.Data["PageIsWatchers"] = true
RenderUserCards(ctx, ctx.Repo.Repository.NumWatches, func(opts db.ListOptions) ([]*user_model.User, error) {
return repo_model.GetRepoWatchers(ctx.Repo.Repository.ID, opts)
}, tplWatchers)
2015-11-17 13:28:46 +09:00
}
// Stars render repository's starred users
2016-03-12 01:56:52 +09:00
func Stars(ctx *context.Context) {
2015-11-17 13:28:46 +09:00
ctx.Data["Title"] = ctx.Tr("repo.stargazers")
ctx.Data["CardsTitle"] = ctx.Tr("repo.stargazers")
2015-11-17 13:28:46 +09:00
ctx.Data["PageIsStargazers"] = true
RenderUserCards(ctx, ctx.Repo.Repository.NumStars, func(opts db.ListOptions) ([]*user_model.User, error) {
return repo_model.GetStargazers(ctx.Repo.Repository, opts)
}, tplWatchers)
2015-11-17 13:28:46 +09:00
}
2015-11-17 13:33:40 +09:00
// Forks render repository's forked users
2016-03-12 01:56:52 +09:00
func Forks(ctx *context.Context) {
2015-11-17 13:33:40 +09:00
ctx.Data["Title"] = ctx.Tr("repos.forks")
page := ctx.FormInt("page")
if page <= 0 {
page = 1
}
pager := context.NewPagination(ctx.Repo.Repository.NumForks, setting.ItemsPerPage, page, 5)
ctx.Data["Page"] = pager
forks, err := repo_model.GetForks(ctx.Repo.Repository, db.ListOptions{
Page: pager.Paginater.Current(),
PageSize: setting.ItemsPerPage,
})
2015-11-17 13:33:40 +09:00
if err != nil {
ctx.ServerError("GetForks", err)
2015-11-17 13:33:40 +09:00
return
}
for _, fork := range forks {
if err = fork.GetOwner(ctx); err != nil {
ctx.ServerError("GetOwner", err)
2015-11-17 13:33:40 +09:00
return
}
}
2015-11-17 13:33:40 +09:00
ctx.Data["Forks"] = forks
ctx.HTML(http.StatusOK, tplForks)
2015-11-17 13:33:40 +09:00
}