This commit is contained in:
Steffen Schröter 2021-06-27 12:24:06 +02:00
commit 551c4bb3c3
195 changed files with 4275 additions and 1392 deletions

View file

@ -27,10 +27,10 @@ func runConvert(ctx *cli.Context) error {
return err return err
} }
log.Trace("AppPath: %s", setting.AppPath) log.Info("AppPath: %s", setting.AppPath)
log.Trace("AppWorkPath: %s", setting.AppWorkPath) log.Info("AppWorkPath: %s", setting.AppWorkPath)
log.Trace("Custom path: %s", setting.CustomPath) log.Info("Custom path: %s", setting.CustomPath)
log.Trace("Log path: %s", setting.LogRootPath) log.Info("Log path: %s", setting.LogRootPath)
setting.InitDBConfig() setting.InitDBConfig()
if !setting.Database.UseMySQL { if !setting.Database.UseMySQL {

View file

@ -80,10 +80,10 @@ func runDumpRepository(ctx *cli.Context) error {
return err return err
} }
log.Trace("AppPath: %s", setting.AppPath) log.Info("AppPath: %s", setting.AppPath)
log.Trace("AppWorkPath: %s", setting.AppWorkPath) log.Info("AppWorkPath: %s", setting.AppWorkPath)
log.Trace("Custom path: %s", setting.CustomPath) log.Info("Custom path: %s", setting.CustomPath)
log.Trace("Log path: %s", setting.LogRootPath) log.Info("Log path: %s", setting.LogRootPath)
setting.InitDBConfig() setting.InitDBConfig()
var ( var (

View file

@ -179,7 +179,7 @@ Gitea or set your environment appropriately.`, "")
GitObjectDirectory: os.Getenv(private.GitObjectDirectory), GitObjectDirectory: os.Getenv(private.GitObjectDirectory),
GitQuarantinePath: os.Getenv(private.GitQuarantinePath), GitQuarantinePath: os.Getenv(private.GitQuarantinePath),
GitPushOptions: pushOptions(), GitPushOptions: pushOptions(),
ProtectedBranchID: prID, PullRequestID: prID,
IsDeployKey: isDeployKey, IsDeployKey: isDeployKey,
} }
@ -221,8 +221,8 @@ Gitea or set your environment appropriately.`, "")
total++ total++
lastline++ lastline++
// If the ref is a branch, check if it's protected // If the ref is a branch or tag, check if it's protected
if strings.HasPrefix(refFullName, git.BranchPrefix) { if strings.HasPrefix(refFullName, git.BranchPrefix) || strings.HasPrefix(refFullName, git.TagPrefix) {
oldCommitIDs[count] = oldCommitID oldCommitIDs[count] = oldCommitID
newCommitIDs[count] = newCommitID newCommitIDs[count] = newCommitID
refFullNames[count] = refFullName refFullNames[count] = refFullName
@ -230,7 +230,7 @@ Gitea or set your environment appropriately.`, "")
fmt.Fprintf(out, "*") fmt.Fprintf(out, "*")
if count >= hookBatchSize { if count >= hookBatchSize {
fmt.Fprintf(out, " Checking %d branches\n", count) fmt.Fprintf(out, " Checking %d references\n", count)
hookOptions.OldCommitIDs = oldCommitIDs hookOptions.OldCommitIDs = oldCommitIDs
hookOptions.NewCommitIDs = newCommitIDs hookOptions.NewCommitIDs = newCommitIDs
@ -261,7 +261,7 @@ Gitea or set your environment appropriately.`, "")
hookOptions.NewCommitIDs = newCommitIDs[:count] hookOptions.NewCommitIDs = newCommitIDs[:count]
hookOptions.RefFullNames = refFullNames[:count] hookOptions.RefFullNames = refFullNames[:count]
fmt.Fprintf(out, " Checking %d branches\n", count) fmt.Fprintf(out, " Checking %d references\n", count)
statusCode, msg := private.HookPreReceive(username, reponame, hookOptions) statusCode, msg := private.HookPreReceive(username, reponame, hookOptions)
switch statusCode { switch statusCode {

View file

@ -28,10 +28,10 @@ func runMigrate(ctx *cli.Context) error {
return err return err
} }
log.Trace("AppPath: %s", setting.AppPath) log.Info("AppPath: %s", setting.AppPath)
log.Trace("AppWorkPath: %s", setting.AppWorkPath) log.Info("AppWorkPath: %s", setting.AppWorkPath)
log.Trace("Custom path: %s", setting.CustomPath) log.Info("Custom path: %s", setting.CustomPath)
log.Trace("Log path: %s", setting.LogRootPath) log.Info("Log path: %s", setting.LogRootPath)
setting.InitDBConfig() setting.InitDBConfig()
if err := models.NewEngine(context.Background(), migrations.Migrate); err != nil { if err := models.NewEngine(context.Background(), migrations.Migrate); err != nil {

View file

@ -110,10 +110,10 @@ func runMigrateStorage(ctx *cli.Context) error {
return err return err
} }
log.Trace("AppPath: %s", setting.AppPath) log.Info("AppPath: %s", setting.AppPath)
log.Trace("AppWorkPath: %s", setting.AppWorkPath) log.Info("AppWorkPath: %s", setting.AppWorkPath)
log.Trace("Custom path: %s", setting.CustomPath) log.Info("Custom path: %s", setting.CustomPath)
log.Trace("Log path: %s", setting.LogRootPath) log.Info("Log path: %s", setting.LogRootPath)
setting.InitDBConfig() setting.InitDBConfig()
if err := models.NewEngine(context.Background(), migrations.Migrate); err != nil { if err := models.NewEngine(context.Background(), migrations.Migrate); err != nil {

View file

@ -47,6 +47,14 @@ and it takes care of all the other things for you`,
Value: setting.PIDFile, Value: setting.PIDFile,
Usage: "Custom pid file path", Usage: "Custom pid file path",
}, },
cli.BoolFlag{
Name: "quiet, q",
Usage: "Only display Fatal logging errors until logging is set-up",
},
cli.BoolFlag{
Name: "verbose",
Usage: "Set initial logging to TRACE level until logging is properly set-up",
},
}, },
} }
@ -71,6 +79,14 @@ func runHTTPRedirector() {
} }
func runWeb(ctx *cli.Context) error { func runWeb(ctx *cli.Context) error {
if ctx.Bool("verbose") {
_ = log.DelLogger("console")
log.NewLogger(0, "console", "console", fmt.Sprintf(`{"level": "trace", "colorize": %t, "stacktraceLevel": "none"}`, log.CanColorStdout))
} else if ctx.Bool("quiet") {
_ = log.DelLogger("console")
log.NewLogger(0, "console", "console", fmt.Sprintf(`{"level": "fatal", "colorize": %t, "stacktraceLevel": "none"}`, log.CanColorStdout))
}
managerCtx, cancel := context.WithCancel(context.Background()) managerCtx, cancel := context.WithCancel(context.Background())
graceful.InitManager(managerCtx) graceful.InitManager(managerCtx)
defer cancel() defer cancel()

View file

@ -26,6 +26,7 @@ import (
"time" "time"
"code.gitea.io/gitea/models" "code.gitea.io/gitea/models"
gitea_git "code.gitea.io/gitea/modules/git"
"code.gitea.io/gitea/modules/markup" "code.gitea.io/gitea/modules/markup"
"code.gitea.io/gitea/modules/markup/external" "code.gitea.io/gitea/modules/markup/external"
"code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/setting"
@ -79,7 +80,7 @@ func runPR() {
setting.RunUser = curUser.Username setting.RunUser = curUser.Username
log.Printf("[PR] Loading fixtures data ...\n") log.Printf("[PR] Loading fixtures data ...\n")
setting.CheckLFSVersion() gitea_git.CheckLFSVersion()
//models.LoadConfigs() //models.LoadConfigs()
/* /*
setting.Database.Type = "sqlite3" setting.Database.Type = "sqlite3"

View file

@ -651,9 +651,15 @@ PATH =
;DEFAULT_ALLOW_CREATE_ORGANIZATION = true ;DEFAULT_ALLOW_CREATE_ORGANIZATION = true
;; ;;
;; Either "public", "limited" or "private", default is "public" ;; Either "public", "limited" or "private", default is "public"
;; Limited is for signed user only ;; Limited is for users visible only to signed users
;; Private is only for member of the organization ;; Private is for users visible only to members of their organizations
;; Public is for everyone ;; Public is for users visible for everyone
;DEFAULT_USER_VISIBILITY = public
;;
;; Either "public", "limited" or "private", default is "public"
;; Limited is for organizations visible only to signed users
;; Private is for organizations visible only to members of the organization
;; Public is for organizations visible to everyone
;DEFAULT_ORG_VISIBILITY = public ;DEFAULT_ORG_VISIBILITY = public
;; ;;
;; Default value for DefaultOrgMemberVisible ;; Default value for DefaultOrgMemberVisible
@ -705,6 +711,8 @@ PATH =
;; ;;
;; Minimum amount of time a user must exist before comments are kept when the user is deleted. ;; Minimum amount of time a user must exist before comments are kept when the user is deleted.
;USER_DELETE_WITH_COMMENTS_MAX_TIME = 0 ;USER_DELETE_WITH_COMMENTS_MAX_TIME = 0
;; Valid site url schemes for user profiles
;VALID_SITE_URL_SCHEMES=http,https
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
@ -2048,6 +2056,16 @@ PATH =
;; storage type ;; storage type
;STORAGE_TYPE = local ;STORAGE_TYPE = local
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; settings for repository archives, will override storage setting
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;[storage.repo-archive]
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; storage type
;STORAGE_TYPE = local
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; lfs storage will override storage ;; lfs storage will override storage

View file

@ -512,6 +512,7 @@ relation to port exhaustion.
- `SHOW_MILESTONES_DASHBOARD_PAGE`: **true** Enable this to show the milestones dashboard page - a view of all the user's milestones - `SHOW_MILESTONES_DASHBOARD_PAGE`: **true** Enable this to show the milestones dashboard page - a view of all the user's milestones
- `AUTO_WATCH_NEW_REPOS`: **true**: Enable this to let all organisation users watch new repos when they are created - `AUTO_WATCH_NEW_REPOS`: **true**: Enable this to let all organisation users watch new repos when they are created
- `AUTO_WATCH_ON_CHANGES`: **false**: Enable this to make users watch a repository after their first commit to it - `AUTO_WATCH_ON_CHANGES`: **false**: Enable this to make users watch a repository after their first commit to it
- `DEFAULT_USER_VISIBILITY`: **public**: Set default visibility mode for users, either "public", "limited" or "private".
- `DEFAULT_ORG_VISIBILITY`: **public**: Set default visibility mode for organisations, either "public", "limited" or "private". - `DEFAULT_ORG_VISIBILITY`: **public**: Set default visibility mode for organisations, either "public", "limited" or "private".
- `DEFAULT_ORG_MEMBER_VISIBLE`: **false** True will make the membership of the users visible when added to the organisation. - `DEFAULT_ORG_MEMBER_VISIBLE`: **false** True will make the membership of the users visible when added to the organisation.
- `ALLOW_ONLY_INTERNAL_REGISTRATION`: **false** Set to true to force registration only via gitea. - `ALLOW_ONLY_INTERNAL_REGISTRATION`: **false** Set to true to force registration only via gitea.
@ -519,6 +520,7 @@ relation to port exhaustion.
- `NO_REPLY_ADDRESS`: **noreply.DOMAIN** Value for the domain part of the user's email address in the git log if user has set KeepEmailPrivate to true. DOMAIN resolves to the value in server.DOMAIN. - `NO_REPLY_ADDRESS`: **noreply.DOMAIN** Value for the domain part of the user's email address in the git log if user has set KeepEmailPrivate to true. DOMAIN resolves to the value in server.DOMAIN.
The user's email will be replaced with a concatenation of the user name in lower case, "@" and NO_REPLY_ADDRESS. The user's email will be replaced with a concatenation of the user name in lower case, "@" and NO_REPLY_ADDRESS.
- `USER_DELETE_WITH_COMMENTS_MAX_TIME`: **0** Minimum amount of time a user must exist before comments are kept when the user is deleted. - `USER_DELETE_WITH_COMMENTS_MAX_TIME`: **0** Minimum amount of time a user must exist before comments are kept when the user is deleted.
- `VALID_SITE_URL_SCHEMES`: **http, https**: Valid site url schemes for user profiles
### Service - Expore (`service.explore`) ### Service - Expore (`service.explore`)
@ -907,13 +909,17 @@ Gitea supports customizing the sanitization policy for rendered HTML. The exampl
ELEMENT = span ELEMENT = span
ALLOW_ATTR = class ALLOW_ATTR = class
REGEXP = ^\s*((math(\s+|$)|inline(\s+|$)|display(\s+|$)))+ REGEXP = ^\s*((math(\s+|$)|inline(\s+|$)|display(\s+|$)))+
ALLOW_DATA_URI_IMAGES = true
``` ```
- `ELEMENT`: The element this policy applies to. Must be non-empty. - `ELEMENT`: The element this policy applies to. Must be non-empty.
- `ALLOW_ATTR`: The attribute this policy allows. Must be non-empty. - `ALLOW_ATTR`: The attribute this policy allows. Must be non-empty.
- `REGEXP`: A regex to match the contents of the attribute against. Must be present but may be empty for unconditional whitelisting of this attribute. - `REGEXP`: A regex to match the contents of the attribute against. Must be present but may be empty for unconditional whitelisting of this attribute.
- `ALLOW_DATA_URI_IMAGES`: **false** Allow data uri images (`<img src="data:image/png;base64,..."/>`).
Multiple sanitisation rules can be defined by adding unique subsections, e.g. `[markup.sanitizer.TeX-2]`. Multiple sanitisation rules can be defined by adding unique subsections, e.g. `[markup.sanitizer.TeX-2]`.
To apply a sanitisation rules only for a specify external renderer they must use the renderer name, e.g. `[markup.sanitizer.asciidoc.rule-1]`.
If the rule is defined above the renderer ini section or the name does not match a renderer it is applied to every renderer.
## Time (`time`) ## Time (`time`)
@ -991,6 +997,23 @@ MINIO_USE_SSL = false
And used by `[attachment]`, `[lfs]` and etc. as `STORAGE_TYPE`. And used by `[attachment]`, `[lfs]` and etc. as `STORAGE_TYPE`.
## Repository Archive Storage (`storage.repo-archive`)
Configuration for repository archive storage. It will inherit from default `[storage]` or
`[storage.xxx]` when set `STORAGE_TYPE` to `xxx`. The default of `PATH`
is `data/repo-archive` and the default of `MINIO_BASE_PATH` is `repo-archive/`.
- `STORAGE_TYPE`: **local**: Storage type for repo archive, `local` for local disk or `minio` for s3 compatible object storage service or other name defined with `[storage.xxx]`
- `SERVE_DIRECT`: **false**: Allows the storage driver to redirect to authenticated URLs to serve files directly. Currently, only Minio/S3 is supported via signed URLs, local does nothing.
- `PATH`: **./data/repo-archive**: Where to store archive files, only available when `STORAGE_TYPE` is `local`.
- `MINIO_ENDPOINT`: **localhost:9000**: Minio endpoint to connect only available when `STORAGE_TYPE` is `minio`
- `MINIO_ACCESS_KEY_ID`: Minio accessKeyID to connect only available when `STORAGE_TYPE` is `minio`
- `MINIO_SECRET_ACCESS_KEY`: Minio secretAccessKey to connect only available when `STORAGE_TYPE is` `minio`
- `MINIO_BUCKET`: **gitea**: Minio bucket to store the lfs only available when `STORAGE_TYPE` is `minio`
- `MINIO_LOCATION`: **us-east-1**: Minio location to create bucket only available when `STORAGE_TYPE` is `minio`
- `MINIO_BASE_PATH`: **repo-archive/**: Minio base path on the bucket only available when `STORAGE_TYPE` is `minio`
- `MINIO_USE_SSL`: **false**: Minio enabled ssl only available when `STORAGE_TYPE` is `minio`
## Other (`other`) ## Other (`other`)
- `SHOW_FOOTER_BRANDING`: **false**: Show Gitea branding in the footer. - `SHOW_FOOTER_BRANDING`: **false**: Show Gitea branding in the footer.

View file

@ -382,6 +382,21 @@ MINIO_USE_SSL = false
然后你在 `[attachment]`, `[lfs]` 等中可以把这个名字用作 `STORAGE_TYPE` 的值。 然后你在 `[attachment]`, `[lfs]` 等中可以把这个名字用作 `STORAGE_TYPE` 的值。
## Repository Archive Storage (`storage.repo-archive`)
Repository archive 的存储配置。 如果 `STORAGE_TYPE` 为空,则此配置将从 `[storage]` 继承。如果不为 `local` 或者 `minio` 而为 `xxx` 则从 `[storage.xxx]` 继承。当继承时, `PATH` 默认为 `data/repo-archive``MINIO_BASE_PATH` 默认为 `repo-archive/`
- `STORAGE_TYPE`: **local**: Repository archive 的存储类型,`local` 将存储到磁盘,`minio` 将存储到 s3 兼容的对象服务。
- `SERVE_DIRECT`: **false**: 允许直接重定向到存储系统。当前,仅 Minio/S3 是支持的。
- `PATH`: 存放 Repository archive 上传的文件的地方,默认是 `data/repo-archive`
- `MINIO_ENDPOINT`: **localhost:9000**: Minio 地址,仅当 `STORAGE_TYPE``minio` 时有效。
- `MINIO_ACCESS_KEY_ID`: Minio accessKeyID仅当 `STORAGE_TYPE``minio` 时有效。
- `MINIO_SECRET_ACCESS_KEY`: Minio secretAccessKey仅当 `STORAGE_TYPE``minio` 时有效。
- `MINIO_BUCKET`: **gitea**: Minio bucket仅当 `STORAGE_TYPE``minio` 时有效。
- `MINIO_LOCATION`: **us-east-1**: Minio location ,仅当 `STORAGE_TYPE``minio` 时有效。
- `MINIO_BASE_PATH`: **repo-archive/**: Minio base path ,仅当 `STORAGE_TYPE``minio` 时有效。
- `MINIO_USE_SSL`: **false**: Minio 是否启用 ssl ,仅当 `STORAGE_TYPE``minio` 时有效。
## Other (`other`) ## Other (`other`)
- `SHOW_FOOTER_BRANDING`: 为真则在页面底部显示Gitea的字样。 - `SHOW_FOOTER_BRANDING`: 为真则在页面底部显示Gitea的字样。

View file

@ -64,8 +64,8 @@ IS_INPUT_FILE = false
[markup.jupyter] [markup.jupyter]
ENABLED = true ENABLED = true
FILE_EXTENSIONS = .ipynb FILE_EXTENSIONS = .ipynb
RENDER_COMMAND = "jupyter nbconvert --stdout --to html --template basic " RENDER_COMMAND = "jupyter nbconvert --stdin --stdout --to html --template basic"
IS_INPUT_FILE = true IS_INPUT_FILE = false
[markup.restructuredtext] [markup.restructuredtext]
ENABLED = true ENABLED = true
@ -90,15 +90,50 @@ FILE_EXTENSIONS = .md,.markdown
RENDER_COMMAND = pandoc -f markdown -t html --katex RENDER_COMMAND = pandoc -f markdown -t html --katex
``` ```
You must define `ELEMENT`, `ALLOW_ATTR`, and `REGEXP` in each section. You must define `ELEMENT` and `ALLOW_ATTR` in each section.
To define multiple entries, add a unique alphanumeric suffix (e.g., `[markup.sanitizer.1]` and `[markup.sanitizer.something]`). To define multiple entries, add a unique alphanumeric suffix (e.g., `[markup.sanitizer.1]` and `[markup.sanitizer.something]`).
To apply a sanitisation rules only for a specify external renderer they must use the renderer name, e.g. `[markup.sanitizer.asciidoc.rule-1]`, `[markup.sanitizer.<renderer>.rule-1]`.
**Note**: If the rule is defined above the renderer ini section or the name does not match a renderer it is applied to every renderer.
Once your configuration changes have been made, restart Gitea to have changes take effect. Once your configuration changes have been made, restart Gitea to have changes take effect.
**Note**: Prior to Gitea 1.12 there was a single `markup.sanitiser` section with keys that were redefined for multiple rules, however, **Note**: Prior to Gitea 1.12 there was a single `markup.sanitiser` section with keys that were redefined for multiple rules, however,
there were significant problems with this method of configuration necessitating configuration through multiple sections. there were significant problems with this method of configuration necessitating configuration through multiple sections.
### Example: Office DOCX
Display Office DOCX files with [`pandoc`](https://pandoc.org/):
```ini
[markup.docx]
ENABLED = true
FILE_EXTENSIONS = .docx
RENDER_COMMAND = "pandoc --from docx --to html --self-contained --template /path/to/basic.html"
[markup.sanitizer.docx.img]
ALLOW_DATA_URI_IMAGES = true
```
The template file has the following content:
```
$body$
```
### Example: Jupyter Notebook
Display Jupyter Notebook files with [`nbconvert`](https://github.com/jupyter/nbconvert):
```ini
[markup.jupyter]
ENABLED = true
FILE_EXTENSIONS = .ipynb
RENDER_COMMAND = "jupyter-nbconvert --stdin --stdout --to html --template basic"
[markup.sanitizer.jupyter.img]
ALLOW_DATA_URI_IMAGES = true
```
## Customizing CSS ## Customizing CSS
The external renderer is specified in the .ini in the format `[markup.XXXXX]` and the HTML supplied by your external renderer will be wrapped in a `<div>` with classes `markup` and `XXXXX`. The `markup` class provides out of the box styling (as does `markdown` if `XXXXX` is `markdown`). Otherwise you can use these classes to specifically target the contents of your rendered HTML. The external renderer is specified in the .ini in the format `[markup.XXXXX]` and the HTML supplied by your external renderer will be wrapped in a `<div>` with classes `markup` and `XXXXX`. The `markup` class provides out of the box styling (as does `markdown` if `XXXXX` is `markdown`). Otherwise you can use these classes to specifically target the contents of your rendered HTML.

View file

@ -0,0 +1,57 @@
---
date: "2021-05-14T00:00:00-00:00"
title: "Protected tags"
slug: "protected-tags"
weight: 45
toc: false
draft: false
menu:
sidebar:
parent: "advanced"
name: "Protected tags"
weight: 45
identifier: "protected-tags"
---
# Protected tags
Protected tags allow control over who has permission to create or update git tags. Each rule allows you to match either an individual tag name, or use an appropriate pattern to control multiple tags at once.
**Table of Contents**
{{< toc >}}
## Setting up protected tags
To protect a tag, you need to follow these steps:
1. Go to the repositorys **Settings** > **Tags** page.
1. Type a pattern to match a name. You can use a single name, a [glob pattern](https://pkg.go.dev/github.com/gobwas/glob#Compile) or a regular expression.
1. Choose the allowed users and/or teams. If you leave these fields empty noone is allowed to create or modify this tag.
1. Select **Save** to save the configuration.
## Pattern protected tags
The pattern uses [glob](https://pkg.go.dev/github.com/gobwas/glob#Compile) or regular expressions to match a tag name. For regular expressions you need to enclose the pattern in slashes.
Examples:
| Type | Pattern Protected Tag | Possible Matching Tags |
| ----- | ------------------------ | --------------------------------------- |
| Glob | `v*` | `v`, `v-1`, `version2` |
| Glob | `v[0-9]` | `v0`, `v1` up to `v9` |
| Glob | `*-release` | `2.1-release`, `final-release` |
| Glob | `gitea` | only `gitea` |
| Glob | `*gitea*` | `gitea`, `2.1-gitea`, `1_gitea-release` |
| Glob | `{v,rel}-*` | `v-`, `v-1`, `v-final`, `rel-`, `rel-x` |
| Glob | `*` | matches all possible tag names |
| Regex | `/\Av/` | `v`, `v-1`, `version2` |
| Regex | `/\Av[0-9]\z/` | `v0`, `v1` up to `v9` |
| Regex | `/\Av\d+\.\d+\.\d+\z/` | `v1.0.17`, `v2.1.0` |
| Regex | `/\Av\d+(\.\d+){0,2}\z/` | `v1`, `v2.1`, `v1.2.34` |
| Regex | `/-release\z/` | `2.1-release`, `final-release` |
| Regex | `/gitea/` | `gitea`, `2.1-gitea`, `1_gitea-release` |
| Regex | `/\Agitea\z/` | only `gitea` |
| Regex | `/^gitea$/` | only `gitea` |
| Regex | `/\A(v\|rel)-/` | `v-`, `v-1`, `v-final`, `rel-`, `rel-x` |
| Regex | `/.+/` | matches all possible tag names |

View file

@ -73,6 +73,8 @@ One of these three distributions of Make will run on Windows:
- The binary is called `mingw32-make.exe` instead of `make.exe`. Add the `bin` folder to `PATH`. - The binary is called `mingw32-make.exe` instead of `make.exe`. Add the `bin` folder to `PATH`.
- [Chocolatey package](https://chocolatey.org/packages/make). Run `choco install make` - [Chocolatey package](https://chocolatey.org/packages/make). Run `choco install make`
**Note**: If you are attempting to build using make with Windows Command Prompt, you may run into issues. The above prompts (git bash, or mingw) are recommended, however if you only have command prompt (or potentially powershell) you can set environment variables using the [set](https://docs.microsoft.com/en-us/windows-server/administration/windows-commands/set_1) command, e.g. `set TAGS=bindata`.
## Downloading and cloning the Gitea source code ## Downloading and cloning the Gitea source code
The recommended method of obtaining the source code is by using `git clone`. The recommended method of obtaining the source code is by using `git clone`.

View file

@ -46,6 +46,8 @@ Starts the server:
- `--port number`, `-p number`: Port number. Optional. (default: 3000). Overrides configuration file. - `--port number`, `-p number`: Port number. Optional. (default: 3000). Overrides configuration file.
- `--install-port number`: Port number to run the install page on. Optional. (default: 3000). Overrides configuration file. - `--install-port number`: Port number to run the install page on. Optional. (default: 3000). Overrides configuration file.
- `--pid path`, `-P path`: Pidfile path. Optional. - `--pid path`, `-P path`: Pidfile path. Optional.
- `--quiet`, `-q`: Only emit Fatal logs on the console for logs emitted before logging set up.
- `--verbose`: Emit tracing logs on the console for logs emitted before logging is set-up.
- Examples: - Examples:
- `gitea web` - `gitea web`
- `gitea web --port 80` - `gitea web --port 80`

View file

@ -221,6 +221,9 @@ If you wish to run Gitea with IIS. You will need to setup IIS with URL Rewrite a
```xml ```xml
<?xml version="1.0" encoding="UTF-8"?> <?xml version="1.0" encoding="UTF-8"?>
<configuration> <configuration>
<system.web>
<httpRuntime requestPathInvalidCharacters="" />
</system.web>
<system.webServer> <system.webServer>
<security> <security>
<requestFiltering> <requestFiltering>

View file

@ -11,6 +11,7 @@ import (
"code.gitea.io/gitea/models" "code.gitea.io/gitea/models"
api "code.gitea.io/gitea/modules/structs" api "code.gitea.io/gitea/modules/structs"
jsoniter "github.com/json-iterator/go"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
) )
@ -139,6 +140,59 @@ func TestAPIPullReview(t *testing.T) {
req = NewRequestf(t, http.MethodDelete, "/api/v1/repos/%s/%s/pulls/%d/reviews/%d?token=%s", repo.OwnerName, repo.Name, pullIssue.Index, review.ID, token) req = NewRequestf(t, http.MethodDelete, "/api/v1/repos/%s/%s/pulls/%d/reviews/%d?token=%s", repo.OwnerName, repo.Name, pullIssue.Index, review.ID, token)
resp = session.MakeRequest(t, req, http.StatusNoContent) resp = session.MakeRequest(t, req, http.StatusNoContent)
// test CreatePullReview Comment without body but with comments
req = NewRequestWithJSON(t, http.MethodPost, fmt.Sprintf("/api/v1/repos/%s/%s/pulls/%d/reviews?token=%s", repo.OwnerName, repo.Name, pullIssue.Index, token), &api.CreatePullReviewOptions{
// Body: "",
Event: "COMMENT",
Comments: []api.CreatePullReviewComment{{
Path: "README.md",
Body: "first new line",
OldLineNum: 0,
NewLineNum: 1,
}, {
Path: "README.md",
Body: "first old line",
OldLineNum: 1,
NewLineNum: 0,
},
},
})
var commentReview api.PullReview
resp = session.MakeRequest(t, req, http.StatusOK)
DecodeJSON(t, resp, &commentReview)
assert.EqualValues(t, "COMMENT", commentReview.State)
assert.EqualValues(t, 2, commentReview.CodeCommentsCount)
assert.EqualValues(t, "", commentReview.Body)
assert.EqualValues(t, false, commentReview.Dismissed)
// test CreatePullReview Comment with body but without comments
commentBody := "This is a body of the comment."
req = NewRequestWithJSON(t, http.MethodPost, fmt.Sprintf("/api/v1/repos/%s/%s/pulls/%d/reviews?token=%s", repo.OwnerName, repo.Name, pullIssue.Index, token), &api.CreatePullReviewOptions{
Body: commentBody,
Event: "COMMENT",
Comments: []api.CreatePullReviewComment{},
})
resp = session.MakeRequest(t, req, http.StatusOK)
DecodeJSON(t, resp, &commentReview)
assert.EqualValues(t, "COMMENT", commentReview.State)
assert.EqualValues(t, 0, commentReview.CodeCommentsCount)
assert.EqualValues(t, commentBody, commentReview.Body)
assert.EqualValues(t, false, commentReview.Dismissed)
// test CreatePullReview Comment without body and no comments
req = NewRequestWithJSON(t, http.MethodPost, fmt.Sprintf("/api/v1/repos/%s/%s/pulls/%d/reviews?token=%s", repo.OwnerName, repo.Name, pullIssue.Index, token), &api.CreatePullReviewOptions{
Body: "",
Event: "COMMENT",
Comments: []api.CreatePullReviewComment{},
})
resp = session.MakeRequest(t, req, http.StatusUnprocessableEntity)
errMap := make(map[string]interface{})
json := jsoniter.ConfigCompatibleWithStandardLibrary
json.Unmarshal(resp.Body.Bytes(), &errMap)
assert.EqualValues(t, "review event COMMENT requires a body or a comment", errMap["message"].(string))
// test get review requests // test get review requests
// to make it simple, use same api with get review // to make it simple, use same api with get review
pullIssue12 := models.AssertExistsAndLoadBean(t, &models.Issue{ID: 12}).(*models.Issue) pullIssue12 := models.AssertExistsAndLoadBean(t, &models.Issue{ID: 12}).(*models.Issue)

View file

@ -39,7 +39,7 @@ func TestAPIRepoTags(t *testing.T) {
assert.Equal(t, setting.AppURL+"user2/repo1/archive/v1.1.zip", tags[0].ZipballURL) assert.Equal(t, setting.AppURL+"user2/repo1/archive/v1.1.zip", tags[0].ZipballURL)
assert.Equal(t, setting.AppURL+"user2/repo1/archive/v1.1.tar.gz", tags[0].TarballURL) assert.Equal(t, setting.AppURL+"user2/repo1/archive/v1.1.tar.gz", tags[0].TarballURL)
newTag := createNewTagUsingAPI(t, session, token, user.Name, repoName, "awesome-tag", "", "nice!\nand some text") newTag := createNewTagUsingAPI(t, session, token, user.Name, repoName, "gitea/22", "", "nice!\nand some text")
resp = session.MakeRequest(t, req, http.StatusOK) resp = session.MakeRequest(t, req, http.StatusOK)
DecodeJSON(t, resp, &tags) DecodeJSON(t, resp, &tags)
assert.Len(t, tags, 2) assert.Len(t, tags, 2)
@ -51,6 +51,20 @@ func TestAPIRepoTags(t *testing.T) {
assert.EqualValues(t, newTag.Commit.SHA, tag.Commit.SHA) assert.EqualValues(t, newTag.Commit.SHA, tag.Commit.SHA)
} }
} }
// get created tag
req = NewRequestf(t, "GET", "/api/v1/repos/%s/%s/tags/%s?token=%s", user.Name, repoName, newTag.Name, token)
resp = session.MakeRequest(t, req, http.StatusOK)
var tag *api.Tag
DecodeJSON(t, resp, &tag)
assert.EqualValues(t, newTag, tag)
// delete tag
delReq := NewRequestf(t, "DELETE", "/api/v1/repos/%s/%s/tags/%s?token=%s", user.Name, repoName, newTag.Name, token)
resp = session.MakeRequest(t, delReq, http.StatusNoContent)
// check if it's gone
resp = session.MakeRequest(t, req, http.StatusNotFound)
} }
func createNewTagUsingAPI(t *testing.T, session *TestSession, token string, ownerName, repoName, name, target, msg string) *api.Tag { func createNewTagUsingAPI(t *testing.T, session *TestSession, token string, ownerName, repoName, name, target, msg string) *api.Tag {

View file

@ -26,7 +26,7 @@ func TestUserHeatmap(t *testing.T) {
var heatmap []*models.UserHeatmapData var heatmap []*models.UserHeatmapData
DecodeJSON(t, resp, &heatmap) DecodeJSON(t, resp, &heatmap)
var dummyheatmap []*models.UserHeatmapData var dummyheatmap []*models.UserHeatmapData
dummyheatmap = append(dummyheatmap, &models.UserHeatmapData{Timestamp: 1603152000, Contributions: 1}) dummyheatmap = append(dummyheatmap, &models.UserHeatmapData{Timestamp: 1603227600, Contributions: 1})
assert.Equal(t, dummyheatmap, heatmap) assert.Equal(t, dummyheatmap, heatmap)
} }

View file

@ -59,3 +59,34 @@ func TestAPIUserSearchNotLoggedIn(t *testing.T) {
} }
} }
} }
func TestAPIUserSearchAdminLoggedInUserHidden(t *testing.T) {
defer prepareTestEnv(t)()
adminUsername := "user1"
session := loginUser(t, adminUsername)
token := getTokenForLoggedInUser(t, session)
query := "user31"
req := NewRequestf(t, "GET", "/api/v1/users/search?token=%s&q=%s", token, query)
req.SetBasicAuth(token, "x-oauth-basic")
resp := session.MakeRequest(t, req, http.StatusOK)
var results SearchResults
DecodeJSON(t, resp, &results)
assert.NotEmpty(t, results.Data)
for _, user := range results.Data {
assert.Contains(t, user.UserName, query)
assert.NotEmpty(t, user.Email)
assert.EqualValues(t, "private", user.Visibility)
}
}
func TestAPIUserSearchNotLoggedInUserHidden(t *testing.T) {
defer prepareTestEnv(t)()
query := "user31"
req := NewRequestf(t, "GET", "/api/v1/users/search?q=%s", query)
resp := MakeRequest(t, req, http.StatusOK)
var results SearchResults
DecodeJSON(t, resp, &results)
assert.Empty(t, results.Data)
}

View file

@ -143,7 +143,7 @@ func standardCommitAndPushTest(t *testing.T, dstPath string) (little, big string
func lfsCommitAndPushTest(t *testing.T, dstPath string) (littleLFS, bigLFS string) { func lfsCommitAndPushTest(t *testing.T, dstPath string) (littleLFS, bigLFS string) {
t.Run("LFS", func(t *testing.T) { t.Run("LFS", func(t *testing.T) {
defer PrintCurrentTest(t)() defer PrintCurrentTest(t)()
setting.CheckLFSVersion() git.CheckLFSVersion()
if !setting.LFS.StartServer { if !setting.LFS.StartServer {
t.Skip() t.Skip()
return return
@ -213,7 +213,7 @@ func rawTest(t *testing.T, ctx *APITestContext, little, big, littleLFS, bigLFS s
resp := session.MakeRequestNilResponseRecorder(t, req, http.StatusOK) resp := session.MakeRequestNilResponseRecorder(t, req, http.StatusOK)
assert.Equal(t, littleSize, resp.Length) assert.Equal(t, littleSize, resp.Length)
setting.CheckLFSVersion() git.CheckLFSVersion()
if setting.LFS.StartServer { if setting.LFS.StartServer {
req = NewRequest(t, "GET", path.Join("/", username, reponame, "/raw/branch/master/", littleLFS)) req = NewRequest(t, "GET", path.Join("/", username, reponame, "/raw/branch/master/", littleLFS))
resp := session.MakeRequest(t, req, http.StatusOK) resp := session.MakeRequest(t, req, http.StatusOK)
@ -255,7 +255,7 @@ func mediaTest(t *testing.T, ctx *APITestContext, little, big, littleLFS, bigLFS
resp := session.MakeRequestNilResponseRecorder(t, req, http.StatusOK) resp := session.MakeRequestNilResponseRecorder(t, req, http.StatusOK)
assert.Equal(t, littleSize, resp.Length) assert.Equal(t, littleSize, resp.Length)
setting.CheckLFSVersion() git.CheckLFSVersion()
if setting.LFS.StartServer { if setting.LFS.StartServer {
req = NewRequest(t, "GET", path.Join("/", username, reponame, "/media/branch/master/", littleLFS)) req = NewRequest(t, "GET", path.Join("/", username, reponame, "/media/branch/master/", littleLFS))
resp = session.MakeRequestNilResponseRecorder(t, req, http.StatusOK) resp = session.MakeRequestNilResponseRecorder(t, req, http.StatusOK)

View file

@ -0,0 +1 @@
aacbdfe9e1c4b47f60abe81849045fa4e96f1d75

View file

@ -26,6 +26,7 @@ import (
"code.gitea.io/gitea/models" "code.gitea.io/gitea/models"
"code.gitea.io/gitea/modules/base" "code.gitea.io/gitea/modules/base"
"code.gitea.io/gitea/modules/git"
"code.gitea.io/gitea/modules/graceful" "code.gitea.io/gitea/modules/graceful"
"code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/queue" "code.gitea.io/gitea/modules/queue"
@ -162,7 +163,7 @@ func initIntegrationTest() {
setting.SetCustomPathAndConf("", "", "") setting.SetCustomPathAndConf("", "", "")
setting.NewContext() setting.NewContext()
util.RemoveAll(models.LocalCopyPath()) util.RemoveAll(models.LocalCopyPath())
setting.CheckLFSVersion() git.CheckLFSVersion()
setting.InitDBConfig() setting.InitDBConfig()
if err := storage.Init(); err != nil { if err := storage.Init(); err != nil {
fmt.Printf("Init storage failed: %v", err) fmt.Printf("Init storage failed: %v", err)

View file

@ -13,6 +13,7 @@ import (
"testing" "testing"
"code.gitea.io/gitea/models" "code.gitea.io/gitea/models"
"code.gitea.io/gitea/modules/git"
"code.gitea.io/gitea/modules/lfs" "code.gitea.io/gitea/modules/lfs"
"code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/routers/web" "code.gitea.io/gitea/routers/web"
@ -81,7 +82,7 @@ func checkResponseTestContentEncoding(t *testing.T, content *[]byte, resp *httpt
func TestGetLFSSmall(t *testing.T) { func TestGetLFSSmall(t *testing.T) {
defer prepareTestEnv(t)() defer prepareTestEnv(t)()
setting.CheckLFSVersion() git.CheckLFSVersion()
if !setting.LFS.StartServer { if !setting.LFS.StartServer {
t.Skip() t.Skip()
return return
@ -94,7 +95,7 @@ func TestGetLFSSmall(t *testing.T) {
func TestGetLFSLarge(t *testing.T) { func TestGetLFSLarge(t *testing.T) {
defer prepareTestEnv(t)() defer prepareTestEnv(t)()
setting.CheckLFSVersion() git.CheckLFSVersion()
if !setting.LFS.StartServer { if !setting.LFS.StartServer {
t.Skip() t.Skip()
return return
@ -110,7 +111,7 @@ func TestGetLFSLarge(t *testing.T) {
func TestGetLFSGzip(t *testing.T) { func TestGetLFSGzip(t *testing.T) {
defer prepareTestEnv(t)() defer prepareTestEnv(t)()
setting.CheckLFSVersion() git.CheckLFSVersion()
if !setting.LFS.StartServer { if !setting.LFS.StartServer {
t.Skip() t.Skip()
return return
@ -131,7 +132,7 @@ func TestGetLFSGzip(t *testing.T) {
func TestGetLFSZip(t *testing.T) { func TestGetLFSZip(t *testing.T) {
defer prepareTestEnv(t)() defer prepareTestEnv(t)()
setting.CheckLFSVersion() git.CheckLFSVersion()
if !setting.LFS.StartServer { if !setting.LFS.StartServer {
t.Skip() t.Skip()
return return
@ -154,7 +155,7 @@ func TestGetLFSZip(t *testing.T) {
func TestGetLFSRangeNo(t *testing.T) { func TestGetLFSRangeNo(t *testing.T) {
defer prepareTestEnv(t)() defer prepareTestEnv(t)()
setting.CheckLFSVersion() git.CheckLFSVersion()
if !setting.LFS.StartServer { if !setting.LFS.StartServer {
t.Skip() t.Skip()
return return
@ -167,7 +168,7 @@ func TestGetLFSRangeNo(t *testing.T) {
func TestGetLFSRange(t *testing.T) { func TestGetLFSRange(t *testing.T) {
defer prepareTestEnv(t)() defer prepareTestEnv(t)()
setting.CheckLFSVersion() git.CheckLFSVersion()
if !setting.LFS.StartServer { if !setting.LFS.StartServer {
t.Skip() t.Skip()
return return

View file

@ -23,6 +23,7 @@ import (
"code.gitea.io/gitea/models/migrations" "code.gitea.io/gitea/models/migrations"
"code.gitea.io/gitea/modules/base" "code.gitea.io/gitea/modules/base"
"code.gitea.io/gitea/modules/charset" "code.gitea.io/gitea/modules/charset"
"code.gitea.io/gitea/modules/git"
"code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/modules/util" "code.gitea.io/gitea/modules/util"
@ -61,7 +62,7 @@ func initMigrationTest(t *testing.T) func() {
assert.NoError(t, util.RemoveAll(setting.RepoRootPath)) assert.NoError(t, util.RemoveAll(setting.RepoRootPath))
assert.NoError(t, util.CopyDir(path.Join(filepath.Dir(setting.AppPath), "integrations/gitea-repositories-meta"), setting.RepoRootPath)) assert.NoError(t, util.CopyDir(path.Join(filepath.Dir(setting.AppPath), "integrations/gitea-repositories-meta"), setting.RepoRootPath))
setting.CheckLFSVersion() git.CheckLFSVersion()
setting.InitDBConfig() setting.InitDBConfig()
setting.NewLogServices(true) setting.NewLogServices(true)
return deferFn return deferFn

View file

@ -59,7 +59,9 @@ func TestMirrorPull(t *testing.T) {
assert.NoError(t, release_service.CreateRelease(gitRepo, &models.Release{ assert.NoError(t, release_service.CreateRelease(gitRepo, &models.Release{
RepoID: repo.ID, RepoID: repo.ID,
Repo: repo,
PublisherID: user.ID, PublisherID: user.ID,
Publisher: user,
TagName: "v0.2", TagName: "v0.2",
Target: "master", Target: "master",
Title: "v0.2 is released", Title: "v0.2 is released",

View file

@ -0,0 +1,74 @@
// Copyright 2021 The Gitea Authors. All rights reserved.
// Use of this source code is governed by a MIT-style
// license that can be found in the LICENSE file.
package integrations
import (
"io/ioutil"
"net/url"
"testing"
"code.gitea.io/gitea/models"
"code.gitea.io/gitea/modules/git"
"code.gitea.io/gitea/modules/util"
"code.gitea.io/gitea/services/release"
"github.com/stretchr/testify/assert"
)
func TestCreateNewTagProtected(t *testing.T) {
defer prepareTestEnv(t)()
repo := models.AssertExistsAndLoadBean(t, &models.Repository{ID: 1}).(*models.Repository)
owner := models.AssertExistsAndLoadBean(t, &models.User{ID: repo.OwnerID}).(*models.User)
t.Run("API", func(t *testing.T) {
defer PrintCurrentTest(t)()
err := release.CreateNewTag(owner, repo, "master", "v-1", "first tag")
assert.NoError(t, err)
err = models.InsertProtectedTag(&models.ProtectedTag{
RepoID: repo.ID,
NamePattern: "v-*",
})
assert.NoError(t, err)
err = models.InsertProtectedTag(&models.ProtectedTag{
RepoID: repo.ID,
NamePattern: "v-1.1",
AllowlistUserIDs: []int64{repo.OwnerID},
})
assert.NoError(t, err)
err = release.CreateNewTag(owner, repo, "master", "v-2", "second tag")
assert.Error(t, err)
assert.True(t, models.IsErrProtectedTagName(err))
err = release.CreateNewTag(owner, repo, "master", "v-1.1", "third tag")
assert.NoError(t, err)
})
t.Run("Git", func(t *testing.T) {
onGiteaRun(t, func(t *testing.T, u *url.URL) {
username := "user2"
httpContext := NewAPITestContext(t, username, "repo1")
dstPath, err := ioutil.TempDir("", httpContext.Reponame)
assert.NoError(t, err)
defer util.RemoveAll(dstPath)
u.Path = httpContext.GitPath()
u.User = url.UserPassword(username, userPassword)
doGitClone(dstPath, u)(t)
_, err = git.NewCommand("tag", "v-2").RunInDir(dstPath)
assert.NoError(t, err)
_, err = git.NewCommand("push", "--tags").RunInDir(dstPath)
assert.Error(t, err)
assert.Contains(t, err.Error(), "Tag v-2 is protected")
})
})
}

View file

@ -362,11 +362,7 @@ func (repo *Repository) GetBranchProtection(branchName string) (*ProtectedBranch
} }
// IsProtectedBranch checks if branch is protected // IsProtectedBranch checks if branch is protected
func (repo *Repository) IsProtectedBranch(branchName string, doer *User) (bool, error) { func (repo *Repository) IsProtectedBranch(branchName string) (bool, error) {
if doer == nil {
return true, nil
}
protectedBranch := &ProtectedBranch{ protectedBranch := &ProtectedBranch{
RepoID: repo.ID, RepoID: repo.ID,
BranchName: branchName, BranchName: branchName,
@ -379,27 +375,6 @@ func (repo *Repository) IsProtectedBranch(branchName string, doer *User) (bool,
return has, nil return has, nil
} }
// IsProtectedBranchForPush checks if branch is protected for push
func (repo *Repository) IsProtectedBranchForPush(branchName string, doer *User) (bool, error) {
if doer == nil {
return true, nil
}
protectedBranch := &ProtectedBranch{
RepoID: repo.ID,
BranchName: branchName,
}
has, err := x.Get(protectedBranch)
if err != nil {
return true, err
} else if has {
return !protectedBranch.CanUserPush(doer.ID), nil
}
return false, nil
}
// updateApprovalWhitelist checks whether the user whitelist changed and returns a whitelist with // updateApprovalWhitelist checks whether the user whitelist changed and returns a whitelist with
// the users from newWhitelist which have explicit read or write access to the repo. // the users from newWhitelist which have explicit read or write access to the repo.
func updateApprovalWhitelist(repo *Repository, currentWhitelist, newWhitelist []int64) (whitelist []int64, err error) { func updateApprovalWhitelist(repo *Repository, currentWhitelist, newWhitelist []int64) (whitelist []int64, err error) {

View file

@ -985,6 +985,21 @@ func (err ErrInvalidTagName) Error() string {
return fmt.Sprintf("release tag name is not valid [tag_name: %s]", err.TagName) return fmt.Sprintf("release tag name is not valid [tag_name: %s]", err.TagName)
} }
// ErrProtectedTagName represents a "ProtectedTagName" kind of error.
type ErrProtectedTagName struct {
TagName string
}
// IsErrProtectedTagName checks if an error is a ErrProtectedTagName.
func IsErrProtectedTagName(err error) bool {
_, ok := err.(ErrProtectedTagName)
return ok
}
func (err ErrProtectedTagName) Error() string {
return fmt.Sprintf("release tag name is protected [tag_name: %s]", err.TagName)
}
// ErrRepoFileAlreadyExists represents a "RepoFileAlreadyExist" kind of error. // ErrRepoFileAlreadyExists represents a "RepoFileAlreadyExist" kind of error.
type ErrRepoFileAlreadyExists struct { type ErrRepoFileAlreadyExists struct {
Path string Path string

View file

@ -32,3 +32,27 @@
repo_id: 22 repo_id: 22
is_private: true is_private: true
created_unix: 1603267920 created_unix: 1603267920
- id: 5
user_id: 10
op_type: 1 # create repo
act_user_id: 10
repo_id: 6
is_private: true
created_unix: 1603010100
- id: 6
user_id: 10
op_type: 1 # create repo
act_user_id: 10
repo_id: 7
is_private: true
created_unix: 1603011300
- id: 7
user_id: 10
op_type: 1 # create repo
act_user_id: 10
repo_id: 8
is_private: false
created_unix: 1603011540 # grouped with id:7

View file

@ -0,0 +1 @@
[] # empty

View file

@ -508,7 +508,6 @@
num_repos: 0 num_repos: 0
is_active: true is_active: true
- -
id: 30 id: 30
lower_name: user30 lower_name: user30
@ -525,3 +524,20 @@
avatar_email: user30@example.com avatar_email: user30@example.com
num_repos: 2 num_repos: 2
is_active: true is_active: true
-
id: 31
lower_name: user31
name: user31
full_name: "user31"
email: user31@example.com
passwd_hash_algo: argon2
passwd: a3d5fcd92bae586c2e3dbe72daea7a0d27833a8d0227aa1704f4bbd775c1f3b03535b76dd93b0d4d8d22a519dca47df1547b # password
type: 0 # individual
salt: ZogKvWdyEx
is_admin: false
visibility: 2
avatar: avatar31
avatar_email: user31@example.com
num_repos: 0
is_active: true

View file

@ -856,7 +856,11 @@ func UserSignIn(username, password string) (*User, error) {
return authUser, nil return authUser, nil
} }
log.Warn("Failed to login '%s' via '%s': %v", username, source.Name, err) if IsErrUserNotExist(err) {
log.Debug("Failed to login '%s' via '%s': %v", username, source.Name, err)
} else {
log.Warn("Failed to login '%s' via '%s': %v", username, source.Name, err)
}
} }
return nil, ErrUserNotExist{user.ID, user.Name, 0} return nil, ErrUserNotExist{user.ID, user.Name, 0}

View file

@ -319,6 +319,10 @@ var migrations = []Migration{
NewMigration("Create PushMirror table", createPushMirrorTable), NewMigration("Create PushMirror table", createPushMirrorTable),
// v184 -> v185 // v184 -> v185
NewMigration("Rename Task errors to message", renameTaskErrorsToMessage), NewMigration("Rename Task errors to message", renameTaskErrorsToMessage),
// v185 -> v186
NewMigration("Add new table repo_archiver", addRepoArchiver),
// v186 -> v187
NewMigration("Create protected tag table", createProtectedTagTable),
} }
// GetCurrentDBVersion returns the current db version // GetCurrentDBVersion returns the current db version

View file

@ -16,6 +16,7 @@ import (
"code.gitea.io/gitea/models" "code.gitea.io/gitea/models"
"code.gitea.io/gitea/modules/base" "code.gitea.io/gitea/modules/base"
"code.gitea.io/gitea/modules/git"
"code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/modules/timeutil" "code.gitea.io/gitea/modules/timeutil"
"code.gitea.io/gitea/modules/util" "code.gitea.io/gitea/modules/util"
@ -55,7 +56,7 @@ func TestMain(m *testing.M) {
setting.SetCustomPathAndConf("", "", "") setting.SetCustomPathAndConf("", "", "")
setting.NewContext() setting.NewContext()
setting.CheckLFSVersion() git.CheckLFSVersion()
setting.InitDBConfig() setting.InitDBConfig()
setting.NewLogServices(true) setting.NewLogServices(true)

View file

@ -1,3 +1,4 @@
// Copyright 2021 The Gitea Authors. All rights reserved.
// Use of this source code is governed by a MIT-style // Use of this source code is governed by a MIT-style
// license that can be found in the LICENSE file. // license that can be found in the LICENSE file.

22
models/migrations/v185.go Normal file
View file

@ -0,0 +1,22 @@
// Copyright 2021 The Gitea Authors. All rights reserved.
// Use of this source code is governed by a MIT-style
// license that can be found in the LICENSE file.
package migrations
import (
"xorm.io/xorm"
)
func addRepoArchiver(x *xorm.Engine) error {
// RepoArchiver represents all archivers
type RepoArchiver struct {
ID int64 `xorm:"pk autoincr"`
RepoID int64 `xorm:"index unique(s)"`
Type int `xorm:"unique(s)"`
Status int
CommitID string `xorm:"VARCHAR(40) unique(s)"`
CreatedUnix int64 `xorm:"INDEX NOT NULL created"`
}
return x.Sync2(new(RepoArchiver))
}

26
models/migrations/v186.go Normal file
View file

@ -0,0 +1,26 @@
// Copyright 2021 The Gitea Authors. All rights reserved.
// Use of this source code is governed by a MIT-style
// license that can be found in the LICENSE file.
package migrations
import (
"code.gitea.io/gitea/modules/timeutil"
"xorm.io/xorm"
)
func createProtectedTagTable(x *xorm.Engine) error {
type ProtectedTag struct {
ID int64 `xorm:"pk autoincr"`
RepoID int64
NamePattern string
AllowlistUserIDs []int64 `xorm:"JSON TEXT"`
AllowlistTeamIDs []int64 `xorm:"JSON TEXT"`
CreatedUnix timeutil.TimeStamp `xorm:"created"`
UpdatedUnix timeutil.TimeStamp `xorm:"updated"`
}
return x.Sync2(new(ProtectedTag))
}

View file

@ -136,6 +136,8 @@ func init() {
new(RepoTransfer), new(RepoTransfer),
new(IssueIndex), new(IssueIndex),
new(PushMirror), new(PushMirror),
new(RepoArchiver),
new(ProtectedTag),
) )
gonicNames := []string{"SSL", "UID"} gonicNames := []string{"SSL", "UID"}

View file

@ -455,22 +455,22 @@ func getOwnedOrgsByUserID(sess *xorm.Session, userID int64) ([]*User, error) {
Find(&orgs) Find(&orgs)
} }
// HasOrgVisible tells if the given user can see the given org // HasOrgOrUserVisible tells if the given user can see the given org or user
func HasOrgVisible(org, user *User) bool { func HasOrgOrUserVisible(org, user *User) bool {
return hasOrgVisible(x, org, user) return hasOrgOrUserVisible(x, org, user)
} }
func hasOrgVisible(e Engine, org, user *User) bool { func hasOrgOrUserVisible(e Engine, orgOrUser, user *User) bool {
// Not SignedUser // Not SignedUser
if user == nil { if user == nil {
return org.Visibility == structs.VisibleTypePublic return orgOrUser.Visibility == structs.VisibleTypePublic
} }
if user.IsAdmin { if user.IsAdmin || orgOrUser.ID == user.ID {
return true return true
} }
if (org.Visibility == structs.VisibleTypePrivate || user.IsRestricted) && !org.hasMemberWithUserID(e, user.ID) { if (orgOrUser.Visibility == structs.VisibleTypePrivate || user.IsRestricted) && !orgOrUser.hasMemberWithUserID(e, user.ID) {
return false return false
} }
return true return true
@ -483,7 +483,7 @@ func HasOrgsVisible(orgs []*User, user *User) bool {
} }
for _, org := range orgs { for _, org := range orgs {
if HasOrgVisible(org, user) { if HasOrgOrUserVisible(org, user) {
return true return true
} }
} }

View file

@ -586,9 +586,9 @@ func TestHasOrgVisibleTypePublic(t *testing.T) {
assert.NoError(t, CreateOrganization(org, owner)) assert.NoError(t, CreateOrganization(org, owner))
org = AssertExistsAndLoadBean(t, org = AssertExistsAndLoadBean(t,
&User{Name: org.Name, Type: UserTypeOrganization}).(*User) &User{Name: org.Name, Type: UserTypeOrganization}).(*User)
test1 := HasOrgVisible(org, owner) test1 := HasOrgOrUserVisible(org, owner)
test2 := HasOrgVisible(org, user3) test2 := HasOrgOrUserVisible(org, user3)
test3 := HasOrgVisible(org, nil) test3 := HasOrgOrUserVisible(org, nil)
assert.True(t, test1) // owner of org assert.True(t, test1) // owner of org
assert.True(t, test2) // user not a part of org assert.True(t, test2) // user not a part of org
assert.True(t, test3) // logged out user assert.True(t, test3) // logged out user
@ -609,9 +609,9 @@ func TestHasOrgVisibleTypeLimited(t *testing.T) {
assert.NoError(t, CreateOrganization(org, owner)) assert.NoError(t, CreateOrganization(org, owner))
org = AssertExistsAndLoadBean(t, org = AssertExistsAndLoadBean(t,
&User{Name: org.Name, Type: UserTypeOrganization}).(*User) &User{Name: org.Name, Type: UserTypeOrganization}).(*User)
test1 := HasOrgVisible(org, owner) test1 := HasOrgOrUserVisible(org, owner)
test2 := HasOrgVisible(org, user3) test2 := HasOrgOrUserVisible(org, user3)
test3 := HasOrgVisible(org, nil) test3 := HasOrgOrUserVisible(org, nil)
assert.True(t, test1) // owner of org assert.True(t, test1) // owner of org
assert.True(t, test2) // user not a part of org assert.True(t, test2) // user not a part of org
assert.False(t, test3) // logged out user assert.False(t, test3) // logged out user
@ -632,9 +632,9 @@ func TestHasOrgVisibleTypePrivate(t *testing.T) {
assert.NoError(t, CreateOrganization(org, owner)) assert.NoError(t, CreateOrganization(org, owner))
org = AssertExistsAndLoadBean(t, org = AssertExistsAndLoadBean(t,
&User{Name: org.Name, Type: UserTypeOrganization}).(*User) &User{Name: org.Name, Type: UserTypeOrganization}).(*User)
test1 := HasOrgVisible(org, owner) test1 := HasOrgOrUserVisible(org, owner)
test2 := HasOrgVisible(org, user3) test2 := HasOrgOrUserVisible(org, user3)
test3 := HasOrgVisible(org, nil) test3 := HasOrgOrUserVisible(org, nil)
assert.True(t, test1) // owner of org assert.True(t, test1) // owner of org
assert.False(t, test2) // user not a part of org assert.False(t, test2) // user not a part of org
assert.False(t, test3) // logged out user assert.False(t, test3) // logged out user

131
models/protected_tag.go Normal file
View file

@ -0,0 +1,131 @@
// Copyright 2021 The Gitea Authors. All rights reserved.
// Use of this source code is governed by a MIT-style
// license that can be found in the LICENSE file.
package models
import (
"regexp"
"strings"
"code.gitea.io/gitea/modules/base"
"code.gitea.io/gitea/modules/timeutil"
"github.com/gobwas/glob"
)
// ProtectedTag struct
type ProtectedTag struct {
ID int64 `xorm:"pk autoincr"`
RepoID int64
NamePattern string
RegexPattern *regexp.Regexp `xorm:"-"`
GlobPattern glob.Glob `xorm:"-"`
AllowlistUserIDs []int64 `xorm:"JSON TEXT"`
AllowlistTeamIDs []int64 `xorm:"JSON TEXT"`
CreatedUnix timeutil.TimeStamp `xorm:"created"`
UpdatedUnix timeutil.TimeStamp `xorm:"updated"`
}
// InsertProtectedTag inserts a protected tag to database
func InsertProtectedTag(pt *ProtectedTag) error {
_, err := x.Insert(pt)
return err
}
// UpdateProtectedTag updates the protected tag
func UpdateProtectedTag(pt *ProtectedTag) error {
_, err := x.ID(pt.ID).AllCols().Update(pt)
return err
}
// DeleteProtectedTag deletes a protected tag by ID
func DeleteProtectedTag(pt *ProtectedTag) error {
_, err := x.ID(pt.ID).Delete(&ProtectedTag{})
return err
}
// EnsureCompiledPattern ensures the glob pattern is compiled
func (pt *ProtectedTag) EnsureCompiledPattern() error {
if pt.RegexPattern != nil || pt.GlobPattern != nil {
return nil
}
var err error
if len(pt.NamePattern) >= 2 && strings.HasPrefix(pt.NamePattern, "/") && strings.HasSuffix(pt.NamePattern, "/") {
pt.RegexPattern, err = regexp.Compile(pt.NamePattern[1 : len(pt.NamePattern)-1])
} else {
pt.GlobPattern, err = glob.Compile(pt.NamePattern)
}
return err
}
// IsUserAllowed returns true if the user is allowed to modify the tag
func (pt *ProtectedTag) IsUserAllowed(userID int64) (bool, error) {
if base.Int64sContains(pt.AllowlistUserIDs, userID) {
return true, nil
}
if len(pt.AllowlistTeamIDs) == 0 {
return false, nil
}
in, err := IsUserInTeams(userID, pt.AllowlistTeamIDs)
if err != nil {
return false, err
}
return in, nil
}
// GetProtectedTags gets all protected tags of the repository
func (repo *Repository) GetProtectedTags() ([]*ProtectedTag, error) {
tags := make([]*ProtectedTag, 0)
return tags, x.Find(&tags, &ProtectedTag{RepoID: repo.ID})
}
// GetProtectedTagByID gets the protected tag with the specific id
func GetProtectedTagByID(id int64) (*ProtectedTag, error) {
tag := new(ProtectedTag)
has, err := x.ID(id).Get(tag)
if err != nil {
return nil, err
}
if !has {
return nil, nil
}
return tag, nil
}
// IsUserAllowedToControlTag checks if a user can control the specific tag.
// It returns true if the tag name is not protected or the user is allowed to control it.
func IsUserAllowedToControlTag(tags []*ProtectedTag, tagName string, userID int64) (bool, error) {
isAllowed := true
for _, tag := range tags {
err := tag.EnsureCompiledPattern()
if err != nil {
return false, err
}
if !tag.matchString(tagName) {
continue
}
isAllowed, err = tag.IsUserAllowed(userID)
if err != nil {
return false, err
}
if isAllowed {
break
}
}
return isAllowed, nil
}
func (pt *ProtectedTag) matchString(name string) bool {
if pt.RegexPattern != nil {
return pt.RegexPattern.MatchString(name)
}
return pt.GlobPattern.Match(name)
}

View file

@ -0,0 +1,162 @@
// Copyright 2021 The Gitea Authors. All rights reserved.
// Use of this source code is governed by a MIT-style
// license that can be found in the LICENSE file.
package models
import (
"testing"
"github.com/stretchr/testify/assert"
)
func TestIsUserAllowed(t *testing.T) {
assert.NoError(t, PrepareTestDatabase())
pt := &ProtectedTag{}
allowed, err := pt.IsUserAllowed(1)
assert.NoError(t, err)
assert.False(t, allowed)
pt = &ProtectedTag{
AllowlistUserIDs: []int64{1},
}
allowed, err = pt.IsUserAllowed(1)
assert.NoError(t, err)
assert.True(t, allowed)
allowed, err = pt.IsUserAllowed(2)
assert.NoError(t, err)
assert.False(t, allowed)
pt = &ProtectedTag{
AllowlistTeamIDs: []int64{1},
}
allowed, err = pt.IsUserAllowed(1)
assert.NoError(t, err)
assert.False(t, allowed)
allowed, err = pt.IsUserAllowed(2)
assert.NoError(t, err)
assert.True(t, allowed)
pt = &ProtectedTag{
AllowlistUserIDs: []int64{1},
AllowlistTeamIDs: []int64{1},
}
allowed, err = pt.IsUserAllowed(1)
assert.NoError(t, err)
assert.True(t, allowed)
allowed, err = pt.IsUserAllowed(2)
assert.NoError(t, err)
assert.True(t, allowed)
}
func TestIsUserAllowedToControlTag(t *testing.T) {
cases := []struct {
name string
userid int64
allowed bool
}{
{
name: "test",
userid: 1,
allowed: true,
},
{
name: "test",
userid: 3,
allowed: true,
},
{
name: "gitea",
userid: 1,
allowed: true,
},
{
name: "gitea",
userid: 3,
allowed: false,
},
{
name: "test-gitea",
userid: 1,
allowed: true,
},
{
name: "test-gitea",
userid: 3,
allowed: false,
},
{
name: "gitea-test",
userid: 1,
allowed: true,
},
{
name: "gitea-test",
userid: 3,
allowed: true,
},
{
name: "v-1",
userid: 1,
allowed: false,
},
{
name: "v-1",
userid: 2,
allowed: true,
},
{
name: "release",
userid: 1,
allowed: false,
},
}
t.Run("Glob", func(t *testing.T) {
protectedTags := []*ProtectedTag{
{
NamePattern: `*gitea`,
AllowlistUserIDs: []int64{1},
},
{
NamePattern: `v-*`,
AllowlistUserIDs: []int64{2},
},
{
NamePattern: "release",
},
}
for n, c := range cases {
isAllowed, err := IsUserAllowedToControlTag(protectedTags, c.name, c.userid)
assert.NoError(t, err)
assert.Equal(t, c.allowed, isAllowed, "case %d: error should match", n)
}
})
t.Run("Regex", func(t *testing.T) {
protectedTags := []*ProtectedTag{
{
NamePattern: `/gitea\z/`,
AllowlistUserIDs: []int64{1},
},
{
NamePattern: `/\Av-/`,
AllowlistUserIDs: []int64{2},
},
{
NamePattern: "/release/",
},
}
for n, c := range cases {
isAllowed, err := IsUserAllowedToControlTag(protectedTags, c.name, c.userid)
assert.NoError(t, err)
assert.Equal(t, c.allowed, isAllowed, "case %d: error should match", n)
}
})
}

View file

@ -585,8 +585,7 @@ func (repo *Repository) getReviewers(e Engine, doerID, posterID int64) ([]*User,
var users []*User var users []*User
if repo.IsPrivate || if repo.IsPrivate || repo.Owner.Visibility == api.VisibleTypePrivate {
(repo.Owner.IsOrganization() && repo.Owner.Visibility == api.VisibleTypePrivate) {
// This a private repository: // This a private repository:
// Anyone who can read the repository is a requestable reviewer // Anyone who can read the repository is a requestable reviewer
if err := e. if err := e.
@ -1498,6 +1497,7 @@ func DeleteRepository(doer *User, uid, repoID int64) error {
&Mirror{RepoID: repoID}, &Mirror{RepoID: repoID},
&Notification{RepoID: repoID}, &Notification{RepoID: repoID},
&ProtectedBranch{RepoID: repoID}, &ProtectedBranch{RepoID: repoID},
&ProtectedTag{RepoID: repoID},
&PullRequest{BaseRepoID: repoID}, &PullRequest{BaseRepoID: repoID},
&PushMirror{RepoID: repoID}, &PushMirror{RepoID: repoID},
&Release{RepoID: repoID}, &Release{RepoID: repoID},
@ -1587,6 +1587,22 @@ func DeleteRepository(doer *User, uid, repoID int64) error {
return err return err
} }
// Remove archives
var archives []*RepoArchiver
if err = sess.Where("repo_id=?", repoID).Find(&archives); err != nil {
return err
}
for _, v := range archives {
v.Repo = repo
p, _ := v.RelativePath()
removeStorageWithNotice(sess, storage.RepoArchives, "Delete repo archive file", p)
}
if _, err := sess.Delete(&RepoArchiver{RepoID: repoID}); err != nil {
return err
}
if repo.NumForks > 0 { if repo.NumForks > 0 {
if _, err = sess.Exec("UPDATE `repository` SET fork_id=0,is_fork=? WHERE fork_id=?", false, repo.ID); err != nil { if _, err = sess.Exec("UPDATE `repository` SET fork_id=0,is_fork=? WHERE fork_id=?", false, repo.ID); err != nil {
log.Error("reset 'fork_id' and 'is_fork': %v", err) log.Error("reset 'fork_id' and 'is_fork': %v", err)
@ -1768,64 +1784,45 @@ func DeleteRepositoryArchives(ctx context.Context) error {
func DeleteOldRepositoryArchives(ctx context.Context, olderThan time.Duration) error { func DeleteOldRepositoryArchives(ctx context.Context, olderThan time.Duration) error {
log.Trace("Doing: ArchiveCleanup") log.Trace("Doing: ArchiveCleanup")
if err := x.Where("id > 0").Iterate(new(Repository), func(idx int, bean interface{}) error { for {
return deleteOldRepositoryArchives(ctx, olderThan, idx, bean) var archivers []RepoArchiver
}); err != nil { err := x.Where("created_unix < ?", time.Now().Add(-olderThan).Unix()).
log.Trace("Error: ArchiveClean: %v", err) Asc("created_unix").
return err Limit(100).
Find(&archivers)
if err != nil {
log.Trace("Error: ArchiveClean: %v", err)
return err
}
for _, archiver := range archivers {
if err := deleteOldRepoArchiver(ctx, &archiver); err != nil {
return err
}
}
if len(archivers) < 100 {
break
}
} }
log.Trace("Finished: ArchiveCleanup") log.Trace("Finished: ArchiveCleanup")
return nil return nil
} }
func deleteOldRepositoryArchives(ctx context.Context, olderThan time.Duration, idx int, bean interface{}) error { var delRepoArchiver = new(RepoArchiver)
repo := bean.(*Repository)
basePath := filepath.Join(repo.RepoPath(), "archives")
for _, ty := range []string{"zip", "targz"} { func deleteOldRepoArchiver(ctx context.Context, archiver *RepoArchiver) error {
select { p, err := archiver.RelativePath()
case <-ctx.Done(): if err != nil {
return ErrCancelledf("before deleting old repository archives with filetype %s for %s", ty, repo.FullName()) return err
default: }
} _, err = x.ID(archiver.ID).Delete(delRepoArchiver)
if err != nil {
path := filepath.Join(basePath, ty) return err
file, err := os.Open(path) }
if err != nil { if err := storage.RepoArchives.Delete(p); err != nil {
if !os.IsNotExist(err) { log.Error("delete repo archive file failed: %v", err)
log.Warn("Unable to open directory %s: %v", path, err)
return err
}
// If the directory doesn't exist, that's okay.
continue
}
files, err := file.Readdir(0)
file.Close()
if err != nil {
log.Warn("Unable to read directory %s: %v", path, err)
return err
}
minimumOldestTime := time.Now().Add(-olderThan)
for _, info := range files {
if info.ModTime().Before(minimumOldestTime) && !info.IsDir() {
select {
case <-ctx.Done():
return ErrCancelledf("before deleting old repository archive file %s with filetype %s for %s", info.Name(), ty, repo.FullName())
default:
}
toDelete := filepath.Join(path, info.Name())
// This is a best-effort purge, so we do not check error codes to confirm removal.
if err = util.Remove(toDelete); err != nil {
log.Trace("Unable to delete %s, but proceeding: %v", toDelete, err)
}
}
}
} }
return nil return nil
} }

86
models/repo_archiver.go Normal file
View file

@ -0,0 +1,86 @@
// Copyright 2021 The Gitea Authors. All rights reserved.
// Use of this source code is governed by a MIT-style
// license that can be found in the LICENSE file.
package models
import (
"fmt"
"code.gitea.io/gitea/modules/git"
"code.gitea.io/gitea/modules/timeutil"
)
// RepoArchiverStatus represents repo archive status
type RepoArchiverStatus int
// enumerate all repo archive statuses
const (
RepoArchiverGenerating = iota // the archiver is generating
RepoArchiverReady // it's ready
)
// RepoArchiver represents all archivers
type RepoArchiver struct {
ID int64 `xorm:"pk autoincr"`
RepoID int64 `xorm:"index unique(s)"`
Repo *Repository `xorm:"-"`
Type git.ArchiveType `xorm:"unique(s)"`
Status RepoArchiverStatus
CommitID string `xorm:"VARCHAR(40) unique(s)"`
CreatedUnix timeutil.TimeStamp `xorm:"INDEX NOT NULL created"`
}
// LoadRepo loads repository
func (archiver *RepoArchiver) LoadRepo() (*Repository, error) {
if archiver.Repo != nil {
return archiver.Repo, nil
}
var repo Repository
has, err := x.ID(archiver.RepoID).Get(&repo)
if err != nil {
return nil, err
}
if !has {
return nil, ErrRepoNotExist{
ID: archiver.RepoID,
}
}
return &repo, nil
}
// RelativePath returns relative path
func (archiver *RepoArchiver) RelativePath() (string, error) {
repo, err := archiver.LoadRepo()
if err != nil {
return "", err
}
return fmt.Sprintf("%s/%s/%s.%s", repo.FullName(), archiver.CommitID[:2], archiver.CommitID, archiver.Type.String()), nil
}
// GetRepoArchiver get an archiver
func GetRepoArchiver(ctx DBContext, repoID int64, tp git.ArchiveType, commitID string) (*RepoArchiver, error) {
var archiver RepoArchiver
has, err := ctx.e.Where("repo_id=?", repoID).And("`type`=?", tp).And("commit_id=?", commitID).Get(&archiver)
if err != nil {
return nil, err
}
if has {
return &archiver, nil
}
return nil, nil
}
// AddRepoArchiver adds an archiver
func AddRepoArchiver(ctx DBContext, archiver *RepoArchiver) error {
_, err := ctx.e.Insert(archiver)
return err
}
// UpdateRepoArchiverStatus updates archiver's status
func UpdateRepoArchiverStatus(ctx DBContext, archiver *RepoArchiver) error {
_, err := ctx.e.ID(archiver.ID).Cols("status").Update(archiver)
return err
}

View file

@ -176,9 +176,9 @@ func getUserRepoPermission(e Engine, repo *Repository, user *User) (perm Permiss
return return
} }
// Prevent strangers from checking out public repo of private orginization // Prevent strangers from checking out public repo of private orginization/users
// Allow user if they are collaborator of a repo within a private orginization but not a member of the orginization itself // Allow user if they are collaborator of a repo within a private user or a private organization but not a member of the organization itself
if repo.Owner.IsOrganization() && !hasOrgVisible(e, repo.Owner, user) && !isCollaborator { if !hasOrgOrUserVisible(e, repo.Owner, user) && !isCollaborator {
perm.AccessMode = AccessModeNone perm.AccessMode = AccessModeNone
return return
} }

View file

@ -74,6 +74,8 @@ func MainTest(m *testing.M, pathToGiteaRoot string) {
setting.RepoAvatar.Storage.Path = filepath.Join(setting.AppDataPath, "repo-avatars") setting.RepoAvatar.Storage.Path = filepath.Join(setting.AppDataPath, "repo-avatars")
setting.RepoArchive.Storage.Path = filepath.Join(setting.AppDataPath, "repo-archive")
if err = storage.Init(); err != nil { if err = storage.Init(); err != nil {
fatalTestError("storage.Init: %v\n", err) fatalTestError("storage.Init: %v\n", err)
} }

View file

@ -432,6 +432,62 @@ func (u *User) IsPasswordSet() bool {
return len(u.Passwd) != 0 return len(u.Passwd) != 0
} }
// IsVisibleToUser check if viewer is able to see user profile
func (u *User) IsVisibleToUser(viewer *User) bool {
return u.isVisibleToUser(x, viewer)
}
func (u *User) isVisibleToUser(e Engine, viewer *User) bool {
if viewer != nil && viewer.IsAdmin {
return true
}
switch u.Visibility {
case structs.VisibleTypePublic:
return true
case structs.VisibleTypeLimited:
if viewer == nil || viewer.IsRestricted {
return false
}
return true
case structs.VisibleTypePrivate:
if viewer == nil || viewer.IsRestricted {
return false
}
// If they follow - they see each over
follower := IsFollowing(u.ID, viewer.ID)
if follower {
return true
}
// Now we need to check if they in some organization together
count, err := x.Table("team_user").
Where(
builder.And(
builder.Eq{"uid": viewer.ID},
builder.Or(
builder.Eq{"org_id": u.ID},
builder.In("org_id",
builder.Select("org_id").
From("team_user", "t2").
Where(builder.Eq{"uid": u.ID}))))).
Count(new(TeamUser))
if err != nil {
return false
}
if count < 0 {
// No common organization
return false
}
// they are in an organization together
return true
}
return false
}
// IsOrganization returns true if user is actually a organization. // IsOrganization returns true if user is actually a organization.
func (u *User) IsOrganization() bool { func (u *User) IsOrganization() bool {
return u.Type == UserTypeOrganization return u.Type == UserTypeOrganization
@ -796,8 +852,13 @@ func IsUsableUsername(name string) error {
return isUsableName(reservedUsernames, reservedUserPatterns, name) return isUsableName(reservedUsernames, reservedUserPatterns, name)
} }
// CreateUserOverwriteOptions are an optional options who overwrite system defaults on user creation
type CreateUserOverwriteOptions struct {
Visibility structs.VisibleType
}
// CreateUser creates record of a new user. // CreateUser creates record of a new user.
func CreateUser(u *User) (err error) { func CreateUser(u *User, overwriteDefault ...*CreateUserOverwriteOptions) (err error) {
if err = IsUsableUsername(u.Name); err != nil { if err = IsUsableUsername(u.Name); err != nil {
return err return err
} }
@ -831,8 +892,6 @@ func CreateUser(u *User) (err error) {
return ErrEmailAlreadyUsed{u.Email} return ErrEmailAlreadyUsed{u.Email}
} }
u.KeepEmailPrivate = setting.Service.DefaultKeepEmailPrivate
u.LowerName = strings.ToLower(u.Name) u.LowerName = strings.ToLower(u.Name)
u.AvatarEmail = u.Email u.AvatarEmail = u.Email
if u.Rands, err = GetUserSalt(); err != nil { if u.Rands, err = GetUserSalt(); err != nil {
@ -841,10 +900,18 @@ func CreateUser(u *User) (err error) {
if err = u.SetPassword(u.Passwd); err != nil { if err = u.SetPassword(u.Passwd); err != nil {
return err return err
} }
// set system defaults
u.KeepEmailPrivate = setting.Service.DefaultKeepEmailPrivate
u.Visibility = setting.Service.DefaultUserVisibilityMode
u.AllowCreateOrganization = setting.Service.DefaultAllowCreateOrganization && !setting.Admin.DisableRegularOrgCreation u.AllowCreateOrganization = setting.Service.DefaultAllowCreateOrganization && !setting.Admin.DisableRegularOrgCreation
u.EmailNotificationsPreference = setting.Admin.DefaultEmailNotification u.EmailNotificationsPreference = setting.Admin.DefaultEmailNotification
u.MaxRepoCreation = -1 u.MaxRepoCreation = -1
u.Theme = setting.UI.DefaultTheme u.Theme = setting.UI.DefaultTheme
// overwrite defaults if set
if len(overwriteDefault) != 0 && overwriteDefault[0] != nil {
u.Visibility = overwriteDefault[0].Visibility
}
if _, err = sess.Insert(u); err != nil { if _, err = sess.Insert(u); err != nil {
return err return err
@ -1527,10 +1594,9 @@ func (opts *SearchUserOptions) toConds() builder.Cond {
cond = cond.And(keywordCond) cond = cond.And(keywordCond)
} }
// If visibility filtered
if len(opts.Visible) > 0 { if len(opts.Visible) > 0 {
cond = cond.And(builder.In("visibility", opts.Visible)) cond = cond.And(builder.In("visibility", opts.Visible))
} else {
cond = cond.And(builder.In("visibility", structs.VisibleTypePublic))
} }
if opts.Actor != nil { if opts.Actor != nil {
@ -1543,16 +1609,27 @@ func (opts *SearchUserOptions) toConds() builder.Cond {
exprCond = builder.Expr("org_user.org_id = \"user\".id") exprCond = builder.Expr("org_user.org_id = \"user\".id")
} }
var accessCond builder.Cond // If Admin - they see all users!
if !opts.Actor.IsRestricted { if !opts.Actor.IsAdmin {
accessCond = builder.Or( // Force visiblity for privacy
builder.In("id", builder.Select("org_id").From("org_user").LeftJoin("`user`", exprCond).Where(builder.And(builder.Eq{"uid": opts.Actor.ID}, builder.Eq{"visibility": structs.VisibleTypePrivate}))), var accessCond builder.Cond
builder.In("visibility", structs.VisibleTypePublic, structs.VisibleTypeLimited)) if !opts.Actor.IsRestricted {
} else { accessCond = builder.Or(
// restricted users only see orgs they are a member of builder.In("id", builder.Select("org_id").From("org_user").LeftJoin("`user`", exprCond).Where(builder.And(builder.Eq{"uid": opts.Actor.ID}, builder.Eq{"visibility": structs.VisibleTypePrivate}))),
accessCond = builder.In("id", builder.Select("org_id").From("org_user").LeftJoin("`user`", exprCond).Where(builder.And(builder.Eq{"uid": opts.Actor.ID}))) builder.In("visibility", structs.VisibleTypePublic, structs.VisibleTypeLimited))
} else {
// restricted users only see orgs they are a member of
accessCond = builder.In("id", builder.Select("org_id").From("org_user").LeftJoin("`user`", exprCond).Where(builder.And(builder.Eq{"uid": opts.Actor.ID})))
}
// Don't forget about self
accessCond = accessCond.Or(builder.Eq{"id": opts.Actor.ID})
cond = cond.And(accessCond)
} }
cond = cond.And(accessCond)
} else {
// Force visiblity for privacy
// Not logged in - only public users
cond = cond.And(builder.In("visibility", structs.VisibleTypePublic))
} }
if opts.UID > 0 { if opts.UID > 0 {

View file

@ -32,17 +32,14 @@ func getUserHeatmapData(user *User, team *Team, doer *User) ([]*UserHeatmapData,
return hdata, nil return hdata, nil
} }
var groupBy string // Group by 15 minute intervals which will allow the client to accurately shift the timestamp to their timezone.
// The interval is based on the fact that there are timezones such as UTC +5:30 and UTC +12:45.
groupBy := "created_unix / 900 * 900"
groupByName := "timestamp" // We need this extra case because mssql doesn't allow grouping by alias groupByName := "timestamp" // We need this extra case because mssql doesn't allow grouping by alias
switch { switch {
case setting.Database.UseSQLite3:
groupBy = "strftime('%s', strftime('%Y-%m-%d', created_unix, 'unixepoch'))"
case setting.Database.UseMySQL: case setting.Database.UseMySQL:
groupBy = "UNIX_TIMESTAMP(DATE(FROM_UNIXTIME(created_unix)))" groupBy = "created_unix DIV 900 * 900"
case setting.Database.UsePostgreSQL:
groupBy = "extract(epoch from date_trunc('day', to_timestamp(created_unix)))"
case setting.Database.UseMSSQL: case setting.Database.UseMSSQL:
groupBy = "datediff(SECOND, '19700101', dateadd(DAY, 0, datediff(day, 0, dateadd(s, created_unix, '19700101'))))"
groupByName = groupBy groupByName = groupBy
} }

View file

@ -19,12 +19,20 @@ func TestGetUserHeatmapDataByUser(t *testing.T) {
CountResult int CountResult int
JSONResult string JSONResult string
}{ }{
{2, 2, 1, `[{"timestamp":1603152000,"contributions":1}]`}, // self looks at action in private repo // self looks at action in private repo
{2, 1, 1, `[{"timestamp":1603152000,"contributions":1}]`}, // admin looks at action in private repo {2, 2, 1, `[{"timestamp":1603227600,"contributions":1}]`},
{2, 3, 0, `[]`}, // other user looks at action in private repo // admin looks at action in private repo
{2, 0, 0, `[]`}, // nobody looks at action in private repo {2, 1, 1, `[{"timestamp":1603227600,"contributions":1}]`},
{16, 15, 1, `[{"timestamp":1603238400,"contributions":1}]`}, // collaborator looks at action in private repo // other user looks at action in private repo
{3, 3, 0, `[]`}, // no action action not performed by target user {2, 3, 0, `[]`},
// nobody looks at action in private repo
{2, 0, 0, `[]`},
// collaborator looks at action in private repo
{16, 15, 1, `[{"timestamp":1603267200,"contributions":1}]`},
// no action action not performed by target user
{3, 3, 0, `[]`},
// multiple actions performed with two grouped together
{10, 10, 3, `[{"timestamp":1603009800,"contributions":1},{"timestamp":1603010700,"contributions":2}]`},
} }
// Prepare // Prepare
assert.NoError(t, PrepareTestDatabase()) assert.NoError(t, PrepareTestDatabase())
@ -51,9 +59,13 @@ func TestGetUserHeatmapDataByUser(t *testing.T) {
// Get the heatmap and compare // Get the heatmap and compare
heatmap, err := GetUserHeatmapDataByUser(user, doer) heatmap, err := GetUserHeatmapDataByUser(user, doer)
var contributions int
for _, hm := range heatmap {
contributions += int(hm.Contributions)
}
assert.NoError(t, err) assert.NoError(t, err)
assert.Len(t, heatmap, len(actions), "invalid action count: did the test data became too old?") assert.Len(t, actions, contributions, "invalid action count: did the test data became too old?")
assert.Len(t, heatmap, tc.CountResult, fmt.Sprintf("testcase %d", i)) assert.Equal(t, tc.CountResult, contributions, fmt.Sprintf("testcase %d", i))
// Test JSON rendering // Test JSON rendering
json := jsoniter.ConfigCompatibleWithStandardLibrary json := jsoniter.ConfigCompatibleWithStandardLibrary

View file

@ -380,6 +380,21 @@ func (ctx *Context) ServeFile(file string, names ...string) {
http.ServeFile(ctx.Resp, ctx.Req, file) http.ServeFile(ctx.Resp, ctx.Req, file)
} }
// ServeStream serves file via io stream
func (ctx *Context) ServeStream(rd io.Reader, name string) {
ctx.Resp.Header().Set("Content-Description", "File Transfer")
ctx.Resp.Header().Set("Content-Type", "application/octet-stream")
ctx.Resp.Header().Set("Content-Disposition", "attachment; filename="+name)
ctx.Resp.Header().Set("Content-Transfer-Encoding", "binary")
ctx.Resp.Header().Set("Expires", "0")
ctx.Resp.Header().Set("Cache-Control", "must-revalidate")
ctx.Resp.Header().Set("Pragma", "public")
_, err := io.Copy(ctx.Resp, rd)
if err != nil {
ctx.ServerError("Download file failed", err)
}
}
// Error returned an error to web browser // Error returned an error to web browser
func (ctx *Context) Error(status int, contents ...string) { func (ctx *Context) Error(status int, contents ...string) {
var v = http.StatusText(status) var v = http.StatusText(status)

View file

@ -62,10 +62,14 @@ func toUser(user *models.User, signed, authed bool) *api.User {
Following: user.NumFollowing, Following: user.NumFollowing,
StarredRepos: user.NumStars, StarredRepos: user.NumStars,
} }
result.Visibility = user.Visibility.String()
// hide primary email if API caller is anonymous or user keep email private // hide primary email if API caller is anonymous or user keep email private
if signed && (!user.KeepEmailPrivate || authed) { if signed && (!user.KeepEmailPrivate || authed) {
result.Email = user.Email result.Email = user.Email
} }
// only site admin will get these information and possibly user himself // only site admin will get these information and possibly user himself
if authed { if authed {
result.IsAdmin = user.IsAdmin result.IsAdmin = user.IsAdmin
@ -76,3 +80,18 @@ func toUser(user *models.User, signed, authed bool) *api.User {
} }
return result return result
} }
// User2UserSettings return UserSettings based on a user
func User2UserSettings(user *models.User) api.UserSettings {
return api.UserSettings{
FullName: user.FullName,
Website: user.Website,
Location: user.Location,
Language: user.Language,
Description: user.Description,
Theme: user.Theme,
HideEmail: user.KeepEmailPrivate,
HideActivity: user.KeepActivityPrivate,
DiffViewStyle: user.DiffViewStyle,
}
}

View file

@ -8,6 +8,7 @@ import (
"testing" "testing"
"code.gitea.io/gitea/models" "code.gitea.io/gitea/models"
api "code.gitea.io/gitea/modules/structs"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
) )
@ -27,4 +28,11 @@ func TestUser_ToUser(t *testing.T) {
apiUser = toUser(user1, false, false) apiUser = toUser(user1, false, false)
assert.False(t, apiUser.IsAdmin) assert.False(t, apiUser.IsAdmin)
assert.EqualValues(t, api.VisibleTypePublic.String(), apiUser.Visibility)
user31 := models.AssertExistsAndLoadBean(t, &models.User{ID: 31, IsAdmin: false, Visibility: api.VisibleTypePrivate}).(*models.User)
apiUser = toUser(user31, true, true)
assert.False(t, apiUser.IsAdmin)
assert.EqualValues(t, api.VisibleTypePrivate.String(), apiUser.Visibility)
} }

View file

@ -0,0 +1,59 @@
// Copyright 2021 The Gitea Authors. All rights reserved.
// Use of this source code is governed by a MIT-style
// license that can be found in the LICENSE file.
package doctor
import (
"os"
"path/filepath"
"code.gitea.io/gitea/models"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/util"
)
func checkOldArchives(logger log.Logger, autofix bool) error {
numRepos := 0
numReposUpdated := 0
err := iterateRepositories(func(repo *models.Repository) error {
if repo.IsEmpty {
return nil
}
p := filepath.Join(repo.RepoPath(), "archives")
isDir, err := util.IsDir(p)
if err != nil {
log.Warn("check if %s is directory failed: %v", p, err)
}
if isDir {
numRepos++
if autofix {
if err := os.RemoveAll(p); err == nil {
numReposUpdated++
} else {
log.Warn("remove %s failed: %v", p, err)
}
}
}
return nil
})
if autofix {
logger.Info("%d / %d old archives in repository deleted", numReposUpdated, numRepos)
} else {
logger.Info("%d old archives in repository need to be deleted", numRepos)
}
return err
}
func init() {
Register(&Check{
Title: "Check old archives",
Name: "check-old-archives",
IsDefault: false,
Run: checkOldArchives,
Priority: 7,
})
}

View file

@ -12,6 +12,8 @@ import (
"strconv" "strconv"
"strings" "strings"
"code.gitea.io/gitea/modules/log"
"github.com/djherbis/buffer" "github.com/djherbis/buffer"
"github.com/djherbis/nio/v3" "github.com/djherbis/nio/v3"
) )
@ -99,7 +101,7 @@ func ReadBatchLine(rd *bufio.Reader) (sha []byte, typ string, size int64, err er
} }
idx := strings.IndexByte(typ, ' ') idx := strings.IndexByte(typ, ' ')
if idx < 0 { if idx < 0 {
log("missing space typ: %s", typ) log.Debug("missing space typ: %s", typ)
err = ErrNotExist{ID: string(sha)} err = ErrNotExist{ID: string(sha)}
return return
} }
@ -230,7 +232,7 @@ func ParseTreeLine(rd *bufio.Reader, modeBuf, fnameBuf, shaBuf []byte) (mode, fn
} }
idx := bytes.IndexByte(readBytes, ' ') idx := bytes.IndexByte(readBytes, ' ')
if idx < 0 { if idx < 0 {
log("missing space in readBytes ParseTreeLine: %s", readBytes) log.Debug("missing space in readBytes ParseTreeLine: %s", readBytes)
err = &ErrNotExist{} err = &ErrNotExist{}
return return

View file

@ -34,7 +34,7 @@ func (b *Blob) GetBlobContent() (string, error) {
return string(buf), nil return string(buf), nil
} }
// GetBlobLineCount gets line count of lob as raw text // GetBlobLineCount gets line count of the blob
func (b *Blob) GetBlobLineCount() (int, error) { func (b *Blob) GetBlobLineCount() (int, error) {
reader, err := b.DataAsync() reader, err := b.DataAsync()
if err != nil { if err != nil {
@ -42,10 +42,14 @@ func (b *Blob) GetBlobLineCount() (int, error) {
} }
defer reader.Close() defer reader.Close()
buf := make([]byte, 32*1024) buf := make([]byte, 32*1024)
count := 0 count := 1
lineSep := []byte{'\n'} lineSep := []byte{'\n'}
c, err := reader.Read(buf)
if c == 0 && err == io.EOF {
return 0, nil
}
for { for {
c, err := reader.Read(buf)
count += bytes.Count(buf[:c], lineSep) count += bytes.Count(buf[:c], lineSep)
switch { switch {
case err == io.EOF: case err == io.EOF:
@ -53,6 +57,7 @@ func (b *Blob) GetBlobLineCount() (int, error) {
case err != nil: case err != nil:
return count, err return count, err
} }
c, err = reader.Read(buf)
} }
} }

View file

@ -12,6 +12,8 @@ import (
"io" "io"
"io/ioutil" "io/ioutil"
"math" "math"
"code.gitea.io/gitea/modules/log"
) )
// Blob represents a Git object. // Blob represents a Git object.
@ -69,12 +71,12 @@ func (b *Blob) Size() int64 {
defer cancel() defer cancel()
_, err := wr.Write([]byte(b.ID.String() + "\n")) _, err := wr.Write([]byte(b.ID.String() + "\n"))
if err != nil { if err != nil {
log("error whilst reading size for %s in %s. Error: %v", b.ID.String(), b.repo.Path, err) log.Debug("error whilst reading size for %s in %s. Error: %v", b.ID.String(), b.repo.Path, err)
return 0 return 0
} }
_, _, b.size, err = ReadBatchLine(rd) _, _, b.size, err = ReadBatchLine(rd)
if err != nil { if err != nil {
log("error whilst reading size for %s in %s. Error: %v", b.ID.String(), b.repo.Path, err) log.Debug("error whilst reading size for %s in %s. Error: %v", b.ID.String(), b.repo.Path, err)
return 0 return 0
} }

View file

@ -15,6 +15,7 @@ import (
"strings" "strings"
"time" "time"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/process" "code.gitea.io/gitea/modules/process"
) )
@ -22,8 +23,8 @@ var (
// GlobalCommandArgs global command args for external package setting // GlobalCommandArgs global command args for external package setting
GlobalCommandArgs []string GlobalCommandArgs []string
// DefaultCommandExecutionTimeout default command execution timeout duration // defaultCommandExecutionTimeout default command execution timeout duration
DefaultCommandExecutionTimeout = 360 * time.Second defaultCommandExecutionTimeout = 360 * time.Second
) )
// DefaultLocale is the default LC_ALL to run git commands in. // DefaultLocale is the default LC_ALL to run git commands in.
@ -110,13 +111,13 @@ func (c *Command) RunInDirTimeoutEnvFullPipeline(env []string, timeout time.Dura
// it pipes stdout and stderr to given io.Writer and passes in an io.Reader as stdin. Between cmd.Start and cmd.Wait the passed in function is run. // it pipes stdout and stderr to given io.Writer and passes in an io.Reader as stdin. Between cmd.Start and cmd.Wait the passed in function is run.
func (c *Command) RunInDirTimeoutEnvFullPipelineFunc(env []string, timeout time.Duration, dir string, stdout, stderr io.Writer, stdin io.Reader, fn func(context.Context, context.CancelFunc) error) error { func (c *Command) RunInDirTimeoutEnvFullPipelineFunc(env []string, timeout time.Duration, dir string, stdout, stderr io.Writer, stdin io.Reader, fn func(context.Context, context.CancelFunc) error) error {
if timeout == -1 { if timeout == -1 {
timeout = DefaultCommandExecutionTimeout timeout = defaultCommandExecutionTimeout
} }
if len(dir) == 0 { if len(dir) == 0 {
log(c.String()) log.Debug("%s", c)
} else { } else {
log("%s: %v", dir, c) log.Debug("%s: %v", dir, c)
} }
ctx, cancel := context.WithTimeout(c.parentContext, timeout) ctx, cancel := context.WithTimeout(c.parentContext, timeout)
@ -197,9 +198,12 @@ func (c *Command) RunInDirTimeoutEnv(env []string, timeout time.Duration, dir st
if err := c.RunInDirTimeoutEnvPipeline(env, timeout, dir, stdout, stderr); err != nil { if err := c.RunInDirTimeoutEnvPipeline(env, timeout, dir, stdout, stderr); err != nil {
return nil, ConcatenateError(err, stderr.String()) return nil, ConcatenateError(err, stderr.String())
} }
if stdout.Len() > 0 && log.IsTrace() {
if stdout.Len() > 0 { tracelen := stdout.Len()
log("stdout:\n%s", stdout.Bytes()[:1024]) if tracelen > 1024 {
tracelen = 1024
}
log.Trace("Stdout:\n %s", stdout.Bytes()[:tracelen])
} }
return stdout.Bytes(), nil return stdout.Bytes(), nil
} }

View file

@ -12,6 +12,8 @@ import (
"io" "io"
"path" "path"
"sort" "sort"
"code.gitea.io/gitea/modules/log"
) )
// GetCommitsInfo gets information of all commits that are corresponding to these entries // GetCommitsInfo gets information of all commits that are corresponding to these entries
@ -78,7 +80,7 @@ func (tes Entries) GetCommitsInfo(ctx context.Context, commit *Commit, treePath
commitsInfo[i].SubModuleFile = subModuleFile commitsInfo[i].SubModuleFile = subModuleFile
} }
} else { } else {
log("missing commit for %s", entry.Name()) log.Debug("missing commit for %s", entry.Name())
} }
} }

View file

@ -15,6 +15,7 @@ import (
"strconv" "strconv"
"strings" "strings"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/process" "code.gitea.io/gitea/modules/process"
) )
@ -113,7 +114,7 @@ func ParseDiffHunkString(diffhunk string) (leftLine, leftHunk, rightLine, righHu
righHunk, _ = strconv.Atoi(rightRange[1]) righHunk, _ = strconv.Atoi(rightRange[1])
} }
} else { } else {
log("Parse line number failed: %v", diffhunk) log.Debug("Parse line number failed: %v", diffhunk)
rightLine = leftLine rightLine = leftLine
righHunk = leftHunk righHunk = leftHunk
} }

View file

@ -159,3 +159,20 @@ func (err *ErrPushRejected) GenerateMessage() {
} }
err.Message = strings.TrimSpace(messageBuilder.String()) err.Message = strings.TrimSpace(messageBuilder.String())
} }
// ErrMoreThanOne represents an error if pull request fails when there are more than one sources (branch, tag) with the same name
type ErrMoreThanOne struct {
StdOut string
StdErr string
Err error
}
// IsErrMoreThanOne checks if an error is a ErrMoreThanOne
func IsErrMoreThanOne(err error) bool {
_, ok := err.(*ErrMoreThanOne)
return ok
}
func (err *ErrMoreThanOne) Error() string {
return fmt.Sprintf("ErrMoreThanOne Error: %v: %s\n%s", err.Err, err.StdErr, err.StdOut)
}

View file

@ -14,14 +14,12 @@ import (
"time" "time"
"code.gitea.io/gitea/modules/process" "code.gitea.io/gitea/modules/process"
"code.gitea.io/gitea/modules/setting"
"github.com/hashicorp/go-version" "github.com/hashicorp/go-version"
) )
var ( var (
// Debug enables verbose logging on everything.
// This should be false in case Gogs starts in SSH mode.
Debug = false
// Prefix the log prefix // Prefix the log prefix
Prefix = "[git-module] " Prefix = "[git-module] "
// GitVersionRequired is the minimum Git version required // GitVersionRequired is the minimum Git version required
@ -41,19 +39,6 @@ var (
goVersionLessThan115 = true goVersionLessThan115 = true
) )
func log(format string, args ...interface{}) {
if !Debug {
return
}
fmt.Print(Prefix)
if len(args) == 0 {
fmt.Println(format)
} else {
fmt.Printf(format+"\n", args...)
}
}
// LocalVersion returns current Git version from shell. // LocalVersion returns current Git version from shell.
func LocalVersion() (*version.Version, error) { func LocalVersion() (*version.Version, error) {
if err := LoadGitVersion(); err != nil { if err := LoadGitVersion(); err != nil {
@ -122,10 +107,42 @@ func SetExecutablePath(path string) error {
return nil return nil
} }
// VersionInfo returns git version information
func VersionInfo() string {
var format = "Git Version: %s"
var args = []interface{}{gitVersion.Original()}
// Since git wire protocol has been released from git v2.18
if setting.Git.EnableAutoGitWireProtocol && CheckGitVersionAtLeast("2.18") == nil {
format += ", Wire Protocol %s Enabled"
args = append(args, "Version 2") // for focus color
}
return fmt.Sprintf(format, args...)
}
// Init initializes git module // Init initializes git module
func Init(ctx context.Context) error { func Init(ctx context.Context) error {
DefaultContext = ctx DefaultContext = ctx
defaultCommandExecutionTimeout = time.Duration(setting.Git.Timeout.Default) * time.Second
if err := SetExecutablePath(setting.Git.Path); err != nil {
return err
}
// force cleanup args
GlobalCommandArgs = []string{}
if CheckGitVersionAtLeast("2.9") == nil {
// Explicitly disable credential helper, otherwise Git credentials might leak
GlobalCommandArgs = append(GlobalCommandArgs, "-c", "credential.helper=")
}
// Since git wire protocol has been released from git v2.18
if setting.Git.EnableAutoGitWireProtocol && CheckGitVersionAtLeast("2.18") == nil {
GlobalCommandArgs = append(GlobalCommandArgs, "-c", "protocol.version=2")
}
// Save current git version on init to gitVersion otherwise it would require an RWMutex // Save current git version on init to gitVersion otherwise it would require an RWMutex
if err := LoadGitVersion(); err != nil { if err := LoadGitVersion(); err != nil {
return err return err

View file

@ -9,6 +9,8 @@ import (
"fmt" "fmt"
"os" "os"
"testing" "testing"
"code.gitea.io/gitea/modules/log"
) )
func fatalTestError(fmtStr string, args ...interface{}) { func fatalTestError(fmtStr string, args ...interface{}) {
@ -17,6 +19,8 @@ func fatalTestError(fmtStr string, args ...interface{}) {
} }
func TestMain(m *testing.M) { func TestMain(m *testing.M) {
_ = log.NewLogger(1000, "console", "console", `{"level":"trace","stacktracelevel":"NONE","stderr":true}`)
if err := Init(context.Background()); err != nil { if err := Init(context.Background()); err != nil {
fatalTestError("Init failed: %v", err) fatalTestError("Init failed: %v", err)
} }

View file

@ -1,4 +1,5 @@
// Copyright 2015 The Gogs Authors. All rights reserved. // Copyright 2015 The Gogs Authors. All rights reserved.
// Copyright 2021 The Gitea Authors. All rights reserved.
// Use of this source code is governed by a MIT-style // Use of this source code is governed by a MIT-style
// license that can be found in the LICENSE file. // license that can be found in the LICENSE file.
@ -12,6 +13,7 @@ import (
"path/filepath" "path/filepath"
"strings" "strings"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/util" "code.gitea.io/gitea/modules/util"
) )
@ -126,11 +128,11 @@ const (
// SetUpdateHook writes given content to update hook of the repository. // SetUpdateHook writes given content to update hook of the repository.
func SetUpdateHook(repoPath, content string) (err error) { func SetUpdateHook(repoPath, content string) (err error) {
log("Setting update hook: %s", repoPath) log.Debug("Setting update hook: %s", repoPath)
hookPath := path.Join(repoPath, HookPathUpdate) hookPath := path.Join(repoPath, HookPathUpdate)
isExist, err := util.IsExist(hookPath) isExist, err := util.IsExist(hookPath)
if err != nil { if err != nil {
log("Unable to check if %s exists. Error: %v", hookPath, err) log.Debug("Unable to check if %s exists. Error: %v", hookPath, err)
return err return err
} }
if isExist { if isExist {

View file

@ -7,6 +7,8 @@ package git
import ( import (
"crypto/sha256" "crypto/sha256"
"fmt" "fmt"
"code.gitea.io/gitea/modules/log"
) )
// Cache represents a caching interface // Cache represents a caching interface
@ -24,6 +26,6 @@ func (c *LastCommitCache) getCacheKey(repoPath, ref, entryPath string) string {
// Put put the last commit id with commit and entry path // Put put the last commit id with commit and entry path
func (c *LastCommitCache) Put(ref, entryPath, commitID string) error { func (c *LastCommitCache) Put(ref, entryPath, commitID string) error {
log("LastCommitCache save: [%s:%s:%s]", ref, entryPath, commitID) log.Debug("LastCommitCache save: [%s:%s:%s]", ref, entryPath, commitID)
return c.cache.Put(c.getCacheKey(c.repoPath, ref, entryPath), commitID, c.ttl()) return c.cache.Put(c.getCacheKey(c.repoPath, ref, entryPath), commitID, c.ttl())
} }

View file

@ -10,6 +10,8 @@ import (
"context" "context"
"path" "path"
"code.gitea.io/gitea/modules/log"
"github.com/go-git/go-git/v5/plumbing/object" "github.com/go-git/go-git/v5/plumbing/object"
cgobject "github.com/go-git/go-git/v5/plumbing/object/commitgraph" cgobject "github.com/go-git/go-git/v5/plumbing/object/commitgraph"
) )
@ -41,9 +43,9 @@ func NewLastCommitCache(repoPath string, gitRepo *Repository, ttl func() int64,
func (c *LastCommitCache) Get(ref, entryPath string) (interface{}, error) { func (c *LastCommitCache) Get(ref, entryPath string) (interface{}, error) {
v := c.cache.Get(c.getCacheKey(c.repoPath, ref, entryPath)) v := c.cache.Get(c.getCacheKey(c.repoPath, ref, entryPath))
if vs, ok := v.(string); ok { if vs, ok := v.(string); ok {
log("LastCommitCache hit level 1: [%s:%s:%s]", ref, entryPath, vs) log.Debug("LastCommitCache hit level 1: [%s:%s:%s]", ref, entryPath, vs)
if commit, ok := c.commitCache[vs]; ok { if commit, ok := c.commitCache[vs]; ok {
log("LastCommitCache hit level 2: [%s:%s:%s]", ref, entryPath, vs) log.Debug("LastCommitCache hit level 2: [%s:%s:%s]", ref, entryPath, vs)
return commit, nil return commit, nil
} }
id, err := c.repo.ConvertToSHA1(vs) id, err := c.repo.ConvertToSHA1(vs)

View file

@ -10,6 +10,8 @@ import (
"bufio" "bufio"
"context" "context"
"path" "path"
"code.gitea.io/gitea/modules/log"
) )
// LastCommitCache represents a cache to store last commit // LastCommitCache represents a cache to store last commit
@ -39,9 +41,9 @@ func NewLastCommitCache(repoPath string, gitRepo *Repository, ttl func() int64,
func (c *LastCommitCache) Get(ref, entryPath string, wr WriteCloserError, rd *bufio.Reader) (interface{}, error) { func (c *LastCommitCache) Get(ref, entryPath string, wr WriteCloserError, rd *bufio.Reader) (interface{}, error) {
v := c.cache.Get(c.getCacheKey(c.repoPath, ref, entryPath)) v := c.cache.Get(c.getCacheKey(c.repoPath, ref, entryPath))
if vs, ok := v.(string); ok { if vs, ok := v.(string); ok {
log("LastCommitCache hit level 1: [%s:%s:%s]", ref, entryPath, vs) log.Debug("LastCommitCache hit level 1: [%s:%s:%s]", ref, entryPath, vs)
if commit, ok := c.commitCache[vs]; ok { if commit, ok := c.commitCache[vs]; ok {
log("LastCommitCache hit level 2: [%s:%s:%s]", ref, entryPath, vs) log.Debug("LastCommitCache hit level 2: [%s:%s:%s]", ref, entryPath, vs)
return commit, nil return commit, nil
} }
id, err := c.repo.ConvertToSHA1(vs) id, err := c.repo.ConvertToSHA1(vs)

37
modules/git/lfs.go Normal file
View file

@ -0,0 +1,37 @@
// Copyright 2021 The Gitea Authors. All rights reserved.
// Use of this source code is governed by a MIT-style
// license that can be found in the LICENSE file.
package git
import (
"sync"
logger "code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/setting"
)
var once sync.Once
// CheckLFSVersion will check lfs version, if not satisfied, then disable it.
func CheckLFSVersion() {
if setting.LFS.StartServer {
//Disable LFS client hooks if installed for the current OS user
//Needs at least git v2.1.2
err := LoadGitVersion()
if err != nil {
logger.Fatal("Error retrieving git version: %v", err)
}
if CheckGitVersionAtLeast("2.1.2") != nil {
setting.LFS.StartServer = false
logger.Error("LFS server support needs at least Git v2.1.2")
} else {
once.Do(func() {
GlobalCommandArgs = append(GlobalCommandArgs, "-c", "filter.lfs.required=",
"-c", "filter.lfs.smudge=", "-c", "filter.lfs.clean=")
})
}
}
}

View file

@ -13,6 +13,8 @@ import (
"io" "io"
"strconv" "strconv"
"strings" "strings"
"code.gitea.io/gitea/modules/log"
) )
// ParseTreeEntries parses the output of a `git ls-tree -l` command. // ParseTreeEntries parses the output of a `git ls-tree -l` command.
@ -120,7 +122,7 @@ loop:
case "40000": case "40000":
entry.entryMode = EntryModeTree entry.entryMode = EntryModeTree
default: default:
log("Unknown mode: %v", string(mode)) log.Debug("Unknown mode: %v", string(mode))
return nil, fmt.Errorf("unknown mode: %v", string(mode)) return nil, fmt.Errorf("unknown mode: %v", string(mode))
} }

View file

@ -225,6 +225,13 @@ func Push(repoPath string, opts PushOptions) error {
} }
err.GenerateMessage() err.GenerateMessage()
return err return err
} else if strings.Contains(errbuf.String(), "matches more than one") {
err := &ErrMoreThanOne{
StdOut: outbuf.String(),
StdErr: errbuf.String(),
Err: err,
}
return err
} }
} }

View file

@ -8,6 +8,7 @@ package git
import ( import (
"context" "context"
"fmt" "fmt"
"io"
"path/filepath" "path/filepath"
"strings" "strings"
) )
@ -33,32 +34,28 @@ func (a ArchiveType) String() string {
return "unknown" return "unknown"
} }
// CreateArchiveOpts represents options for creating an archive
type CreateArchiveOpts struct {
Format ArchiveType
Prefix bool
}
// CreateArchive create archive content to the target path // CreateArchive create archive content to the target path
func (c *Commit) CreateArchive(ctx context.Context, target string, opts CreateArchiveOpts) error { func (repo *Repository) CreateArchive(ctx context.Context, format ArchiveType, target io.Writer, usePrefix bool, commitID string) error {
if opts.Format.String() == "unknown" { if format.String() == "unknown" {
return fmt.Errorf("unknown format: %v", opts.Format) return fmt.Errorf("unknown format: %v", format)
} }
args := []string{ args := []string{
"archive", "archive",
} }
if opts.Prefix { if usePrefix {
args = append(args, "--prefix="+filepath.Base(strings.TrimSuffix(c.repo.Path, ".git"))+"/") args = append(args, "--prefix="+filepath.Base(strings.TrimSuffix(repo.Path, ".git"))+"/")
} }
args = append(args, args = append(args,
"--format="+opts.Format.String(), "--format="+format.String(),
"-o", commitID,
target,
c.ID.String(),
) )
_, err := NewCommandContext(ctx, args...).RunInDir(c.repo.Path) var stderr strings.Builder
return err err := NewCommandContext(ctx, args...).RunInDirPipeline(repo.Path, target, &stderr)
if err != nil {
return ConcatenateError(err, stderr.String())
}
return nil
} }

View file

@ -12,6 +12,8 @@ import (
"context" "context"
"errors" "errors"
"path/filepath" "path/filepath"
"code.gitea.io/gitea/modules/log"
) )
// Repository represents a Git repository. // Repository represents a Git repository.
@ -54,7 +56,7 @@ func OpenRepository(repoPath string) (*Repository, error) {
// CatFileBatch obtains a CatFileBatch for this repository // CatFileBatch obtains a CatFileBatch for this repository
func (repo *Repository) CatFileBatch() (WriteCloserError, *bufio.Reader, func()) { func (repo *Repository) CatFileBatch() (WriteCloserError, *bufio.Reader, func()) {
if repo.batchCancel == nil || repo.batchReader.Buffered() > 0 { if repo.batchCancel == nil || repo.batchReader.Buffered() > 0 {
log("Opening temporary cat file batch for: %s", repo.Path) log.Debug("Opening temporary cat file batch for: %s", repo.Path)
return CatFileBatch(repo.Path) return CatFileBatch(repo.Path)
} }
return repo.batchWriter, repo.batchReader, func() {} return repo.batchWriter, repo.batchReader, func() {}
@ -63,7 +65,7 @@ func (repo *Repository) CatFileBatch() (WriteCloserError, *bufio.Reader, func())
// CatFileBatchCheck obtains a CatFileBatchCheck for this repository // CatFileBatchCheck obtains a CatFileBatchCheck for this repository
func (repo *Repository) CatFileBatchCheck() (WriteCloserError, *bufio.Reader, func()) { func (repo *Repository) CatFileBatchCheck() (WriteCloserError, *bufio.Reader, func()) {
if repo.checkCancel == nil || repo.checkReader.Buffered() > 0 { if repo.checkCancel == nil || repo.checkReader.Buffered() > 0 {
log("Opening temporary cat file batch-check: %s", repo.Path) log.Debug("Opening temporary cat file batch-check: %s", repo.Path)
return CatFileBatchCheck(repo.Path) return CatFileBatchCheck(repo.Path)
} }
return repo.checkWriter, repo.checkReader, func() {} return repo.checkWriter, repo.checkReader, func() {}

View file

@ -12,6 +12,8 @@ import (
"bytes" "bytes"
"io" "io"
"strings" "strings"
"code.gitea.io/gitea/modules/log"
) )
// IsObjectExist returns true if given reference exists in the repository. // IsObjectExist returns true if given reference exists in the repository.
@ -24,7 +26,7 @@ func (repo *Repository) IsObjectExist(name string) bool {
defer cancel() defer cancel()
_, err := wr.Write([]byte(name + "\n")) _, err := wr.Write([]byte(name + "\n"))
if err != nil { if err != nil {
log("Error writing to CatFileBatchCheck %v", err) log.Debug("Error writing to CatFileBatchCheck %v", err)
return false return false
} }
sha, _, _, err := ReadBatchLine(rd) sha, _, _, err := ReadBatchLine(rd)
@ -41,7 +43,7 @@ func (repo *Repository) IsReferenceExist(name string) bool {
defer cancel() defer cancel()
_, err := wr.Write([]byte(name + "\n")) _, err := wr.Write([]byte(name + "\n"))
if err != nil { if err != nil {
log("Error writing to CatFileBatchCheck %v", err) log.Debug("Error writing to CatFileBatchCheck %v", err)
return false return false
} }
_, _, _, err = ReadBatchLine(rd) _, _, _, err = ReadBatchLine(rd)

View file

@ -12,6 +12,8 @@ import (
"io/ioutil" "io/ioutil"
"strconv" "strconv"
"strings" "strings"
"code.gitea.io/gitea/modules/setting"
) )
// GetBranchCommitID returns last commit ID string of given branch. // GetBranchCommitID returns last commit ID string of given branch.
@ -85,12 +87,6 @@ func (repo *Repository) GetCommitByPath(relpath string) (*Commit, error) {
return commits.Front().Value.(*Commit), nil return commits.Front().Value.(*Commit), nil
} }
// CommitsRangeSize the default commits range size
var CommitsRangeSize = 50
// BranchesRangeSize the default branches range size
var BranchesRangeSize = 20
func (repo *Repository) commitsByRange(id SHA1, page, pageSize int) (*list.List, error) { func (repo *Repository) commitsByRange(id SHA1, page, pageSize int) (*list.List, error) {
stdout, err := NewCommand("log", id.String(), "--skip="+strconv.Itoa((page-1)*pageSize), stdout, err := NewCommand("log", id.String(), "--skip="+strconv.Itoa((page-1)*pageSize),
"--max-count="+strconv.Itoa(pageSize), prettyLogFormat).RunInDirBytes(repo.Path) "--max-count="+strconv.Itoa(pageSize), prettyLogFormat).RunInDirBytes(repo.Path)
@ -206,7 +202,7 @@ func (repo *Repository) FileCommitsCount(revision, file string) (int64, error) {
// CommitsByFileAndRange return the commits according revison file and the page // CommitsByFileAndRange return the commits according revison file and the page
func (repo *Repository) CommitsByFileAndRange(revision, file string, page int) (*list.List, error) { func (repo *Repository) CommitsByFileAndRange(revision, file string, page int) (*list.List, error) {
skip := (page - 1) * CommitsRangeSize skip := (page - 1) * setting.Git.CommitsRangeSize
stdoutReader, stdoutWriter := io.Pipe() stdoutReader, stdoutWriter := io.Pipe()
defer func() { defer func() {
@ -216,7 +212,7 @@ func (repo *Repository) CommitsByFileAndRange(revision, file string, page int) (
go func() { go func() {
stderr := strings.Builder{} stderr := strings.Builder{}
err := NewCommand("log", revision, "--follow", err := NewCommand("log", revision, "--follow",
"--max-count="+strconv.Itoa(CommitsRangeSize*page), "--max-count="+strconv.Itoa(setting.Git.CommitsRangeSize*page),
prettyLogFormat, "--", file). prettyLogFormat, "--", file).
RunInDirPipeline(repo.Path, stdoutWriter, &stderr) RunInDirPipeline(repo.Path, stdoutWriter, &stderr)
if err != nil { if err != nil {
@ -247,7 +243,7 @@ func (repo *Repository) CommitsByFileAndRange(revision, file string, page int) (
// CommitsByFileAndRangeNoFollow return the commits according revison file and the page // CommitsByFileAndRangeNoFollow return the commits according revison file and the page
func (repo *Repository) CommitsByFileAndRangeNoFollow(revision, file string, page int) (*list.List, error) { func (repo *Repository) CommitsByFileAndRangeNoFollow(revision, file string, page int) (*list.List, error) {
stdout, err := NewCommand("log", revision, "--skip="+strconv.Itoa((page-1)*50), stdout, err := NewCommand("log", revision, "--skip="+strconv.Itoa((page-1)*50),
"--max-count="+strconv.Itoa(CommitsRangeSize), prettyLogFormat, "--", file).RunInDirBytes(repo.Path) "--max-count="+strconv.Itoa(setting.Git.CommitsRangeSize), prettyLogFormat, "--", file).RunInDirBytes(repo.Path)
if err != nil { if err != nil {
return nil, err return nil, err
} }

View file

@ -12,6 +12,8 @@ import (
"io" "io"
"io/ioutil" "io/ioutil"
"strings" "strings"
"code.gitea.io/gitea/modules/log"
) )
// ResolveReference resolves a name to a reference // ResolveReference resolves a name to a reference
@ -110,7 +112,7 @@ func (repo *Repository) getCommitFromBatchReader(rd *bufio.Reader, id SHA1) (*Co
return commit, nil return commit, nil
default: default:
log("Unknown typ: %s", typ) log.Debug("Unknown typ: %s", typ)
_, err = rd.Discard(int(size) + 1) _, err = rd.Discard(int(size) + 1)
if err != nil { if err != nil {
return nil, err return nil, err

View file

@ -13,6 +13,7 @@ import (
"math" "math"
"code.gitea.io/gitea/modules/analyze" "code.gitea.io/gitea/modules/analyze"
"code.gitea.io/gitea/modules/log"
"github.com/go-enry/go-enry/v2" "github.com/go-enry/go-enry/v2"
) )
@ -34,19 +35,19 @@ func (repo *Repository) GetLanguageStats(commitID string) (map[string]int64, err
} }
shaBytes, typ, size, err := ReadBatchLine(batchReader) shaBytes, typ, size, err := ReadBatchLine(batchReader)
if typ != "commit" { if typ != "commit" {
log("Unable to get commit for: %s. Err: %v", commitID, err) log.Debug("Unable to get commit for: %s. Err: %v", commitID, err)
return nil, ErrNotExist{commitID, ""} return nil, ErrNotExist{commitID, ""}
} }
sha, err := NewIDFromString(string(shaBytes)) sha, err := NewIDFromString(string(shaBytes))
if err != nil { if err != nil {
log("Unable to get commit for: %s. Err: %v", commitID, err) log.Debug("Unable to get commit for: %s. Err: %v", commitID, err)
return nil, ErrNotExist{commitID, ""} return nil, ErrNotExist{commitID, ""}
} }
commit, err := CommitFromReader(repo, sha, io.LimitReader(batchReader, size)) commit, err := CommitFromReader(repo, sha, io.LimitReader(batchReader, size))
if err != nil { if err != nil {
log("Unable to get commit for: %s. Err: %v", commitID, err) log.Debug("Unable to get commit for: %s. Err: %v", commitID, err)
return nil, err return nil, err
} }
if _, err = batchReader.Discard(1); err != nil { if _, err = batchReader.Discard(1); err != nil {
@ -79,7 +80,7 @@ func (repo *Repository) GetLanguageStats(commitID string) (map[string]int64, err
} }
_, _, size, err := ReadBatchLine(batchReader) _, _, size, err := ReadBatchLine(batchReader)
if err != nil { if err != nil {
log("Error reading blob: %s Err: %v", f.ID.String(), err) log.Debug("Error reading blob: %s Err: %v", f.ID.String(), err)
return nil, err return nil, err
} }

View file

@ -8,6 +8,8 @@ package git
import ( import (
"fmt" "fmt"
"strings" "strings"
"code.gitea.io/gitea/modules/log"
) )
// TagPrefix tags prefix path on the repository // TagPrefix tags prefix path on the repository
@ -33,7 +35,7 @@ func (repo *Repository) CreateAnnotatedTag(name, message, revision string) error
func (repo *Repository) getTag(tagID SHA1, name string) (*Tag, error) { func (repo *Repository) getTag(tagID SHA1, name string) (*Tag, error) {
t, ok := repo.tagCache.Get(tagID.String()) t, ok := repo.tagCache.Get(tagID.String())
if ok { if ok {
log("Hit cache: %s", tagID) log.Debug("Hit cache: %s", tagID)
tagClone := *t.(*Tag) tagClone := *t.(*Tag)
tagClone.Name = name // This is necessary because lightweight tags may have same id tagClone.Name = name // This is necessary because lightweight tags may have same id
return &tagClone, nil return &tagClone, nil

View file

@ -33,7 +33,7 @@ var (
once sync.Once once sync.Once
cache *lru.ARCCache cache *lru.TwoQueueCache
) )
// NewContext loads custom highlight map from local config // NewContext loads custom highlight map from local config
@ -45,7 +45,7 @@ func NewContext() {
} }
// The size 512 is simply a conservative rule of thumb // The size 512 is simply a conservative rule of thumb
c, err := lru.NewARC(512) c, err := lru.New2Q(512)
if err != nil { if err != nil {
panic(fmt.Sprintf("failed to initialize LRU cache for highlighter: %s", err)) panic(fmt.Sprintf("failed to initialize LRU cache for highlighter: %s", err))
} }

View file

@ -10,6 +10,7 @@ import (
"html" "html"
"io" "io"
"io/ioutil" "io/ioutil"
"regexp"
"strconv" "strconv"
"code.gitea.io/gitea/modules/csv" "code.gitea.io/gitea/modules/csv"
@ -38,6 +39,15 @@ func (Renderer) Extensions() []string {
return []string{".csv", ".tsv"} return []string{".csv", ".tsv"}
} }
// SanitizerRules implements markup.Renderer
func (Renderer) SanitizerRules() []setting.MarkupSanitizerRule {
return []setting.MarkupSanitizerRule{
{Element: "table", AllowAttr: "class", Regexp: regexp.MustCompile(`data-table`)},
{Element: "th", AllowAttr: "class", Regexp: regexp.MustCompile(`line-num`)},
{Element: "td", AllowAttr: "class", Regexp: regexp.MustCompile(`line-num`)},
}
}
func writeField(w io.Writer, element, class, field string) error { func writeField(w io.Writer, element, class, field string) error {
if _, err := io.WriteString(w, "<"); err != nil { if _, err := io.WriteString(w, "<"); err != nil {
return err return err

View file

@ -30,7 +30,7 @@ func RegisterRenderers() {
// Renderer implements markup.Renderer for external tools // Renderer implements markup.Renderer for external tools
type Renderer struct { type Renderer struct {
setting.MarkupRenderer *setting.MarkupRenderer
} }
// Name returns the external tool name // Name returns the external tool name
@ -48,6 +48,11 @@ func (p *Renderer) Extensions() []string {
return p.FileExtensions return p.FileExtensions
} }
// SanitizerRules implements markup.Renderer
func (p *Renderer) SanitizerRules() []setting.MarkupSanitizerRule {
return p.MarkupSanitizerRules
}
func envMark(envName string) string { func envMark(envName string) string {
if runtime.GOOS == "windows" { if runtime.GOOS == "windows" {
return "%" + envName + "%" return "%" + envName + "%"

View file

@ -112,7 +112,7 @@ func TestRender_links(t *testing.T) {
defaultCustom := setting.Markdown.CustomURLSchemes defaultCustom := setting.Markdown.CustomURLSchemes
setting.Markdown.CustomURLSchemes = []string{"ftp", "magnet"} setting.Markdown.CustomURLSchemes = []string{"ftp", "magnet"}
ReplaceSanitizer() InitializeSanitizer()
CustomLinkURLSchemes(setting.Markdown.CustomURLSchemes) CustomLinkURLSchemes(setting.Markdown.CustomURLSchemes)
test( test(
@ -192,7 +192,7 @@ func TestRender_links(t *testing.T) {
// Restore previous settings // Restore previous settings
setting.Markdown.CustomURLSchemes = defaultCustom setting.Markdown.CustomURLSchemes = defaultCustom
ReplaceSanitizer() InitializeSanitizer()
CustomLinkURLSchemes(setting.Markdown.CustomURLSchemes) CustomLinkURLSchemes(setting.Markdown.CustomURLSchemes)
} }

View file

@ -199,7 +199,7 @@ func actualRender(ctx *markup.RenderContext, input io.Reader, output io.Writer)
} }
_ = lw.Close() _ = lw.Close()
}() }()
buf := markup.SanitizeReader(rd) buf := markup.SanitizeReader(rd, "")
_, err := io.Copy(output, buf) _, err := io.Copy(output, buf)
return err return err
} }
@ -215,7 +215,7 @@ func render(ctx *markup.RenderContext, input io.Reader, output io.Writer) error
if log.IsDebug() { if log.IsDebug() {
log.Debug("Panic in markdown: %v\n%s", err, string(log.Stack(2))) log.Debug("Panic in markdown: %v\n%s", err, string(log.Stack(2)))
} }
ret := markup.SanitizeReader(input) ret := markup.SanitizeReader(input, "")
_, err = io.Copy(output, ret) _, err = io.Copy(output, ret)
if err != nil { if err != nil {
log.Error("SanitizeReader failed: %v", err) log.Error("SanitizeReader failed: %v", err)
@ -249,6 +249,11 @@ func (Renderer) Extensions() []string {
return setting.Markdown.FileExtensions return setting.Markdown.FileExtensions
} }
// SanitizerRules implements markup.Renderer
func (Renderer) SanitizerRules() []setting.MarkupSanitizerRule {
return []setting.MarkupSanitizerRule{}
}
// Render implements markup.Renderer // Render implements markup.Renderer
func (Renderer) Render(ctx *markup.RenderContext, input io.Reader, output io.Writer) error { func (Renderer) Render(ctx *markup.RenderContext, input io.Reader, output io.Writer) error {
return render(ctx, input, output) return render(ctx, input, output)

View file

@ -11,9 +11,13 @@ import (
"io" "io"
"strings" "strings"
"code.gitea.io/gitea/modules/highlight"
"code.gitea.io/gitea/modules/markup" "code.gitea.io/gitea/modules/markup"
"code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/modules/util" "code.gitea.io/gitea/modules/util"
"github.com/alecthomas/chroma"
"github.com/alecthomas/chroma/lexers"
"github.com/niklasfasching/go-org/org" "github.com/niklasfasching/go-org/org"
) )
@ -38,9 +42,55 @@ func (Renderer) Extensions() []string {
return []string{".org"} return []string{".org"}
} }
// SanitizerRules implements markup.Renderer
func (Renderer) SanitizerRules() []setting.MarkupSanitizerRule {
return []setting.MarkupSanitizerRule{}
}
// Render renders orgmode rawbytes to HTML // Render renders orgmode rawbytes to HTML
func Render(ctx *markup.RenderContext, input io.Reader, output io.Writer) error { func Render(ctx *markup.RenderContext, input io.Reader, output io.Writer) error {
htmlWriter := org.NewHTMLWriter() htmlWriter := org.NewHTMLWriter()
htmlWriter.HighlightCodeBlock = func(source, lang string, inline bool) string {
var w strings.Builder
if _, err := w.WriteString(`<pre>`); err != nil {
return ""
}
lexer := lexers.Get(lang)
if lexer == nil && lang == "" {
lexer = lexers.Analyse(source)
if lexer == nil {
lexer = lexers.Fallback
}
lang = strings.ToLower(lexer.Config().Name)
}
if lexer == nil {
// include language-x class as part of commonmark spec
if _, err := w.WriteString(`<code class="chroma language-` + string(lang) + `">`); err != nil {
return ""
}
if _, err := w.WriteString(html.EscapeString(source)); err != nil {
return ""
}
} else {
// include language-x class as part of commonmark spec
if _, err := w.WriteString(`<code class="chroma language-` + string(lang) + `">`); err != nil {
return ""
}
lexer = chroma.Coalesce(lexer)
if _, err := w.WriteString(highlight.Code(lexer.Config().Filenames[0], source)); err != nil {
return ""
}
}
if _, err := w.WriteString("</code></pre>"); err != nil {
return ""
}
return w.String()
}
w := &Writer{ w := &Writer{
HTMLWriter: htmlWriter, HTMLWriter: htmlWriter,

View file

@ -81,6 +81,7 @@ type Renderer interface {
Name() string // markup format name Name() string // markup format name
Extensions() []string Extensions() []string
NeedPostProcess() bool NeedPostProcess() bool
SanitizerRules() []setting.MarkupSanitizerRule
Render(ctx *RenderContext, input io.Reader, output io.Writer) error Render(ctx *RenderContext, input io.Reader, output io.Writer) error
} }
@ -136,37 +137,32 @@ func render(ctx *RenderContext, renderer Renderer, input io.Reader, output io.Wr
_ = pw.Close() _ = pw.Close()
}() }()
if renderer.NeedPostProcess() { pr2, pw2 := io.Pipe()
pr2, pw2 := io.Pipe() defer func() {
defer func() { _ = pr2.Close()
_ = pr2.Close() _ = pw2.Close()
_ = pw2.Close() }()
}()
wg.Add(1) wg.Add(1)
go func() { go func() {
buf := SanitizeReader(pr2) buf := SanitizeReader(pr2, renderer.Name())
_, err = io.Copy(output, buf) _, err = io.Copy(output, buf)
_ = pr2.Close() _ = pr2.Close()
wg.Done() wg.Done()
}() }()
wg.Add(1) wg.Add(1)
go func() { go func() {
if renderer.NeedPostProcess() {
err = PostProcess(ctx, pr, pw2) err = PostProcess(ctx, pr, pw2)
_ = pr.Close() } else {
_ = pw2.Close() _, err = io.Copy(pw2, pr)
wg.Done() }
}() _ = pr.Close()
} else { _ = pw2.Close()
wg.Add(1) wg.Done()
go func() { }()
buf := SanitizeReader(pr)
_, err = io.Copy(output, buf)
_ = pr.Close()
wg.Done()
}()
}
if err1 := renderer.Render(ctx, input, pw); err1 != nil { if err1 := renderer.Render(ctx, input, pw); err1 != nil {
return err1 return err1
} }

View file

@ -19,8 +19,9 @@ import (
// Sanitizer is a protection wrapper of *bluemonday.Policy which does not allow // Sanitizer is a protection wrapper of *bluemonday.Policy which does not allow
// any modification to the underlying policies once it's been created. // any modification to the underlying policies once it's been created.
type Sanitizer struct { type Sanitizer struct {
policy *bluemonday.Policy defaultPolicy *bluemonday.Policy
init sync.Once rendererPolicies map[string]*bluemonday.Policy
init sync.Once
} }
var sanitizer = &Sanitizer{} var sanitizer = &Sanitizer{}
@ -30,47 +31,57 @@ var sanitizer = &Sanitizer{}
// entire application lifecycle. // entire application lifecycle.
func NewSanitizer() { func NewSanitizer() {
sanitizer.init.Do(func() { sanitizer.init.Do(func() {
ReplaceSanitizer() InitializeSanitizer()
}) })
} }
// ReplaceSanitizer replaces the current sanitizer to account for changes in settings // InitializeSanitizer (re)initializes the current sanitizer to account for changes in settings
func ReplaceSanitizer() { func InitializeSanitizer() {
sanitizer.policy = bluemonday.UGCPolicy() sanitizer.rendererPolicies = map[string]*bluemonday.Policy{}
sanitizer.defaultPolicy = createDefaultPolicy()
for name, renderer := range renderers {
sanitizerRules := renderer.SanitizerRules()
if len(sanitizerRules) > 0 {
policy := createDefaultPolicy()
addSanitizerRules(policy, sanitizerRules)
sanitizer.rendererPolicies[name] = policy
}
}
}
func createDefaultPolicy() *bluemonday.Policy {
policy := bluemonday.UGCPolicy()
// For Chroma markdown plugin // For Chroma markdown plugin
sanitizer.policy.AllowAttrs("class").Matching(regexp.MustCompile(`^is-loading$`)).OnElements("pre") policy.AllowAttrs("class").Matching(regexp.MustCompile(`^is-loading$`)).OnElements("pre")
sanitizer.policy.AllowAttrs("class").Matching(regexp.MustCompile(`^(chroma )?language-[\w-]+$`)).OnElements("code") policy.AllowAttrs("class").Matching(regexp.MustCompile(`^(chroma )?language-[\w-]+$`)).OnElements("code")
// Checkboxes // Checkboxes
sanitizer.policy.AllowAttrs("type").Matching(regexp.MustCompile(`^checkbox$`)).OnElements("input") policy.AllowAttrs("type").Matching(regexp.MustCompile(`^checkbox$`)).OnElements("input")
sanitizer.policy.AllowAttrs("checked", "disabled", "data-source-position").OnElements("input") policy.AllowAttrs("checked", "disabled", "data-source-position").OnElements("input")
// Custom URL-Schemes // Custom URL-Schemes
if len(setting.Markdown.CustomURLSchemes) > 0 { if len(setting.Markdown.CustomURLSchemes) > 0 {
sanitizer.policy.AllowURLSchemes(setting.Markdown.CustomURLSchemes...) policy.AllowURLSchemes(setting.Markdown.CustomURLSchemes...)
} }
// Allow classes for anchors // Allow classes for anchors
sanitizer.policy.AllowAttrs("class").Matching(regexp.MustCompile(`ref-issue`)).OnElements("a") policy.AllowAttrs("class").Matching(regexp.MustCompile(`ref-issue`)).OnElements("a")
// Allow classes for task lists // Allow classes for task lists
sanitizer.policy.AllowAttrs("class").Matching(regexp.MustCompile(`task-list-item`)).OnElements("li") policy.AllowAttrs("class").Matching(regexp.MustCompile(`task-list-item`)).OnElements("li")
// Allow icons // Allow icons
sanitizer.policy.AllowAttrs("class").Matching(regexp.MustCompile(`^icon(\s+[\p{L}\p{N}_-]+)+$`)).OnElements("i") policy.AllowAttrs("class").Matching(regexp.MustCompile(`^icon(\s+[\p{L}\p{N}_-]+)+$`)).OnElements("i")
// Allow unlabelled labels // Allow unlabelled labels
sanitizer.policy.AllowNoAttrs().OnElements("label") policy.AllowNoAttrs().OnElements("label")
// Allow classes for emojis // Allow classes for emojis
sanitizer.policy.AllowAttrs("class").Matching(regexp.MustCompile(`emoji`)).OnElements("img") policy.AllowAttrs("class").Matching(regexp.MustCompile(`emoji`)).OnElements("img")
// Allow icons, emojis, chroma syntax and keyword markup on span // Allow icons, emojis, chroma syntax and keyword markup on span
sanitizer.policy.AllowAttrs("class").Matching(regexp.MustCompile(`^((icon(\s+[\p{L}\p{N}_-]+)+)|(emoji))$|^([a-z][a-z0-9]{0,2})$|^` + keywordClass + `$`)).OnElements("span") policy.AllowAttrs("class").Matching(regexp.MustCompile(`^((icon(\s+[\p{L}\p{N}_-]+)+)|(emoji))$|^([a-z][a-z0-9]{0,2})$|^` + keywordClass + `$`)).OnElements("span")
// Allow data tables
sanitizer.policy.AllowAttrs("class").Matching(regexp.MustCompile(`data-table`)).OnElements("table")
sanitizer.policy.AllowAttrs("class").Matching(regexp.MustCompile(`line-num`)).OnElements("th", "td")
// Allow generally safe attributes // Allow generally safe attributes
generalSafeAttrs := []string{"abbr", "accept", "accept-charset", generalSafeAttrs := []string{"abbr", "accept", "accept-charset",
@ -101,18 +112,29 @@ func ReplaceSanitizer() {
"abbr", "bdo", "cite", "dfn", "mark", "small", "span", "time", "wbr", "abbr", "bdo", "cite", "dfn", "mark", "small", "span", "time", "wbr",
} }
sanitizer.policy.AllowAttrs(generalSafeAttrs...).OnElements(generalSafeElements...) policy.AllowAttrs(generalSafeAttrs...).OnElements(generalSafeElements...)
sanitizer.policy.AllowAttrs("itemscope", "itemtype").OnElements("div") policy.AllowAttrs("itemscope", "itemtype").OnElements("div")
// FIXME: Need to handle longdesc in img but there is no easy way to do it // FIXME: Need to handle longdesc in img but there is no easy way to do it
// Custom keyword markup // Custom keyword markup
for _, rule := range setting.ExternalSanitizerRules { addSanitizerRules(policy, setting.ExternalSanitizerRules)
if rule.Regexp != nil {
sanitizer.policy.AllowAttrs(rule.AllowAttr).Matching(rule.Regexp).OnElements(rule.Element) return policy
} else { }
sanitizer.policy.AllowAttrs(rule.AllowAttr).OnElements(rule.Element)
func addSanitizerRules(policy *bluemonday.Policy, rules []setting.MarkupSanitizerRule) {
for _, rule := range rules {
if rule.AllowDataURIImages {
policy.AllowDataURIImages()
}
if rule.Element != "" {
if rule.Regexp != nil {
policy.AllowAttrs(rule.AllowAttr).Matching(rule.Regexp).OnElements(rule.Element)
} else {
policy.AllowAttrs(rule.AllowAttr).OnElements(rule.Element)
}
} }
} }
} }
@ -120,11 +142,15 @@ func ReplaceSanitizer() {
// Sanitize takes a string that contains a HTML fragment or document and applies policy whitelist. // Sanitize takes a string that contains a HTML fragment or document and applies policy whitelist.
func Sanitize(s string) string { func Sanitize(s string) string {
NewSanitizer() NewSanitizer()
return sanitizer.policy.Sanitize(s) return sanitizer.defaultPolicy.Sanitize(s)
} }
// SanitizeReader sanitizes a Reader // SanitizeReader sanitizes a Reader
func SanitizeReader(r io.Reader) *bytes.Buffer { func SanitizeReader(r io.Reader, renderer string) *bytes.Buffer {
NewSanitizer() NewSanitizer()
return sanitizer.policy.SanitizeReader(r) policy, exist := sanitizer.rendererPolicies[renderer]
if !exist {
policy = sanitizer.defaultPolicy
}
return policy.SanitizeReader(r)
} }

View file

@ -54,7 +54,7 @@ type HookOptions struct {
GitAlternativeObjectDirectories string GitAlternativeObjectDirectories string
GitQuarantinePath string GitQuarantinePath string
GitPushOptions GitPushOptions GitPushOptions GitPushOptions
ProtectedBranchID int64 PullRequestID int64
IsDeployKey bool IsDeployKey bool
} }

View file

@ -58,7 +58,6 @@ type ServCommandResults struct {
// ErrServCommand is an error returned from ServCommmand. // ErrServCommand is an error returned from ServCommmand.
type ErrServCommand struct { type ErrServCommand struct {
Results ServCommandResults Results ServCommandResults
Type string
Err string Err string
StatusCode int StatusCode int
} }

View file

@ -5,6 +5,7 @@
package references package references
import ( import (
"bytes"
"net/url" "net/url"
"regexp" "regexp"
"strconv" "strconv"
@ -14,6 +15,8 @@ import (
"code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/markup/mdstripper" "code.gitea.io/gitea/modules/markup/mdstripper"
"code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/setting"
"github.com/yuin/goldmark/util"
) )
var ( var (
@ -321,7 +324,7 @@ func FindRenderizableReferenceNumeric(content string, prOnly bool) (bool, *Rende
return false, nil return false, nil
} }
} }
r := getCrossReference([]byte(content), match[2], match[3], false, prOnly) r := getCrossReference(util.StringToReadOnlyBytes(content), match[2], match[3], false, prOnly)
if r == nil { if r == nil {
return false, nil return false, nil
} }
@ -465,18 +468,17 @@ func findAllIssueReferencesBytes(content []byte, links []string) []*rawReference
} }
func getCrossReference(content []byte, start, end int, fromLink bool, prOnly bool) *rawReference { func getCrossReference(content []byte, start, end int, fromLink bool, prOnly bool) *rawReference {
refid := string(content[start:end]) sep := bytes.IndexAny(content[start:end], "#!")
sep := strings.IndexAny(refid, "#!")
if sep < 0 { if sep < 0 {
return nil return nil
} }
isPull := refid[sep] == '!' isPull := content[start+sep] == '!'
if prOnly && !isPull { if prOnly && !isPull {
return nil return nil
} }
repo := refid[:sep] repo := string(content[start : start+sep])
issue := refid[sep+1:] issue := string(content[start+sep+1 : end])
index, err := strconv.ParseInt(issue, 10, 64) index, err := strconv.ParseInt(string(issue), 10, 64)
if err != nil { if err != nil {
return nil return nil
} }

View file

@ -7,7 +7,6 @@ package setting
import ( import (
"time" "time"
"code.gitea.io/gitea/modules/git"
"code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/log"
) )
@ -19,8 +18,8 @@ var (
MaxGitDiffLines int MaxGitDiffLines int
MaxGitDiffLineCharacters int MaxGitDiffLineCharacters int
MaxGitDiffFiles int MaxGitDiffFiles int
CommitsRangeSize int CommitsRangeSize int // CommitsRangeSize the default commits range size
BranchesRangeSize int BranchesRangeSize int // BranchesRangeSize the default branches range size
VerbosePush bool VerbosePush bool
VerbosePushDelay time.Duration VerbosePushDelay time.Duration
GCArgs []string `ini:"GC_ARGS" delim:" "` GCArgs []string `ini:"GC_ARGS" delim:" "`
@ -54,7 +53,7 @@ var (
Pull int Pull int
GC int `ini:"GC"` GC int `ini:"GC"`
}{ }{
Default: int(git.DefaultCommandExecutionTimeout / time.Second), Default: 360,
Migrate: 600, Migrate: 600,
Mirror: 300, Mirror: 300,
Clone: 300, Clone: 300,
@ -68,35 +67,4 @@ func newGit() {
if err := Cfg.Section("git").MapTo(&Git); err != nil { if err := Cfg.Section("git").MapTo(&Git); err != nil {
log.Fatal("Failed to map Git settings: %v", err) log.Fatal("Failed to map Git settings: %v", err)
} }
if err := git.SetExecutablePath(Git.Path); err != nil {
log.Fatal("Failed to initialize Git settings: %v", err)
}
git.DefaultCommandExecutionTimeout = time.Duration(Git.Timeout.Default) * time.Second
version, err := git.LocalVersion()
if err != nil {
log.Fatal("Error retrieving git version: %v", err)
}
// force cleanup args
git.GlobalCommandArgs = []string{}
if git.CheckGitVersionAtLeast("2.9") == nil {
// Explicitly disable credential helper, otherwise Git credentials might leak
git.GlobalCommandArgs = append(git.GlobalCommandArgs, "-c", "credential.helper=")
}
var format = "Git Version: %s"
var args = []interface{}{version.Original()}
// Since git wire protocol has been released from git v2.18
if Git.EnableAutoGitWireProtocol && git.CheckGitVersionAtLeast("2.18") == nil {
git.GlobalCommandArgs = append(git.GlobalCommandArgs, "-c", "protocol.version=2")
format += ", Wire Protocol %s Enabled"
args = append(args, "Version 2") // for focus color
}
git.CommitsRangeSize = Git.CommitsRangeSize
git.BranchesRangeSize = Git.BranchesRangeSize
log.Info(format, args...)
} }

View file

@ -9,7 +9,6 @@ import (
"time" "time"
"code.gitea.io/gitea/modules/generate" "code.gitea.io/gitea/modules/generate"
"code.gitea.io/gitea/modules/git"
"code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/log"
ini "gopkg.in/ini.v1" ini "gopkg.in/ini.v1"
@ -67,24 +66,3 @@ func newLFSService() {
} }
} }
} }
// CheckLFSVersion will check lfs version, if not satisfied, then disable it.
func CheckLFSVersion() {
if LFS.StartServer {
//Disable LFS client hooks if installed for the current OS user
//Needs at least git v2.1.2
err := git.LoadGitVersion()
if err != nil {
log.Fatal("Error retrieving git version: %v", err)
}
if git.CheckGitVersionAtLeast("2.1.2") != nil {
LFS.StartServer = false
log.Error("LFS server support needs at least Git v2.1.2")
} else {
git.GlobalCommandArgs = append(git.GlobalCommandArgs, "-c", "filter.lfs.required=",
"-c", "filter.lfs.smudge=", "-c", "filter.lfs.clean=")
}
}
}

View file

@ -15,31 +15,34 @@ import (
// ExternalMarkupRenderers represents the external markup renderers // ExternalMarkupRenderers represents the external markup renderers
var ( var (
ExternalMarkupRenderers []MarkupRenderer ExternalMarkupRenderers []*MarkupRenderer
ExternalSanitizerRules []MarkupSanitizerRule ExternalSanitizerRules []MarkupSanitizerRule
) )
// MarkupRenderer defines the external parser configured in ini // MarkupRenderer defines the external parser configured in ini
type MarkupRenderer struct { type MarkupRenderer struct {
Enabled bool Enabled bool
MarkupName string MarkupName string
Command string Command string
FileExtensions []string FileExtensions []string
IsInputFile bool IsInputFile bool
NeedPostProcess bool NeedPostProcess bool
MarkupSanitizerRules []MarkupSanitizerRule
} }
// MarkupSanitizerRule defines the policy for whitelisting attributes on // MarkupSanitizerRule defines the policy for whitelisting attributes on
// certain elements. // certain elements.
type MarkupSanitizerRule struct { type MarkupSanitizerRule struct {
Element string Element string
AllowAttr string AllowAttr string
Regexp *regexp.Regexp Regexp *regexp.Regexp
AllowDataURIImages bool
} }
func newMarkup() { func newMarkup() {
ExternalMarkupRenderers = make([]MarkupRenderer, 0, 10) ExternalMarkupRenderers = make([]*MarkupRenderer, 0, 10)
ExternalSanitizerRules = make([]MarkupSanitizerRule, 0, 10) ExternalSanitizerRules = make([]MarkupSanitizerRule, 0, 10)
for _, sec := range Cfg.Section("markup").ChildSections() { for _, sec := range Cfg.Section("markup").ChildSections() {
name := strings.TrimPrefix(sec.Name(), "markup.") name := strings.TrimPrefix(sec.Name(), "markup.")
if name == "" { if name == "" {
@ -56,50 +59,62 @@ func newMarkup() {
} }
func newMarkupSanitizer(name string, sec *ini.Section) { func newMarkupSanitizer(name string, sec *ini.Section) {
haveElement := sec.HasKey("ELEMENT") rule, ok := createMarkupSanitizerRule(name, sec)
haveAttr := sec.HasKey("ALLOW_ATTR") if ok {
haveRegexp := sec.HasKey("REGEXP") if strings.HasPrefix(name, "sanitizer.") {
names := strings.SplitN(strings.TrimPrefix(name, "sanitizer."), ".", 2)
name = names[0]
}
for _, renderer := range ExternalMarkupRenderers {
if name == renderer.MarkupName {
renderer.MarkupSanitizerRules = append(renderer.MarkupSanitizerRules, rule)
return
}
}
ExternalSanitizerRules = append(ExternalSanitizerRules, rule)
}
}
if !haveElement && !haveAttr && !haveRegexp { func createMarkupSanitizerRule(name string, sec *ini.Section) (MarkupSanitizerRule, bool) {
log.Warn("Skipping empty section: markup.%s.", name) var rule MarkupSanitizerRule
return
ok := false
if sec.HasKey("ALLOW_DATA_URI_IMAGES") {
rule.AllowDataURIImages = sec.Key("ALLOW_DATA_URI_IMAGES").MustBool(false)
ok = true
} }
if !haveElement || !haveAttr || !haveRegexp { if sec.HasKey("ELEMENT") || sec.HasKey("ALLOW_ATTR") {
log.Error("Missing required keys from markup.%s. Must have all three of ELEMENT, ALLOW_ATTR, and REGEXP defined!", name) rule.Element = sec.Key("ELEMENT").Value()
return rule.AllowAttr = sec.Key("ALLOW_ATTR").Value()
}
elements := sec.Key("ELEMENT").Value() if rule.Element == "" || rule.AllowAttr == "" {
allowAttrs := sec.Key("ALLOW_ATTR").Value() log.Error("Missing required values from markup.%s. Must have ELEMENT and ALLOW_ATTR defined!", name)
regexpStr := sec.Key("REGEXP").Value() return rule, false
if regexpStr == "" {
rule := MarkupSanitizerRule{
Element: elements,
AllowAttr: allowAttrs,
Regexp: nil,
} }
ExternalSanitizerRules = append(ExternalSanitizerRules, rule) regexpStr := sec.Key("REGEXP").Value()
return if regexpStr != "" {
// Validate when parsing the config that this is a valid regular
// expression. Then we can use regexp.MustCompile(...) later.
compiled, err := regexp.Compile(regexpStr)
if err != nil {
log.Error("In markup.%s: REGEXP (%s) failed to compile: %v", name, regexpStr, err)
return rule, false
}
rule.Regexp = compiled
}
ok = true
} }
// Validate when parsing the config that this is a valid regular if !ok {
// expression. Then we can use regexp.MustCompile(...) later. log.Error("Missing required keys from markup.%s. Must have ELEMENT and ALLOW_ATTR or ALLOW_DATA_URI_IMAGES defined!", name)
compiled, err := regexp.Compile(regexpStr) return rule, false
if err != nil {
log.Error("In module.%s: REGEXP (%s) at definition %d failed to compile: %v", regexpStr, name, err)
return
} }
rule := MarkupSanitizerRule{ return rule, true
Element: elements,
AllowAttr: allowAttrs,
Regexp: compiled,
}
ExternalSanitizerRules = append(ExternalSanitizerRules, rule)
} }
func newMarkupRenderer(name string, sec *ini.Section) { func newMarkupRenderer(name string, sec *ini.Section) {
@ -126,7 +141,7 @@ func newMarkupRenderer(name string, sec *ini.Section) {
return return
} }
ExternalMarkupRenderers = append(ExternalMarkupRenderers, MarkupRenderer{ ExternalMarkupRenderers = append(ExternalMarkupRenderers, &MarkupRenderer{
Enabled: sec.Key("ENABLED").MustBool(false), Enabled: sec.Key("ENABLED").MustBool(false),
MarkupName: name, MarkupName: name,
FileExtensions: exts, FileExtensions: exts,

View file

@ -251,6 +251,10 @@ var (
} }
RepoRootPath string RepoRootPath string
ScriptType = "bash" ScriptType = "bash"
RepoArchive = struct {
Storage
}{}
) )
func newRepository() { func newRepository() {
@ -328,4 +332,6 @@ func newRepository() {
if !filepath.IsAbs(Repository.Upload.TempPath) { if !filepath.IsAbs(Repository.Upload.TempPath) {
Repository.Upload.TempPath = path.Join(AppWorkPath, Repository.Upload.TempPath) Repository.Upload.TempPath = path.Join(AppWorkPath, Repository.Upload.TempPath)
} }
RepoArchive.Storage = getStorage("repo-archive", "", nil)
} }

View file

@ -6,6 +6,7 @@ package setting
import ( import (
"regexp" "regexp"
"strings"
"time" "time"
"code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/log"
@ -14,6 +15,8 @@ import (
// Service settings // Service settings
var Service struct { var Service struct {
DefaultUserVisibility string
DefaultUserVisibilityMode structs.VisibleType
DefaultOrgVisibility string DefaultOrgVisibility string
DefaultOrgVisibilityMode structs.VisibleType DefaultOrgVisibilityMode structs.VisibleType
ActiveCodeLives int ActiveCodeLives int
@ -55,6 +58,7 @@ var Service struct {
AutoWatchOnChanges bool AutoWatchOnChanges bool
DefaultOrgMemberVisible bool DefaultOrgMemberVisible bool
UserDeleteWithCommentsMaxTime time.Duration UserDeleteWithCommentsMaxTime time.Duration
ValidSiteURLSchemes []string
// OpenID settings // OpenID settings
EnableOpenIDSignIn bool EnableOpenIDSignIn bool
@ -116,10 +120,22 @@ func newService() {
Service.EnableUserHeatmap = sec.Key("ENABLE_USER_HEATMAP").MustBool(true) Service.EnableUserHeatmap = sec.Key("ENABLE_USER_HEATMAP").MustBool(true)
Service.AutoWatchNewRepos = sec.Key("AUTO_WATCH_NEW_REPOS").MustBool(true) Service.AutoWatchNewRepos = sec.Key("AUTO_WATCH_NEW_REPOS").MustBool(true)
Service.AutoWatchOnChanges = sec.Key("AUTO_WATCH_ON_CHANGES").MustBool(false) Service.AutoWatchOnChanges = sec.Key("AUTO_WATCH_ON_CHANGES").MustBool(false)
Service.DefaultUserVisibility = sec.Key("DEFAULT_USER_VISIBILITY").In("public", structs.ExtractKeysFromMapString(structs.VisibilityModes))
Service.DefaultUserVisibilityMode = structs.VisibilityModes[Service.DefaultUserVisibility]
Service.DefaultOrgVisibility = sec.Key("DEFAULT_ORG_VISIBILITY").In("public", structs.ExtractKeysFromMapString(structs.VisibilityModes)) Service.DefaultOrgVisibility = sec.Key("DEFAULT_ORG_VISIBILITY").In("public", structs.ExtractKeysFromMapString(structs.VisibilityModes))
Service.DefaultOrgVisibilityMode = structs.VisibilityModes[Service.DefaultOrgVisibility] Service.DefaultOrgVisibilityMode = structs.VisibilityModes[Service.DefaultOrgVisibility]
Service.DefaultOrgMemberVisible = sec.Key("DEFAULT_ORG_MEMBER_VISIBLE").MustBool() Service.DefaultOrgMemberVisible = sec.Key("DEFAULT_ORG_MEMBER_VISIBLE").MustBool()
Service.UserDeleteWithCommentsMaxTime = sec.Key("USER_DELETE_WITH_COMMENTS_MAX_TIME").MustDuration(0) Service.UserDeleteWithCommentsMaxTime = sec.Key("USER_DELETE_WITH_COMMENTS_MAX_TIME").MustDuration(0)
sec.Key("VALID_SITE_URL_SCHEMES").MustString("http,https")
Service.ValidSiteURLSchemes = sec.Key("VALID_SITE_URL_SCHEMES").Strings(",")
schemes := make([]string, len(Service.ValidSiteURLSchemes))
for _, scheme := range Service.ValidSiteURLSchemes {
scheme = strings.ToLower(strings.TrimSpace(scheme))
if scheme != "" {
schemes = append(schemes, scheme)
}
}
Service.ValidSiteURLSchemes = schemes
if err := Cfg.Section("service.explore").MapTo(&Service.Explore); err != nil { if err := Cfg.Section("service.explore").MapTo(&Service.Explore); err != nil {
log.Fatal("Failed to map service.explore settings: %v", err) log.Fatal("Failed to map service.explore settings: %v", err)

View file

@ -469,7 +469,8 @@ func getWorkPath(appPath string) string {
func init() { func init() {
IsWindows = runtime.GOOS == "windows" IsWindows = runtime.GOOS == "windows"
// We can rely on log.CanColorStdout being set properly because modules/log/console_windows.go comes before modules/setting/setting.go lexicographically // We can rely on log.CanColorStdout being set properly because modules/log/console_windows.go comes before modules/setting/setting.go lexicographically
log.NewLogger(0, "console", "console", fmt.Sprintf(`{"level": "trace", "colorize": %t, "stacktraceLevel": "none"}`, log.CanColorStdout)) // By default set this logger at Info - we'll change it later but we need to start with something.
log.NewLogger(0, "console", "console", fmt.Sprintf(`{"level": "info", "colorize": %t, "stacktraceLevel": "none"}`, log.CanColorStdout))
var err error var err error
if AppPath, err = getAppPath(); err != nil { if AppPath, err = getAppPath(); err != nil {
@ -1158,6 +1159,19 @@ func CreateOrAppendToCustomConf(callback func(cfg *ini.File)) {
if err := cfg.SaveTo(CustomConf); err != nil { if err := cfg.SaveTo(CustomConf); err != nil {
log.Fatal("error saving to custom config: %v", err) log.Fatal("error saving to custom config: %v", err)
} }
// Change permissions to be more restrictive
fi, err := os.Stat(CustomConf)
if err != nil {
log.Error("Failed to determine current conf file permissions: %v", err)
return
}
if fi.Mode().Perm() > 0o600 {
if err = os.Chmod(CustomConf, 0o600); err != nil {
log.Warn("Failed changing conf file permissions to -rw-------. Consider changing them manually.")
}
}
} }
// NewServices initializes the services // NewServices initializes the services

View file

@ -43,6 +43,10 @@ func getStorage(name, typ string, targetSec *ini.Section) Storage {
sec.Key("MINIO_LOCATION").MustString("us-east-1") sec.Key("MINIO_LOCATION").MustString("us-east-1")
sec.Key("MINIO_USE_SSL").MustBool(false) sec.Key("MINIO_USE_SSL").MustBool(false)
if targetSec == nil {
targetSec, _ = Cfg.NewSection(name)
}
var storage Storage var storage Storage
storage.Section = targetSec storage.Section = targetSec
storage.Type = typ storage.Type = typ

View file

@ -114,6 +114,9 @@ var (
Avatars ObjectStorage Avatars ObjectStorage
// RepoAvatars represents repository avatars storage // RepoAvatars represents repository avatars storage
RepoAvatars ObjectStorage RepoAvatars ObjectStorage
// RepoArchives represents repository archives storage
RepoArchives ObjectStorage
) )
// Init init the stoarge // Init init the stoarge
@ -130,7 +133,11 @@ func Init() error {
return err return err
} }
return initLFS() if err := initLFS(); err != nil {
return err
}
return initRepoArchives()
} }
// NewStorage takes a storage type and some config and returns an ObjectStorage or an error // NewStorage takes a storage type and some config and returns an ObjectStorage or an error
@ -169,3 +176,9 @@ func initRepoAvatars() (err error) {
RepoAvatars, err = NewStorage(setting.RepoAvatar.Storage.Type, &setting.RepoAvatar.Storage) RepoAvatars, err = NewStorage(setting.RepoAvatar.Storage.Type, &setting.RepoAvatar.Storage)
return return
} }
func initRepoArchives() (err error) {
log.Info("Initialising Repository Archive storage with type: %s", setting.RepoArchive.Storage.Type)
RepoArchives, err = NewStorage(setting.RepoArchive.Storage.Type, &setting.RepoArchive.Storage)
return
}

View file

@ -19,6 +19,7 @@ type CreateUserOption struct {
Password string `json:"password" binding:"Required;MaxSize(255)"` Password string `json:"password" binding:"Required;MaxSize(255)"`
MustChangePassword *bool `json:"must_change_password"` MustChangePassword *bool `json:"must_change_password"`
SendNotify bool `json:"send_notify"` SendNotify bool `json:"send_notify"`
Visibility string `json:"visibility" binding:"In(,public,limited,private)"`
} }
// EditUserOption edit user options // EditUserOption edit user options
@ -43,4 +44,5 @@ type EditUserOption struct {
ProhibitLogin *bool `json:"prohibit_login"` ProhibitLogin *bool `json:"prohibit_login"`
AllowCreateOrganization *bool `json:"allow_create_organization"` AllowCreateOrganization *bool `json:"allow_create_organization"`
Restricted *bool `json:"restricted"` Restricted *bool `json:"restricted"`
Visibility string `json:"visibility" binding:"In(,public,limited,private)"`
} }

View file

@ -43,6 +43,8 @@ type User struct {
Website string `json:"website"` Website string `json:"website"`
// the user's description // the user's description
Description string `json:"description"` Description string `json:"description"`
// User visibility level option: public, limited, private
Visibility string `json:"visibility"`
// user counts // user counts
Followers int `json:"followers_count"` Followers int `json:"followers_count"`
@ -60,3 +62,33 @@ func (u User) MarshalJSON() ([]byte, error) {
CompatUserName string `json:"username"` CompatUserName string `json:"username"`
}{shadow(u), u.UserName}) }{shadow(u), u.UserName})
} }
// UserSettings represents user settings
// swagger:model
type UserSettings struct {
FullName string `json:"full_name"`
Website string `json:"website"`
Description string `json:"description"`
Location string `json:"location"`
Language string `json:"language"`
Theme string `json:"theme"`
DiffViewStyle string `json:"diff_view_style"`
// Privacy
HideEmail bool `json:"hide_email"`
HideActivity bool `json:"hide_activity"`
}
// UserSettingsOptions represents options to change user settings
// swagger:model
type UserSettingsOptions struct {
FullName *string `json:"full_name" binding:"MaxSize(100)"`
Website *string `json:"website" binding:"OmitEmpty;ValidUrl;MaxSize(255)"`
Description *string `json:"description" binding:"MaxSize(255)"`
Location *string `json:"location" binding:"MaxSize(50)"`
Language *string `json:"language"`
Theme *string `json:"theme"`
DiffViewStyle *string `json:"diff_view_style"`
// Privacy
HideEmail *bool `json:"hide_email"`
HideActivity *bool `json:"hide_activity"`
}

View file

@ -19,6 +19,9 @@ const (
// ErrGlobPattern is returned when glob pattern is invalid // ErrGlobPattern is returned when glob pattern is invalid
ErrGlobPattern = "GlobPattern" ErrGlobPattern = "GlobPattern"
// ErrRegexPattern is returned when a regex pattern is invalid
ErrRegexPattern = "RegexPattern"
) )
var ( var (
@ -52,7 +55,10 @@ func CheckGitRefAdditionalRulesValid(name string) bool {
func AddBindingRules() { func AddBindingRules() {
addGitRefNameBindingRule() addGitRefNameBindingRule()
addValidURLBindingRule() addValidURLBindingRule()
addValidSiteURLBindingRule()
addGlobPatternRule() addGlobPatternRule()
addRegexPatternRule()
addGlobOrRegexPatternRule()
} }
func addGitRefNameBindingRule() { func addGitRefNameBindingRule() {
@ -97,22 +103,78 @@ func addValidURLBindingRule() {
}) })
} }
func addValidSiteURLBindingRule() {
// URL validation rule
binding.AddRule(&binding.Rule{
IsMatch: func(rule string) bool {
return strings.HasPrefix(rule, "ValidSiteUrl")
},
IsValid: func(errs binding.Errors, name string, val interface{}) (bool, binding.Errors) {
str := fmt.Sprintf("%v", val)
if len(str) != 0 && !IsValidSiteURL(str) {
errs.Add([]string{name}, binding.ERR_URL, "Url")
return false, errs
}
return true, errs
},
})
}
func addGlobPatternRule() { func addGlobPatternRule() {
binding.AddRule(&binding.Rule{ binding.AddRule(&binding.Rule{
IsMatch: func(rule string) bool { IsMatch: func(rule string) bool {
return rule == "GlobPattern" return rule == "GlobPattern"
}, },
IsValid: globPatternValidator,
})
}
func globPatternValidator(errs binding.Errors, name string, val interface{}) (bool, binding.Errors) {
str := fmt.Sprintf("%v", val)
if len(str) != 0 {
if _, err := glob.Compile(str); err != nil {
errs.Add([]string{name}, ErrGlobPattern, err.Error())
return false, errs
}
}
return true, errs
}
func addRegexPatternRule() {
binding.AddRule(&binding.Rule{
IsMatch: func(rule string) bool {
return rule == "RegexPattern"
},
IsValid: regexPatternValidator,
})
}
func regexPatternValidator(errs binding.Errors, name string, val interface{}) (bool, binding.Errors) {
str := fmt.Sprintf("%v", val)
if _, err := regexp.Compile(str); err != nil {
errs.Add([]string{name}, ErrRegexPattern, err.Error())
return false, errs
}
return true, errs
}
func addGlobOrRegexPatternRule() {
binding.AddRule(&binding.Rule{
IsMatch: func(rule string) bool {
return rule == "GlobOrRegexPattern"
},
IsValid: func(errs binding.Errors, name string, val interface{}) (bool, binding.Errors) { IsValid: func(errs binding.Errors, name string, val interface{}) (bool, binding.Errors) {
str := fmt.Sprintf("%v", val) str := strings.TrimSpace(fmt.Sprintf("%v", val))
if len(str) != 0 { if len(str) >= 2 && strings.HasPrefix(str, "/") && strings.HasSuffix(str, "/") {
if _, err := glob.Compile(str); err != nil { return regexPatternValidator(errs, name, str[1:len(str)-1])
errs.Add([]string{name}, ErrGlobPattern, err.Error())
return false, errs
}
} }
return globPatternValidator(errs, name, val)
return true, errs
}, },
}) })
} }

View file

@ -26,9 +26,10 @@ type (
} }
TestForm struct { TestForm struct {
BranchName string `form:"BranchName" binding:"GitRefName"` BranchName string `form:"BranchName" binding:"GitRefName"`
URL string `form:"ValidUrl" binding:"ValidUrl"` URL string `form:"ValidUrl" binding:"ValidUrl"`
GlobPattern string `form:"GlobPattern" binding:"GlobPattern"` GlobPattern string `form:"GlobPattern" binding:"GlobPattern"`
RegexPattern string `form:"RegexPattern" binding:"RegexPattern"`
} }
) )

View file

@ -52,6 +52,25 @@ func IsValidURL(uri string) bool {
return true return true
} }
// IsValidSiteURL checks if URL is valid
func IsValidSiteURL(uri string) bool {
u, err := url.ParseRequestURI(uri)
if err != nil {
return false
}
if !validPort(portOnly(u.Host)) {
return false
}
for _, scheme := range setting.Service.ValidSiteURLSchemes {
if scheme == u.Scheme {
return true
}
}
return false
}
// IsAPIURL checks if URL is current Gitea instance API URL // IsAPIURL checks if URL is current Gitea instance API URL
func IsAPIURL(uri string) bool { func IsAPIURL(uri string) bool {
return strings.HasPrefix(strings.ToLower(uri), strings.ToLower(setting.AppURL+"api")) return strings.HasPrefix(strings.ToLower(uri), strings.ToLower(setting.AppURL+"api"))

Some files were not shown because too many files have changed in this diff Show more