Merge branch 'main' of https://github.com/go-gitea/gitea
This commit is contained in:
commit
551c4bb3c3
195 changed files with 4275 additions and 1392 deletions
|
@ -27,10 +27,10 @@ func runConvert(ctx *cli.Context) error {
|
|||
return err
|
||||
}
|
||||
|
||||
log.Trace("AppPath: %s", setting.AppPath)
|
||||
log.Trace("AppWorkPath: %s", setting.AppWorkPath)
|
||||
log.Trace("Custom path: %s", setting.CustomPath)
|
||||
log.Trace("Log path: %s", setting.LogRootPath)
|
||||
log.Info("AppPath: %s", setting.AppPath)
|
||||
log.Info("AppWorkPath: %s", setting.AppWorkPath)
|
||||
log.Info("Custom path: %s", setting.CustomPath)
|
||||
log.Info("Log path: %s", setting.LogRootPath)
|
||||
setting.InitDBConfig()
|
||||
|
||||
if !setting.Database.UseMySQL {
|
||||
|
|
|
@ -69,7 +69,7 @@ var CmdDumpRepository = cli.Command{
|
|||
cli.StringFlag{
|
||||
Name: "units",
|
||||
Value: "",
|
||||
Usage: `Which items will be migrated, one or more units should be separated as comma.
|
||||
Usage: `Which items will be migrated, one or more units should be separated as comma.
|
||||
wiki, issues, labels, releases, release_assets, milestones, pull_requests, comments are allowed. Empty means all units.`,
|
||||
},
|
||||
},
|
||||
|
@ -80,10 +80,10 @@ func runDumpRepository(ctx *cli.Context) error {
|
|||
return err
|
||||
}
|
||||
|
||||
log.Trace("AppPath: %s", setting.AppPath)
|
||||
log.Trace("AppWorkPath: %s", setting.AppWorkPath)
|
||||
log.Trace("Custom path: %s", setting.CustomPath)
|
||||
log.Trace("Log path: %s", setting.LogRootPath)
|
||||
log.Info("AppPath: %s", setting.AppPath)
|
||||
log.Info("AppWorkPath: %s", setting.AppWorkPath)
|
||||
log.Info("Custom path: %s", setting.CustomPath)
|
||||
log.Info("Log path: %s", setting.LogRootPath)
|
||||
setting.InitDBConfig()
|
||||
|
||||
var (
|
||||
|
|
10
cmd/hook.go
10
cmd/hook.go
|
@ -179,7 +179,7 @@ Gitea or set your environment appropriately.`, "")
|
|||
GitObjectDirectory: os.Getenv(private.GitObjectDirectory),
|
||||
GitQuarantinePath: os.Getenv(private.GitQuarantinePath),
|
||||
GitPushOptions: pushOptions(),
|
||||
ProtectedBranchID: prID,
|
||||
PullRequestID: prID,
|
||||
IsDeployKey: isDeployKey,
|
||||
}
|
||||
|
||||
|
@ -221,8 +221,8 @@ Gitea or set your environment appropriately.`, "")
|
|||
total++
|
||||
lastline++
|
||||
|
||||
// If the ref is a branch, check if it's protected
|
||||
if strings.HasPrefix(refFullName, git.BranchPrefix) {
|
||||
// If the ref is a branch or tag, check if it's protected
|
||||
if strings.HasPrefix(refFullName, git.BranchPrefix) || strings.HasPrefix(refFullName, git.TagPrefix) {
|
||||
oldCommitIDs[count] = oldCommitID
|
||||
newCommitIDs[count] = newCommitID
|
||||
refFullNames[count] = refFullName
|
||||
|
@ -230,7 +230,7 @@ Gitea or set your environment appropriately.`, "")
|
|||
fmt.Fprintf(out, "*")
|
||||
|
||||
if count >= hookBatchSize {
|
||||
fmt.Fprintf(out, " Checking %d branches\n", count)
|
||||
fmt.Fprintf(out, " Checking %d references\n", count)
|
||||
|
||||
hookOptions.OldCommitIDs = oldCommitIDs
|
||||
hookOptions.NewCommitIDs = newCommitIDs
|
||||
|
@ -261,7 +261,7 @@ Gitea or set your environment appropriately.`, "")
|
|||
hookOptions.NewCommitIDs = newCommitIDs[:count]
|
||||
hookOptions.RefFullNames = refFullNames[:count]
|
||||
|
||||
fmt.Fprintf(out, " Checking %d branches\n", count)
|
||||
fmt.Fprintf(out, " Checking %d references\n", count)
|
||||
|
||||
statusCode, msg := private.HookPreReceive(username, reponame, hookOptions)
|
||||
switch statusCode {
|
||||
|
|
|
@ -28,10 +28,10 @@ func runMigrate(ctx *cli.Context) error {
|
|||
return err
|
||||
}
|
||||
|
||||
log.Trace("AppPath: %s", setting.AppPath)
|
||||
log.Trace("AppWorkPath: %s", setting.AppWorkPath)
|
||||
log.Trace("Custom path: %s", setting.CustomPath)
|
||||
log.Trace("Log path: %s", setting.LogRootPath)
|
||||
log.Info("AppPath: %s", setting.AppPath)
|
||||
log.Info("AppWorkPath: %s", setting.AppWorkPath)
|
||||
log.Info("Custom path: %s", setting.CustomPath)
|
||||
log.Info("Log path: %s", setting.LogRootPath)
|
||||
setting.InitDBConfig()
|
||||
|
||||
if err := models.NewEngine(context.Background(), migrations.Migrate); err != nil {
|
||||
|
|
|
@ -110,10 +110,10 @@ func runMigrateStorage(ctx *cli.Context) error {
|
|||
return err
|
||||
}
|
||||
|
||||
log.Trace("AppPath: %s", setting.AppPath)
|
||||
log.Trace("AppWorkPath: %s", setting.AppWorkPath)
|
||||
log.Trace("Custom path: %s", setting.CustomPath)
|
||||
log.Trace("Log path: %s", setting.LogRootPath)
|
||||
log.Info("AppPath: %s", setting.AppPath)
|
||||
log.Info("AppWorkPath: %s", setting.AppWorkPath)
|
||||
log.Info("Custom path: %s", setting.CustomPath)
|
||||
log.Info("Log path: %s", setting.LogRootPath)
|
||||
setting.InitDBConfig()
|
||||
|
||||
if err := models.NewEngine(context.Background(), migrations.Migrate); err != nil {
|
||||
|
|
16
cmd/web.go
16
cmd/web.go
|
@ -47,6 +47,14 @@ and it takes care of all the other things for you`,
|
|||
Value: setting.PIDFile,
|
||||
Usage: "Custom pid file path",
|
||||
},
|
||||
cli.BoolFlag{
|
||||
Name: "quiet, q",
|
||||
Usage: "Only display Fatal logging errors until logging is set-up",
|
||||
},
|
||||
cli.BoolFlag{
|
||||
Name: "verbose",
|
||||
Usage: "Set initial logging to TRACE level until logging is properly set-up",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
|
@ -71,6 +79,14 @@ func runHTTPRedirector() {
|
|||
}
|
||||
|
||||
func runWeb(ctx *cli.Context) error {
|
||||
if ctx.Bool("verbose") {
|
||||
_ = log.DelLogger("console")
|
||||
log.NewLogger(0, "console", "console", fmt.Sprintf(`{"level": "trace", "colorize": %t, "stacktraceLevel": "none"}`, log.CanColorStdout))
|
||||
} else if ctx.Bool("quiet") {
|
||||
_ = log.DelLogger("console")
|
||||
log.NewLogger(0, "console", "console", fmt.Sprintf(`{"level": "fatal", "colorize": %t, "stacktraceLevel": "none"}`, log.CanColorStdout))
|
||||
}
|
||||
|
||||
managerCtx, cancel := context.WithCancel(context.Background())
|
||||
graceful.InitManager(managerCtx)
|
||||
defer cancel()
|
||||
|
|
|
@ -26,6 +26,7 @@ import (
|
|||
"time"
|
||||
|
||||
"code.gitea.io/gitea/models"
|
||||
gitea_git "code.gitea.io/gitea/modules/git"
|
||||
"code.gitea.io/gitea/modules/markup"
|
||||
"code.gitea.io/gitea/modules/markup/external"
|
||||
"code.gitea.io/gitea/modules/setting"
|
||||
|
@ -79,7 +80,7 @@ func runPR() {
|
|||
setting.RunUser = curUser.Username
|
||||
|
||||
log.Printf("[PR] Loading fixtures data ...\n")
|
||||
setting.CheckLFSVersion()
|
||||
gitea_git.CheckLFSVersion()
|
||||
//models.LoadConfigs()
|
||||
/*
|
||||
setting.Database.Type = "sqlite3"
|
||||
|
|
|
@ -651,9 +651,15 @@ PATH =
|
|||
;DEFAULT_ALLOW_CREATE_ORGANIZATION = true
|
||||
;;
|
||||
;; Either "public", "limited" or "private", default is "public"
|
||||
;; Limited is for signed user only
|
||||
;; Private is only for member of the organization
|
||||
;; Public is for everyone
|
||||
;; Limited is for users visible only to signed users
|
||||
;; Private is for users visible only to members of their organizations
|
||||
;; Public is for users visible for everyone
|
||||
;DEFAULT_USER_VISIBILITY = public
|
||||
;;
|
||||
;; Either "public", "limited" or "private", default is "public"
|
||||
;; Limited is for organizations visible only to signed users
|
||||
;; Private is for organizations visible only to members of the organization
|
||||
;; Public is for organizations visible to everyone
|
||||
;DEFAULT_ORG_VISIBILITY = public
|
||||
;;
|
||||
;; Default value for DefaultOrgMemberVisible
|
||||
|
@ -705,6 +711,8 @@ PATH =
|
|||
;;
|
||||
;; Minimum amount of time a user must exist before comments are kept when the user is deleted.
|
||||
;USER_DELETE_WITH_COMMENTS_MAX_TIME = 0
|
||||
;; Valid site url schemes for user profiles
|
||||
;VALID_SITE_URL_SCHEMES=http,https
|
||||
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
@ -2048,6 +2056,16 @@ PATH =
|
|||
;; storage type
|
||||
;STORAGE_TYPE = local
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; settings for repository archives, will override storage setting
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;[storage.repo-archive]
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; storage type
|
||||
;STORAGE_TYPE = local
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; lfs storage will override storage
|
||||
|
|
|
@ -512,6 +512,7 @@ relation to port exhaustion.
|
|||
- `SHOW_MILESTONES_DASHBOARD_PAGE`: **true** Enable this to show the milestones dashboard page - a view of all the user's milestones
|
||||
- `AUTO_WATCH_NEW_REPOS`: **true**: Enable this to let all organisation users watch new repos when they are created
|
||||
- `AUTO_WATCH_ON_CHANGES`: **false**: Enable this to make users watch a repository after their first commit to it
|
||||
- `DEFAULT_USER_VISIBILITY`: **public**: Set default visibility mode for users, either "public", "limited" or "private".
|
||||
- `DEFAULT_ORG_VISIBILITY`: **public**: Set default visibility mode for organisations, either "public", "limited" or "private".
|
||||
- `DEFAULT_ORG_MEMBER_VISIBLE`: **false** True will make the membership of the users visible when added to the organisation.
|
||||
- `ALLOW_ONLY_INTERNAL_REGISTRATION`: **false** Set to true to force registration only via gitea.
|
||||
|
@ -519,6 +520,7 @@ relation to port exhaustion.
|
|||
- `NO_REPLY_ADDRESS`: **noreply.DOMAIN** Value for the domain part of the user's email address in the git log if user has set KeepEmailPrivate to true. DOMAIN resolves to the value in server.DOMAIN.
|
||||
The user's email will be replaced with a concatenation of the user name in lower case, "@" and NO_REPLY_ADDRESS.
|
||||
- `USER_DELETE_WITH_COMMENTS_MAX_TIME`: **0** Minimum amount of time a user must exist before comments are kept when the user is deleted.
|
||||
- `VALID_SITE_URL_SCHEMES`: **http, https**: Valid site url schemes for user profiles
|
||||
|
||||
### Service - Expore (`service.explore`)
|
||||
|
||||
|
@ -907,13 +909,17 @@ Gitea supports customizing the sanitization policy for rendered HTML. The exampl
|
|||
ELEMENT = span
|
||||
ALLOW_ATTR = class
|
||||
REGEXP = ^\s*((math(\s+|$)|inline(\s+|$)|display(\s+|$)))+
|
||||
ALLOW_DATA_URI_IMAGES = true
|
||||
```
|
||||
|
||||
- `ELEMENT`: The element this policy applies to. Must be non-empty.
|
||||
- `ALLOW_ATTR`: The attribute this policy allows. Must be non-empty.
|
||||
- `REGEXP`: A regex to match the contents of the attribute against. Must be present but may be empty for unconditional whitelisting of this attribute.
|
||||
- `ALLOW_DATA_URI_IMAGES`: **false** Allow data uri images (`<img src="data:image/png;base64,..."/>`).
|
||||
|
||||
Multiple sanitisation rules can be defined by adding unique subsections, e.g. `[markup.sanitizer.TeX-2]`.
|
||||
To apply a sanitisation rules only for a specify external renderer they must use the renderer name, e.g. `[markup.sanitizer.asciidoc.rule-1]`.
|
||||
If the rule is defined above the renderer ini section or the name does not match a renderer it is applied to every renderer.
|
||||
|
||||
## Time (`time`)
|
||||
|
||||
|
@ -991,6 +997,23 @@ MINIO_USE_SSL = false
|
|||
|
||||
And used by `[attachment]`, `[lfs]` and etc. as `STORAGE_TYPE`.
|
||||
|
||||
## Repository Archive Storage (`storage.repo-archive`)
|
||||
|
||||
Configuration for repository archive storage. It will inherit from default `[storage]` or
|
||||
`[storage.xxx]` when set `STORAGE_TYPE` to `xxx`. The default of `PATH`
|
||||
is `data/repo-archive` and the default of `MINIO_BASE_PATH` is `repo-archive/`.
|
||||
|
||||
- `STORAGE_TYPE`: **local**: Storage type for repo archive, `local` for local disk or `minio` for s3 compatible object storage service or other name defined with `[storage.xxx]`
|
||||
- `SERVE_DIRECT`: **false**: Allows the storage driver to redirect to authenticated URLs to serve files directly. Currently, only Minio/S3 is supported via signed URLs, local does nothing.
|
||||
- `PATH`: **./data/repo-archive**: Where to store archive files, only available when `STORAGE_TYPE` is `local`.
|
||||
- `MINIO_ENDPOINT`: **localhost:9000**: Minio endpoint to connect only available when `STORAGE_TYPE` is `minio`
|
||||
- `MINIO_ACCESS_KEY_ID`: Minio accessKeyID to connect only available when `STORAGE_TYPE` is `minio`
|
||||
- `MINIO_SECRET_ACCESS_KEY`: Minio secretAccessKey to connect only available when `STORAGE_TYPE is` `minio`
|
||||
- `MINIO_BUCKET`: **gitea**: Minio bucket to store the lfs only available when `STORAGE_TYPE` is `minio`
|
||||
- `MINIO_LOCATION`: **us-east-1**: Minio location to create bucket only available when `STORAGE_TYPE` is `minio`
|
||||
- `MINIO_BASE_PATH`: **repo-archive/**: Minio base path on the bucket only available when `STORAGE_TYPE` is `minio`
|
||||
- `MINIO_USE_SSL`: **false**: Minio enabled ssl only available when `STORAGE_TYPE` is `minio`
|
||||
|
||||
## Other (`other`)
|
||||
|
||||
- `SHOW_FOOTER_BRANDING`: **false**: Show Gitea branding in the footer.
|
||||
|
|
|
@ -382,6 +382,21 @@ MINIO_USE_SSL = false
|
|||
|
||||
然后你在 `[attachment]`, `[lfs]` 等中可以把这个名字用作 `STORAGE_TYPE` 的值。
|
||||
|
||||
## Repository Archive Storage (`storage.repo-archive`)
|
||||
|
||||
Repository archive 的存储配置。 如果 `STORAGE_TYPE` 为空,则此配置将从 `[storage]` 继承。如果不为 `local` 或者 `minio` 而为 `xxx`, 则从 `[storage.xxx]` 继承。当继承时, `PATH` 默认为 `data/repo-archive`,`MINIO_BASE_PATH` 默认为 `repo-archive/`。
|
||||
|
||||
- `STORAGE_TYPE`: **local**: Repository archive 的存储类型,`local` 将存储到磁盘,`minio` 将存储到 s3 兼容的对象服务。
|
||||
- `SERVE_DIRECT`: **false**: 允许直接重定向到存储系统。当前,仅 Minio/S3 是支持的。
|
||||
- `PATH`: 存放 Repository archive 上传的文件的地方,默认是 `data/repo-archive`。
|
||||
- `MINIO_ENDPOINT`: **localhost:9000**: Minio 地址,仅当 `STORAGE_TYPE` 为 `minio` 时有效。
|
||||
- `MINIO_ACCESS_KEY_ID`: Minio accessKeyID,仅当 `STORAGE_TYPE` 为 `minio` 时有效。
|
||||
- `MINIO_SECRET_ACCESS_KEY`: Minio secretAccessKey,仅当 `STORAGE_TYPE` 为 `minio` 时有效。
|
||||
- `MINIO_BUCKET`: **gitea**: Minio bucket,仅当 `STORAGE_TYPE` 为 `minio` 时有效。
|
||||
- `MINIO_LOCATION`: **us-east-1**: Minio location ,仅当 `STORAGE_TYPE` 为 `minio` 时有效。
|
||||
- `MINIO_BASE_PATH`: **repo-archive/**: Minio base path ,仅当 `STORAGE_TYPE` 为 `minio` 时有效。
|
||||
- `MINIO_USE_SSL`: **false**: Minio 是否启用 ssl ,仅当 `STORAGE_TYPE` 为 `minio` 时有效。
|
||||
|
||||
## Other (`other`)
|
||||
|
||||
- `SHOW_FOOTER_BRANDING`: 为真则在页面底部显示Gitea的字样。
|
||||
|
|
|
@ -64,8 +64,8 @@ IS_INPUT_FILE = false
|
|||
[markup.jupyter]
|
||||
ENABLED = true
|
||||
FILE_EXTENSIONS = .ipynb
|
||||
RENDER_COMMAND = "jupyter nbconvert --stdout --to html --template basic "
|
||||
IS_INPUT_FILE = true
|
||||
RENDER_COMMAND = "jupyter nbconvert --stdin --stdout --to html --template basic"
|
||||
IS_INPUT_FILE = false
|
||||
|
||||
[markup.restructuredtext]
|
||||
ENABLED = true
|
||||
|
@ -90,15 +90,50 @@ FILE_EXTENSIONS = .md,.markdown
|
|||
RENDER_COMMAND = pandoc -f markdown -t html --katex
|
||||
```
|
||||
|
||||
You must define `ELEMENT`, `ALLOW_ATTR`, and `REGEXP` in each section.
|
||||
You must define `ELEMENT` and `ALLOW_ATTR` in each section.
|
||||
|
||||
To define multiple entries, add a unique alphanumeric suffix (e.g., `[markup.sanitizer.1]` and `[markup.sanitizer.something]`).
|
||||
|
||||
To apply a sanitisation rules only for a specify external renderer they must use the renderer name, e.g. `[markup.sanitizer.asciidoc.rule-1]`, `[markup.sanitizer.<renderer>.rule-1]`.
|
||||
|
||||
**Note**: If the rule is defined above the renderer ini section or the name does not match a renderer it is applied to every renderer.
|
||||
|
||||
Once your configuration changes have been made, restart Gitea to have changes take effect.
|
||||
|
||||
**Note**: Prior to Gitea 1.12 there was a single `markup.sanitiser` section with keys that were redefined for multiple rules, however,
|
||||
there were significant problems with this method of configuration necessitating configuration through multiple sections.
|
||||
|
||||
### Example: Office DOCX
|
||||
|
||||
Display Office DOCX files with [`pandoc`](https://pandoc.org/):
|
||||
```ini
|
||||
[markup.docx]
|
||||
ENABLED = true
|
||||
FILE_EXTENSIONS = .docx
|
||||
RENDER_COMMAND = "pandoc --from docx --to html --self-contained --template /path/to/basic.html"
|
||||
|
||||
[markup.sanitizer.docx.img]
|
||||
ALLOW_DATA_URI_IMAGES = true
|
||||
```
|
||||
|
||||
The template file has the following content:
|
||||
```
|
||||
$body$
|
||||
```
|
||||
|
||||
### Example: Jupyter Notebook
|
||||
|
||||
Display Jupyter Notebook files with [`nbconvert`](https://github.com/jupyter/nbconvert):
|
||||
```ini
|
||||
[markup.jupyter]
|
||||
ENABLED = true
|
||||
FILE_EXTENSIONS = .ipynb
|
||||
RENDER_COMMAND = "jupyter-nbconvert --stdin --stdout --to html --template basic"
|
||||
|
||||
[markup.sanitizer.jupyter.img]
|
||||
ALLOW_DATA_URI_IMAGES = true
|
||||
```
|
||||
|
||||
## Customizing CSS
|
||||
The external renderer is specified in the .ini in the format `[markup.XXXXX]` and the HTML supplied by your external renderer will be wrapped in a `<div>` with classes `markup` and `XXXXX`. The `markup` class provides out of the box styling (as does `markdown` if `XXXXX` is `markdown`). Otherwise you can use these classes to specifically target the contents of your rendered HTML.
|
||||
|
||||
|
|
57
docs/content/doc/advanced/protected-tags.en-us.md
Normal file
57
docs/content/doc/advanced/protected-tags.en-us.md
Normal file
|
@ -0,0 +1,57 @@
|
|||
---
|
||||
date: "2021-05-14T00:00:00-00:00"
|
||||
title: "Protected tags"
|
||||
slug: "protected-tags"
|
||||
weight: 45
|
||||
toc: false
|
||||
draft: false
|
||||
menu:
|
||||
sidebar:
|
||||
parent: "advanced"
|
||||
name: "Protected tags"
|
||||
weight: 45
|
||||
identifier: "protected-tags"
|
||||
---
|
||||
|
||||
# Protected tags
|
||||
|
||||
Protected tags allow control over who has permission to create or update git tags. Each rule allows you to match either an individual tag name, or use an appropriate pattern to control multiple tags at once.
|
||||
|
||||
**Table of Contents**
|
||||
|
||||
{{< toc >}}
|
||||
|
||||
## Setting up protected tags
|
||||
|
||||
To protect a tag, you need to follow these steps:
|
||||
|
||||
1. Go to the repository’s **Settings** > **Tags** page.
|
||||
1. Type a pattern to match a name. You can use a single name, a [glob pattern](https://pkg.go.dev/github.com/gobwas/glob#Compile) or a regular expression.
|
||||
1. Choose the allowed users and/or teams. If you leave these fields empty noone is allowed to create or modify this tag.
|
||||
1. Select **Save** to save the configuration.
|
||||
|
||||
## Pattern protected tags
|
||||
|
||||
The pattern uses [glob](https://pkg.go.dev/github.com/gobwas/glob#Compile) or regular expressions to match a tag name. For regular expressions you need to enclose the pattern in slashes.
|
||||
|
||||
Examples:
|
||||
|
||||
| Type | Pattern Protected Tag | Possible Matching Tags |
|
||||
| ----- | ------------------------ | --------------------------------------- |
|
||||
| Glob | `v*` | `v`, `v-1`, `version2` |
|
||||
| Glob | `v[0-9]` | `v0`, `v1` up to `v9` |
|
||||
| Glob | `*-release` | `2.1-release`, `final-release` |
|
||||
| Glob | `gitea` | only `gitea` |
|
||||
| Glob | `*gitea*` | `gitea`, `2.1-gitea`, `1_gitea-release` |
|
||||
| Glob | `{v,rel}-*` | `v-`, `v-1`, `v-final`, `rel-`, `rel-x` |
|
||||
| Glob | `*` | matches all possible tag names |
|
||||
| Regex | `/\Av/` | `v`, `v-1`, `version2` |
|
||||
| Regex | `/\Av[0-9]\z/` | `v0`, `v1` up to `v9` |
|
||||
| Regex | `/\Av\d+\.\d+\.\d+\z/` | `v1.0.17`, `v2.1.0` |
|
||||
| Regex | `/\Av\d+(\.\d+){0,2}\z/` | `v1`, `v2.1`, `v1.2.34` |
|
||||
| Regex | `/-release\z/` | `2.1-release`, `final-release` |
|
||||
| Regex | `/gitea/` | `gitea`, `2.1-gitea`, `1_gitea-release` |
|
||||
| Regex | `/\Agitea\z/` | only `gitea` |
|
||||
| Regex | `/^gitea$/` | only `gitea` |
|
||||
| Regex | `/\A(v\|rel)-/` | `v-`, `v-1`, `v-final`, `rel-`, `rel-x` |
|
||||
| Regex | `/.+/` | matches all possible tag names |
|
|
@ -73,6 +73,8 @@ One of these three distributions of Make will run on Windows:
|
|||
- The binary is called `mingw32-make.exe` instead of `make.exe`. Add the `bin` folder to `PATH`.
|
||||
- [Chocolatey package](https://chocolatey.org/packages/make). Run `choco install make`
|
||||
|
||||
**Note**: If you are attempting to build using make with Windows Command Prompt, you may run into issues. The above prompts (git bash, or mingw) are recommended, however if you only have command prompt (or potentially powershell) you can set environment variables using the [set](https://docs.microsoft.com/en-us/windows-server/administration/windows-commands/set_1) command, e.g. `set TAGS=bindata`.
|
||||
|
||||
## Downloading and cloning the Gitea source code
|
||||
|
||||
The recommended method of obtaining the source code is by using `git clone`.
|
||||
|
|
|
@ -46,6 +46,8 @@ Starts the server:
|
|||
- `--port number`, `-p number`: Port number. Optional. (default: 3000). Overrides configuration file.
|
||||
- `--install-port number`: Port number to run the install page on. Optional. (default: 3000). Overrides configuration file.
|
||||
- `--pid path`, `-P path`: Pidfile path. Optional.
|
||||
- `--quiet`, `-q`: Only emit Fatal logs on the console for logs emitted before logging set up.
|
||||
- `--verbose`: Emit tracing logs on the console for logs emitted before logging is set-up.
|
||||
- Examples:
|
||||
- `gitea web`
|
||||
- `gitea web --port 80`
|
||||
|
|
|
@ -221,6 +221,9 @@ If you wish to run Gitea with IIS. You will need to setup IIS with URL Rewrite a
|
|||
```xml
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<configuration>
|
||||
<system.web>
|
||||
<httpRuntime requestPathInvalidCharacters="" />
|
||||
</system.web>
|
||||
<system.webServer>
|
||||
<security>
|
||||
<requestFiltering>
|
||||
|
|
|
@ -11,6 +11,7 @@ import (
|
|||
|
||||
"code.gitea.io/gitea/models"
|
||||
api "code.gitea.io/gitea/modules/structs"
|
||||
jsoniter "github.com/json-iterator/go"
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
|
@ -139,6 +140,59 @@ func TestAPIPullReview(t *testing.T) {
|
|||
req = NewRequestf(t, http.MethodDelete, "/api/v1/repos/%s/%s/pulls/%d/reviews/%d?token=%s", repo.OwnerName, repo.Name, pullIssue.Index, review.ID, token)
|
||||
resp = session.MakeRequest(t, req, http.StatusNoContent)
|
||||
|
||||
// test CreatePullReview Comment without body but with comments
|
||||
req = NewRequestWithJSON(t, http.MethodPost, fmt.Sprintf("/api/v1/repos/%s/%s/pulls/%d/reviews?token=%s", repo.OwnerName, repo.Name, pullIssue.Index, token), &api.CreatePullReviewOptions{
|
||||
// Body: "",
|
||||
Event: "COMMENT",
|
||||
Comments: []api.CreatePullReviewComment{{
|
||||
Path: "README.md",
|
||||
Body: "first new line",
|
||||
OldLineNum: 0,
|
||||
NewLineNum: 1,
|
||||
}, {
|
||||
Path: "README.md",
|
||||
Body: "first old line",
|
||||
OldLineNum: 1,
|
||||
NewLineNum: 0,
|
||||
},
|
||||
},
|
||||
})
|
||||
var commentReview api.PullReview
|
||||
|
||||
resp = session.MakeRequest(t, req, http.StatusOK)
|
||||
DecodeJSON(t, resp, &commentReview)
|
||||
assert.EqualValues(t, "COMMENT", commentReview.State)
|
||||
assert.EqualValues(t, 2, commentReview.CodeCommentsCount)
|
||||
assert.EqualValues(t, "", commentReview.Body)
|
||||
assert.EqualValues(t, false, commentReview.Dismissed)
|
||||
|
||||
// test CreatePullReview Comment with body but without comments
|
||||
commentBody := "This is a body of the comment."
|
||||
req = NewRequestWithJSON(t, http.MethodPost, fmt.Sprintf("/api/v1/repos/%s/%s/pulls/%d/reviews?token=%s", repo.OwnerName, repo.Name, pullIssue.Index, token), &api.CreatePullReviewOptions{
|
||||
Body: commentBody,
|
||||
Event: "COMMENT",
|
||||
Comments: []api.CreatePullReviewComment{},
|
||||
})
|
||||
|
||||
resp = session.MakeRequest(t, req, http.StatusOK)
|
||||
DecodeJSON(t, resp, &commentReview)
|
||||
assert.EqualValues(t, "COMMENT", commentReview.State)
|
||||
assert.EqualValues(t, 0, commentReview.CodeCommentsCount)
|
||||
assert.EqualValues(t, commentBody, commentReview.Body)
|
||||
assert.EqualValues(t, false, commentReview.Dismissed)
|
||||
|
||||
// test CreatePullReview Comment without body and no comments
|
||||
req = NewRequestWithJSON(t, http.MethodPost, fmt.Sprintf("/api/v1/repos/%s/%s/pulls/%d/reviews?token=%s", repo.OwnerName, repo.Name, pullIssue.Index, token), &api.CreatePullReviewOptions{
|
||||
Body: "",
|
||||
Event: "COMMENT",
|
||||
Comments: []api.CreatePullReviewComment{},
|
||||
})
|
||||
resp = session.MakeRequest(t, req, http.StatusUnprocessableEntity)
|
||||
errMap := make(map[string]interface{})
|
||||
json := jsoniter.ConfigCompatibleWithStandardLibrary
|
||||
json.Unmarshal(resp.Body.Bytes(), &errMap)
|
||||
assert.EqualValues(t, "review event COMMENT requires a body or a comment", errMap["message"].(string))
|
||||
|
||||
// test get review requests
|
||||
// to make it simple, use same api with get review
|
||||
pullIssue12 := models.AssertExistsAndLoadBean(t, &models.Issue{ID: 12}).(*models.Issue)
|
||||
|
|
|
@ -39,7 +39,7 @@ func TestAPIRepoTags(t *testing.T) {
|
|||
assert.Equal(t, setting.AppURL+"user2/repo1/archive/v1.1.zip", tags[0].ZipballURL)
|
||||
assert.Equal(t, setting.AppURL+"user2/repo1/archive/v1.1.tar.gz", tags[0].TarballURL)
|
||||
|
||||
newTag := createNewTagUsingAPI(t, session, token, user.Name, repoName, "awesome-tag", "", "nice!\nand some text")
|
||||
newTag := createNewTagUsingAPI(t, session, token, user.Name, repoName, "gitea/22", "", "nice!\nand some text")
|
||||
resp = session.MakeRequest(t, req, http.StatusOK)
|
||||
DecodeJSON(t, resp, &tags)
|
||||
assert.Len(t, tags, 2)
|
||||
|
@ -51,6 +51,20 @@ func TestAPIRepoTags(t *testing.T) {
|
|||
assert.EqualValues(t, newTag.Commit.SHA, tag.Commit.SHA)
|
||||
}
|
||||
}
|
||||
|
||||
// get created tag
|
||||
req = NewRequestf(t, "GET", "/api/v1/repos/%s/%s/tags/%s?token=%s", user.Name, repoName, newTag.Name, token)
|
||||
resp = session.MakeRequest(t, req, http.StatusOK)
|
||||
var tag *api.Tag
|
||||
DecodeJSON(t, resp, &tag)
|
||||
assert.EqualValues(t, newTag, tag)
|
||||
|
||||
// delete tag
|
||||
delReq := NewRequestf(t, "DELETE", "/api/v1/repos/%s/%s/tags/%s?token=%s", user.Name, repoName, newTag.Name, token)
|
||||
resp = session.MakeRequest(t, delReq, http.StatusNoContent)
|
||||
|
||||
// check if it's gone
|
||||
resp = session.MakeRequest(t, req, http.StatusNotFound)
|
||||
}
|
||||
|
||||
func createNewTagUsingAPI(t *testing.T, session *TestSession, token string, ownerName, repoName, name, target, msg string) *api.Tag {
|
||||
|
|
|
@ -26,7 +26,7 @@ func TestUserHeatmap(t *testing.T) {
|
|||
var heatmap []*models.UserHeatmapData
|
||||
DecodeJSON(t, resp, &heatmap)
|
||||
var dummyheatmap []*models.UserHeatmapData
|
||||
dummyheatmap = append(dummyheatmap, &models.UserHeatmapData{Timestamp: 1603152000, Contributions: 1})
|
||||
dummyheatmap = append(dummyheatmap, &models.UserHeatmapData{Timestamp: 1603227600, Contributions: 1})
|
||||
|
||||
assert.Equal(t, dummyheatmap, heatmap)
|
||||
}
|
||||
|
|
|
@ -59,3 +59,34 @@ func TestAPIUserSearchNotLoggedIn(t *testing.T) {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestAPIUserSearchAdminLoggedInUserHidden(t *testing.T) {
|
||||
defer prepareTestEnv(t)()
|
||||
adminUsername := "user1"
|
||||
session := loginUser(t, adminUsername)
|
||||
token := getTokenForLoggedInUser(t, session)
|
||||
query := "user31"
|
||||
req := NewRequestf(t, "GET", "/api/v1/users/search?token=%s&q=%s", token, query)
|
||||
req.SetBasicAuth(token, "x-oauth-basic")
|
||||
resp := session.MakeRequest(t, req, http.StatusOK)
|
||||
|
||||
var results SearchResults
|
||||
DecodeJSON(t, resp, &results)
|
||||
assert.NotEmpty(t, results.Data)
|
||||
for _, user := range results.Data {
|
||||
assert.Contains(t, user.UserName, query)
|
||||
assert.NotEmpty(t, user.Email)
|
||||
assert.EqualValues(t, "private", user.Visibility)
|
||||
}
|
||||
}
|
||||
|
||||
func TestAPIUserSearchNotLoggedInUserHidden(t *testing.T) {
|
||||
defer prepareTestEnv(t)()
|
||||
query := "user31"
|
||||
req := NewRequestf(t, "GET", "/api/v1/users/search?q=%s", query)
|
||||
resp := MakeRequest(t, req, http.StatusOK)
|
||||
|
||||
var results SearchResults
|
||||
DecodeJSON(t, resp, &results)
|
||||
assert.Empty(t, results.Data)
|
||||
}
|
||||
|
|
|
@ -143,7 +143,7 @@ func standardCommitAndPushTest(t *testing.T, dstPath string) (little, big string
|
|||
func lfsCommitAndPushTest(t *testing.T, dstPath string) (littleLFS, bigLFS string) {
|
||||
t.Run("LFS", func(t *testing.T) {
|
||||
defer PrintCurrentTest(t)()
|
||||
setting.CheckLFSVersion()
|
||||
git.CheckLFSVersion()
|
||||
if !setting.LFS.StartServer {
|
||||
t.Skip()
|
||||
return
|
||||
|
@ -213,7 +213,7 @@ func rawTest(t *testing.T, ctx *APITestContext, little, big, littleLFS, bigLFS s
|
|||
resp := session.MakeRequestNilResponseRecorder(t, req, http.StatusOK)
|
||||
assert.Equal(t, littleSize, resp.Length)
|
||||
|
||||
setting.CheckLFSVersion()
|
||||
git.CheckLFSVersion()
|
||||
if setting.LFS.StartServer {
|
||||
req = NewRequest(t, "GET", path.Join("/", username, reponame, "/raw/branch/master/", littleLFS))
|
||||
resp := session.MakeRequest(t, req, http.StatusOK)
|
||||
|
@ -255,7 +255,7 @@ func mediaTest(t *testing.T, ctx *APITestContext, little, big, littleLFS, bigLFS
|
|||
resp := session.MakeRequestNilResponseRecorder(t, req, http.StatusOK)
|
||||
assert.Equal(t, littleSize, resp.Length)
|
||||
|
||||
setting.CheckLFSVersion()
|
||||
git.CheckLFSVersion()
|
||||
if setting.LFS.StartServer {
|
||||
req = NewRequest(t, "GET", path.Join("/", username, reponame, "/media/branch/master/", littleLFS))
|
||||
resp = session.MakeRequestNilResponseRecorder(t, req, http.StatusOK)
|
||||
|
|
|
@ -0,0 +1 @@
|
|||
aacbdfe9e1c4b47f60abe81849045fa4e96f1d75
|
|
@ -26,6 +26,7 @@ import (
|
|||
|
||||
"code.gitea.io/gitea/models"
|
||||
"code.gitea.io/gitea/modules/base"
|
||||
"code.gitea.io/gitea/modules/git"
|
||||
"code.gitea.io/gitea/modules/graceful"
|
||||
"code.gitea.io/gitea/modules/log"
|
||||
"code.gitea.io/gitea/modules/queue"
|
||||
|
@ -162,7 +163,7 @@ func initIntegrationTest() {
|
|||
setting.SetCustomPathAndConf("", "", "")
|
||||
setting.NewContext()
|
||||
util.RemoveAll(models.LocalCopyPath())
|
||||
setting.CheckLFSVersion()
|
||||
git.CheckLFSVersion()
|
||||
setting.InitDBConfig()
|
||||
if err := storage.Init(); err != nil {
|
||||
fmt.Printf("Init storage failed: %v", err)
|
||||
|
|
|
@ -13,6 +13,7 @@ import (
|
|||
"testing"
|
||||
|
||||
"code.gitea.io/gitea/models"
|
||||
"code.gitea.io/gitea/modules/git"
|
||||
"code.gitea.io/gitea/modules/lfs"
|
||||
"code.gitea.io/gitea/modules/setting"
|
||||
"code.gitea.io/gitea/routers/web"
|
||||
|
@ -81,7 +82,7 @@ func checkResponseTestContentEncoding(t *testing.T, content *[]byte, resp *httpt
|
|||
|
||||
func TestGetLFSSmall(t *testing.T) {
|
||||
defer prepareTestEnv(t)()
|
||||
setting.CheckLFSVersion()
|
||||
git.CheckLFSVersion()
|
||||
if !setting.LFS.StartServer {
|
||||
t.Skip()
|
||||
return
|
||||
|
@ -94,7 +95,7 @@ func TestGetLFSSmall(t *testing.T) {
|
|||
|
||||
func TestGetLFSLarge(t *testing.T) {
|
||||
defer prepareTestEnv(t)()
|
||||
setting.CheckLFSVersion()
|
||||
git.CheckLFSVersion()
|
||||
if !setting.LFS.StartServer {
|
||||
t.Skip()
|
||||
return
|
||||
|
@ -110,7 +111,7 @@ func TestGetLFSLarge(t *testing.T) {
|
|||
|
||||
func TestGetLFSGzip(t *testing.T) {
|
||||
defer prepareTestEnv(t)()
|
||||
setting.CheckLFSVersion()
|
||||
git.CheckLFSVersion()
|
||||
if !setting.LFS.StartServer {
|
||||
t.Skip()
|
||||
return
|
||||
|
@ -131,7 +132,7 @@ func TestGetLFSGzip(t *testing.T) {
|
|||
|
||||
func TestGetLFSZip(t *testing.T) {
|
||||
defer prepareTestEnv(t)()
|
||||
setting.CheckLFSVersion()
|
||||
git.CheckLFSVersion()
|
||||
if !setting.LFS.StartServer {
|
||||
t.Skip()
|
||||
return
|
||||
|
@ -154,7 +155,7 @@ func TestGetLFSZip(t *testing.T) {
|
|||
|
||||
func TestGetLFSRangeNo(t *testing.T) {
|
||||
defer prepareTestEnv(t)()
|
||||
setting.CheckLFSVersion()
|
||||
git.CheckLFSVersion()
|
||||
if !setting.LFS.StartServer {
|
||||
t.Skip()
|
||||
return
|
||||
|
@ -167,7 +168,7 @@ func TestGetLFSRangeNo(t *testing.T) {
|
|||
|
||||
func TestGetLFSRange(t *testing.T) {
|
||||
defer prepareTestEnv(t)()
|
||||
setting.CheckLFSVersion()
|
||||
git.CheckLFSVersion()
|
||||
if !setting.LFS.StartServer {
|
||||
t.Skip()
|
||||
return
|
||||
|
|
|
@ -23,6 +23,7 @@ import (
|
|||
"code.gitea.io/gitea/models/migrations"
|
||||
"code.gitea.io/gitea/modules/base"
|
||||
"code.gitea.io/gitea/modules/charset"
|
||||
"code.gitea.io/gitea/modules/git"
|
||||
"code.gitea.io/gitea/modules/setting"
|
||||
"code.gitea.io/gitea/modules/util"
|
||||
|
||||
|
@ -61,7 +62,7 @@ func initMigrationTest(t *testing.T) func() {
|
|||
assert.NoError(t, util.RemoveAll(setting.RepoRootPath))
|
||||
assert.NoError(t, util.CopyDir(path.Join(filepath.Dir(setting.AppPath), "integrations/gitea-repositories-meta"), setting.RepoRootPath))
|
||||
|
||||
setting.CheckLFSVersion()
|
||||
git.CheckLFSVersion()
|
||||
setting.InitDBConfig()
|
||||
setting.NewLogServices(true)
|
||||
return deferFn
|
||||
|
|
|
@ -59,7 +59,9 @@ func TestMirrorPull(t *testing.T) {
|
|||
|
||||
assert.NoError(t, release_service.CreateRelease(gitRepo, &models.Release{
|
||||
RepoID: repo.ID,
|
||||
Repo: repo,
|
||||
PublisherID: user.ID,
|
||||
Publisher: user,
|
||||
TagName: "v0.2",
|
||||
Target: "master",
|
||||
Title: "v0.2 is released",
|
||||
|
|
74
integrations/repo_tag_test.go
Normal file
74
integrations/repo_tag_test.go
Normal file
|
@ -0,0 +1,74 @@
|
|||
// Copyright 2021 The Gitea Authors. All rights reserved.
|
||||
// Use of this source code is governed by a MIT-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package integrations
|
||||
|
||||
import (
|
||||
"io/ioutil"
|
||||
"net/url"
|
||||
"testing"
|
||||
|
||||
"code.gitea.io/gitea/models"
|
||||
"code.gitea.io/gitea/modules/git"
|
||||
"code.gitea.io/gitea/modules/util"
|
||||
"code.gitea.io/gitea/services/release"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
func TestCreateNewTagProtected(t *testing.T) {
|
||||
defer prepareTestEnv(t)()
|
||||
|
||||
repo := models.AssertExistsAndLoadBean(t, &models.Repository{ID: 1}).(*models.Repository)
|
||||
owner := models.AssertExistsAndLoadBean(t, &models.User{ID: repo.OwnerID}).(*models.User)
|
||||
|
||||
t.Run("API", func(t *testing.T) {
|
||||
defer PrintCurrentTest(t)()
|
||||
|
||||
err := release.CreateNewTag(owner, repo, "master", "v-1", "first tag")
|
||||
assert.NoError(t, err)
|
||||
|
||||
err = models.InsertProtectedTag(&models.ProtectedTag{
|
||||
RepoID: repo.ID,
|
||||
NamePattern: "v-*",
|
||||
})
|
||||
assert.NoError(t, err)
|
||||
err = models.InsertProtectedTag(&models.ProtectedTag{
|
||||
RepoID: repo.ID,
|
||||
NamePattern: "v-1.1",
|
||||
AllowlistUserIDs: []int64{repo.OwnerID},
|
||||
})
|
||||
assert.NoError(t, err)
|
||||
|
||||
err = release.CreateNewTag(owner, repo, "master", "v-2", "second tag")
|
||||
assert.Error(t, err)
|
||||
assert.True(t, models.IsErrProtectedTagName(err))
|
||||
|
||||
err = release.CreateNewTag(owner, repo, "master", "v-1.1", "third tag")
|
||||
assert.NoError(t, err)
|
||||
})
|
||||
|
||||
t.Run("Git", func(t *testing.T) {
|
||||
onGiteaRun(t, func(t *testing.T, u *url.URL) {
|
||||
username := "user2"
|
||||
httpContext := NewAPITestContext(t, username, "repo1")
|
||||
|
||||
dstPath, err := ioutil.TempDir("", httpContext.Reponame)
|
||||
assert.NoError(t, err)
|
||||
defer util.RemoveAll(dstPath)
|
||||
|
||||
u.Path = httpContext.GitPath()
|
||||
u.User = url.UserPassword(username, userPassword)
|
||||
|
||||
doGitClone(dstPath, u)(t)
|
||||
|
||||
_, err = git.NewCommand("tag", "v-2").RunInDir(dstPath)
|
||||
assert.NoError(t, err)
|
||||
|
||||
_, err = git.NewCommand("push", "--tags").RunInDir(dstPath)
|
||||
assert.Error(t, err)
|
||||
assert.Contains(t, err.Error(), "Tag v-2 is protected")
|
||||
})
|
||||
})
|
||||
}
|
|
@ -362,11 +362,7 @@ func (repo *Repository) GetBranchProtection(branchName string) (*ProtectedBranch
|
|||
}
|
||||
|
||||
// IsProtectedBranch checks if branch is protected
|
||||
func (repo *Repository) IsProtectedBranch(branchName string, doer *User) (bool, error) {
|
||||
if doer == nil {
|
||||
return true, nil
|
||||
}
|
||||
|
||||
func (repo *Repository) IsProtectedBranch(branchName string) (bool, error) {
|
||||
protectedBranch := &ProtectedBranch{
|
||||
RepoID: repo.ID,
|
||||
BranchName: branchName,
|
||||
|
@ -379,27 +375,6 @@ func (repo *Repository) IsProtectedBranch(branchName string, doer *User) (bool,
|
|||
return has, nil
|
||||
}
|
||||
|
||||
// IsProtectedBranchForPush checks if branch is protected for push
|
||||
func (repo *Repository) IsProtectedBranchForPush(branchName string, doer *User) (bool, error) {
|
||||
if doer == nil {
|
||||
return true, nil
|
||||
}
|
||||
|
||||
protectedBranch := &ProtectedBranch{
|
||||
RepoID: repo.ID,
|
||||
BranchName: branchName,
|
||||
}
|
||||
|
||||
has, err := x.Get(protectedBranch)
|
||||
if err != nil {
|
||||
return true, err
|
||||
} else if has {
|
||||
return !protectedBranch.CanUserPush(doer.ID), nil
|
||||
}
|
||||
|
||||
return false, nil
|
||||
}
|
||||
|
||||
// updateApprovalWhitelist checks whether the user whitelist changed and returns a whitelist with
|
||||
// the users from newWhitelist which have explicit read or write access to the repo.
|
||||
func updateApprovalWhitelist(repo *Repository, currentWhitelist, newWhitelist []int64) (whitelist []int64, err error) {
|
||||
|
|
|
@ -985,6 +985,21 @@ func (err ErrInvalidTagName) Error() string {
|
|||
return fmt.Sprintf("release tag name is not valid [tag_name: %s]", err.TagName)
|
||||
}
|
||||
|
||||
// ErrProtectedTagName represents a "ProtectedTagName" kind of error.
|
||||
type ErrProtectedTagName struct {
|
||||
TagName string
|
||||
}
|
||||
|
||||
// IsErrProtectedTagName checks if an error is a ErrProtectedTagName.
|
||||
func IsErrProtectedTagName(err error) bool {
|
||||
_, ok := err.(ErrProtectedTagName)
|
||||
return ok
|
||||
}
|
||||
|
||||
func (err ErrProtectedTagName) Error() string {
|
||||
return fmt.Sprintf("release tag name is protected [tag_name: %s]", err.TagName)
|
||||
}
|
||||
|
||||
// ErrRepoFileAlreadyExists represents a "RepoFileAlreadyExist" kind of error.
|
||||
type ErrRepoFileAlreadyExists struct {
|
||||
Path string
|
||||
|
|
|
@ -32,3 +32,27 @@
|
|||
repo_id: 22
|
||||
is_private: true
|
||||
created_unix: 1603267920
|
||||
|
||||
- id: 5
|
||||
user_id: 10
|
||||
op_type: 1 # create repo
|
||||
act_user_id: 10
|
||||
repo_id: 6
|
||||
is_private: true
|
||||
created_unix: 1603010100
|
||||
|
||||
- id: 6
|
||||
user_id: 10
|
||||
op_type: 1 # create repo
|
||||
act_user_id: 10
|
||||
repo_id: 7
|
||||
is_private: true
|
||||
created_unix: 1603011300
|
||||
|
||||
- id: 7
|
||||
user_id: 10
|
||||
op_type: 1 # create repo
|
||||
act_user_id: 10
|
||||
repo_id: 8
|
||||
is_private: false
|
||||
created_unix: 1603011540 # grouped with id:7
|
||||
|
|
1
models/fixtures/repo_archiver.yml
Normal file
1
models/fixtures/repo_archiver.yml
Normal file
|
@ -0,0 +1 @@
|
|||
[] # empty
|
|
@ -508,7 +508,6 @@
|
|||
num_repos: 0
|
||||
is_active: true
|
||||
|
||||
|
||||
-
|
||||
id: 30
|
||||
lower_name: user30
|
||||
|
@ -525,3 +524,20 @@
|
|||
avatar_email: user30@example.com
|
||||
num_repos: 2
|
||||
is_active: true
|
||||
|
||||
-
|
||||
id: 31
|
||||
lower_name: user31
|
||||
name: user31
|
||||
full_name: "user31"
|
||||
email: user31@example.com
|
||||
passwd_hash_algo: argon2
|
||||
passwd: a3d5fcd92bae586c2e3dbe72daea7a0d27833a8d0227aa1704f4bbd775c1f3b03535b76dd93b0d4d8d22a519dca47df1547b # password
|
||||
type: 0 # individual
|
||||
salt: ZogKvWdyEx
|
||||
is_admin: false
|
||||
visibility: 2
|
||||
avatar: avatar31
|
||||
avatar_email: user31@example.com
|
||||
num_repos: 0
|
||||
is_active: true
|
||||
|
|
|
@ -856,7 +856,11 @@ func UserSignIn(username, password string) (*User, error) {
|
|||
return authUser, nil
|
||||
}
|
||||
|
||||
log.Warn("Failed to login '%s' via '%s': %v", username, source.Name, err)
|
||||
if IsErrUserNotExist(err) {
|
||||
log.Debug("Failed to login '%s' via '%s': %v", username, source.Name, err)
|
||||
} else {
|
||||
log.Warn("Failed to login '%s' via '%s': %v", username, source.Name, err)
|
||||
}
|
||||
}
|
||||
|
||||
return nil, ErrUserNotExist{user.ID, user.Name, 0}
|
||||
|
|
|
@ -319,6 +319,10 @@ var migrations = []Migration{
|
|||
NewMigration("Create PushMirror table", createPushMirrorTable),
|
||||
// v184 -> v185
|
||||
NewMigration("Rename Task errors to message", renameTaskErrorsToMessage),
|
||||
// v185 -> v186
|
||||
NewMigration("Add new table repo_archiver", addRepoArchiver),
|
||||
// v186 -> v187
|
||||
NewMigration("Create protected tag table", createProtectedTagTable),
|
||||
}
|
||||
|
||||
// GetCurrentDBVersion returns the current db version
|
||||
|
|
|
@ -16,6 +16,7 @@ import (
|
|||
|
||||
"code.gitea.io/gitea/models"
|
||||
"code.gitea.io/gitea/modules/base"
|
||||
"code.gitea.io/gitea/modules/git"
|
||||
"code.gitea.io/gitea/modules/setting"
|
||||
"code.gitea.io/gitea/modules/timeutil"
|
||||
"code.gitea.io/gitea/modules/util"
|
||||
|
@ -55,7 +56,7 @@ func TestMain(m *testing.M) {
|
|||
|
||||
setting.SetCustomPathAndConf("", "", "")
|
||||
setting.NewContext()
|
||||
setting.CheckLFSVersion()
|
||||
git.CheckLFSVersion()
|
||||
setting.InitDBConfig()
|
||||
setting.NewLogServices(true)
|
||||
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
// Copyright 2021 The Gitea Authors. All rights reserved.
|
||||
// Use of this source code is governed by a MIT-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
|
|
22
models/migrations/v185.go
Normal file
22
models/migrations/v185.go
Normal file
|
@ -0,0 +1,22 @@
|
|||
// Copyright 2021 The Gitea Authors. All rights reserved.
|
||||
// Use of this source code is governed by a MIT-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package migrations
|
||||
|
||||
import (
|
||||
"xorm.io/xorm"
|
||||
)
|
||||
|
||||
func addRepoArchiver(x *xorm.Engine) error {
|
||||
// RepoArchiver represents all archivers
|
||||
type RepoArchiver struct {
|
||||
ID int64 `xorm:"pk autoincr"`
|
||||
RepoID int64 `xorm:"index unique(s)"`
|
||||
Type int `xorm:"unique(s)"`
|
||||
Status int
|
||||
CommitID string `xorm:"VARCHAR(40) unique(s)"`
|
||||
CreatedUnix int64 `xorm:"INDEX NOT NULL created"`
|
||||
}
|
||||
return x.Sync2(new(RepoArchiver))
|
||||
}
|
26
models/migrations/v186.go
Normal file
26
models/migrations/v186.go
Normal file
|
@ -0,0 +1,26 @@
|
|||
// Copyright 2021 The Gitea Authors. All rights reserved.
|
||||
// Use of this source code is governed by a MIT-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package migrations
|
||||
|
||||
import (
|
||||
"code.gitea.io/gitea/modules/timeutil"
|
||||
|
||||
"xorm.io/xorm"
|
||||
)
|
||||
|
||||
func createProtectedTagTable(x *xorm.Engine) error {
|
||||
type ProtectedTag struct {
|
||||
ID int64 `xorm:"pk autoincr"`
|
||||
RepoID int64
|
||||
NamePattern string
|
||||
AllowlistUserIDs []int64 `xorm:"JSON TEXT"`
|
||||
AllowlistTeamIDs []int64 `xorm:"JSON TEXT"`
|
||||
|
||||
CreatedUnix timeutil.TimeStamp `xorm:"created"`
|
||||
UpdatedUnix timeutil.TimeStamp `xorm:"updated"`
|
||||
}
|
||||
|
||||
return x.Sync2(new(ProtectedTag))
|
||||
}
|
|
@ -136,6 +136,8 @@ func init() {
|
|||
new(RepoTransfer),
|
||||
new(IssueIndex),
|
||||
new(PushMirror),
|
||||
new(RepoArchiver),
|
||||
new(ProtectedTag),
|
||||
)
|
||||
|
||||
gonicNames := []string{"SSL", "UID"}
|
||||
|
|
|
@ -455,22 +455,22 @@ func getOwnedOrgsByUserID(sess *xorm.Session, userID int64) ([]*User, error) {
|
|||
Find(&orgs)
|
||||
}
|
||||
|
||||
// HasOrgVisible tells if the given user can see the given org
|
||||
func HasOrgVisible(org, user *User) bool {
|
||||
return hasOrgVisible(x, org, user)
|
||||
// HasOrgOrUserVisible tells if the given user can see the given org or user
|
||||
func HasOrgOrUserVisible(org, user *User) bool {
|
||||
return hasOrgOrUserVisible(x, org, user)
|
||||
}
|
||||
|
||||
func hasOrgVisible(e Engine, org, user *User) bool {
|
||||
func hasOrgOrUserVisible(e Engine, orgOrUser, user *User) bool {
|
||||
// Not SignedUser
|
||||
if user == nil {
|
||||
return org.Visibility == structs.VisibleTypePublic
|
||||
return orgOrUser.Visibility == structs.VisibleTypePublic
|
||||
}
|
||||
|
||||
if user.IsAdmin {
|
||||
if user.IsAdmin || orgOrUser.ID == user.ID {
|
||||
return true
|
||||
}
|
||||
|
||||
if (org.Visibility == structs.VisibleTypePrivate || user.IsRestricted) && !org.hasMemberWithUserID(e, user.ID) {
|
||||
if (orgOrUser.Visibility == structs.VisibleTypePrivate || user.IsRestricted) && !orgOrUser.hasMemberWithUserID(e, user.ID) {
|
||||
return false
|
||||
}
|
||||
return true
|
||||
|
@ -483,7 +483,7 @@ func HasOrgsVisible(orgs []*User, user *User) bool {
|
|||
}
|
||||
|
||||
for _, org := range orgs {
|
||||
if HasOrgVisible(org, user) {
|
||||
if HasOrgOrUserVisible(org, user) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
|
|
@ -586,9 +586,9 @@ func TestHasOrgVisibleTypePublic(t *testing.T) {
|
|||
assert.NoError(t, CreateOrganization(org, owner))
|
||||
org = AssertExistsAndLoadBean(t,
|
||||
&User{Name: org.Name, Type: UserTypeOrganization}).(*User)
|
||||
test1 := HasOrgVisible(org, owner)
|
||||
test2 := HasOrgVisible(org, user3)
|
||||
test3 := HasOrgVisible(org, nil)
|
||||
test1 := HasOrgOrUserVisible(org, owner)
|
||||
test2 := HasOrgOrUserVisible(org, user3)
|
||||
test3 := HasOrgOrUserVisible(org, nil)
|
||||
assert.True(t, test1) // owner of org
|
||||
assert.True(t, test2) // user not a part of org
|
||||
assert.True(t, test3) // logged out user
|
||||
|
@ -609,9 +609,9 @@ func TestHasOrgVisibleTypeLimited(t *testing.T) {
|
|||
assert.NoError(t, CreateOrganization(org, owner))
|
||||
org = AssertExistsAndLoadBean(t,
|
||||
&User{Name: org.Name, Type: UserTypeOrganization}).(*User)
|
||||
test1 := HasOrgVisible(org, owner)
|
||||
test2 := HasOrgVisible(org, user3)
|
||||
test3 := HasOrgVisible(org, nil)
|
||||
test1 := HasOrgOrUserVisible(org, owner)
|
||||
test2 := HasOrgOrUserVisible(org, user3)
|
||||
test3 := HasOrgOrUserVisible(org, nil)
|
||||
assert.True(t, test1) // owner of org
|
||||
assert.True(t, test2) // user not a part of org
|
||||
assert.False(t, test3) // logged out user
|
||||
|
@ -632,9 +632,9 @@ func TestHasOrgVisibleTypePrivate(t *testing.T) {
|
|||
assert.NoError(t, CreateOrganization(org, owner))
|
||||
org = AssertExistsAndLoadBean(t,
|
||||
&User{Name: org.Name, Type: UserTypeOrganization}).(*User)
|
||||
test1 := HasOrgVisible(org, owner)
|
||||
test2 := HasOrgVisible(org, user3)
|
||||
test3 := HasOrgVisible(org, nil)
|
||||
test1 := HasOrgOrUserVisible(org, owner)
|
||||
test2 := HasOrgOrUserVisible(org, user3)
|
||||
test3 := HasOrgOrUserVisible(org, nil)
|
||||
assert.True(t, test1) // owner of org
|
||||
assert.False(t, test2) // user not a part of org
|
||||
assert.False(t, test3) // logged out user
|
||||
|
|
131
models/protected_tag.go
Normal file
131
models/protected_tag.go
Normal file
|
@ -0,0 +1,131 @@
|
|||
// Copyright 2021 The Gitea Authors. All rights reserved.
|
||||
// Use of this source code is governed by a MIT-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package models
|
||||
|
||||
import (
|
||||
"regexp"
|
||||
"strings"
|
||||
|
||||
"code.gitea.io/gitea/modules/base"
|
||||
"code.gitea.io/gitea/modules/timeutil"
|
||||
|
||||
"github.com/gobwas/glob"
|
||||
)
|
||||
|
||||
// ProtectedTag struct
|
||||
type ProtectedTag struct {
|
||||
ID int64 `xorm:"pk autoincr"`
|
||||
RepoID int64
|
||||
NamePattern string
|
||||
RegexPattern *regexp.Regexp `xorm:"-"`
|
||||
GlobPattern glob.Glob `xorm:"-"`
|
||||
AllowlistUserIDs []int64 `xorm:"JSON TEXT"`
|
||||
AllowlistTeamIDs []int64 `xorm:"JSON TEXT"`
|
||||
|
||||
CreatedUnix timeutil.TimeStamp `xorm:"created"`
|
||||
UpdatedUnix timeutil.TimeStamp `xorm:"updated"`
|
||||
}
|
||||
|
||||
// InsertProtectedTag inserts a protected tag to database
|
||||
func InsertProtectedTag(pt *ProtectedTag) error {
|
||||
_, err := x.Insert(pt)
|
||||
return err
|
||||
}
|
||||
|
||||
// UpdateProtectedTag updates the protected tag
|
||||
func UpdateProtectedTag(pt *ProtectedTag) error {
|
||||
_, err := x.ID(pt.ID).AllCols().Update(pt)
|
||||
return err
|
||||
}
|
||||
|
||||
// DeleteProtectedTag deletes a protected tag by ID
|
||||
func DeleteProtectedTag(pt *ProtectedTag) error {
|
||||
_, err := x.ID(pt.ID).Delete(&ProtectedTag{})
|
||||
return err
|
||||
}
|
||||
|
||||
// EnsureCompiledPattern ensures the glob pattern is compiled
|
||||
func (pt *ProtectedTag) EnsureCompiledPattern() error {
|
||||
if pt.RegexPattern != nil || pt.GlobPattern != nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
var err error
|
||||
if len(pt.NamePattern) >= 2 && strings.HasPrefix(pt.NamePattern, "/") && strings.HasSuffix(pt.NamePattern, "/") {
|
||||
pt.RegexPattern, err = regexp.Compile(pt.NamePattern[1 : len(pt.NamePattern)-1])
|
||||
} else {
|
||||
pt.GlobPattern, err = glob.Compile(pt.NamePattern)
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
// IsUserAllowed returns true if the user is allowed to modify the tag
|
||||
func (pt *ProtectedTag) IsUserAllowed(userID int64) (bool, error) {
|
||||
if base.Int64sContains(pt.AllowlistUserIDs, userID) {
|
||||
return true, nil
|
||||
}
|
||||
|
||||
if len(pt.AllowlistTeamIDs) == 0 {
|
||||
return false, nil
|
||||
}
|
||||
|
||||
in, err := IsUserInTeams(userID, pt.AllowlistTeamIDs)
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
return in, nil
|
||||
}
|
||||
|
||||
// GetProtectedTags gets all protected tags of the repository
|
||||
func (repo *Repository) GetProtectedTags() ([]*ProtectedTag, error) {
|
||||
tags := make([]*ProtectedTag, 0)
|
||||
return tags, x.Find(&tags, &ProtectedTag{RepoID: repo.ID})
|
||||
}
|
||||
|
||||
// GetProtectedTagByID gets the protected tag with the specific id
|
||||
func GetProtectedTagByID(id int64) (*ProtectedTag, error) {
|
||||
tag := new(ProtectedTag)
|
||||
has, err := x.ID(id).Get(tag)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if !has {
|
||||
return nil, nil
|
||||
}
|
||||
return tag, nil
|
||||
}
|
||||
|
||||
// IsUserAllowedToControlTag checks if a user can control the specific tag.
|
||||
// It returns true if the tag name is not protected or the user is allowed to control it.
|
||||
func IsUserAllowedToControlTag(tags []*ProtectedTag, tagName string, userID int64) (bool, error) {
|
||||
isAllowed := true
|
||||
for _, tag := range tags {
|
||||
err := tag.EnsureCompiledPattern()
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
|
||||
if !tag.matchString(tagName) {
|
||||
continue
|
||||
}
|
||||
|
||||
isAllowed, err = tag.IsUserAllowed(userID)
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
if isAllowed {
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
return isAllowed, nil
|
||||
}
|
||||
|
||||
func (pt *ProtectedTag) matchString(name string) bool {
|
||||
if pt.RegexPattern != nil {
|
||||
return pt.RegexPattern.MatchString(name)
|
||||
}
|
||||
return pt.GlobPattern.Match(name)
|
||||
}
|
162
models/protected_tag_test.go
Normal file
162
models/protected_tag_test.go
Normal file
|
@ -0,0 +1,162 @@
|
|||
// Copyright 2021 The Gitea Authors. All rights reserved.
|
||||
// Use of this source code is governed by a MIT-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package models
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
func TestIsUserAllowed(t *testing.T) {
|
||||
assert.NoError(t, PrepareTestDatabase())
|
||||
|
||||
pt := &ProtectedTag{}
|
||||
allowed, err := pt.IsUserAllowed(1)
|
||||
assert.NoError(t, err)
|
||||
assert.False(t, allowed)
|
||||
|
||||
pt = &ProtectedTag{
|
||||
AllowlistUserIDs: []int64{1},
|
||||
}
|
||||
allowed, err = pt.IsUserAllowed(1)
|
||||
assert.NoError(t, err)
|
||||
assert.True(t, allowed)
|
||||
|
||||
allowed, err = pt.IsUserAllowed(2)
|
||||
assert.NoError(t, err)
|
||||
assert.False(t, allowed)
|
||||
|
||||
pt = &ProtectedTag{
|
||||
AllowlistTeamIDs: []int64{1},
|
||||
}
|
||||
allowed, err = pt.IsUserAllowed(1)
|
||||
assert.NoError(t, err)
|
||||
assert.False(t, allowed)
|
||||
|
||||
allowed, err = pt.IsUserAllowed(2)
|
||||
assert.NoError(t, err)
|
||||
assert.True(t, allowed)
|
||||
|
||||
pt = &ProtectedTag{
|
||||
AllowlistUserIDs: []int64{1},
|
||||
AllowlistTeamIDs: []int64{1},
|
||||
}
|
||||
allowed, err = pt.IsUserAllowed(1)
|
||||
assert.NoError(t, err)
|
||||
assert.True(t, allowed)
|
||||
|
||||
allowed, err = pt.IsUserAllowed(2)
|
||||
assert.NoError(t, err)
|
||||
assert.True(t, allowed)
|
||||
}
|
||||
|
||||
func TestIsUserAllowedToControlTag(t *testing.T) {
|
||||
cases := []struct {
|
||||
name string
|
||||
userid int64
|
||||
allowed bool
|
||||
}{
|
||||
{
|
||||
name: "test",
|
||||
userid: 1,
|
||||
allowed: true,
|
||||
},
|
||||
{
|
||||
name: "test",
|
||||
userid: 3,
|
||||
allowed: true,
|
||||
},
|
||||
{
|
||||
name: "gitea",
|
||||
userid: 1,
|
||||
allowed: true,
|
||||
},
|
||||
{
|
||||
name: "gitea",
|
||||
userid: 3,
|
||||
allowed: false,
|
||||
},
|
||||
{
|
||||
name: "test-gitea",
|
||||
userid: 1,
|
||||
allowed: true,
|
||||
},
|
||||
{
|
||||
name: "test-gitea",
|
||||
userid: 3,
|
||||
allowed: false,
|
||||
},
|
||||
{
|
||||
name: "gitea-test",
|
||||
userid: 1,
|
||||
allowed: true,
|
||||
},
|
||||
{
|
||||
name: "gitea-test",
|
||||
userid: 3,
|
||||
allowed: true,
|
||||
},
|
||||
{
|
||||
name: "v-1",
|
||||
userid: 1,
|
||||
allowed: false,
|
||||
},
|
||||
{
|
||||
name: "v-1",
|
||||
userid: 2,
|
||||
allowed: true,
|
||||
},
|
||||
{
|
||||
name: "release",
|
||||
userid: 1,
|
||||
allowed: false,
|
||||
},
|
||||
}
|
||||
|
||||
t.Run("Glob", func(t *testing.T) {
|
||||
protectedTags := []*ProtectedTag{
|
||||
{
|
||||
NamePattern: `*gitea`,
|
||||
AllowlistUserIDs: []int64{1},
|
||||
},
|
||||
{
|
||||
NamePattern: `v-*`,
|
||||
AllowlistUserIDs: []int64{2},
|
||||
},
|
||||
{
|
||||
NamePattern: "release",
|
||||
},
|
||||
}
|
||||
|
||||
for n, c := range cases {
|
||||
isAllowed, err := IsUserAllowedToControlTag(protectedTags, c.name, c.userid)
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, c.allowed, isAllowed, "case %d: error should match", n)
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("Regex", func(t *testing.T) {
|
||||
protectedTags := []*ProtectedTag{
|
||||
{
|
||||
NamePattern: `/gitea\z/`,
|
||||
AllowlistUserIDs: []int64{1},
|
||||
},
|
||||
{
|
||||
NamePattern: `/\Av-/`,
|
||||
AllowlistUserIDs: []int64{2},
|
||||
},
|
||||
{
|
||||
NamePattern: "/release/",
|
||||
},
|
||||
}
|
||||
|
||||
for n, c := range cases {
|
||||
isAllowed, err := IsUserAllowedToControlTag(protectedTags, c.name, c.userid)
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, c.allowed, isAllowed, "case %d: error should match", n)
|
||||
}
|
||||
})
|
||||
}
|
101
models/repo.go
101
models/repo.go
|
@ -585,8 +585,7 @@ func (repo *Repository) getReviewers(e Engine, doerID, posterID int64) ([]*User,
|
|||
|
||||
var users []*User
|
||||
|
||||
if repo.IsPrivate ||
|
||||
(repo.Owner.IsOrganization() && repo.Owner.Visibility == api.VisibleTypePrivate) {
|
||||
if repo.IsPrivate || repo.Owner.Visibility == api.VisibleTypePrivate {
|
||||
// This a private repository:
|
||||
// Anyone who can read the repository is a requestable reviewer
|
||||
if err := e.
|
||||
|
@ -1498,6 +1497,7 @@ func DeleteRepository(doer *User, uid, repoID int64) error {
|
|||
&Mirror{RepoID: repoID},
|
||||
&Notification{RepoID: repoID},
|
||||
&ProtectedBranch{RepoID: repoID},
|
||||
&ProtectedTag{RepoID: repoID},
|
||||
&PullRequest{BaseRepoID: repoID},
|
||||
&PushMirror{RepoID: repoID},
|
||||
&Release{RepoID: repoID},
|
||||
|
@ -1587,6 +1587,22 @@ func DeleteRepository(doer *User, uid, repoID int64) error {
|
|||
return err
|
||||
}
|
||||
|
||||
// Remove archives
|
||||
var archives []*RepoArchiver
|
||||
if err = sess.Where("repo_id=?", repoID).Find(&archives); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
for _, v := range archives {
|
||||
v.Repo = repo
|
||||
p, _ := v.RelativePath()
|
||||
removeStorageWithNotice(sess, storage.RepoArchives, "Delete repo archive file", p)
|
||||
}
|
||||
|
||||
if _, err := sess.Delete(&RepoArchiver{RepoID: repoID}); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if repo.NumForks > 0 {
|
||||
if _, err = sess.Exec("UPDATE `repository` SET fork_id=0,is_fork=? WHERE fork_id=?", false, repo.ID); err != nil {
|
||||
log.Error("reset 'fork_id' and 'is_fork': %v", err)
|
||||
|
@ -1768,64 +1784,45 @@ func DeleteRepositoryArchives(ctx context.Context) error {
|
|||
func DeleteOldRepositoryArchives(ctx context.Context, olderThan time.Duration) error {
|
||||
log.Trace("Doing: ArchiveCleanup")
|
||||
|
||||
if err := x.Where("id > 0").Iterate(new(Repository), func(idx int, bean interface{}) error {
|
||||
return deleteOldRepositoryArchives(ctx, olderThan, idx, bean)
|
||||
}); err != nil {
|
||||
log.Trace("Error: ArchiveClean: %v", err)
|
||||
return err
|
||||
for {
|
||||
var archivers []RepoArchiver
|
||||
err := x.Where("created_unix < ?", time.Now().Add(-olderThan).Unix()).
|
||||
Asc("created_unix").
|
||||
Limit(100).
|
||||
Find(&archivers)
|
||||
if err != nil {
|
||||
log.Trace("Error: ArchiveClean: %v", err)
|
||||
return err
|
||||
}
|
||||
|
||||
for _, archiver := range archivers {
|
||||
if err := deleteOldRepoArchiver(ctx, &archiver); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
if len(archivers) < 100 {
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
log.Trace("Finished: ArchiveCleanup")
|
||||
return nil
|
||||
}
|
||||
|
||||
func deleteOldRepositoryArchives(ctx context.Context, olderThan time.Duration, idx int, bean interface{}) error {
|
||||
repo := bean.(*Repository)
|
||||
basePath := filepath.Join(repo.RepoPath(), "archives")
|
||||
var delRepoArchiver = new(RepoArchiver)
|
||||
|
||||
for _, ty := range []string{"zip", "targz"} {
|
||||
select {
|
||||
case <-ctx.Done():
|
||||
return ErrCancelledf("before deleting old repository archives with filetype %s for %s", ty, repo.FullName())
|
||||
default:
|
||||
}
|
||||
|
||||
path := filepath.Join(basePath, ty)
|
||||
file, err := os.Open(path)
|
||||
if err != nil {
|
||||
if !os.IsNotExist(err) {
|
||||
log.Warn("Unable to open directory %s: %v", path, err)
|
||||
return err
|
||||
}
|
||||
|
||||
// If the directory doesn't exist, that's okay.
|
||||
continue
|
||||
}
|
||||
|
||||
files, err := file.Readdir(0)
|
||||
file.Close()
|
||||
if err != nil {
|
||||
log.Warn("Unable to read directory %s: %v", path, err)
|
||||
return err
|
||||
}
|
||||
|
||||
minimumOldestTime := time.Now().Add(-olderThan)
|
||||
for _, info := range files {
|
||||
if info.ModTime().Before(minimumOldestTime) && !info.IsDir() {
|
||||
select {
|
||||
case <-ctx.Done():
|
||||
return ErrCancelledf("before deleting old repository archive file %s with filetype %s for %s", info.Name(), ty, repo.FullName())
|
||||
default:
|
||||
}
|
||||
toDelete := filepath.Join(path, info.Name())
|
||||
// This is a best-effort purge, so we do not check error codes to confirm removal.
|
||||
if err = util.Remove(toDelete); err != nil {
|
||||
log.Trace("Unable to delete %s, but proceeding: %v", toDelete, err)
|
||||
}
|
||||
}
|
||||
}
|
||||
func deleteOldRepoArchiver(ctx context.Context, archiver *RepoArchiver) error {
|
||||
p, err := archiver.RelativePath()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
_, err = x.ID(archiver.ID).Delete(delRepoArchiver)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if err := storage.RepoArchives.Delete(p); err != nil {
|
||||
log.Error("delete repo archive file failed: %v", err)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
|
|
86
models/repo_archiver.go
Normal file
86
models/repo_archiver.go
Normal file
|
@ -0,0 +1,86 @@
|
|||
// Copyright 2021 The Gitea Authors. All rights reserved.
|
||||
// Use of this source code is governed by a MIT-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package models
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
"code.gitea.io/gitea/modules/git"
|
||||
"code.gitea.io/gitea/modules/timeutil"
|
||||
)
|
||||
|
||||
// RepoArchiverStatus represents repo archive status
|
||||
type RepoArchiverStatus int
|
||||
|
||||
// enumerate all repo archive statuses
|
||||
const (
|
||||
RepoArchiverGenerating = iota // the archiver is generating
|
||||
RepoArchiverReady // it's ready
|
||||
)
|
||||
|
||||
// RepoArchiver represents all archivers
|
||||
type RepoArchiver struct {
|
||||
ID int64 `xorm:"pk autoincr"`
|
||||
RepoID int64 `xorm:"index unique(s)"`
|
||||
Repo *Repository `xorm:"-"`
|
||||
Type git.ArchiveType `xorm:"unique(s)"`
|
||||
Status RepoArchiverStatus
|
||||
CommitID string `xorm:"VARCHAR(40) unique(s)"`
|
||||
CreatedUnix timeutil.TimeStamp `xorm:"INDEX NOT NULL created"`
|
||||
}
|
||||
|
||||
// LoadRepo loads repository
|
||||
func (archiver *RepoArchiver) LoadRepo() (*Repository, error) {
|
||||
if archiver.Repo != nil {
|
||||
return archiver.Repo, nil
|
||||
}
|
||||
|
||||
var repo Repository
|
||||
has, err := x.ID(archiver.RepoID).Get(&repo)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if !has {
|
||||
return nil, ErrRepoNotExist{
|
||||
ID: archiver.RepoID,
|
||||
}
|
||||
}
|
||||
return &repo, nil
|
||||
}
|
||||
|
||||
// RelativePath returns relative path
|
||||
func (archiver *RepoArchiver) RelativePath() (string, error) {
|
||||
repo, err := archiver.LoadRepo()
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
return fmt.Sprintf("%s/%s/%s.%s", repo.FullName(), archiver.CommitID[:2], archiver.CommitID, archiver.Type.String()), nil
|
||||
}
|
||||
|
||||
// GetRepoArchiver get an archiver
|
||||
func GetRepoArchiver(ctx DBContext, repoID int64, tp git.ArchiveType, commitID string) (*RepoArchiver, error) {
|
||||
var archiver RepoArchiver
|
||||
has, err := ctx.e.Where("repo_id=?", repoID).And("`type`=?", tp).And("commit_id=?", commitID).Get(&archiver)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if has {
|
||||
return &archiver, nil
|
||||
}
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
// AddRepoArchiver adds an archiver
|
||||
func AddRepoArchiver(ctx DBContext, archiver *RepoArchiver) error {
|
||||
_, err := ctx.e.Insert(archiver)
|
||||
return err
|
||||
}
|
||||
|
||||
// UpdateRepoArchiverStatus updates archiver's status
|
||||
func UpdateRepoArchiverStatus(ctx DBContext, archiver *RepoArchiver) error {
|
||||
_, err := ctx.e.ID(archiver.ID).Cols("status").Update(archiver)
|
||||
return err
|
||||
}
|
|
@ -176,9 +176,9 @@ func getUserRepoPermission(e Engine, repo *Repository, user *User) (perm Permiss
|
|||
return
|
||||
}
|
||||
|
||||
// Prevent strangers from checking out public repo of private orginization
|
||||
// Allow user if they are collaborator of a repo within a private orginization but not a member of the orginization itself
|
||||
if repo.Owner.IsOrganization() && !hasOrgVisible(e, repo.Owner, user) && !isCollaborator {
|
||||
// Prevent strangers from checking out public repo of private orginization/users
|
||||
// Allow user if they are collaborator of a repo within a private user or a private organization but not a member of the organization itself
|
||||
if !hasOrgOrUserVisible(e, repo.Owner, user) && !isCollaborator {
|
||||
perm.AccessMode = AccessModeNone
|
||||
return
|
||||
}
|
||||
|
|
|
@ -74,6 +74,8 @@ func MainTest(m *testing.M, pathToGiteaRoot string) {
|
|||
|
||||
setting.RepoAvatar.Storage.Path = filepath.Join(setting.AppDataPath, "repo-avatars")
|
||||
|
||||
setting.RepoArchive.Storage.Path = filepath.Join(setting.AppDataPath, "repo-archive")
|
||||
|
||||
if err = storage.Init(); err != nil {
|
||||
fatalTestError("storage.Init: %v\n", err)
|
||||
}
|
||||
|
|
105
models/user.go
105
models/user.go
|
@ -432,6 +432,62 @@ func (u *User) IsPasswordSet() bool {
|
|||
return len(u.Passwd) != 0
|
||||
}
|
||||
|
||||
// IsVisibleToUser check if viewer is able to see user profile
|
||||
func (u *User) IsVisibleToUser(viewer *User) bool {
|
||||
return u.isVisibleToUser(x, viewer)
|
||||
}
|
||||
|
||||
func (u *User) isVisibleToUser(e Engine, viewer *User) bool {
|
||||
if viewer != nil && viewer.IsAdmin {
|
||||
return true
|
||||
}
|
||||
|
||||
switch u.Visibility {
|
||||
case structs.VisibleTypePublic:
|
||||
return true
|
||||
case structs.VisibleTypeLimited:
|
||||
if viewer == nil || viewer.IsRestricted {
|
||||
return false
|
||||
}
|
||||
return true
|
||||
case structs.VisibleTypePrivate:
|
||||
if viewer == nil || viewer.IsRestricted {
|
||||
return false
|
||||
}
|
||||
|
||||
// If they follow - they see each over
|
||||
follower := IsFollowing(u.ID, viewer.ID)
|
||||
if follower {
|
||||
return true
|
||||
}
|
||||
|
||||
// Now we need to check if they in some organization together
|
||||
count, err := x.Table("team_user").
|
||||
Where(
|
||||
builder.And(
|
||||
builder.Eq{"uid": viewer.ID},
|
||||
builder.Or(
|
||||
builder.Eq{"org_id": u.ID},
|
||||
builder.In("org_id",
|
||||
builder.Select("org_id").
|
||||
From("team_user", "t2").
|
||||
Where(builder.Eq{"uid": u.ID}))))).
|
||||
Count(new(TeamUser))
|
||||
if err != nil {
|
||||
return false
|
||||
}
|
||||
|
||||
if count < 0 {
|
||||
// No common organization
|
||||
return false
|
||||
}
|
||||
|
||||
// they are in an organization together
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// IsOrganization returns true if user is actually a organization.
|
||||
func (u *User) IsOrganization() bool {
|
||||
return u.Type == UserTypeOrganization
|
||||
|
@ -796,8 +852,13 @@ func IsUsableUsername(name string) error {
|
|||
return isUsableName(reservedUsernames, reservedUserPatterns, name)
|
||||
}
|
||||
|
||||
// CreateUserOverwriteOptions are an optional options who overwrite system defaults on user creation
|
||||
type CreateUserOverwriteOptions struct {
|
||||
Visibility structs.VisibleType
|
||||
}
|
||||
|
||||
// CreateUser creates record of a new user.
|
||||
func CreateUser(u *User) (err error) {
|
||||
func CreateUser(u *User, overwriteDefault ...*CreateUserOverwriteOptions) (err error) {
|
||||
if err = IsUsableUsername(u.Name); err != nil {
|
||||
return err
|
||||
}
|
||||
|
@ -831,8 +892,6 @@ func CreateUser(u *User) (err error) {
|
|||
return ErrEmailAlreadyUsed{u.Email}
|
||||
}
|
||||
|
||||
u.KeepEmailPrivate = setting.Service.DefaultKeepEmailPrivate
|
||||
|
||||
u.LowerName = strings.ToLower(u.Name)
|
||||
u.AvatarEmail = u.Email
|
||||
if u.Rands, err = GetUserSalt(); err != nil {
|
||||
|
@ -841,10 +900,18 @@ func CreateUser(u *User) (err error) {
|
|||
if err = u.SetPassword(u.Passwd); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// set system defaults
|
||||
u.KeepEmailPrivate = setting.Service.DefaultKeepEmailPrivate
|
||||
u.Visibility = setting.Service.DefaultUserVisibilityMode
|
||||
u.AllowCreateOrganization = setting.Service.DefaultAllowCreateOrganization && !setting.Admin.DisableRegularOrgCreation
|
||||
u.EmailNotificationsPreference = setting.Admin.DefaultEmailNotification
|
||||
u.MaxRepoCreation = -1
|
||||
u.Theme = setting.UI.DefaultTheme
|
||||
// overwrite defaults if set
|
||||
if len(overwriteDefault) != 0 && overwriteDefault[0] != nil {
|
||||
u.Visibility = overwriteDefault[0].Visibility
|
||||
}
|
||||
|
||||
if _, err = sess.Insert(u); err != nil {
|
||||
return err
|
||||
|
@ -1527,10 +1594,9 @@ func (opts *SearchUserOptions) toConds() builder.Cond {
|
|||
cond = cond.And(keywordCond)
|
||||
}
|
||||
|
||||
// If visibility filtered
|
||||
if len(opts.Visible) > 0 {
|
||||
cond = cond.And(builder.In("visibility", opts.Visible))
|
||||
} else {
|
||||
cond = cond.And(builder.In("visibility", structs.VisibleTypePublic))
|
||||
}
|
||||
|
||||
if opts.Actor != nil {
|
||||
|
@ -1543,16 +1609,27 @@ func (opts *SearchUserOptions) toConds() builder.Cond {
|
|||
exprCond = builder.Expr("org_user.org_id = \"user\".id")
|
||||
}
|
||||
|
||||
var accessCond builder.Cond
|
||||
if !opts.Actor.IsRestricted {
|
||||
accessCond = builder.Or(
|
||||
builder.In("id", builder.Select("org_id").From("org_user").LeftJoin("`user`", exprCond).Where(builder.And(builder.Eq{"uid": opts.Actor.ID}, builder.Eq{"visibility": structs.VisibleTypePrivate}))),
|
||||
builder.In("visibility", structs.VisibleTypePublic, structs.VisibleTypeLimited))
|
||||
} else {
|
||||
// restricted users only see orgs they are a member of
|
||||
accessCond = builder.In("id", builder.Select("org_id").From("org_user").LeftJoin("`user`", exprCond).Where(builder.And(builder.Eq{"uid": opts.Actor.ID})))
|
||||
// If Admin - they see all users!
|
||||
if !opts.Actor.IsAdmin {
|
||||
// Force visiblity for privacy
|
||||
var accessCond builder.Cond
|
||||
if !opts.Actor.IsRestricted {
|
||||
accessCond = builder.Or(
|
||||
builder.In("id", builder.Select("org_id").From("org_user").LeftJoin("`user`", exprCond).Where(builder.And(builder.Eq{"uid": opts.Actor.ID}, builder.Eq{"visibility": structs.VisibleTypePrivate}))),
|
||||
builder.In("visibility", structs.VisibleTypePublic, structs.VisibleTypeLimited))
|
||||
} else {
|
||||
// restricted users only see orgs they are a member of
|
||||
accessCond = builder.In("id", builder.Select("org_id").From("org_user").LeftJoin("`user`", exprCond).Where(builder.And(builder.Eq{"uid": opts.Actor.ID})))
|
||||
}
|
||||
// Don't forget about self
|
||||
accessCond = accessCond.Or(builder.Eq{"id": opts.Actor.ID})
|
||||
cond = cond.And(accessCond)
|
||||
}
|
||||
cond = cond.And(accessCond)
|
||||
|
||||
} else {
|
||||
// Force visiblity for privacy
|
||||
// Not logged in - only public users
|
||||
cond = cond.And(builder.In("visibility", structs.VisibleTypePublic))
|
||||
}
|
||||
|
||||
if opts.UID > 0 {
|
||||
|
|
|
@ -32,17 +32,14 @@ func getUserHeatmapData(user *User, team *Team, doer *User) ([]*UserHeatmapData,
|
|||
return hdata, nil
|
||||
}
|
||||
|
||||
var groupBy string
|
||||
// Group by 15 minute intervals which will allow the client to accurately shift the timestamp to their timezone.
|
||||
// The interval is based on the fact that there are timezones such as UTC +5:30 and UTC +12:45.
|
||||
groupBy := "created_unix / 900 * 900"
|
||||
groupByName := "timestamp" // We need this extra case because mssql doesn't allow grouping by alias
|
||||
switch {
|
||||
case setting.Database.UseSQLite3:
|
||||
groupBy = "strftime('%s', strftime('%Y-%m-%d', created_unix, 'unixepoch'))"
|
||||
case setting.Database.UseMySQL:
|
||||
groupBy = "UNIX_TIMESTAMP(DATE(FROM_UNIXTIME(created_unix)))"
|
||||
case setting.Database.UsePostgreSQL:
|
||||
groupBy = "extract(epoch from date_trunc('day', to_timestamp(created_unix)))"
|
||||
groupBy = "created_unix DIV 900 * 900"
|
||||
case setting.Database.UseMSSQL:
|
||||
groupBy = "datediff(SECOND, '19700101', dateadd(DAY, 0, datediff(day, 0, dateadd(s, created_unix, '19700101'))))"
|
||||
groupByName = groupBy
|
||||
}
|
||||
|
||||
|
|
|
@ -19,12 +19,20 @@ func TestGetUserHeatmapDataByUser(t *testing.T) {
|
|||
CountResult int
|
||||
JSONResult string
|
||||
}{
|
||||
{2, 2, 1, `[{"timestamp":1603152000,"contributions":1}]`}, // self looks at action in private repo
|
||||
{2, 1, 1, `[{"timestamp":1603152000,"contributions":1}]`}, // admin looks at action in private repo
|
||||
{2, 3, 0, `[]`}, // other user looks at action in private repo
|
||||
{2, 0, 0, `[]`}, // nobody looks at action in private repo
|
||||
{16, 15, 1, `[{"timestamp":1603238400,"contributions":1}]`}, // collaborator looks at action in private repo
|
||||
{3, 3, 0, `[]`}, // no action action not performed by target user
|
||||
// self looks at action in private repo
|
||||
{2, 2, 1, `[{"timestamp":1603227600,"contributions":1}]`},
|
||||
// admin looks at action in private repo
|
||||
{2, 1, 1, `[{"timestamp":1603227600,"contributions":1}]`},
|
||||
// other user looks at action in private repo
|
||||
{2, 3, 0, `[]`},
|
||||
// nobody looks at action in private repo
|
||||
{2, 0, 0, `[]`},
|
||||
// collaborator looks at action in private repo
|
||||
{16, 15, 1, `[{"timestamp":1603267200,"contributions":1}]`},
|
||||
// no action action not performed by target user
|
||||
{3, 3, 0, `[]`},
|
||||
// multiple actions performed with two grouped together
|
||||
{10, 10, 3, `[{"timestamp":1603009800,"contributions":1},{"timestamp":1603010700,"contributions":2}]`},
|
||||
}
|
||||
// Prepare
|
||||
assert.NoError(t, PrepareTestDatabase())
|
||||
|
@ -51,9 +59,13 @@ func TestGetUserHeatmapDataByUser(t *testing.T) {
|
|||
|
||||
// Get the heatmap and compare
|
||||
heatmap, err := GetUserHeatmapDataByUser(user, doer)
|
||||
var contributions int
|
||||
for _, hm := range heatmap {
|
||||
contributions += int(hm.Contributions)
|
||||
}
|
||||
assert.NoError(t, err)
|
||||
assert.Len(t, heatmap, len(actions), "invalid action count: did the test data became too old?")
|
||||
assert.Len(t, heatmap, tc.CountResult, fmt.Sprintf("testcase %d", i))
|
||||
assert.Len(t, actions, contributions, "invalid action count: did the test data became too old?")
|
||||
assert.Equal(t, tc.CountResult, contributions, fmt.Sprintf("testcase %d", i))
|
||||
|
||||
// Test JSON rendering
|
||||
json := jsoniter.ConfigCompatibleWithStandardLibrary
|
||||
|
|
|
@ -380,6 +380,21 @@ func (ctx *Context) ServeFile(file string, names ...string) {
|
|||
http.ServeFile(ctx.Resp, ctx.Req, file)
|
||||
}
|
||||
|
||||
// ServeStream serves file via io stream
|
||||
func (ctx *Context) ServeStream(rd io.Reader, name string) {
|
||||
ctx.Resp.Header().Set("Content-Description", "File Transfer")
|
||||
ctx.Resp.Header().Set("Content-Type", "application/octet-stream")
|
||||
ctx.Resp.Header().Set("Content-Disposition", "attachment; filename="+name)
|
||||
ctx.Resp.Header().Set("Content-Transfer-Encoding", "binary")
|
||||
ctx.Resp.Header().Set("Expires", "0")
|
||||
ctx.Resp.Header().Set("Cache-Control", "must-revalidate")
|
||||
ctx.Resp.Header().Set("Pragma", "public")
|
||||
_, err := io.Copy(ctx.Resp, rd)
|
||||
if err != nil {
|
||||
ctx.ServerError("Download file failed", err)
|
||||
}
|
||||
}
|
||||
|
||||
// Error returned an error to web browser
|
||||
func (ctx *Context) Error(status int, contents ...string) {
|
||||
var v = http.StatusText(status)
|
||||
|
|
|
@ -62,10 +62,14 @@ func toUser(user *models.User, signed, authed bool) *api.User {
|
|||
Following: user.NumFollowing,
|
||||
StarredRepos: user.NumStars,
|
||||
}
|
||||
|
||||
result.Visibility = user.Visibility.String()
|
||||
|
||||
// hide primary email if API caller is anonymous or user keep email private
|
||||
if signed && (!user.KeepEmailPrivate || authed) {
|
||||
result.Email = user.Email
|
||||
}
|
||||
|
||||
// only site admin will get these information and possibly user himself
|
||||
if authed {
|
||||
result.IsAdmin = user.IsAdmin
|
||||
|
@ -76,3 +80,18 @@ func toUser(user *models.User, signed, authed bool) *api.User {
|
|||
}
|
||||
return result
|
||||
}
|
||||
|
||||
// User2UserSettings return UserSettings based on a user
|
||||
func User2UserSettings(user *models.User) api.UserSettings {
|
||||
return api.UserSettings{
|
||||
FullName: user.FullName,
|
||||
Website: user.Website,
|
||||
Location: user.Location,
|
||||
Language: user.Language,
|
||||
Description: user.Description,
|
||||
Theme: user.Theme,
|
||||
HideEmail: user.KeepEmailPrivate,
|
||||
HideActivity: user.KeepActivityPrivate,
|
||||
DiffViewStyle: user.DiffViewStyle,
|
||||
}
|
||||
}
|
||||
|
|
|
@ -8,6 +8,7 @@ import (
|
|||
"testing"
|
||||
|
||||
"code.gitea.io/gitea/models"
|
||||
api "code.gitea.io/gitea/modules/structs"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
@ -27,4 +28,11 @@ func TestUser_ToUser(t *testing.T) {
|
|||
|
||||
apiUser = toUser(user1, false, false)
|
||||
assert.False(t, apiUser.IsAdmin)
|
||||
assert.EqualValues(t, api.VisibleTypePublic.String(), apiUser.Visibility)
|
||||
|
||||
user31 := models.AssertExistsAndLoadBean(t, &models.User{ID: 31, IsAdmin: false, Visibility: api.VisibleTypePrivate}).(*models.User)
|
||||
|
||||
apiUser = toUser(user31, true, true)
|
||||
assert.False(t, apiUser.IsAdmin)
|
||||
assert.EqualValues(t, api.VisibleTypePrivate.String(), apiUser.Visibility)
|
||||
}
|
||||
|
|
59
modules/doctor/checkOldArchives.go
Normal file
59
modules/doctor/checkOldArchives.go
Normal file
|
@ -0,0 +1,59 @@
|
|||
// Copyright 2021 The Gitea Authors. All rights reserved.
|
||||
// Use of this source code is governed by a MIT-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package doctor
|
||||
|
||||
import (
|
||||
"os"
|
||||
"path/filepath"
|
||||
|
||||
"code.gitea.io/gitea/models"
|
||||
"code.gitea.io/gitea/modules/log"
|
||||
"code.gitea.io/gitea/modules/util"
|
||||
)
|
||||
|
||||
func checkOldArchives(logger log.Logger, autofix bool) error {
|
||||
numRepos := 0
|
||||
numReposUpdated := 0
|
||||
err := iterateRepositories(func(repo *models.Repository) error {
|
||||
if repo.IsEmpty {
|
||||
return nil
|
||||
}
|
||||
|
||||
p := filepath.Join(repo.RepoPath(), "archives")
|
||||
isDir, err := util.IsDir(p)
|
||||
if err != nil {
|
||||
log.Warn("check if %s is directory failed: %v", p, err)
|
||||
}
|
||||
if isDir {
|
||||
numRepos++
|
||||
if autofix {
|
||||
if err := os.RemoveAll(p); err == nil {
|
||||
numReposUpdated++
|
||||
} else {
|
||||
log.Warn("remove %s failed: %v", p, err)
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil
|
||||
})
|
||||
|
||||
if autofix {
|
||||
logger.Info("%d / %d old archives in repository deleted", numReposUpdated, numRepos)
|
||||
} else {
|
||||
logger.Info("%d old archives in repository need to be deleted", numRepos)
|
||||
}
|
||||
|
||||
return err
|
||||
}
|
||||
|
||||
func init() {
|
||||
Register(&Check{
|
||||
Title: "Check old archives",
|
||||
Name: "check-old-archives",
|
||||
IsDefault: false,
|
||||
Run: checkOldArchives,
|
||||
Priority: 7,
|
||||
})
|
||||
}
|
|
@ -12,6 +12,8 @@ import (
|
|||
"strconv"
|
||||
"strings"
|
||||
|
||||
"code.gitea.io/gitea/modules/log"
|
||||
|
||||
"github.com/djherbis/buffer"
|
||||
"github.com/djherbis/nio/v3"
|
||||
)
|
||||
|
@ -99,7 +101,7 @@ func ReadBatchLine(rd *bufio.Reader) (sha []byte, typ string, size int64, err er
|
|||
}
|
||||
idx := strings.IndexByte(typ, ' ')
|
||||
if idx < 0 {
|
||||
log("missing space typ: %s", typ)
|
||||
log.Debug("missing space typ: %s", typ)
|
||||
err = ErrNotExist{ID: string(sha)}
|
||||
return
|
||||
}
|
||||
|
@ -230,7 +232,7 @@ func ParseTreeLine(rd *bufio.Reader, modeBuf, fnameBuf, shaBuf []byte) (mode, fn
|
|||
}
|
||||
idx := bytes.IndexByte(readBytes, ' ')
|
||||
if idx < 0 {
|
||||
log("missing space in readBytes ParseTreeLine: %s", readBytes)
|
||||
log.Debug("missing space in readBytes ParseTreeLine: %s", readBytes)
|
||||
|
||||
err = &ErrNotExist{}
|
||||
return
|
||||
|
|
|
@ -34,7 +34,7 @@ func (b *Blob) GetBlobContent() (string, error) {
|
|||
return string(buf), nil
|
||||
}
|
||||
|
||||
// GetBlobLineCount gets line count of lob as raw text
|
||||
// GetBlobLineCount gets line count of the blob
|
||||
func (b *Blob) GetBlobLineCount() (int, error) {
|
||||
reader, err := b.DataAsync()
|
||||
if err != nil {
|
||||
|
@ -42,10 +42,14 @@ func (b *Blob) GetBlobLineCount() (int, error) {
|
|||
}
|
||||
defer reader.Close()
|
||||
buf := make([]byte, 32*1024)
|
||||
count := 0
|
||||
count := 1
|
||||
lineSep := []byte{'\n'}
|
||||
|
||||
c, err := reader.Read(buf)
|
||||
if c == 0 && err == io.EOF {
|
||||
return 0, nil
|
||||
}
|
||||
for {
|
||||
c, err := reader.Read(buf)
|
||||
count += bytes.Count(buf[:c], lineSep)
|
||||
switch {
|
||||
case err == io.EOF:
|
||||
|
@ -53,6 +57,7 @@ func (b *Blob) GetBlobLineCount() (int, error) {
|
|||
case err != nil:
|
||||
return count, err
|
||||
}
|
||||
c, err = reader.Read(buf)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -12,6 +12,8 @@ import (
|
|||
"io"
|
||||
"io/ioutil"
|
||||
"math"
|
||||
|
||||
"code.gitea.io/gitea/modules/log"
|
||||
)
|
||||
|
||||
// Blob represents a Git object.
|
||||
|
@ -69,12 +71,12 @@ func (b *Blob) Size() int64 {
|
|||
defer cancel()
|
||||
_, err := wr.Write([]byte(b.ID.String() + "\n"))
|
||||
if err != nil {
|
||||
log("error whilst reading size for %s in %s. Error: %v", b.ID.String(), b.repo.Path, err)
|
||||
log.Debug("error whilst reading size for %s in %s. Error: %v", b.ID.String(), b.repo.Path, err)
|
||||
return 0
|
||||
}
|
||||
_, _, b.size, err = ReadBatchLine(rd)
|
||||
if err != nil {
|
||||
log("error whilst reading size for %s in %s. Error: %v", b.ID.String(), b.repo.Path, err)
|
||||
log.Debug("error whilst reading size for %s in %s. Error: %v", b.ID.String(), b.repo.Path, err)
|
||||
return 0
|
||||
}
|
||||
|
||||
|
|
|
@ -15,6 +15,7 @@ import (
|
|||
"strings"
|
||||
"time"
|
||||
|
||||
"code.gitea.io/gitea/modules/log"
|
||||
"code.gitea.io/gitea/modules/process"
|
||||
)
|
||||
|
||||
|
@ -22,8 +23,8 @@ var (
|
|||
// GlobalCommandArgs global command args for external package setting
|
||||
GlobalCommandArgs []string
|
||||
|
||||
// DefaultCommandExecutionTimeout default command execution timeout duration
|
||||
DefaultCommandExecutionTimeout = 360 * time.Second
|
||||
// defaultCommandExecutionTimeout default command execution timeout duration
|
||||
defaultCommandExecutionTimeout = 360 * time.Second
|
||||
)
|
||||
|
||||
// DefaultLocale is the default LC_ALL to run git commands in.
|
||||
|
@ -110,13 +111,13 @@ func (c *Command) RunInDirTimeoutEnvFullPipeline(env []string, timeout time.Dura
|
|||
// it pipes stdout and stderr to given io.Writer and passes in an io.Reader as stdin. Between cmd.Start and cmd.Wait the passed in function is run.
|
||||
func (c *Command) RunInDirTimeoutEnvFullPipelineFunc(env []string, timeout time.Duration, dir string, stdout, stderr io.Writer, stdin io.Reader, fn func(context.Context, context.CancelFunc) error) error {
|
||||
if timeout == -1 {
|
||||
timeout = DefaultCommandExecutionTimeout
|
||||
timeout = defaultCommandExecutionTimeout
|
||||
}
|
||||
|
||||
if len(dir) == 0 {
|
||||
log(c.String())
|
||||
log.Debug("%s", c)
|
||||
} else {
|
||||
log("%s: %v", dir, c)
|
||||
log.Debug("%s: %v", dir, c)
|
||||
}
|
||||
|
||||
ctx, cancel := context.WithTimeout(c.parentContext, timeout)
|
||||
|
@ -197,9 +198,12 @@ func (c *Command) RunInDirTimeoutEnv(env []string, timeout time.Duration, dir st
|
|||
if err := c.RunInDirTimeoutEnvPipeline(env, timeout, dir, stdout, stderr); err != nil {
|
||||
return nil, ConcatenateError(err, stderr.String())
|
||||
}
|
||||
|
||||
if stdout.Len() > 0 {
|
||||
log("stdout:\n%s", stdout.Bytes()[:1024])
|
||||
if stdout.Len() > 0 && log.IsTrace() {
|
||||
tracelen := stdout.Len()
|
||||
if tracelen > 1024 {
|
||||
tracelen = 1024
|
||||
}
|
||||
log.Trace("Stdout:\n %s", stdout.Bytes()[:tracelen])
|
||||
}
|
||||
return stdout.Bytes(), nil
|
||||
}
|
||||
|
|
|
@ -12,6 +12,8 @@ import (
|
|||
"io"
|
||||
"path"
|
||||
"sort"
|
||||
|
||||
"code.gitea.io/gitea/modules/log"
|
||||
)
|
||||
|
||||
// GetCommitsInfo gets information of all commits that are corresponding to these entries
|
||||
|
@ -78,7 +80,7 @@ func (tes Entries) GetCommitsInfo(ctx context.Context, commit *Commit, treePath
|
|||
commitsInfo[i].SubModuleFile = subModuleFile
|
||||
}
|
||||
} else {
|
||||
log("missing commit for %s", entry.Name())
|
||||
log.Debug("missing commit for %s", entry.Name())
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -15,6 +15,7 @@ import (
|
|||
"strconv"
|
||||
"strings"
|
||||
|
||||
"code.gitea.io/gitea/modules/log"
|
||||
"code.gitea.io/gitea/modules/process"
|
||||
)
|
||||
|
||||
|
@ -113,7 +114,7 @@ func ParseDiffHunkString(diffhunk string) (leftLine, leftHunk, rightLine, righHu
|
|||
righHunk, _ = strconv.Atoi(rightRange[1])
|
||||
}
|
||||
} else {
|
||||
log("Parse line number failed: %v", diffhunk)
|
||||
log.Debug("Parse line number failed: %v", diffhunk)
|
||||
rightLine = leftLine
|
||||
righHunk = leftHunk
|
||||
}
|
||||
|
|
|
@ -159,3 +159,20 @@ func (err *ErrPushRejected) GenerateMessage() {
|
|||
}
|
||||
err.Message = strings.TrimSpace(messageBuilder.String())
|
||||
}
|
||||
|
||||
// ErrMoreThanOne represents an error if pull request fails when there are more than one sources (branch, tag) with the same name
|
||||
type ErrMoreThanOne struct {
|
||||
StdOut string
|
||||
StdErr string
|
||||
Err error
|
||||
}
|
||||
|
||||
// IsErrMoreThanOne checks if an error is a ErrMoreThanOne
|
||||
func IsErrMoreThanOne(err error) bool {
|
||||
_, ok := err.(*ErrMoreThanOne)
|
||||
return ok
|
||||
}
|
||||
|
||||
func (err *ErrMoreThanOne) Error() string {
|
||||
return fmt.Sprintf("ErrMoreThanOne Error: %v: %s\n%s", err.Err, err.StdErr, err.StdOut)
|
||||
}
|
||||
|
|
|
@ -14,14 +14,12 @@ import (
|
|||
"time"
|
||||
|
||||
"code.gitea.io/gitea/modules/process"
|
||||
"code.gitea.io/gitea/modules/setting"
|
||||
|
||||
"github.com/hashicorp/go-version"
|
||||
)
|
||||
|
||||
var (
|
||||
// Debug enables verbose logging on everything.
|
||||
// This should be false in case Gogs starts in SSH mode.
|
||||
Debug = false
|
||||
// Prefix the log prefix
|
||||
Prefix = "[git-module] "
|
||||
// GitVersionRequired is the minimum Git version required
|
||||
|
@ -41,19 +39,6 @@ var (
|
|||
goVersionLessThan115 = true
|
||||
)
|
||||
|
||||
func log(format string, args ...interface{}) {
|
||||
if !Debug {
|
||||
return
|
||||
}
|
||||
|
||||
fmt.Print(Prefix)
|
||||
if len(args) == 0 {
|
||||
fmt.Println(format)
|
||||
} else {
|
||||
fmt.Printf(format+"\n", args...)
|
||||
}
|
||||
}
|
||||
|
||||
// LocalVersion returns current Git version from shell.
|
||||
func LocalVersion() (*version.Version, error) {
|
||||
if err := LoadGitVersion(); err != nil {
|
||||
|
@ -122,10 +107,42 @@ func SetExecutablePath(path string) error {
|
|||
return nil
|
||||
}
|
||||
|
||||
// VersionInfo returns git version information
|
||||
func VersionInfo() string {
|
||||
var format = "Git Version: %s"
|
||||
var args = []interface{}{gitVersion.Original()}
|
||||
// Since git wire protocol has been released from git v2.18
|
||||
if setting.Git.EnableAutoGitWireProtocol && CheckGitVersionAtLeast("2.18") == nil {
|
||||
format += ", Wire Protocol %s Enabled"
|
||||
args = append(args, "Version 2") // for focus color
|
||||
}
|
||||
|
||||
return fmt.Sprintf(format, args...)
|
||||
}
|
||||
|
||||
// Init initializes git module
|
||||
func Init(ctx context.Context) error {
|
||||
DefaultContext = ctx
|
||||
|
||||
defaultCommandExecutionTimeout = time.Duration(setting.Git.Timeout.Default) * time.Second
|
||||
|
||||
if err := SetExecutablePath(setting.Git.Path); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// force cleanup args
|
||||
GlobalCommandArgs = []string{}
|
||||
|
||||
if CheckGitVersionAtLeast("2.9") == nil {
|
||||
// Explicitly disable credential helper, otherwise Git credentials might leak
|
||||
GlobalCommandArgs = append(GlobalCommandArgs, "-c", "credential.helper=")
|
||||
}
|
||||
|
||||
// Since git wire protocol has been released from git v2.18
|
||||
if setting.Git.EnableAutoGitWireProtocol && CheckGitVersionAtLeast("2.18") == nil {
|
||||
GlobalCommandArgs = append(GlobalCommandArgs, "-c", "protocol.version=2")
|
||||
}
|
||||
|
||||
// Save current git version on init to gitVersion otherwise it would require an RWMutex
|
||||
if err := LoadGitVersion(); err != nil {
|
||||
return err
|
||||
|
|
|
@ -9,6 +9,8 @@ import (
|
|||
"fmt"
|
||||
"os"
|
||||
"testing"
|
||||
|
||||
"code.gitea.io/gitea/modules/log"
|
||||
)
|
||||
|
||||
func fatalTestError(fmtStr string, args ...interface{}) {
|
||||
|
@ -17,6 +19,8 @@ func fatalTestError(fmtStr string, args ...interface{}) {
|
|||
}
|
||||
|
||||
func TestMain(m *testing.M) {
|
||||
_ = log.NewLogger(1000, "console", "console", `{"level":"trace","stacktracelevel":"NONE","stderr":true}`)
|
||||
|
||||
if err := Init(context.Background()); err != nil {
|
||||
fatalTestError("Init failed: %v", err)
|
||||
}
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
// Copyright 2015 The Gogs Authors. All rights reserved.
|
||||
// Copyright 2021 The Gitea Authors. All rights reserved.
|
||||
// Use of this source code is governed by a MIT-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
|
@ -12,6 +13,7 @@ import (
|
|||
"path/filepath"
|
||||
"strings"
|
||||
|
||||
"code.gitea.io/gitea/modules/log"
|
||||
"code.gitea.io/gitea/modules/util"
|
||||
)
|
||||
|
||||
|
@ -126,11 +128,11 @@ const (
|
|||
|
||||
// SetUpdateHook writes given content to update hook of the repository.
|
||||
func SetUpdateHook(repoPath, content string) (err error) {
|
||||
log("Setting update hook: %s", repoPath)
|
||||
log.Debug("Setting update hook: %s", repoPath)
|
||||
hookPath := path.Join(repoPath, HookPathUpdate)
|
||||
isExist, err := util.IsExist(hookPath)
|
||||
if err != nil {
|
||||
log("Unable to check if %s exists. Error: %v", hookPath, err)
|
||||
log.Debug("Unable to check if %s exists. Error: %v", hookPath, err)
|
||||
return err
|
||||
}
|
||||
if isExist {
|
||||
|
|
|
@ -7,6 +7,8 @@ package git
|
|||
import (
|
||||
"crypto/sha256"
|
||||
"fmt"
|
||||
|
||||
"code.gitea.io/gitea/modules/log"
|
||||
)
|
||||
|
||||
// Cache represents a caching interface
|
||||
|
@ -24,6 +26,6 @@ func (c *LastCommitCache) getCacheKey(repoPath, ref, entryPath string) string {
|
|||
|
||||
// Put put the last commit id with commit and entry path
|
||||
func (c *LastCommitCache) Put(ref, entryPath, commitID string) error {
|
||||
log("LastCommitCache save: [%s:%s:%s]", ref, entryPath, commitID)
|
||||
log.Debug("LastCommitCache save: [%s:%s:%s]", ref, entryPath, commitID)
|
||||
return c.cache.Put(c.getCacheKey(c.repoPath, ref, entryPath), commitID, c.ttl())
|
||||
}
|
||||
|
|
|
@ -10,6 +10,8 @@ import (
|
|||
"context"
|
||||
"path"
|
||||
|
||||
"code.gitea.io/gitea/modules/log"
|
||||
|
||||
"github.com/go-git/go-git/v5/plumbing/object"
|
||||
cgobject "github.com/go-git/go-git/v5/plumbing/object/commitgraph"
|
||||
)
|
||||
|
@ -41,9 +43,9 @@ func NewLastCommitCache(repoPath string, gitRepo *Repository, ttl func() int64,
|
|||
func (c *LastCommitCache) Get(ref, entryPath string) (interface{}, error) {
|
||||
v := c.cache.Get(c.getCacheKey(c.repoPath, ref, entryPath))
|
||||
if vs, ok := v.(string); ok {
|
||||
log("LastCommitCache hit level 1: [%s:%s:%s]", ref, entryPath, vs)
|
||||
log.Debug("LastCommitCache hit level 1: [%s:%s:%s]", ref, entryPath, vs)
|
||||
if commit, ok := c.commitCache[vs]; ok {
|
||||
log("LastCommitCache hit level 2: [%s:%s:%s]", ref, entryPath, vs)
|
||||
log.Debug("LastCommitCache hit level 2: [%s:%s:%s]", ref, entryPath, vs)
|
||||
return commit, nil
|
||||
}
|
||||
id, err := c.repo.ConvertToSHA1(vs)
|
||||
|
|
|
@ -10,6 +10,8 @@ import (
|
|||
"bufio"
|
||||
"context"
|
||||
"path"
|
||||
|
||||
"code.gitea.io/gitea/modules/log"
|
||||
)
|
||||
|
||||
// LastCommitCache represents a cache to store last commit
|
||||
|
@ -39,9 +41,9 @@ func NewLastCommitCache(repoPath string, gitRepo *Repository, ttl func() int64,
|
|||
func (c *LastCommitCache) Get(ref, entryPath string, wr WriteCloserError, rd *bufio.Reader) (interface{}, error) {
|
||||
v := c.cache.Get(c.getCacheKey(c.repoPath, ref, entryPath))
|
||||
if vs, ok := v.(string); ok {
|
||||
log("LastCommitCache hit level 1: [%s:%s:%s]", ref, entryPath, vs)
|
||||
log.Debug("LastCommitCache hit level 1: [%s:%s:%s]", ref, entryPath, vs)
|
||||
if commit, ok := c.commitCache[vs]; ok {
|
||||
log("LastCommitCache hit level 2: [%s:%s:%s]", ref, entryPath, vs)
|
||||
log.Debug("LastCommitCache hit level 2: [%s:%s:%s]", ref, entryPath, vs)
|
||||
return commit, nil
|
||||
}
|
||||
id, err := c.repo.ConvertToSHA1(vs)
|
||||
|
|
37
modules/git/lfs.go
Normal file
37
modules/git/lfs.go
Normal file
|
@ -0,0 +1,37 @@
|
|||
// Copyright 2021 The Gitea Authors. All rights reserved.
|
||||
// Use of this source code is governed by a MIT-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package git
|
||||
|
||||
import (
|
||||
"sync"
|
||||
|
||||
logger "code.gitea.io/gitea/modules/log"
|
||||
"code.gitea.io/gitea/modules/setting"
|
||||
)
|
||||
|
||||
var once sync.Once
|
||||
|
||||
// CheckLFSVersion will check lfs version, if not satisfied, then disable it.
|
||||
func CheckLFSVersion() {
|
||||
if setting.LFS.StartServer {
|
||||
//Disable LFS client hooks if installed for the current OS user
|
||||
//Needs at least git v2.1.2
|
||||
|
||||
err := LoadGitVersion()
|
||||
if err != nil {
|
||||
logger.Fatal("Error retrieving git version: %v", err)
|
||||
}
|
||||
|
||||
if CheckGitVersionAtLeast("2.1.2") != nil {
|
||||
setting.LFS.StartServer = false
|
||||
logger.Error("LFS server support needs at least Git v2.1.2")
|
||||
} else {
|
||||
once.Do(func() {
|
||||
GlobalCommandArgs = append(GlobalCommandArgs, "-c", "filter.lfs.required=",
|
||||
"-c", "filter.lfs.smudge=", "-c", "filter.lfs.clean=")
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
|
@ -13,6 +13,8 @@ import (
|
|||
"io"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"code.gitea.io/gitea/modules/log"
|
||||
)
|
||||
|
||||
// ParseTreeEntries parses the output of a `git ls-tree -l` command.
|
||||
|
@ -120,7 +122,7 @@ loop:
|
|||
case "40000":
|
||||
entry.entryMode = EntryModeTree
|
||||
default:
|
||||
log("Unknown mode: %v", string(mode))
|
||||
log.Debug("Unknown mode: %v", string(mode))
|
||||
return nil, fmt.Errorf("unknown mode: %v", string(mode))
|
||||
}
|
||||
|
||||
|
|
|
@ -225,6 +225,13 @@ func Push(repoPath string, opts PushOptions) error {
|
|||
}
|
||||
err.GenerateMessage()
|
||||
return err
|
||||
} else if strings.Contains(errbuf.String(), "matches more than one") {
|
||||
err := &ErrMoreThanOne{
|
||||
StdOut: outbuf.String(),
|
||||
StdErr: errbuf.String(),
|
||||
Err: err,
|
||||
}
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -8,6 +8,7 @@ package git
|
|||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"io"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
)
|
||||
|
@ -33,32 +34,28 @@ func (a ArchiveType) String() string {
|
|||
return "unknown"
|
||||
}
|
||||
|
||||
// CreateArchiveOpts represents options for creating an archive
|
||||
type CreateArchiveOpts struct {
|
||||
Format ArchiveType
|
||||
Prefix bool
|
||||
}
|
||||
|
||||
// CreateArchive create archive content to the target path
|
||||
func (c *Commit) CreateArchive(ctx context.Context, target string, opts CreateArchiveOpts) error {
|
||||
if opts.Format.String() == "unknown" {
|
||||
return fmt.Errorf("unknown format: %v", opts.Format)
|
||||
func (repo *Repository) CreateArchive(ctx context.Context, format ArchiveType, target io.Writer, usePrefix bool, commitID string) error {
|
||||
if format.String() == "unknown" {
|
||||
return fmt.Errorf("unknown format: %v", format)
|
||||
}
|
||||
|
||||
args := []string{
|
||||
"archive",
|
||||
}
|
||||
if opts.Prefix {
|
||||
args = append(args, "--prefix="+filepath.Base(strings.TrimSuffix(c.repo.Path, ".git"))+"/")
|
||||
if usePrefix {
|
||||
args = append(args, "--prefix="+filepath.Base(strings.TrimSuffix(repo.Path, ".git"))+"/")
|
||||
}
|
||||
|
||||
args = append(args,
|
||||
"--format="+opts.Format.String(),
|
||||
"-o",
|
||||
target,
|
||||
c.ID.String(),
|
||||
"--format="+format.String(),
|
||||
commitID,
|
||||
)
|
||||
|
||||
_, err := NewCommandContext(ctx, args...).RunInDir(c.repo.Path)
|
||||
return err
|
||||
var stderr strings.Builder
|
||||
err := NewCommandContext(ctx, args...).RunInDirPipeline(repo.Path, target, &stderr)
|
||||
if err != nil {
|
||||
return ConcatenateError(err, stderr.String())
|
||||
}
|
||||
return nil
|
||||
}
|
|
@ -12,6 +12,8 @@ import (
|
|||
"context"
|
||||
"errors"
|
||||
"path/filepath"
|
||||
|
||||
"code.gitea.io/gitea/modules/log"
|
||||
)
|
||||
|
||||
// Repository represents a Git repository.
|
||||
|
@ -54,7 +56,7 @@ func OpenRepository(repoPath string) (*Repository, error) {
|
|||
// CatFileBatch obtains a CatFileBatch for this repository
|
||||
func (repo *Repository) CatFileBatch() (WriteCloserError, *bufio.Reader, func()) {
|
||||
if repo.batchCancel == nil || repo.batchReader.Buffered() > 0 {
|
||||
log("Opening temporary cat file batch for: %s", repo.Path)
|
||||
log.Debug("Opening temporary cat file batch for: %s", repo.Path)
|
||||
return CatFileBatch(repo.Path)
|
||||
}
|
||||
return repo.batchWriter, repo.batchReader, func() {}
|
||||
|
@ -63,7 +65,7 @@ func (repo *Repository) CatFileBatch() (WriteCloserError, *bufio.Reader, func())
|
|||
// CatFileBatchCheck obtains a CatFileBatchCheck for this repository
|
||||
func (repo *Repository) CatFileBatchCheck() (WriteCloserError, *bufio.Reader, func()) {
|
||||
if repo.checkCancel == nil || repo.checkReader.Buffered() > 0 {
|
||||
log("Opening temporary cat file batch-check: %s", repo.Path)
|
||||
log.Debug("Opening temporary cat file batch-check: %s", repo.Path)
|
||||
return CatFileBatchCheck(repo.Path)
|
||||
}
|
||||
return repo.checkWriter, repo.checkReader, func() {}
|
||||
|
|
|
@ -12,6 +12,8 @@ import (
|
|||
"bytes"
|
||||
"io"
|
||||
"strings"
|
||||
|
||||
"code.gitea.io/gitea/modules/log"
|
||||
)
|
||||
|
||||
// IsObjectExist returns true if given reference exists in the repository.
|
||||
|
@ -24,7 +26,7 @@ func (repo *Repository) IsObjectExist(name string) bool {
|
|||
defer cancel()
|
||||
_, err := wr.Write([]byte(name + "\n"))
|
||||
if err != nil {
|
||||
log("Error writing to CatFileBatchCheck %v", err)
|
||||
log.Debug("Error writing to CatFileBatchCheck %v", err)
|
||||
return false
|
||||
}
|
||||
sha, _, _, err := ReadBatchLine(rd)
|
||||
|
@ -41,7 +43,7 @@ func (repo *Repository) IsReferenceExist(name string) bool {
|
|||
defer cancel()
|
||||
_, err := wr.Write([]byte(name + "\n"))
|
||||
if err != nil {
|
||||
log("Error writing to CatFileBatchCheck %v", err)
|
||||
log.Debug("Error writing to CatFileBatchCheck %v", err)
|
||||
return false
|
||||
}
|
||||
_, _, _, err = ReadBatchLine(rd)
|
||||
|
|
|
@ -12,6 +12,8 @@ import (
|
|||
"io/ioutil"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"code.gitea.io/gitea/modules/setting"
|
||||
)
|
||||
|
||||
// GetBranchCommitID returns last commit ID string of given branch.
|
||||
|
@ -85,12 +87,6 @@ func (repo *Repository) GetCommitByPath(relpath string) (*Commit, error) {
|
|||
return commits.Front().Value.(*Commit), nil
|
||||
}
|
||||
|
||||
// CommitsRangeSize the default commits range size
|
||||
var CommitsRangeSize = 50
|
||||
|
||||
// BranchesRangeSize the default branches range size
|
||||
var BranchesRangeSize = 20
|
||||
|
||||
func (repo *Repository) commitsByRange(id SHA1, page, pageSize int) (*list.List, error) {
|
||||
stdout, err := NewCommand("log", id.String(), "--skip="+strconv.Itoa((page-1)*pageSize),
|
||||
"--max-count="+strconv.Itoa(pageSize), prettyLogFormat).RunInDirBytes(repo.Path)
|
||||
|
@ -206,7 +202,7 @@ func (repo *Repository) FileCommitsCount(revision, file string) (int64, error) {
|
|||
|
||||
// CommitsByFileAndRange return the commits according revison file and the page
|
||||
func (repo *Repository) CommitsByFileAndRange(revision, file string, page int) (*list.List, error) {
|
||||
skip := (page - 1) * CommitsRangeSize
|
||||
skip := (page - 1) * setting.Git.CommitsRangeSize
|
||||
|
||||
stdoutReader, stdoutWriter := io.Pipe()
|
||||
defer func() {
|
||||
|
@ -216,7 +212,7 @@ func (repo *Repository) CommitsByFileAndRange(revision, file string, page int) (
|
|||
go func() {
|
||||
stderr := strings.Builder{}
|
||||
err := NewCommand("log", revision, "--follow",
|
||||
"--max-count="+strconv.Itoa(CommitsRangeSize*page),
|
||||
"--max-count="+strconv.Itoa(setting.Git.CommitsRangeSize*page),
|
||||
prettyLogFormat, "--", file).
|
||||
RunInDirPipeline(repo.Path, stdoutWriter, &stderr)
|
||||
if err != nil {
|
||||
|
@ -247,7 +243,7 @@ func (repo *Repository) CommitsByFileAndRange(revision, file string, page int) (
|
|||
// CommitsByFileAndRangeNoFollow return the commits according revison file and the page
|
||||
func (repo *Repository) CommitsByFileAndRangeNoFollow(revision, file string, page int) (*list.List, error) {
|
||||
stdout, err := NewCommand("log", revision, "--skip="+strconv.Itoa((page-1)*50),
|
||||
"--max-count="+strconv.Itoa(CommitsRangeSize), prettyLogFormat, "--", file).RunInDirBytes(repo.Path)
|
||||
"--max-count="+strconv.Itoa(setting.Git.CommitsRangeSize), prettyLogFormat, "--", file).RunInDirBytes(repo.Path)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
|
|
@ -12,6 +12,8 @@ import (
|
|||
"io"
|
||||
"io/ioutil"
|
||||
"strings"
|
||||
|
||||
"code.gitea.io/gitea/modules/log"
|
||||
)
|
||||
|
||||
// ResolveReference resolves a name to a reference
|
||||
|
@ -110,7 +112,7 @@ func (repo *Repository) getCommitFromBatchReader(rd *bufio.Reader, id SHA1) (*Co
|
|||
|
||||
return commit, nil
|
||||
default:
|
||||
log("Unknown typ: %s", typ)
|
||||
log.Debug("Unknown typ: %s", typ)
|
||||
_, err = rd.Discard(int(size) + 1)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
|
|
|
@ -13,6 +13,7 @@ import (
|
|||
"math"
|
||||
|
||||
"code.gitea.io/gitea/modules/analyze"
|
||||
"code.gitea.io/gitea/modules/log"
|
||||
|
||||
"github.com/go-enry/go-enry/v2"
|
||||
)
|
||||
|
@ -34,19 +35,19 @@ func (repo *Repository) GetLanguageStats(commitID string) (map[string]int64, err
|
|||
}
|
||||
shaBytes, typ, size, err := ReadBatchLine(batchReader)
|
||||
if typ != "commit" {
|
||||
log("Unable to get commit for: %s. Err: %v", commitID, err)
|
||||
log.Debug("Unable to get commit for: %s. Err: %v", commitID, err)
|
||||
return nil, ErrNotExist{commitID, ""}
|
||||
}
|
||||
|
||||
sha, err := NewIDFromString(string(shaBytes))
|
||||
if err != nil {
|
||||
log("Unable to get commit for: %s. Err: %v", commitID, err)
|
||||
log.Debug("Unable to get commit for: %s. Err: %v", commitID, err)
|
||||
return nil, ErrNotExist{commitID, ""}
|
||||
}
|
||||
|
||||
commit, err := CommitFromReader(repo, sha, io.LimitReader(batchReader, size))
|
||||
if err != nil {
|
||||
log("Unable to get commit for: %s. Err: %v", commitID, err)
|
||||
log.Debug("Unable to get commit for: %s. Err: %v", commitID, err)
|
||||
return nil, err
|
||||
}
|
||||
if _, err = batchReader.Discard(1); err != nil {
|
||||
|
@ -79,7 +80,7 @@ func (repo *Repository) GetLanguageStats(commitID string) (map[string]int64, err
|
|||
}
|
||||
_, _, size, err := ReadBatchLine(batchReader)
|
||||
if err != nil {
|
||||
log("Error reading blob: %s Err: %v", f.ID.String(), err)
|
||||
log.Debug("Error reading blob: %s Err: %v", f.ID.String(), err)
|
||||
return nil, err
|
||||
}
|
||||
|
||||
|
|
|
@ -8,6 +8,8 @@ package git
|
|||
import (
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
"code.gitea.io/gitea/modules/log"
|
||||
)
|
||||
|
||||
// TagPrefix tags prefix path on the repository
|
||||
|
@ -33,7 +35,7 @@ func (repo *Repository) CreateAnnotatedTag(name, message, revision string) error
|
|||
func (repo *Repository) getTag(tagID SHA1, name string) (*Tag, error) {
|
||||
t, ok := repo.tagCache.Get(tagID.String())
|
||||
if ok {
|
||||
log("Hit cache: %s", tagID)
|
||||
log.Debug("Hit cache: %s", tagID)
|
||||
tagClone := *t.(*Tag)
|
||||
tagClone.Name = name // This is necessary because lightweight tags may have same id
|
||||
return &tagClone, nil
|
||||
|
|
|
@ -33,7 +33,7 @@ var (
|
|||
|
||||
once sync.Once
|
||||
|
||||
cache *lru.ARCCache
|
||||
cache *lru.TwoQueueCache
|
||||
)
|
||||
|
||||
// NewContext loads custom highlight map from local config
|
||||
|
@ -45,7 +45,7 @@ func NewContext() {
|
|||
}
|
||||
|
||||
// The size 512 is simply a conservative rule of thumb
|
||||
c, err := lru.NewARC(512)
|
||||
c, err := lru.New2Q(512)
|
||||
if err != nil {
|
||||
panic(fmt.Sprintf("failed to initialize LRU cache for highlighter: %s", err))
|
||||
}
|
||||
|
|
|
@ -10,6 +10,7 @@ import (
|
|||
"html"
|
||||
"io"
|
||||
"io/ioutil"
|
||||
"regexp"
|
||||
"strconv"
|
||||
|
||||
"code.gitea.io/gitea/modules/csv"
|
||||
|
@ -38,6 +39,15 @@ func (Renderer) Extensions() []string {
|
|||
return []string{".csv", ".tsv"}
|
||||
}
|
||||
|
||||
// SanitizerRules implements markup.Renderer
|
||||
func (Renderer) SanitizerRules() []setting.MarkupSanitizerRule {
|
||||
return []setting.MarkupSanitizerRule{
|
||||
{Element: "table", AllowAttr: "class", Regexp: regexp.MustCompile(`data-table`)},
|
||||
{Element: "th", AllowAttr: "class", Regexp: regexp.MustCompile(`line-num`)},
|
||||
{Element: "td", AllowAttr: "class", Regexp: regexp.MustCompile(`line-num`)},
|
||||
}
|
||||
}
|
||||
|
||||
func writeField(w io.Writer, element, class, field string) error {
|
||||
if _, err := io.WriteString(w, "<"); err != nil {
|
||||
return err
|
||||
|
|
7
modules/markup/external/external.go
vendored
7
modules/markup/external/external.go
vendored
|
@ -30,7 +30,7 @@ func RegisterRenderers() {
|
|||
|
||||
// Renderer implements markup.Renderer for external tools
|
||||
type Renderer struct {
|
||||
setting.MarkupRenderer
|
||||
*setting.MarkupRenderer
|
||||
}
|
||||
|
||||
// Name returns the external tool name
|
||||
|
@ -48,6 +48,11 @@ func (p *Renderer) Extensions() []string {
|
|||
return p.FileExtensions
|
||||
}
|
||||
|
||||
// SanitizerRules implements markup.Renderer
|
||||
func (p *Renderer) SanitizerRules() []setting.MarkupSanitizerRule {
|
||||
return p.MarkupSanitizerRules
|
||||
}
|
||||
|
||||
func envMark(envName string) string {
|
||||
if runtime.GOOS == "windows" {
|
||||
return "%" + envName + "%"
|
||||
|
|
|
@ -112,7 +112,7 @@ func TestRender_links(t *testing.T) {
|
|||
|
||||
defaultCustom := setting.Markdown.CustomURLSchemes
|
||||
setting.Markdown.CustomURLSchemes = []string{"ftp", "magnet"}
|
||||
ReplaceSanitizer()
|
||||
InitializeSanitizer()
|
||||
CustomLinkURLSchemes(setting.Markdown.CustomURLSchemes)
|
||||
|
||||
test(
|
||||
|
@ -192,7 +192,7 @@ func TestRender_links(t *testing.T) {
|
|||
|
||||
// Restore previous settings
|
||||
setting.Markdown.CustomURLSchemes = defaultCustom
|
||||
ReplaceSanitizer()
|
||||
InitializeSanitizer()
|
||||
CustomLinkURLSchemes(setting.Markdown.CustomURLSchemes)
|
||||
}
|
||||
|
||||
|
|
|
@ -199,7 +199,7 @@ func actualRender(ctx *markup.RenderContext, input io.Reader, output io.Writer)
|
|||
}
|
||||
_ = lw.Close()
|
||||
}()
|
||||
buf := markup.SanitizeReader(rd)
|
||||
buf := markup.SanitizeReader(rd, "")
|
||||
_, err := io.Copy(output, buf)
|
||||
return err
|
||||
}
|
||||
|
@ -215,7 +215,7 @@ func render(ctx *markup.RenderContext, input io.Reader, output io.Writer) error
|
|||
if log.IsDebug() {
|
||||
log.Debug("Panic in markdown: %v\n%s", err, string(log.Stack(2)))
|
||||
}
|
||||
ret := markup.SanitizeReader(input)
|
||||
ret := markup.SanitizeReader(input, "")
|
||||
_, err = io.Copy(output, ret)
|
||||
if err != nil {
|
||||
log.Error("SanitizeReader failed: %v", err)
|
||||
|
@ -249,6 +249,11 @@ func (Renderer) Extensions() []string {
|
|||
return setting.Markdown.FileExtensions
|
||||
}
|
||||
|
||||
// SanitizerRules implements markup.Renderer
|
||||
func (Renderer) SanitizerRules() []setting.MarkupSanitizerRule {
|
||||
return []setting.MarkupSanitizerRule{}
|
||||
}
|
||||
|
||||
// Render implements markup.Renderer
|
||||
func (Renderer) Render(ctx *markup.RenderContext, input io.Reader, output io.Writer) error {
|
||||
return render(ctx, input, output)
|
||||
|
|
|
@ -11,9 +11,13 @@ import (
|
|||
"io"
|
||||
"strings"
|
||||
|
||||
"code.gitea.io/gitea/modules/highlight"
|
||||
"code.gitea.io/gitea/modules/markup"
|
||||
"code.gitea.io/gitea/modules/setting"
|
||||
"code.gitea.io/gitea/modules/util"
|
||||
|
||||
"github.com/alecthomas/chroma"
|
||||
"github.com/alecthomas/chroma/lexers"
|
||||
"github.com/niklasfasching/go-org/org"
|
||||
)
|
||||
|
||||
|
@ -38,9 +42,55 @@ func (Renderer) Extensions() []string {
|
|||
return []string{".org"}
|
||||
}
|
||||
|
||||
// SanitizerRules implements markup.Renderer
|
||||
func (Renderer) SanitizerRules() []setting.MarkupSanitizerRule {
|
||||
return []setting.MarkupSanitizerRule{}
|
||||
}
|
||||
|
||||
// Render renders orgmode rawbytes to HTML
|
||||
func Render(ctx *markup.RenderContext, input io.Reader, output io.Writer) error {
|
||||
htmlWriter := org.NewHTMLWriter()
|
||||
htmlWriter.HighlightCodeBlock = func(source, lang string, inline bool) string {
|
||||
var w strings.Builder
|
||||
if _, err := w.WriteString(`<pre>`); err != nil {
|
||||
return ""
|
||||
}
|
||||
|
||||
lexer := lexers.Get(lang)
|
||||
if lexer == nil && lang == "" {
|
||||
lexer = lexers.Analyse(source)
|
||||
if lexer == nil {
|
||||
lexer = lexers.Fallback
|
||||
}
|
||||
lang = strings.ToLower(lexer.Config().Name)
|
||||
}
|
||||
|
||||
if lexer == nil {
|
||||
// include language-x class as part of commonmark spec
|
||||
if _, err := w.WriteString(`<code class="chroma language-` + string(lang) + `">`); err != nil {
|
||||
return ""
|
||||
}
|
||||
if _, err := w.WriteString(html.EscapeString(source)); err != nil {
|
||||
return ""
|
||||
}
|
||||
} else {
|
||||
// include language-x class as part of commonmark spec
|
||||
if _, err := w.WriteString(`<code class="chroma language-` + string(lang) + `">`); err != nil {
|
||||
return ""
|
||||
}
|
||||
lexer = chroma.Coalesce(lexer)
|
||||
|
||||
if _, err := w.WriteString(highlight.Code(lexer.Config().Filenames[0], source)); err != nil {
|
||||
return ""
|
||||
}
|
||||
}
|
||||
|
||||
if _, err := w.WriteString("</code></pre>"); err != nil {
|
||||
return ""
|
||||
}
|
||||
|
||||
return w.String()
|
||||
}
|
||||
|
||||
w := &Writer{
|
||||
HTMLWriter: htmlWriter,
|
||||
|
|
|
@ -81,6 +81,7 @@ type Renderer interface {
|
|||
Name() string // markup format name
|
||||
Extensions() []string
|
||||
NeedPostProcess() bool
|
||||
SanitizerRules() []setting.MarkupSanitizerRule
|
||||
Render(ctx *RenderContext, input io.Reader, output io.Writer) error
|
||||
}
|
||||
|
||||
|
@ -136,37 +137,32 @@ func render(ctx *RenderContext, renderer Renderer, input io.Reader, output io.Wr
|
|||
_ = pw.Close()
|
||||
}()
|
||||
|
||||
if renderer.NeedPostProcess() {
|
||||
pr2, pw2 := io.Pipe()
|
||||
defer func() {
|
||||
_ = pr2.Close()
|
||||
_ = pw2.Close()
|
||||
}()
|
||||
pr2, pw2 := io.Pipe()
|
||||
defer func() {
|
||||
_ = pr2.Close()
|
||||
_ = pw2.Close()
|
||||
}()
|
||||
|
||||
wg.Add(1)
|
||||
go func() {
|
||||
buf := SanitizeReader(pr2)
|
||||
_, err = io.Copy(output, buf)
|
||||
_ = pr2.Close()
|
||||
wg.Done()
|
||||
}()
|
||||
wg.Add(1)
|
||||
go func() {
|
||||
buf := SanitizeReader(pr2, renderer.Name())
|
||||
_, err = io.Copy(output, buf)
|
||||
_ = pr2.Close()
|
||||
wg.Done()
|
||||
}()
|
||||
|
||||
wg.Add(1)
|
||||
go func() {
|
||||
wg.Add(1)
|
||||
go func() {
|
||||
if renderer.NeedPostProcess() {
|
||||
err = PostProcess(ctx, pr, pw2)
|
||||
_ = pr.Close()
|
||||
_ = pw2.Close()
|
||||
wg.Done()
|
||||
}()
|
||||
} else {
|
||||
wg.Add(1)
|
||||
go func() {
|
||||
buf := SanitizeReader(pr)
|
||||
_, err = io.Copy(output, buf)
|
||||
_ = pr.Close()
|
||||
wg.Done()
|
||||
}()
|
||||
}
|
||||
} else {
|
||||
_, err = io.Copy(pw2, pr)
|
||||
}
|
||||
_ = pr.Close()
|
||||
_ = pw2.Close()
|
||||
wg.Done()
|
||||
}()
|
||||
|
||||
if err1 := renderer.Render(ctx, input, pw); err1 != nil {
|
||||
return err1
|
||||
}
|
||||
|
|
|
@ -19,8 +19,9 @@ import (
|
|||
// Sanitizer is a protection wrapper of *bluemonday.Policy which does not allow
|
||||
// any modification to the underlying policies once it's been created.
|
||||
type Sanitizer struct {
|
||||
policy *bluemonday.Policy
|
||||
init sync.Once
|
||||
defaultPolicy *bluemonday.Policy
|
||||
rendererPolicies map[string]*bluemonday.Policy
|
||||
init sync.Once
|
||||
}
|
||||
|
||||
var sanitizer = &Sanitizer{}
|
||||
|
@ -30,47 +31,57 @@ var sanitizer = &Sanitizer{}
|
|||
// entire application lifecycle.
|
||||
func NewSanitizer() {
|
||||
sanitizer.init.Do(func() {
|
||||
ReplaceSanitizer()
|
||||
InitializeSanitizer()
|
||||
})
|
||||
}
|
||||
|
||||
// ReplaceSanitizer replaces the current sanitizer to account for changes in settings
|
||||
func ReplaceSanitizer() {
|
||||
sanitizer.policy = bluemonday.UGCPolicy()
|
||||
// InitializeSanitizer (re)initializes the current sanitizer to account for changes in settings
|
||||
func InitializeSanitizer() {
|
||||
sanitizer.rendererPolicies = map[string]*bluemonday.Policy{}
|
||||
sanitizer.defaultPolicy = createDefaultPolicy()
|
||||
|
||||
for name, renderer := range renderers {
|
||||
sanitizerRules := renderer.SanitizerRules()
|
||||
if len(sanitizerRules) > 0 {
|
||||
policy := createDefaultPolicy()
|
||||
addSanitizerRules(policy, sanitizerRules)
|
||||
sanitizer.rendererPolicies[name] = policy
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func createDefaultPolicy() *bluemonday.Policy {
|
||||
policy := bluemonday.UGCPolicy()
|
||||
// For Chroma markdown plugin
|
||||
sanitizer.policy.AllowAttrs("class").Matching(regexp.MustCompile(`^is-loading$`)).OnElements("pre")
|
||||
sanitizer.policy.AllowAttrs("class").Matching(regexp.MustCompile(`^(chroma )?language-[\w-]+$`)).OnElements("code")
|
||||
policy.AllowAttrs("class").Matching(regexp.MustCompile(`^is-loading$`)).OnElements("pre")
|
||||
policy.AllowAttrs("class").Matching(regexp.MustCompile(`^(chroma )?language-[\w-]+$`)).OnElements("code")
|
||||
|
||||
// Checkboxes
|
||||
sanitizer.policy.AllowAttrs("type").Matching(regexp.MustCompile(`^checkbox$`)).OnElements("input")
|
||||
sanitizer.policy.AllowAttrs("checked", "disabled", "data-source-position").OnElements("input")
|
||||
policy.AllowAttrs("type").Matching(regexp.MustCompile(`^checkbox$`)).OnElements("input")
|
||||
policy.AllowAttrs("checked", "disabled", "data-source-position").OnElements("input")
|
||||
|
||||
// Custom URL-Schemes
|
||||
if len(setting.Markdown.CustomURLSchemes) > 0 {
|
||||
sanitizer.policy.AllowURLSchemes(setting.Markdown.CustomURLSchemes...)
|
||||
policy.AllowURLSchemes(setting.Markdown.CustomURLSchemes...)
|
||||
}
|
||||
|
||||
// Allow classes for anchors
|
||||
sanitizer.policy.AllowAttrs("class").Matching(regexp.MustCompile(`ref-issue`)).OnElements("a")
|
||||
policy.AllowAttrs("class").Matching(regexp.MustCompile(`ref-issue`)).OnElements("a")
|
||||
|
||||
// Allow classes for task lists
|
||||
sanitizer.policy.AllowAttrs("class").Matching(regexp.MustCompile(`task-list-item`)).OnElements("li")
|
||||
policy.AllowAttrs("class").Matching(regexp.MustCompile(`task-list-item`)).OnElements("li")
|
||||
|
||||
// Allow icons
|
||||
sanitizer.policy.AllowAttrs("class").Matching(regexp.MustCompile(`^icon(\s+[\p{L}\p{N}_-]+)+$`)).OnElements("i")
|
||||
policy.AllowAttrs("class").Matching(regexp.MustCompile(`^icon(\s+[\p{L}\p{N}_-]+)+$`)).OnElements("i")
|
||||
|
||||
// Allow unlabelled labels
|
||||
sanitizer.policy.AllowNoAttrs().OnElements("label")
|
||||
policy.AllowNoAttrs().OnElements("label")
|
||||
|
||||
// Allow classes for emojis
|
||||
sanitizer.policy.AllowAttrs("class").Matching(regexp.MustCompile(`emoji`)).OnElements("img")
|
||||
policy.AllowAttrs("class").Matching(regexp.MustCompile(`emoji`)).OnElements("img")
|
||||
|
||||
// Allow icons, emojis, chroma syntax and keyword markup on span
|
||||
sanitizer.policy.AllowAttrs("class").Matching(regexp.MustCompile(`^((icon(\s+[\p{L}\p{N}_-]+)+)|(emoji))$|^([a-z][a-z0-9]{0,2})$|^` + keywordClass + `$`)).OnElements("span")
|
||||
|
||||
// Allow data tables
|
||||
sanitizer.policy.AllowAttrs("class").Matching(regexp.MustCompile(`data-table`)).OnElements("table")
|
||||
sanitizer.policy.AllowAttrs("class").Matching(regexp.MustCompile(`line-num`)).OnElements("th", "td")
|
||||
policy.AllowAttrs("class").Matching(regexp.MustCompile(`^((icon(\s+[\p{L}\p{N}_-]+)+)|(emoji))$|^([a-z][a-z0-9]{0,2})$|^` + keywordClass + `$`)).OnElements("span")
|
||||
|
||||
// Allow generally safe attributes
|
||||
generalSafeAttrs := []string{"abbr", "accept", "accept-charset",
|
||||
|
@ -101,18 +112,29 @@ func ReplaceSanitizer() {
|
|||
"abbr", "bdo", "cite", "dfn", "mark", "small", "span", "time", "wbr",
|
||||
}
|
||||
|
||||
sanitizer.policy.AllowAttrs(generalSafeAttrs...).OnElements(generalSafeElements...)
|
||||
policy.AllowAttrs(generalSafeAttrs...).OnElements(generalSafeElements...)
|
||||
|
||||
sanitizer.policy.AllowAttrs("itemscope", "itemtype").OnElements("div")
|
||||
policy.AllowAttrs("itemscope", "itemtype").OnElements("div")
|
||||
|
||||
// FIXME: Need to handle longdesc in img but there is no easy way to do it
|
||||
|
||||
// Custom keyword markup
|
||||
for _, rule := range setting.ExternalSanitizerRules {
|
||||
if rule.Regexp != nil {
|
||||
sanitizer.policy.AllowAttrs(rule.AllowAttr).Matching(rule.Regexp).OnElements(rule.Element)
|
||||
} else {
|
||||
sanitizer.policy.AllowAttrs(rule.AllowAttr).OnElements(rule.Element)
|
||||
addSanitizerRules(policy, setting.ExternalSanitizerRules)
|
||||
|
||||
return policy
|
||||
}
|
||||
|
||||
func addSanitizerRules(policy *bluemonday.Policy, rules []setting.MarkupSanitizerRule) {
|
||||
for _, rule := range rules {
|
||||
if rule.AllowDataURIImages {
|
||||
policy.AllowDataURIImages()
|
||||
}
|
||||
if rule.Element != "" {
|
||||
if rule.Regexp != nil {
|
||||
policy.AllowAttrs(rule.AllowAttr).Matching(rule.Regexp).OnElements(rule.Element)
|
||||
} else {
|
||||
policy.AllowAttrs(rule.AllowAttr).OnElements(rule.Element)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -120,11 +142,15 @@ func ReplaceSanitizer() {
|
|||
// Sanitize takes a string that contains a HTML fragment or document and applies policy whitelist.
|
||||
func Sanitize(s string) string {
|
||||
NewSanitizer()
|
||||
return sanitizer.policy.Sanitize(s)
|
||||
return sanitizer.defaultPolicy.Sanitize(s)
|
||||
}
|
||||
|
||||
// SanitizeReader sanitizes a Reader
|
||||
func SanitizeReader(r io.Reader) *bytes.Buffer {
|
||||
func SanitizeReader(r io.Reader, renderer string) *bytes.Buffer {
|
||||
NewSanitizer()
|
||||
return sanitizer.policy.SanitizeReader(r)
|
||||
policy, exist := sanitizer.rendererPolicies[renderer]
|
||||
if !exist {
|
||||
policy = sanitizer.defaultPolicy
|
||||
}
|
||||
return policy.SanitizeReader(r)
|
||||
}
|
||||
|
|
|
@ -54,7 +54,7 @@ type HookOptions struct {
|
|||
GitAlternativeObjectDirectories string
|
||||
GitQuarantinePath string
|
||||
GitPushOptions GitPushOptions
|
||||
ProtectedBranchID int64
|
||||
PullRequestID int64
|
||||
IsDeployKey bool
|
||||
}
|
||||
|
||||
|
|
|
@ -58,7 +58,6 @@ type ServCommandResults struct {
|
|||
// ErrServCommand is an error returned from ServCommmand.
|
||||
type ErrServCommand struct {
|
||||
Results ServCommandResults
|
||||
Type string
|
||||
Err string
|
||||
StatusCode int
|
||||
}
|
||||
|
|
|
@ -5,6 +5,7 @@
|
|||
package references
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"net/url"
|
||||
"regexp"
|
||||
"strconv"
|
||||
|
@ -14,6 +15,8 @@ import (
|
|||
"code.gitea.io/gitea/modules/log"
|
||||
"code.gitea.io/gitea/modules/markup/mdstripper"
|
||||
"code.gitea.io/gitea/modules/setting"
|
||||
|
||||
"github.com/yuin/goldmark/util"
|
||||
)
|
||||
|
||||
var (
|
||||
|
@ -321,7 +324,7 @@ func FindRenderizableReferenceNumeric(content string, prOnly bool) (bool, *Rende
|
|||
return false, nil
|
||||
}
|
||||
}
|
||||
r := getCrossReference([]byte(content), match[2], match[3], false, prOnly)
|
||||
r := getCrossReference(util.StringToReadOnlyBytes(content), match[2], match[3], false, prOnly)
|
||||
if r == nil {
|
||||
return false, nil
|
||||
}
|
||||
|
@ -465,18 +468,17 @@ func findAllIssueReferencesBytes(content []byte, links []string) []*rawReference
|
|||
}
|
||||
|
||||
func getCrossReference(content []byte, start, end int, fromLink bool, prOnly bool) *rawReference {
|
||||
refid := string(content[start:end])
|
||||
sep := strings.IndexAny(refid, "#!")
|
||||
sep := bytes.IndexAny(content[start:end], "#!")
|
||||
if sep < 0 {
|
||||
return nil
|
||||
}
|
||||
isPull := refid[sep] == '!'
|
||||
isPull := content[start+sep] == '!'
|
||||
if prOnly && !isPull {
|
||||
return nil
|
||||
}
|
||||
repo := refid[:sep]
|
||||
issue := refid[sep+1:]
|
||||
index, err := strconv.ParseInt(issue, 10, 64)
|
||||
repo := string(content[start : start+sep])
|
||||
issue := string(content[start+sep+1 : end])
|
||||
index, err := strconv.ParseInt(string(issue), 10, 64)
|
||||
if err != nil {
|
||||
return nil
|
||||
}
|
||||
|
|
|
@ -7,7 +7,6 @@ package setting
|
|||
import (
|
||||
"time"
|
||||
|
||||
"code.gitea.io/gitea/modules/git"
|
||||
"code.gitea.io/gitea/modules/log"
|
||||
)
|
||||
|
||||
|
@ -19,8 +18,8 @@ var (
|
|||
MaxGitDiffLines int
|
||||
MaxGitDiffLineCharacters int
|
||||
MaxGitDiffFiles int
|
||||
CommitsRangeSize int
|
||||
BranchesRangeSize int
|
||||
CommitsRangeSize int // CommitsRangeSize the default commits range size
|
||||
BranchesRangeSize int // BranchesRangeSize the default branches range size
|
||||
VerbosePush bool
|
||||
VerbosePushDelay time.Duration
|
||||
GCArgs []string `ini:"GC_ARGS" delim:" "`
|
||||
|
@ -54,7 +53,7 @@ var (
|
|||
Pull int
|
||||
GC int `ini:"GC"`
|
||||
}{
|
||||
Default: int(git.DefaultCommandExecutionTimeout / time.Second),
|
||||
Default: 360,
|
||||
Migrate: 600,
|
||||
Mirror: 300,
|
||||
Clone: 300,
|
||||
|
@ -68,35 +67,4 @@ func newGit() {
|
|||
if err := Cfg.Section("git").MapTo(&Git); err != nil {
|
||||
log.Fatal("Failed to map Git settings: %v", err)
|
||||
}
|
||||
if err := git.SetExecutablePath(Git.Path); err != nil {
|
||||
log.Fatal("Failed to initialize Git settings: %v", err)
|
||||
}
|
||||
git.DefaultCommandExecutionTimeout = time.Duration(Git.Timeout.Default) * time.Second
|
||||
|
||||
version, err := git.LocalVersion()
|
||||
if err != nil {
|
||||
log.Fatal("Error retrieving git version: %v", err)
|
||||
}
|
||||
|
||||
// force cleanup args
|
||||
git.GlobalCommandArgs = []string{}
|
||||
|
||||
if git.CheckGitVersionAtLeast("2.9") == nil {
|
||||
// Explicitly disable credential helper, otherwise Git credentials might leak
|
||||
git.GlobalCommandArgs = append(git.GlobalCommandArgs, "-c", "credential.helper=")
|
||||
}
|
||||
|
||||
var format = "Git Version: %s"
|
||||
var args = []interface{}{version.Original()}
|
||||
// Since git wire protocol has been released from git v2.18
|
||||
if Git.EnableAutoGitWireProtocol && git.CheckGitVersionAtLeast("2.18") == nil {
|
||||
git.GlobalCommandArgs = append(git.GlobalCommandArgs, "-c", "protocol.version=2")
|
||||
format += ", Wire Protocol %s Enabled"
|
||||
args = append(args, "Version 2") // for focus color
|
||||
}
|
||||
|
||||
git.CommitsRangeSize = Git.CommitsRangeSize
|
||||
git.BranchesRangeSize = Git.BranchesRangeSize
|
||||
|
||||
log.Info(format, args...)
|
||||
}
|
||||
|
|
|
@ -9,7 +9,6 @@ import (
|
|||
"time"
|
||||
|
||||
"code.gitea.io/gitea/modules/generate"
|
||||
"code.gitea.io/gitea/modules/git"
|
||||
"code.gitea.io/gitea/modules/log"
|
||||
|
||||
ini "gopkg.in/ini.v1"
|
||||
|
@ -67,24 +66,3 @@ func newLFSService() {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
// CheckLFSVersion will check lfs version, if not satisfied, then disable it.
|
||||
func CheckLFSVersion() {
|
||||
if LFS.StartServer {
|
||||
//Disable LFS client hooks if installed for the current OS user
|
||||
//Needs at least git v2.1.2
|
||||
|
||||
err := git.LoadGitVersion()
|
||||
if err != nil {
|
||||
log.Fatal("Error retrieving git version: %v", err)
|
||||
}
|
||||
|
||||
if git.CheckGitVersionAtLeast("2.1.2") != nil {
|
||||
LFS.StartServer = false
|
||||
log.Error("LFS server support needs at least Git v2.1.2")
|
||||
} else {
|
||||
git.GlobalCommandArgs = append(git.GlobalCommandArgs, "-c", "filter.lfs.required=",
|
||||
"-c", "filter.lfs.smudge=", "-c", "filter.lfs.clean=")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -15,31 +15,34 @@ import (
|
|||
|
||||
// ExternalMarkupRenderers represents the external markup renderers
|
||||
var (
|
||||
ExternalMarkupRenderers []MarkupRenderer
|
||||
ExternalMarkupRenderers []*MarkupRenderer
|
||||
ExternalSanitizerRules []MarkupSanitizerRule
|
||||
)
|
||||
|
||||
// MarkupRenderer defines the external parser configured in ini
|
||||
type MarkupRenderer struct {
|
||||
Enabled bool
|
||||
MarkupName string
|
||||
Command string
|
||||
FileExtensions []string
|
||||
IsInputFile bool
|
||||
NeedPostProcess bool
|
||||
Enabled bool
|
||||
MarkupName string
|
||||
Command string
|
||||
FileExtensions []string
|
||||
IsInputFile bool
|
||||
NeedPostProcess bool
|
||||
MarkupSanitizerRules []MarkupSanitizerRule
|
||||
}
|
||||
|
||||
// MarkupSanitizerRule defines the policy for whitelisting attributes on
|
||||
// certain elements.
|
||||
type MarkupSanitizerRule struct {
|
||||
Element string
|
||||
AllowAttr string
|
||||
Regexp *regexp.Regexp
|
||||
Element string
|
||||
AllowAttr string
|
||||
Regexp *regexp.Regexp
|
||||
AllowDataURIImages bool
|
||||
}
|
||||
|
||||
func newMarkup() {
|
||||
ExternalMarkupRenderers = make([]MarkupRenderer, 0, 10)
|
||||
ExternalMarkupRenderers = make([]*MarkupRenderer, 0, 10)
|
||||
ExternalSanitizerRules = make([]MarkupSanitizerRule, 0, 10)
|
||||
|
||||
for _, sec := range Cfg.Section("markup").ChildSections() {
|
||||
name := strings.TrimPrefix(sec.Name(), "markup.")
|
||||
if name == "" {
|
||||
|
@ -56,50 +59,62 @@ func newMarkup() {
|
|||
}
|
||||
|
||||
func newMarkupSanitizer(name string, sec *ini.Section) {
|
||||
haveElement := sec.HasKey("ELEMENT")
|
||||
haveAttr := sec.HasKey("ALLOW_ATTR")
|
||||
haveRegexp := sec.HasKey("REGEXP")
|
||||
rule, ok := createMarkupSanitizerRule(name, sec)
|
||||
if ok {
|
||||
if strings.HasPrefix(name, "sanitizer.") {
|
||||
names := strings.SplitN(strings.TrimPrefix(name, "sanitizer."), ".", 2)
|
||||
name = names[0]
|
||||
}
|
||||
for _, renderer := range ExternalMarkupRenderers {
|
||||
if name == renderer.MarkupName {
|
||||
renderer.MarkupSanitizerRules = append(renderer.MarkupSanitizerRules, rule)
|
||||
return
|
||||
}
|
||||
}
|
||||
ExternalSanitizerRules = append(ExternalSanitizerRules, rule)
|
||||
}
|
||||
}
|
||||
|
||||
if !haveElement && !haveAttr && !haveRegexp {
|
||||
log.Warn("Skipping empty section: markup.%s.", name)
|
||||
return
|
||||
func createMarkupSanitizerRule(name string, sec *ini.Section) (MarkupSanitizerRule, bool) {
|
||||
var rule MarkupSanitizerRule
|
||||
|
||||
ok := false
|
||||
if sec.HasKey("ALLOW_DATA_URI_IMAGES") {
|
||||
rule.AllowDataURIImages = sec.Key("ALLOW_DATA_URI_IMAGES").MustBool(false)
|
||||
ok = true
|
||||
}
|
||||
|
||||
if !haveElement || !haveAttr || !haveRegexp {
|
||||
log.Error("Missing required keys from markup.%s. Must have all three of ELEMENT, ALLOW_ATTR, and REGEXP defined!", name)
|
||||
return
|
||||
}
|
||||
if sec.HasKey("ELEMENT") || sec.HasKey("ALLOW_ATTR") {
|
||||
rule.Element = sec.Key("ELEMENT").Value()
|
||||
rule.AllowAttr = sec.Key("ALLOW_ATTR").Value()
|
||||
|
||||
elements := sec.Key("ELEMENT").Value()
|
||||
allowAttrs := sec.Key("ALLOW_ATTR").Value()
|
||||
regexpStr := sec.Key("REGEXP").Value()
|
||||
|
||||
if regexpStr == "" {
|
||||
rule := MarkupSanitizerRule{
|
||||
Element: elements,
|
||||
AllowAttr: allowAttrs,
|
||||
Regexp: nil,
|
||||
if rule.Element == "" || rule.AllowAttr == "" {
|
||||
log.Error("Missing required values from markup.%s. Must have ELEMENT and ALLOW_ATTR defined!", name)
|
||||
return rule, false
|
||||
}
|
||||
|
||||
ExternalSanitizerRules = append(ExternalSanitizerRules, rule)
|
||||
return
|
||||
regexpStr := sec.Key("REGEXP").Value()
|
||||
if regexpStr != "" {
|
||||
// Validate when parsing the config that this is a valid regular
|
||||
// expression. Then we can use regexp.MustCompile(...) later.
|
||||
compiled, err := regexp.Compile(regexpStr)
|
||||
if err != nil {
|
||||
log.Error("In markup.%s: REGEXP (%s) failed to compile: %v", name, regexpStr, err)
|
||||
return rule, false
|
||||
}
|
||||
|
||||
rule.Regexp = compiled
|
||||
}
|
||||
|
||||
ok = true
|
||||
}
|
||||
|
||||
// Validate when parsing the config that this is a valid regular
|
||||
// expression. Then we can use regexp.MustCompile(...) later.
|
||||
compiled, err := regexp.Compile(regexpStr)
|
||||
if err != nil {
|
||||
log.Error("In module.%s: REGEXP (%s) at definition %d failed to compile: %v", regexpStr, name, err)
|
||||
return
|
||||
if !ok {
|
||||
log.Error("Missing required keys from markup.%s. Must have ELEMENT and ALLOW_ATTR or ALLOW_DATA_URI_IMAGES defined!", name)
|
||||
return rule, false
|
||||
}
|
||||
|
||||
rule := MarkupSanitizerRule{
|
||||
Element: elements,
|
||||
AllowAttr: allowAttrs,
|
||||
Regexp: compiled,
|
||||
}
|
||||
|
||||
ExternalSanitizerRules = append(ExternalSanitizerRules, rule)
|
||||
return rule, true
|
||||
}
|
||||
|
||||
func newMarkupRenderer(name string, sec *ini.Section) {
|
||||
|
@ -126,7 +141,7 @@ func newMarkupRenderer(name string, sec *ini.Section) {
|
|||
return
|
||||
}
|
||||
|
||||
ExternalMarkupRenderers = append(ExternalMarkupRenderers, MarkupRenderer{
|
||||
ExternalMarkupRenderers = append(ExternalMarkupRenderers, &MarkupRenderer{
|
||||
Enabled: sec.Key("ENABLED").MustBool(false),
|
||||
MarkupName: name,
|
||||
FileExtensions: exts,
|
||||
|
|
|
@ -251,6 +251,10 @@ var (
|
|||
}
|
||||
RepoRootPath string
|
||||
ScriptType = "bash"
|
||||
|
||||
RepoArchive = struct {
|
||||
Storage
|
||||
}{}
|
||||
)
|
||||
|
||||
func newRepository() {
|
||||
|
@ -328,4 +332,6 @@ func newRepository() {
|
|||
if !filepath.IsAbs(Repository.Upload.TempPath) {
|
||||
Repository.Upload.TempPath = path.Join(AppWorkPath, Repository.Upload.TempPath)
|
||||
}
|
||||
|
||||
RepoArchive.Storage = getStorage("repo-archive", "", nil)
|
||||
}
|
||||
|
|
|
@ -6,6 +6,7 @@ package setting
|
|||
|
||||
import (
|
||||
"regexp"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"code.gitea.io/gitea/modules/log"
|
||||
|
@ -14,6 +15,8 @@ import (
|
|||
|
||||
// Service settings
|
||||
var Service struct {
|
||||
DefaultUserVisibility string
|
||||
DefaultUserVisibilityMode structs.VisibleType
|
||||
DefaultOrgVisibility string
|
||||
DefaultOrgVisibilityMode structs.VisibleType
|
||||
ActiveCodeLives int
|
||||
|
@ -55,6 +58,7 @@ var Service struct {
|
|||
AutoWatchOnChanges bool
|
||||
DefaultOrgMemberVisible bool
|
||||
UserDeleteWithCommentsMaxTime time.Duration
|
||||
ValidSiteURLSchemes []string
|
||||
|
||||
// OpenID settings
|
||||
EnableOpenIDSignIn bool
|
||||
|
@ -116,10 +120,22 @@ func newService() {
|
|||
Service.EnableUserHeatmap = sec.Key("ENABLE_USER_HEATMAP").MustBool(true)
|
||||
Service.AutoWatchNewRepos = sec.Key("AUTO_WATCH_NEW_REPOS").MustBool(true)
|
||||
Service.AutoWatchOnChanges = sec.Key("AUTO_WATCH_ON_CHANGES").MustBool(false)
|
||||
Service.DefaultUserVisibility = sec.Key("DEFAULT_USER_VISIBILITY").In("public", structs.ExtractKeysFromMapString(structs.VisibilityModes))
|
||||
Service.DefaultUserVisibilityMode = structs.VisibilityModes[Service.DefaultUserVisibility]
|
||||
Service.DefaultOrgVisibility = sec.Key("DEFAULT_ORG_VISIBILITY").In("public", structs.ExtractKeysFromMapString(structs.VisibilityModes))
|
||||
Service.DefaultOrgVisibilityMode = structs.VisibilityModes[Service.DefaultOrgVisibility]
|
||||
Service.DefaultOrgMemberVisible = sec.Key("DEFAULT_ORG_MEMBER_VISIBLE").MustBool()
|
||||
Service.UserDeleteWithCommentsMaxTime = sec.Key("USER_DELETE_WITH_COMMENTS_MAX_TIME").MustDuration(0)
|
||||
sec.Key("VALID_SITE_URL_SCHEMES").MustString("http,https")
|
||||
Service.ValidSiteURLSchemes = sec.Key("VALID_SITE_URL_SCHEMES").Strings(",")
|
||||
schemes := make([]string, len(Service.ValidSiteURLSchemes))
|
||||
for _, scheme := range Service.ValidSiteURLSchemes {
|
||||
scheme = strings.ToLower(strings.TrimSpace(scheme))
|
||||
if scheme != "" {
|
||||
schemes = append(schemes, scheme)
|
||||
}
|
||||
}
|
||||
Service.ValidSiteURLSchemes = schemes
|
||||
|
||||
if err := Cfg.Section("service.explore").MapTo(&Service.Explore); err != nil {
|
||||
log.Fatal("Failed to map service.explore settings: %v", err)
|
||||
|
|
|
@ -469,7 +469,8 @@ func getWorkPath(appPath string) string {
|
|||
func init() {
|
||||
IsWindows = runtime.GOOS == "windows"
|
||||
// We can rely on log.CanColorStdout being set properly because modules/log/console_windows.go comes before modules/setting/setting.go lexicographically
|
||||
log.NewLogger(0, "console", "console", fmt.Sprintf(`{"level": "trace", "colorize": %t, "stacktraceLevel": "none"}`, log.CanColorStdout))
|
||||
// By default set this logger at Info - we'll change it later but we need to start with something.
|
||||
log.NewLogger(0, "console", "console", fmt.Sprintf(`{"level": "info", "colorize": %t, "stacktraceLevel": "none"}`, log.CanColorStdout))
|
||||
|
||||
var err error
|
||||
if AppPath, err = getAppPath(); err != nil {
|
||||
|
@ -1158,6 +1159,19 @@ func CreateOrAppendToCustomConf(callback func(cfg *ini.File)) {
|
|||
if err := cfg.SaveTo(CustomConf); err != nil {
|
||||
log.Fatal("error saving to custom config: %v", err)
|
||||
}
|
||||
|
||||
// Change permissions to be more restrictive
|
||||
fi, err := os.Stat(CustomConf)
|
||||
if err != nil {
|
||||
log.Error("Failed to determine current conf file permissions: %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
if fi.Mode().Perm() > 0o600 {
|
||||
if err = os.Chmod(CustomConf, 0o600); err != nil {
|
||||
log.Warn("Failed changing conf file permissions to -rw-------. Consider changing them manually.")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// NewServices initializes the services
|
||||
|
|
|
@ -43,6 +43,10 @@ func getStorage(name, typ string, targetSec *ini.Section) Storage {
|
|||
sec.Key("MINIO_LOCATION").MustString("us-east-1")
|
||||
sec.Key("MINIO_USE_SSL").MustBool(false)
|
||||
|
||||
if targetSec == nil {
|
||||
targetSec, _ = Cfg.NewSection(name)
|
||||
}
|
||||
|
||||
var storage Storage
|
||||
storage.Section = targetSec
|
||||
storage.Type = typ
|
||||
|
|
|
@ -114,6 +114,9 @@ var (
|
|||
Avatars ObjectStorage
|
||||
// RepoAvatars represents repository avatars storage
|
||||
RepoAvatars ObjectStorage
|
||||
|
||||
// RepoArchives represents repository archives storage
|
||||
RepoArchives ObjectStorage
|
||||
)
|
||||
|
||||
// Init init the stoarge
|
||||
|
@ -130,7 +133,11 @@ func Init() error {
|
|||
return err
|
||||
}
|
||||
|
||||
return initLFS()
|
||||
if err := initLFS(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return initRepoArchives()
|
||||
}
|
||||
|
||||
// NewStorage takes a storage type and some config and returns an ObjectStorage or an error
|
||||
|
@ -169,3 +176,9 @@ func initRepoAvatars() (err error) {
|
|||
RepoAvatars, err = NewStorage(setting.RepoAvatar.Storage.Type, &setting.RepoAvatar.Storage)
|
||||
return
|
||||
}
|
||||
|
||||
func initRepoArchives() (err error) {
|
||||
log.Info("Initialising Repository Archive storage with type: %s", setting.RepoArchive.Storage.Type)
|
||||
RepoArchives, err = NewStorage(setting.RepoArchive.Storage.Type, &setting.RepoArchive.Storage)
|
||||
return
|
||||
}
|
||||
|
|
|
@ -19,6 +19,7 @@ type CreateUserOption struct {
|
|||
Password string `json:"password" binding:"Required;MaxSize(255)"`
|
||||
MustChangePassword *bool `json:"must_change_password"`
|
||||
SendNotify bool `json:"send_notify"`
|
||||
Visibility string `json:"visibility" binding:"In(,public,limited,private)"`
|
||||
}
|
||||
|
||||
// EditUserOption edit user options
|
||||
|
@ -43,4 +44,5 @@ type EditUserOption struct {
|
|||
ProhibitLogin *bool `json:"prohibit_login"`
|
||||
AllowCreateOrganization *bool `json:"allow_create_organization"`
|
||||
Restricted *bool `json:"restricted"`
|
||||
Visibility string `json:"visibility" binding:"In(,public,limited,private)"`
|
||||
}
|
||||
|
|
|
@ -43,6 +43,8 @@ type User struct {
|
|||
Website string `json:"website"`
|
||||
// the user's description
|
||||
Description string `json:"description"`
|
||||
// User visibility level option: public, limited, private
|
||||
Visibility string `json:"visibility"`
|
||||
|
||||
// user counts
|
||||
Followers int `json:"followers_count"`
|
||||
|
@ -60,3 +62,33 @@ func (u User) MarshalJSON() ([]byte, error) {
|
|||
CompatUserName string `json:"username"`
|
||||
}{shadow(u), u.UserName})
|
||||
}
|
||||
|
||||
// UserSettings represents user settings
|
||||
// swagger:model
|
||||
type UserSettings struct {
|
||||
FullName string `json:"full_name"`
|
||||
Website string `json:"website"`
|
||||
Description string `json:"description"`
|
||||
Location string `json:"location"`
|
||||
Language string `json:"language"`
|
||||
Theme string `json:"theme"`
|
||||
DiffViewStyle string `json:"diff_view_style"`
|
||||
// Privacy
|
||||
HideEmail bool `json:"hide_email"`
|
||||
HideActivity bool `json:"hide_activity"`
|
||||
}
|
||||
|
||||
// UserSettingsOptions represents options to change user settings
|
||||
// swagger:model
|
||||
type UserSettingsOptions struct {
|
||||
FullName *string `json:"full_name" binding:"MaxSize(100)"`
|
||||
Website *string `json:"website" binding:"OmitEmpty;ValidUrl;MaxSize(255)"`
|
||||
Description *string `json:"description" binding:"MaxSize(255)"`
|
||||
Location *string `json:"location" binding:"MaxSize(50)"`
|
||||
Language *string `json:"language"`
|
||||
Theme *string `json:"theme"`
|
||||
DiffViewStyle *string `json:"diff_view_style"`
|
||||
// Privacy
|
||||
HideEmail *bool `json:"hide_email"`
|
||||
HideActivity *bool `json:"hide_activity"`
|
||||
}
|
||||
|
|
|
@ -19,6 +19,9 @@ const (
|
|||
|
||||
// ErrGlobPattern is returned when glob pattern is invalid
|
||||
ErrGlobPattern = "GlobPattern"
|
||||
|
||||
// ErrRegexPattern is returned when a regex pattern is invalid
|
||||
ErrRegexPattern = "RegexPattern"
|
||||
)
|
||||
|
||||
var (
|
||||
|
@ -52,7 +55,10 @@ func CheckGitRefAdditionalRulesValid(name string) bool {
|
|||
func AddBindingRules() {
|
||||
addGitRefNameBindingRule()
|
||||
addValidURLBindingRule()
|
||||
addValidSiteURLBindingRule()
|
||||
addGlobPatternRule()
|
||||
addRegexPatternRule()
|
||||
addGlobOrRegexPatternRule()
|
||||
}
|
||||
|
||||
func addGitRefNameBindingRule() {
|
||||
|
@ -97,22 +103,78 @@ func addValidURLBindingRule() {
|
|||
})
|
||||
}
|
||||
|
||||
func addValidSiteURLBindingRule() {
|
||||
// URL validation rule
|
||||
binding.AddRule(&binding.Rule{
|
||||
IsMatch: func(rule string) bool {
|
||||
return strings.HasPrefix(rule, "ValidSiteUrl")
|
||||
},
|
||||
IsValid: func(errs binding.Errors, name string, val interface{}) (bool, binding.Errors) {
|
||||
str := fmt.Sprintf("%v", val)
|
||||
if len(str) != 0 && !IsValidSiteURL(str) {
|
||||
errs.Add([]string{name}, binding.ERR_URL, "Url")
|
||||
return false, errs
|
||||
}
|
||||
|
||||
return true, errs
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
func addGlobPatternRule() {
|
||||
binding.AddRule(&binding.Rule{
|
||||
IsMatch: func(rule string) bool {
|
||||
return rule == "GlobPattern"
|
||||
},
|
||||
IsValid: globPatternValidator,
|
||||
})
|
||||
}
|
||||
|
||||
func globPatternValidator(errs binding.Errors, name string, val interface{}) (bool, binding.Errors) {
|
||||
str := fmt.Sprintf("%v", val)
|
||||
|
||||
if len(str) != 0 {
|
||||
if _, err := glob.Compile(str); err != nil {
|
||||
errs.Add([]string{name}, ErrGlobPattern, err.Error())
|
||||
return false, errs
|
||||
}
|
||||
}
|
||||
|
||||
return true, errs
|
||||
}
|
||||
|
||||
func addRegexPatternRule() {
|
||||
binding.AddRule(&binding.Rule{
|
||||
IsMatch: func(rule string) bool {
|
||||
return rule == "RegexPattern"
|
||||
},
|
||||
IsValid: regexPatternValidator,
|
||||
})
|
||||
}
|
||||
|
||||
func regexPatternValidator(errs binding.Errors, name string, val interface{}) (bool, binding.Errors) {
|
||||
str := fmt.Sprintf("%v", val)
|
||||
|
||||
if _, err := regexp.Compile(str); err != nil {
|
||||
errs.Add([]string{name}, ErrRegexPattern, err.Error())
|
||||
return false, errs
|
||||
}
|
||||
|
||||
return true, errs
|
||||
}
|
||||
|
||||
func addGlobOrRegexPatternRule() {
|
||||
binding.AddRule(&binding.Rule{
|
||||
IsMatch: func(rule string) bool {
|
||||
return rule == "GlobOrRegexPattern"
|
||||
},
|
||||
IsValid: func(errs binding.Errors, name string, val interface{}) (bool, binding.Errors) {
|
||||
str := fmt.Sprintf("%v", val)
|
||||
str := strings.TrimSpace(fmt.Sprintf("%v", val))
|
||||
|
||||
if len(str) != 0 {
|
||||
if _, err := glob.Compile(str); err != nil {
|
||||
errs.Add([]string{name}, ErrGlobPattern, err.Error())
|
||||
return false, errs
|
||||
}
|
||||
if len(str) >= 2 && strings.HasPrefix(str, "/") && strings.HasSuffix(str, "/") {
|
||||
return regexPatternValidator(errs, name, str[1:len(str)-1])
|
||||
}
|
||||
|
||||
return true, errs
|
||||
return globPatternValidator(errs, name, val)
|
||||
},
|
||||
})
|
||||
}
|
||||
|
|
|
@ -26,9 +26,10 @@ type (
|
|||
}
|
||||
|
||||
TestForm struct {
|
||||
BranchName string `form:"BranchName" binding:"GitRefName"`
|
||||
URL string `form:"ValidUrl" binding:"ValidUrl"`
|
||||
GlobPattern string `form:"GlobPattern" binding:"GlobPattern"`
|
||||
BranchName string `form:"BranchName" binding:"GitRefName"`
|
||||
URL string `form:"ValidUrl" binding:"ValidUrl"`
|
||||
GlobPattern string `form:"GlobPattern" binding:"GlobPattern"`
|
||||
RegexPattern string `form:"RegexPattern" binding:"RegexPattern"`
|
||||
}
|
||||
)
|
||||
|
||||
|
|
|
@ -52,6 +52,25 @@ func IsValidURL(uri string) bool {
|
|||
return true
|
||||
}
|
||||
|
||||
// IsValidSiteURL checks if URL is valid
|
||||
func IsValidSiteURL(uri string) bool {
|
||||
u, err := url.ParseRequestURI(uri)
|
||||
if err != nil {
|
||||
return false
|
||||
}
|
||||
|
||||
if !validPort(portOnly(u.Host)) {
|
||||
return false
|
||||
}
|
||||
|
||||
for _, scheme := range setting.Service.ValidSiteURLSchemes {
|
||||
if scheme == u.Scheme {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// IsAPIURL checks if URL is current Gitea instance API URL
|
||||
func IsAPIURL(uri string) bool {
|
||||
return strings.HasPrefix(strings.ToLower(uri), strings.ToLower(setting.AppURL+"api"))
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Reference in a new issue