mirror of
https://github.com/go-gitea/gitea.git
synced 2025-12-13 18:32:54 +00:00
Compare commits
38 Commits
7d6861ac54
...
main
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
29057ea55f | ||
|
|
ac8308b5cb | ||
|
|
1e72b15639 | ||
|
|
3102c04c1e | ||
|
|
3e57ba5b36 | ||
|
|
4c06c98dda | ||
|
|
87b855bd15 | ||
|
|
906adff0c1 | ||
|
|
4cbcb91b7b | ||
|
|
bfbc38f40c | ||
|
|
d2a372fc59 | ||
|
|
f25409fab8 | ||
|
|
01351cc6c7 | ||
|
|
a440116a16 | ||
|
|
24b81ac8b9 | ||
|
|
1c69fdccdd | ||
|
|
ed698d1a61 | ||
|
|
d83a071db9 | ||
|
|
69700f9cdd | ||
|
|
98ef79d73a | ||
|
|
b41ccb0627 | ||
|
|
c287a8cdb5 | ||
|
|
ca8c4ebecd | ||
|
|
6675ddc117 | ||
|
|
5fdc84841a | ||
|
|
64960a18f9 | ||
|
|
cb5082f8fe | ||
|
|
ee365f5100 | ||
|
|
b49dd8e32f | ||
|
|
ee6e371e44 | ||
|
|
e30a130b9a | ||
|
|
97cb4409fb | ||
|
|
46d7adefe0 | ||
|
|
9f268edd2f | ||
|
|
ca4b21c305 | ||
|
|
a04a16dc2b | ||
|
|
1e777f92c7 | ||
|
|
5340db4dbe |
4
.github/workflows/cron-licenses.yml
vendored
4
.github/workflows/cron-licenses.yml
vendored
@@ -9,8 +9,10 @@ jobs:
|
||||
cron-licenses:
|
||||
runs-on: ubuntu-latest
|
||||
if: github.repository == 'go-gitea/gitea'
|
||||
permissions:
|
||||
contents: write
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/checkout@v6
|
||||
- uses: actions/setup-go@v6
|
||||
with:
|
||||
go-version-file: go.mod
|
||||
|
||||
4
.github/workflows/cron-translations.yml
vendored
4
.github/workflows/cron-translations.yml
vendored
@@ -9,8 +9,10 @@ jobs:
|
||||
crowdin-pull:
|
||||
runs-on: ubuntu-latest
|
||||
if: github.repository == 'go-gitea/gitea'
|
||||
permissions:
|
||||
contents: write
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/checkout@v6
|
||||
- uses: crowdin/github-action@v1
|
||||
with:
|
||||
upload_sources: true
|
||||
|
||||
4
.github/workflows/files-changed.yml
vendored
4
.github/workflows/files-changed.yml
vendored
@@ -24,6 +24,8 @@ jobs:
|
||||
detect:
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 3
|
||||
permissions:
|
||||
contents: read
|
||||
outputs:
|
||||
backend: ${{ steps.changes.outputs.backend }}
|
||||
frontend: ${{ steps.changes.outputs.frontend }}
|
||||
@@ -34,7 +36,7 @@ jobs:
|
||||
swagger: ${{ steps.changes.outputs.swagger }}
|
||||
yaml: ${{ steps.changes.outputs.yaml }}
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/checkout@v6
|
||||
- uses: dorny/paths-filter@v3
|
||||
id: changes
|
||||
with:
|
||||
|
||||
50
.github/workflows/pull-compliance.yml
vendored
50
.github/workflows/pull-compliance.yml
vendored
@@ -10,13 +10,17 @@ concurrency:
|
||||
jobs:
|
||||
files-changed:
|
||||
uses: ./.github/workflows/files-changed.yml
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
lint-backend:
|
||||
if: needs.files-changed.outputs.backend == 'true' || needs.files-changed.outputs.actions == 'true'
|
||||
needs: files-changed
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/checkout@v6
|
||||
- uses: actions/setup-go@v6
|
||||
with:
|
||||
go-version-file: go.mod
|
||||
@@ -30,8 +34,10 @@ jobs:
|
||||
if: needs.files-changed.outputs.templates == 'true'
|
||||
needs: files-changed
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/checkout@v6
|
||||
- uses: astral-sh/setup-uv@v6
|
||||
- run: uv python install 3.12
|
||||
- uses: pnpm/action-setup@v4
|
||||
@@ -46,8 +52,10 @@ jobs:
|
||||
if: needs.files-changed.outputs.yaml == 'true'
|
||||
needs: files-changed
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/checkout@v6
|
||||
- uses: astral-sh/setup-uv@v6
|
||||
- run: uv python install 3.12
|
||||
- run: make deps-py
|
||||
@@ -57,8 +65,10 @@ jobs:
|
||||
if: needs.files-changed.outputs.swagger == 'true'
|
||||
needs: files-changed
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/checkout@v6
|
||||
- uses: pnpm/action-setup@v4
|
||||
- uses: actions/setup-node@v5
|
||||
with:
|
||||
@@ -70,8 +80,10 @@ jobs:
|
||||
if: needs.files-changed.outputs.backend == 'true' || needs.files-changed.outputs.frontend == 'true' || needs.files-changed.outputs.actions == 'true' || needs.files-changed.outputs.docs == 'true' || needs.files-changed.outputs.templates == 'true'
|
||||
needs: files-changed
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/checkout@v6
|
||||
- uses: actions/setup-go@v6
|
||||
with:
|
||||
go-version-file: go.mod
|
||||
@@ -82,8 +94,10 @@ jobs:
|
||||
if: needs.files-changed.outputs.backend == 'true' || needs.files-changed.outputs.actions == 'true'
|
||||
needs: files-changed
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/checkout@v6
|
||||
- uses: actions/setup-go@v6
|
||||
with:
|
||||
go-version-file: go.mod
|
||||
@@ -99,8 +113,10 @@ jobs:
|
||||
if: needs.files-changed.outputs.backend == 'true' || needs.files-changed.outputs.actions == 'true'
|
||||
needs: files-changed
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/checkout@v6
|
||||
- uses: actions/setup-go@v6
|
||||
with:
|
||||
go-version-file: go.mod
|
||||
@@ -114,8 +130,10 @@ jobs:
|
||||
if: needs.files-changed.outputs.backend == 'true' || needs.files-changed.outputs.actions == 'true'
|
||||
needs: files-changed
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/checkout@v6
|
||||
- uses: actions/setup-go@v6
|
||||
with:
|
||||
go-version-file: go.mod
|
||||
@@ -127,8 +145,10 @@ jobs:
|
||||
if: needs.files-changed.outputs.frontend == 'true' || needs.files-changed.outputs.actions == 'true'
|
||||
needs: files-changed
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/checkout@v6
|
||||
- uses: pnpm/action-setup@v4
|
||||
- uses: actions/setup-node@v5
|
||||
with:
|
||||
@@ -143,8 +163,10 @@ jobs:
|
||||
if: needs.files-changed.outputs.backend == 'true' || needs.files-changed.outputs.actions == 'true'
|
||||
needs: files-changed
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/checkout@v6
|
||||
- uses: actions/setup-go@v6
|
||||
with:
|
||||
go-version-file: go.mod
|
||||
@@ -175,8 +197,10 @@ jobs:
|
||||
if: needs.files-changed.outputs.docs == 'true' || needs.files-changed.outputs.actions == 'true'
|
||||
needs: files-changed
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/checkout@v6
|
||||
- uses: pnpm/action-setup@v4
|
||||
- uses: actions/setup-node@v5
|
||||
with:
|
||||
@@ -188,8 +212,10 @@ jobs:
|
||||
if: needs.files-changed.outputs.actions == 'true' || needs.files-changed.outputs.actions == 'true'
|
||||
needs: files-changed
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/checkout@v6
|
||||
- uses: actions/setup-go@v6
|
||||
with:
|
||||
go-version-file: go.mod
|
||||
|
||||
22
.github/workflows/pull-db-tests.yml
vendored
22
.github/workflows/pull-db-tests.yml
vendored
@@ -10,11 +10,15 @@ concurrency:
|
||||
jobs:
|
||||
files-changed:
|
||||
uses: ./.github/workflows/files-changed.yml
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
test-pgsql:
|
||||
if: needs.files-changed.outputs.backend == 'true' || needs.files-changed.outputs.actions == 'true'
|
||||
needs: files-changed
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
services:
|
||||
pgsql:
|
||||
image: postgres:14
|
||||
@@ -38,7 +42,7 @@ jobs:
|
||||
ports:
|
||||
- "9000:9000"
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/checkout@v6
|
||||
- uses: actions/setup-go@v6
|
||||
with:
|
||||
go-version-file: go.mod
|
||||
@@ -65,8 +69,10 @@ jobs:
|
||||
if: needs.files-changed.outputs.backend == 'true' || needs.files-changed.outputs.actions == 'true'
|
||||
needs: files-changed
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/checkout@v6
|
||||
- uses: actions/setup-go@v6
|
||||
with:
|
||||
go-version-file: go.mod
|
||||
@@ -90,6 +96,8 @@ jobs:
|
||||
if: needs.files-changed.outputs.backend == 'true' || needs.files-changed.outputs.actions == 'true'
|
||||
needs: files-changed
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
services:
|
||||
elasticsearch:
|
||||
image: elasticsearch:7.5.0
|
||||
@@ -124,7 +132,7 @@ jobs:
|
||||
ports:
|
||||
- 10000:10000
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/checkout@v6
|
||||
- uses: actions/setup-go@v6
|
||||
with:
|
||||
go-version-file: go.mod
|
||||
@@ -152,6 +160,8 @@ jobs:
|
||||
if: needs.files-changed.outputs.backend == 'true' || needs.files-changed.outputs.actions == 'true'
|
||||
needs: files-changed
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
services:
|
||||
mysql:
|
||||
# the bitnami mysql image has more options than the official one, it's easier to customize
|
||||
@@ -177,7 +187,7 @@ jobs:
|
||||
- "587:587"
|
||||
- "993:993"
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/checkout@v6
|
||||
- uses: actions/setup-go@v6
|
||||
with:
|
||||
go-version-file: go.mod
|
||||
@@ -203,6 +213,8 @@ jobs:
|
||||
if: needs.files-changed.outputs.backend == 'true' || needs.files-changed.outputs.actions == 'true'
|
||||
needs: files-changed
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
services:
|
||||
mssql:
|
||||
image: mcr.microsoft.com/mssql/server:2019-latest
|
||||
@@ -217,7 +229,7 @@ jobs:
|
||||
ports:
|
||||
- 10000:10000
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/checkout@v6
|
||||
- uses: actions/setup-go@v6
|
||||
with:
|
||||
go-version-file: go.mod
|
||||
|
||||
6
.github/workflows/pull-docker-dryrun.yml
vendored
6
.github/workflows/pull-docker-dryrun.yml
vendored
@@ -10,13 +10,17 @@ concurrency:
|
||||
jobs:
|
||||
files-changed:
|
||||
uses: ./.github/workflows/files-changed.yml
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
container:
|
||||
if: needs.files-changed.outputs.docker == 'true' || needs.files-changed.outputs.actions == 'true'
|
||||
needs: files-changed
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/checkout@v6
|
||||
- uses: docker/setup-buildx-action@v3
|
||||
- name: Build regular container image
|
||||
uses: docker/build-push-action@v5
|
||||
|
||||
8
.github/workflows/release-nightly.yml
vendored
8
.github/workflows/release-nightly.yml
vendored
@@ -11,8 +11,10 @@ concurrency:
|
||||
jobs:
|
||||
nightly-binary:
|
||||
runs-on: namespace-profile-gitea-release-binary
|
||||
permissions:
|
||||
contents: read
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/checkout@v6
|
||||
# fetch all commits instead of only the last as some branches are long lived and could have many between versions
|
||||
# fetch all tags to ensure that "git describe" reports expected Gitea version, eg. v1.21.0-dev-1-g1234567
|
||||
- run: git fetch --unshallow --quiet --tags --force
|
||||
@@ -56,12 +58,14 @@ jobs:
|
||||
- name: upload binaries to s3
|
||||
run: |
|
||||
aws s3 sync dist/release s3://${{ secrets.AWS_S3_BUCKET }}/gitea/${{ steps.clean_name.outputs.branch }} --no-progress
|
||||
|
||||
nightly-container:
|
||||
runs-on: namespace-profile-gitea-release-docker
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write # to publish to ghcr.io
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/checkout@v6
|
||||
# fetch all commits instead of only the last as some branches are long lived and could have many between versions
|
||||
# fetch all tags to ensure that "git describe" reports expected Gitea version, eg. v1.21.0-dev-1-g1234567
|
||||
- run: git fetch --unshallow --quiet --tags --force
|
||||
|
||||
8
.github/workflows/release-tag-rc.yml
vendored
8
.github/workflows/release-tag-rc.yml
vendored
@@ -12,8 +12,10 @@ concurrency:
|
||||
jobs:
|
||||
binary:
|
||||
runs-on: namespace-profile-gitea-release-binary
|
||||
permissions:
|
||||
contents: read
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/checkout@v6
|
||||
# fetch all commits instead of only the last as some branches are long lived and could have many between versions
|
||||
# fetch all tags to ensure that "git describe" reports expected Gitea version, eg. v1.21.0-dev-1-g1234567
|
||||
- run: git fetch --unshallow --quiet --tags --force
|
||||
@@ -66,12 +68,14 @@ jobs:
|
||||
gh release create ${{ github.ref_name }} --title ${{ github.ref_name }} --draft --notes-from-tag dist/release/*
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.RELEASE_TOKEN }}
|
||||
|
||||
container:
|
||||
runs-on: namespace-profile-gitea-release-docker
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write # to publish to ghcr.io
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/checkout@v6
|
||||
# fetch all commits instead of only the last as some branches are long lived and could have many between versions
|
||||
# fetch all tags to ensure that "git describe" reports expected Gitea version, eg. v1.21.0-dev-1-g1234567
|
||||
- run: git fetch --unshallow --quiet --tags --force
|
||||
|
||||
7
.github/workflows/release-tag-version.yml
vendored
7
.github/workflows/release-tag-version.yml
vendored
@@ -15,9 +15,10 @@ jobs:
|
||||
binary:
|
||||
runs-on: namespace-profile-gitea-release-binary
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write # to publish to ghcr.io
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/checkout@v6
|
||||
# fetch all commits instead of only the last as some branches are long lived and could have many between versions
|
||||
# fetch all tags to ensure that "git describe" reports expected Gitea version, eg. v1.21.0-dev-1-g1234567
|
||||
- run: git fetch --unshallow --quiet --tags --force
|
||||
@@ -70,12 +71,14 @@ jobs:
|
||||
gh release create ${{ github.ref_name }} --title ${{ github.ref_name }} --notes-from-tag dist/release/*
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.RELEASE_TOKEN }}
|
||||
|
||||
container:
|
||||
runs-on: namespace-profile-gitea-release-docker
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write # to publish to ghcr.io
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/checkout@v6
|
||||
# fetch all commits instead of only the last as some branches are long lived and could have many between versions
|
||||
# fetch all tags to ensure that "git describe" reports expected Gitea version, eg. v1.21.0-dev-1-g1234567
|
||||
- run: git fetch --unshallow --quiet --tags --force
|
||||
|
||||
2
Makefile
2
Makefile
@@ -32,7 +32,7 @@ XGO_VERSION := go-1.25.x
|
||||
AIR_PACKAGE ?= github.com/air-verse/air@v1
|
||||
EDITORCONFIG_CHECKER_PACKAGE ?= github.com/editorconfig-checker/editorconfig-checker/v3/cmd/editorconfig-checker@v3
|
||||
GOFUMPT_PACKAGE ?= mvdan.cc/gofumpt@v0.9.2
|
||||
GOLANGCI_LINT_PACKAGE ?= github.com/golangci/golangci-lint/v2/cmd/golangci-lint@v2.6.0
|
||||
GOLANGCI_LINT_PACKAGE ?= github.com/golangci/golangci-lint/v2/cmd/golangci-lint@v2.7.0
|
||||
GXZ_PACKAGE ?= github.com/ulikunitz/xz/cmd/gxz@v0.5.15
|
||||
MISSPELL_PACKAGE ?= github.com/golangci/misspell/cmd/misspell@v0.7.0
|
||||
SWAGGER_PACKAGE ?= github.com/go-swagger/go-swagger/cmd/swagger@v0.33.1
|
||||
|
||||
@@ -205,7 +205,7 @@ export default defineConfig([
|
||||
'@typescript-eslint/no-non-null-asserted-optional-chain': [2],
|
||||
'@typescript-eslint/no-non-null-assertion': [0],
|
||||
'@typescript-eslint/no-redeclare': [0],
|
||||
'@typescript-eslint/no-redundant-type-constituents': [0], // rule does not properly work without strickNullChecks
|
||||
'@typescript-eslint/no-redundant-type-constituents': [2],
|
||||
'@typescript-eslint/no-require-imports': [2],
|
||||
'@typescript-eslint/no-restricted-imports': [0],
|
||||
'@typescript-eslint/no-restricted-types': [0],
|
||||
|
||||
2
go.mod
2
go.mod
@@ -2,7 +2,7 @@ module code.gitea.io/gitea
|
||||
|
||||
go 1.25.0
|
||||
|
||||
toolchain go1.25.4
|
||||
toolchain go1.25.5
|
||||
|
||||
// rfc5280 said: "The serial number is an integer assigned by the CA to each certificate."
|
||||
// But some CAs use negative serial number, just relax the check. related:
|
||||
|
||||
@@ -276,8 +276,14 @@ func GetActionsUserRepoPermission(ctx context.Context, repo *repo_model.Reposito
|
||||
if !actionsCfg.IsCollaborativeOwner(taskRepo.OwnerID) || !taskRepo.IsPrivate {
|
||||
// The task repo can access the current repo only if the task repo is private and
|
||||
// the owner of the task repo is a collaborative owner of the current repo.
|
||||
// FIXME allow public repo read access if tokenless pull is enabled
|
||||
// FIXME should owner's visibility also be considered here?
|
||||
|
||||
// check permission like simple user but limit to read-only
|
||||
perm, err = GetUserRepoPermission(ctx, repo, user_model.NewActionsUser())
|
||||
if err != nil {
|
||||
return perm, err
|
||||
}
|
||||
perm.AccessMode = min(perm.AccessMode, perm_model.AccessModeRead)
|
||||
return perm, nil
|
||||
}
|
||||
accessMode = perm_model.AccessModeRead
|
||||
|
||||
@@ -869,16 +869,6 @@ func GetRepositoriesMapByIDs(ctx context.Context, ids []int64) (map[int64]*Repos
|
||||
return repos, db.GetEngine(ctx).In("id", ids).Find(&repos)
|
||||
}
|
||||
|
||||
// IsRepositoryModelOrDirExist returns true if the repository with given name under user has already existed.
|
||||
func IsRepositoryModelOrDirExist(ctx context.Context, u *user_model.User, repoName string) (bool, error) {
|
||||
has, err := IsRepositoryModelExist(ctx, u, repoName)
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
isDir, err := util.IsDir(RepoPath(u.Name, repoName))
|
||||
return has || isDir, err
|
||||
}
|
||||
|
||||
func IsRepositoryModelExist(ctx context.Context, u *user_model.User, repoName string) (bool, error) {
|
||||
return db.GetEngine(ctx).Get(&Repository{
|
||||
OwnerID: u.ID,
|
||||
|
||||
@@ -9,8 +9,6 @@ import (
|
||||
"time"
|
||||
|
||||
"code.gitea.io/gitea/models/db"
|
||||
user_model "code.gitea.io/gitea/models/user"
|
||||
"code.gitea.io/gitea/modules/log"
|
||||
"code.gitea.io/gitea/modules/util"
|
||||
)
|
||||
|
||||
@@ -106,35 +104,6 @@ func (err ErrRepoFilesAlreadyExist) Unwrap() error {
|
||||
return util.ErrAlreadyExist
|
||||
}
|
||||
|
||||
// CheckCreateRepository check if doer could create a repository in new owner
|
||||
func CheckCreateRepository(ctx context.Context, doer, owner *user_model.User, name string, overwriteOrAdopt bool) error {
|
||||
if !doer.CanCreateRepoIn(owner) {
|
||||
return ErrReachLimitOfRepo{owner.MaxRepoCreation}
|
||||
}
|
||||
|
||||
if err := IsUsableRepoName(name); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
has, err := IsRepositoryModelOrDirExist(ctx, owner, name)
|
||||
if err != nil {
|
||||
return fmt.Errorf("IsRepositoryExist: %w", err)
|
||||
} else if has {
|
||||
return ErrRepoAlreadyExist{owner.Name, name}
|
||||
}
|
||||
|
||||
repoPath := RepoPath(owner.Name, name)
|
||||
isExist, err := util.IsExist(repoPath)
|
||||
if err != nil {
|
||||
log.Error("Unable to check if %s exists. Error: %v", repoPath, err)
|
||||
return err
|
||||
}
|
||||
if !overwriteOrAdopt && isExist {
|
||||
return ErrRepoFilesAlreadyExist{owner.Name, name}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// UpdateRepoSize updates the repository size, calculating it using getDirectorySize
|
||||
func UpdateRepoSize(ctx context.Context, repoID, gitSize, lfsSize int64) error {
|
||||
_, err := db.GetEngine(ctx).ID(repoID).Cols("size", "git_size", "lfs_size").NoAutoTime().Update(&Repository{
|
||||
|
||||
@@ -18,6 +18,23 @@ import (
|
||||
"xorm.io/xorm"
|
||||
)
|
||||
|
||||
// AdminUserOrderByMap represents all possible admin user search orders
|
||||
// This should only be used for admin API endpoints as we should not expose "updated" ordering which could expose recent user activity including logins.
|
||||
var AdminUserOrderByMap = map[string]map[string]db.SearchOrderBy{
|
||||
"asc": {
|
||||
"name": db.SearchOrderByAlphabetically,
|
||||
"created": db.SearchOrderByOldest,
|
||||
"updated": db.SearchOrderByLeastUpdated,
|
||||
"id": db.SearchOrderByID,
|
||||
},
|
||||
"desc": {
|
||||
"name": db.SearchOrderByAlphabeticallyReverse,
|
||||
"created": db.SearchOrderByNewest,
|
||||
"updated": db.SearchOrderByRecentUpdated,
|
||||
"id": db.SearchOrderByIDReverse,
|
||||
},
|
||||
}
|
||||
|
||||
// SearchUserOptions contains the options for searching
|
||||
type SearchUserOptions struct {
|
||||
db.ListOptions
|
||||
|
||||
@@ -5,12 +5,10 @@ package charset
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"io"
|
||||
"strings"
|
||||
"unicode/utf8"
|
||||
|
||||
"code.gitea.io/gitea/modules/log"
|
||||
"code.gitea.io/gitea/modules/setting"
|
||||
"code.gitea.io/gitea/modules/util"
|
||||
|
||||
@@ -23,60 +21,39 @@ import (
|
||||
var UTF8BOM = []byte{'\xef', '\xbb', '\xbf'}
|
||||
|
||||
type ConvertOpts struct {
|
||||
KeepBOM bool
|
||||
KeepBOM bool
|
||||
ErrorReplacement []byte
|
||||
ErrorReturnOrigin bool
|
||||
}
|
||||
|
||||
var ToUTF8WithFallbackReaderPrefetchSize = 16 * 1024
|
||||
|
||||
// ToUTF8WithFallbackReader detects the encoding of content and converts to UTF-8 reader if possible
|
||||
func ToUTF8WithFallbackReader(rd io.Reader, opts ConvertOpts) io.Reader {
|
||||
buf := make([]byte, 2048)
|
||||
buf := make([]byte, ToUTF8WithFallbackReaderPrefetchSize)
|
||||
n, err := util.ReadAtMost(rd, buf)
|
||||
if err != nil {
|
||||
return io.MultiReader(bytes.NewReader(MaybeRemoveBOM(buf[:n], opts)), rd)
|
||||
}
|
||||
|
||||
charsetLabel, err := DetectEncoding(buf[:n])
|
||||
if err != nil || charsetLabel == "UTF-8" {
|
||||
return io.MultiReader(bytes.NewReader(MaybeRemoveBOM(buf[:n], opts)), rd)
|
||||
}
|
||||
|
||||
encoding, _ := charset.Lookup(charsetLabel)
|
||||
if encoding == nil {
|
||||
// read error occurs, don't do any processing
|
||||
return io.MultiReader(bytes.NewReader(buf[:n]), rd)
|
||||
}
|
||||
|
||||
return transform.NewReader(
|
||||
io.MultiReader(
|
||||
bytes.NewReader(MaybeRemoveBOM(buf[:n], opts)),
|
||||
rd,
|
||||
),
|
||||
encoding.NewDecoder(),
|
||||
)
|
||||
}
|
||||
|
||||
// ToUTF8 converts content to UTF8 encoding
|
||||
func ToUTF8(content []byte, opts ConvertOpts) (string, error) {
|
||||
charsetLabel, err := DetectEncoding(content)
|
||||
if err != nil {
|
||||
return "", err
|
||||
} else if charsetLabel == "UTF-8" {
|
||||
return string(MaybeRemoveBOM(content, opts)), nil
|
||||
charsetLabel, _ := DetectEncoding(buf[:n])
|
||||
if charsetLabel == "UTF-8" {
|
||||
// is utf-8, try to remove BOM and read it as-is
|
||||
return io.MultiReader(bytes.NewReader(maybeRemoveBOM(buf[:n], opts)), rd)
|
||||
}
|
||||
|
||||
encoding, _ := charset.Lookup(charsetLabel)
|
||||
if encoding == nil {
|
||||
return string(content), fmt.Errorf("Unknown encoding: %s", charsetLabel)
|
||||
// unknown charset, don't do any processing
|
||||
return io.MultiReader(bytes.NewReader(buf[:n]), rd)
|
||||
}
|
||||
|
||||
// If there is an error, we concatenate the nicely decoded part and the
|
||||
// original left over. This way we won't lose much data.
|
||||
result, n, err := transform.Bytes(encoding.NewDecoder(), content)
|
||||
if err != nil {
|
||||
result = append(result, content[n:]...)
|
||||
}
|
||||
|
||||
result = MaybeRemoveBOM(result, opts)
|
||||
|
||||
return string(result), err
|
||||
// convert from charset to utf-8
|
||||
return transform.NewReader(
|
||||
io.MultiReader(bytes.NewReader(buf[:n]), rd),
|
||||
encoding.NewDecoder(),
|
||||
)
|
||||
}
|
||||
|
||||
// ToUTF8WithFallback detects the encoding of content and converts to UTF-8 if possible
|
||||
@@ -85,73 +62,84 @@ func ToUTF8WithFallback(content []byte, opts ConvertOpts) []byte {
|
||||
return bs
|
||||
}
|
||||
|
||||
// ToUTF8DropErrors makes sure the return string is valid utf-8; attempts conversion if possible
|
||||
func ToUTF8DropErrors(content []byte, opts ConvertOpts) []byte {
|
||||
charsetLabel, err := DetectEncoding(content)
|
||||
if err != nil || charsetLabel == "UTF-8" {
|
||||
return MaybeRemoveBOM(content, opts)
|
||||
func ToUTF8DropErrors(content []byte) []byte {
|
||||
return ToUTF8(content, ConvertOpts{ErrorReplacement: []byte{' '}})
|
||||
}
|
||||
|
||||
func ToUTF8(content []byte, opts ConvertOpts) []byte {
|
||||
charsetLabel, _ := DetectEncoding(content)
|
||||
if charsetLabel == "UTF-8" {
|
||||
return maybeRemoveBOM(content, opts)
|
||||
}
|
||||
|
||||
encoding, _ := charset.Lookup(charsetLabel)
|
||||
if encoding == nil {
|
||||
setting.PanicInDevOrTesting("unsupported detected charset %q, it shouldn't happen", charsetLabel)
|
||||
return content
|
||||
}
|
||||
|
||||
// We ignore any non-decodable parts from the file.
|
||||
// Some parts might be lost
|
||||
var decoded []byte
|
||||
decoder := encoding.NewDecoder()
|
||||
idx := 0
|
||||
for {
|
||||
for idx < len(content) {
|
||||
result, n, err := transform.Bytes(decoder, content[idx:])
|
||||
decoded = append(decoded, result...)
|
||||
if err == nil {
|
||||
break
|
||||
}
|
||||
decoded = append(decoded, ' ')
|
||||
idx = idx + n + 1
|
||||
if idx >= len(content) {
|
||||
break
|
||||
if opts.ErrorReturnOrigin {
|
||||
return content
|
||||
}
|
||||
if opts.ErrorReplacement == nil {
|
||||
decoded = append(decoded, content[idx+n])
|
||||
} else {
|
||||
decoded = append(decoded, opts.ErrorReplacement...)
|
||||
}
|
||||
idx += n + 1
|
||||
}
|
||||
|
||||
return MaybeRemoveBOM(decoded, opts)
|
||||
return maybeRemoveBOM(decoded, opts)
|
||||
}
|
||||
|
||||
// MaybeRemoveBOM removes a UTF-8 BOM from a []byte when opts.KeepBOM is false
|
||||
func MaybeRemoveBOM(content []byte, opts ConvertOpts) []byte {
|
||||
// maybeRemoveBOM removes a UTF-8 BOM from a []byte when opts.KeepBOM is false
|
||||
func maybeRemoveBOM(content []byte, opts ConvertOpts) []byte {
|
||||
if opts.KeepBOM {
|
||||
return content
|
||||
}
|
||||
if len(content) > 2 && bytes.Equal(content[0:3], UTF8BOM) {
|
||||
return content[3:]
|
||||
}
|
||||
return content
|
||||
return bytes.TrimPrefix(content, UTF8BOM)
|
||||
}
|
||||
|
||||
// DetectEncoding detect the encoding of content
|
||||
func DetectEncoding(content []byte) (string, error) {
|
||||
// it always returns a detected or guessed "encoding" string, no matter error happens or not
|
||||
func DetectEncoding(content []byte) (encoding string, _ error) {
|
||||
// First we check if the content represents valid utf8 content excepting a truncated character at the end.
|
||||
|
||||
// Now we could decode all the runes in turn but this is not necessarily the cheapest thing to do
|
||||
// instead we walk backwards from the end to trim off a the incomplete character
|
||||
// instead we walk backwards from the end to trim off the incomplete character
|
||||
toValidate := content
|
||||
end := len(toValidate) - 1
|
||||
|
||||
if end < 0 {
|
||||
// no-op
|
||||
} else if toValidate[end]>>5 == 0b110 {
|
||||
// Incomplete 1 byte extension e.g. © <c2><a9> which has been truncated to <c2>
|
||||
toValidate = toValidate[:end]
|
||||
} else if end > 0 && toValidate[end]>>6 == 0b10 && toValidate[end-1]>>4 == 0b1110 {
|
||||
// Incomplete 2 byte extension e.g. ⛔ <e2><9b><94> which has been truncated to <e2><9b>
|
||||
toValidate = toValidate[:end-1]
|
||||
} else if end > 1 && toValidate[end]>>6 == 0b10 && toValidate[end-1]>>6 == 0b10 && toValidate[end-2]>>3 == 0b11110 {
|
||||
// Incomplete 3 byte extension e.g. 💩 <f0><9f><92><a9> which has been truncated to <f0><9f><92>
|
||||
toValidate = toValidate[:end-2]
|
||||
// U+0000 U+007F 0yyyzzzz
|
||||
// U+0080 U+07FF 110xxxyy 10yyzzzz
|
||||
// U+0800 U+FFFF 1110wwww 10xxxxyy 10yyzzzz
|
||||
// U+010000 U+10FFFF 11110uvv 10vvwwww 10xxxxyy 10yyzzzz
|
||||
cnt := 0
|
||||
for end >= 0 && cnt < 4 {
|
||||
c := toValidate[end]
|
||||
if c>>5 == 0b110 || c>>4 == 0b1110 || c>>3 == 0b11110 {
|
||||
// a leading byte
|
||||
toValidate = toValidate[:end]
|
||||
break
|
||||
} else if c>>6 == 0b10 {
|
||||
// a continuation byte
|
||||
end--
|
||||
} else {
|
||||
// not an utf-8 byte
|
||||
break
|
||||
}
|
||||
cnt++
|
||||
}
|
||||
|
||||
if utf8.Valid(toValidate) {
|
||||
log.Debug("Detected encoding: utf-8 (fast)")
|
||||
return "UTF-8", nil
|
||||
}
|
||||
|
||||
@@ -160,7 +148,7 @@ func DetectEncoding(content []byte) (string, error) {
|
||||
if len(content) < 1024 {
|
||||
// Check if original content is valid
|
||||
if _, err := textDetector.DetectBest(content); err != nil {
|
||||
return "", err
|
||||
return util.IfZero(setting.Repository.AnsiCharset, "UTF-8"), err
|
||||
}
|
||||
times := 1024 / len(content)
|
||||
detectContent = make([]byte, 0, times*len(content))
|
||||
@@ -171,14 +159,10 @@ func DetectEncoding(content []byte) (string, error) {
|
||||
detectContent = content
|
||||
}
|
||||
|
||||
// Now we can't use DetectBest or just results[0] because the result isn't stable - so we need a tie break
|
||||
// Now we can't use DetectBest or just results[0] because the result isn't stable - so we need a tie-break
|
||||
results, err := textDetector.DetectAll(detectContent)
|
||||
if err != nil {
|
||||
if err == chardet.NotDetectedError && len(setting.Repository.AnsiCharset) > 0 {
|
||||
log.Debug("Using default AnsiCharset: %s", setting.Repository.AnsiCharset)
|
||||
return setting.Repository.AnsiCharset, nil
|
||||
}
|
||||
return "", err
|
||||
return util.IfZero(setting.Repository.AnsiCharset, "UTF-8"), err
|
||||
}
|
||||
|
||||
topConfidence := results[0].Confidence
|
||||
@@ -201,11 +185,9 @@ func DetectEncoding(content []byte) (string, error) {
|
||||
}
|
||||
|
||||
// FIXME: to properly decouple this function the fallback ANSI charset should be passed as an argument
|
||||
if topResult.Charset != "UTF-8" && len(setting.Repository.AnsiCharset) > 0 {
|
||||
log.Debug("Using default AnsiCharset: %s", setting.Repository.AnsiCharset)
|
||||
if topResult.Charset != "UTF-8" && setting.Repository.AnsiCharset != "" {
|
||||
return setting.Repository.AnsiCharset, err
|
||||
}
|
||||
|
||||
log.Debug("Detected encoding: %s", topResult.Charset)
|
||||
return topResult.Charset, err
|
||||
return topResult.Charset, nil
|
||||
}
|
||||
|
||||
@@ -4,108 +4,89 @@
|
||||
package charset
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"io"
|
||||
"os"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"code.gitea.io/gitea/modules/setting"
|
||||
"code.gitea.io/gitea/modules/test"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
func resetDefaultCharsetsOrder() {
|
||||
defaultDetectedCharsetsOrder := make([]string, 0, len(setting.Repository.DetectedCharsetsOrder))
|
||||
for _, charset := range setting.Repository.DetectedCharsetsOrder {
|
||||
defaultDetectedCharsetsOrder = append(defaultDetectedCharsetsOrder, strings.ToLower(strings.TrimSpace(charset)))
|
||||
}
|
||||
func TestMain(m *testing.M) {
|
||||
setting.Repository.DetectedCharsetScore = map[string]int{}
|
||||
i := 0
|
||||
for _, charset := range defaultDetectedCharsetsOrder {
|
||||
canonicalCharset := strings.ToLower(strings.TrimSpace(charset))
|
||||
if _, has := setting.Repository.DetectedCharsetScore[canonicalCharset]; !has {
|
||||
setting.Repository.DetectedCharsetScore[canonicalCharset] = i
|
||||
i++
|
||||
}
|
||||
for i, charset := range setting.Repository.DetectedCharsetsOrder {
|
||||
setting.Repository.DetectedCharsetScore[strings.ToLower(charset)] = i
|
||||
}
|
||||
os.Exit(m.Run())
|
||||
}
|
||||
|
||||
func TestMaybeRemoveBOM(t *testing.T) {
|
||||
res := MaybeRemoveBOM([]byte{0xc3, 0xa1, 0xc3, 0xa9, 0xc3, 0xad, 0xc3, 0xb3, 0xc3, 0xba}, ConvertOpts{})
|
||||
res := maybeRemoveBOM([]byte{0xc3, 0xa1, 0xc3, 0xa9, 0xc3, 0xad, 0xc3, 0xb3, 0xc3, 0xba}, ConvertOpts{})
|
||||
assert.Equal(t, []byte{0xc3, 0xa1, 0xc3, 0xa9, 0xc3, 0xad, 0xc3, 0xb3, 0xc3, 0xba}, res)
|
||||
|
||||
res = MaybeRemoveBOM([]byte{0xef, 0xbb, 0xbf, 0xc3, 0xa1, 0xc3, 0xa9, 0xc3, 0xad, 0xc3, 0xb3, 0xc3, 0xba}, ConvertOpts{})
|
||||
res = maybeRemoveBOM([]byte{0xef, 0xbb, 0xbf, 0xc3, 0xa1, 0xc3, 0xa9, 0xc3, 0xad, 0xc3, 0xb3, 0xc3, 0xba}, ConvertOpts{})
|
||||
assert.Equal(t, []byte{0xc3, 0xa1, 0xc3, 0xa9, 0xc3, 0xad, 0xc3, 0xb3, 0xc3, 0xba}, res)
|
||||
}
|
||||
|
||||
func TestToUTF8(t *testing.T) {
|
||||
resetDefaultCharsetsOrder()
|
||||
|
||||
// Note: golang compiler seems so behave differently depending on the current
|
||||
// locale, so some conversions might behave differently. For that reason, we don't
|
||||
// depend on particular conversions but in expected behaviors.
|
||||
|
||||
res, err := ToUTF8([]byte{0x41, 0x42, 0x43}, ConvertOpts{})
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, "ABC", res)
|
||||
res := ToUTF8([]byte{0x41, 0x42, 0x43}, ConvertOpts{})
|
||||
assert.Equal(t, "ABC", string(res))
|
||||
|
||||
// "áéíóú"
|
||||
res, err = ToUTF8([]byte{0xc3, 0xa1, 0xc3, 0xa9, 0xc3, 0xad, 0xc3, 0xb3, 0xc3, 0xba}, ConvertOpts{})
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, []byte{0xc3, 0xa1, 0xc3, 0xa9, 0xc3, 0xad, 0xc3, 0xb3, 0xc3, 0xba}, []byte(res))
|
||||
res = ToUTF8([]byte{0xc3, 0xa1, 0xc3, 0xa9, 0xc3, 0xad, 0xc3, 0xb3, 0xc3, 0xba}, ConvertOpts{})
|
||||
assert.Equal(t, []byte{0xc3, 0xa1, 0xc3, 0xa9, 0xc3, 0xad, 0xc3, 0xb3, 0xc3, 0xba}, res)
|
||||
|
||||
// "áéíóú"
|
||||
res, err = ToUTF8([]byte{
|
||||
res = ToUTF8([]byte{
|
||||
0xef, 0xbb, 0xbf, 0xc3, 0xa1, 0xc3, 0xa9, 0xc3, 0xad, 0xc3, 0xb3,
|
||||
0xc3, 0xba,
|
||||
}, ConvertOpts{})
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, []byte{0xc3, 0xa1, 0xc3, 0xa9, 0xc3, 0xad, 0xc3, 0xb3, 0xc3, 0xba}, []byte(res))
|
||||
assert.Equal(t, []byte{0xc3, 0xa1, 0xc3, 0xa9, 0xc3, 0xad, 0xc3, 0xb3, 0xc3, 0xba}, res)
|
||||
|
||||
res, err = ToUTF8([]byte{
|
||||
res = ToUTF8([]byte{
|
||||
0x48, 0x6F, 0x6C, 0x61, 0x2C, 0x20, 0x61, 0x73, 0xED, 0x20, 0x63,
|
||||
0xF3, 0x6D, 0x6F, 0x20, 0xF1, 0x6F, 0x73, 0x41, 0x41, 0x41, 0x2e,
|
||||
}, ConvertOpts{})
|
||||
assert.NoError(t, err)
|
||||
stringMustStartWith(t, "Hola,", res)
|
||||
stringMustEndWith(t, "AAA.", res)
|
||||
|
||||
res, err = ToUTF8([]byte{
|
||||
res = ToUTF8([]byte{
|
||||
0x48, 0x6F, 0x6C, 0x61, 0x2C, 0x20, 0x61, 0x73, 0xED, 0x20, 0x63,
|
||||
0xF3, 0x6D, 0x6F, 0x20, 0x07, 0xA4, 0x6F, 0x73, 0x41, 0x41, 0x41, 0x2e,
|
||||
}, ConvertOpts{})
|
||||
assert.NoError(t, err)
|
||||
stringMustStartWith(t, "Hola,", res)
|
||||
stringMustEndWith(t, "AAA.", res)
|
||||
|
||||
res, err = ToUTF8([]byte{
|
||||
res = ToUTF8([]byte{
|
||||
0x48, 0x6F, 0x6C, 0x61, 0x2C, 0x20, 0x61, 0x73, 0xED, 0x20, 0x63,
|
||||
0xF3, 0x6D, 0x6F, 0x20, 0x81, 0xA4, 0x6F, 0x73, 0x41, 0x41, 0x41, 0x2e,
|
||||
}, ConvertOpts{})
|
||||
assert.NoError(t, err)
|
||||
stringMustStartWith(t, "Hola,", res)
|
||||
stringMustEndWith(t, "AAA.", res)
|
||||
|
||||
// Japanese (Shift-JIS)
|
||||
// 日属秘ぞしちゅ。
|
||||
res, err = ToUTF8([]byte{
|
||||
res = ToUTF8([]byte{
|
||||
0x93, 0xFA, 0x91, 0xAE, 0x94, 0xE9, 0x82, 0xBC, 0x82, 0xB5, 0x82,
|
||||
0xBF, 0x82, 0xE3, 0x81, 0x42,
|
||||
}, ConvertOpts{})
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, []byte{
|
||||
0xE6, 0x97, 0xA5, 0xE5, 0xB1, 0x9E, 0xE7, 0xA7, 0x98, 0xE3,
|
||||
0x81, 0x9E, 0xE3, 0x81, 0x97, 0xE3, 0x81, 0xA1, 0xE3, 0x82, 0x85, 0xE3, 0x80, 0x82,
|
||||
},
|
||||
[]byte(res))
|
||||
}, res)
|
||||
|
||||
res, err = ToUTF8([]byte{0x00, 0x00, 0x00, 0x00}, ConvertOpts{})
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, []byte{0x00, 0x00, 0x00, 0x00}, []byte(res))
|
||||
res = ToUTF8([]byte{0x00, 0x00, 0x00, 0x00}, ConvertOpts{})
|
||||
assert.Equal(t, []byte{0x00, 0x00, 0x00, 0x00}, res)
|
||||
}
|
||||
|
||||
func TestToUTF8WithFallback(t *testing.T) {
|
||||
resetDefaultCharsetsOrder()
|
||||
// "ABC"
|
||||
res := ToUTF8WithFallback([]byte{0x41, 0x42, 0x43}, ConvertOpts{})
|
||||
assert.Equal(t, []byte{0x41, 0x42, 0x43}, res)
|
||||
@@ -152,54 +133,58 @@ func TestToUTF8WithFallback(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestToUTF8DropErrors(t *testing.T) {
|
||||
resetDefaultCharsetsOrder()
|
||||
// "ABC"
|
||||
res := ToUTF8DropErrors([]byte{0x41, 0x42, 0x43}, ConvertOpts{})
|
||||
res := ToUTF8DropErrors([]byte{0x41, 0x42, 0x43})
|
||||
assert.Equal(t, []byte{0x41, 0x42, 0x43}, res)
|
||||
|
||||
// "áéíóú"
|
||||
res = ToUTF8DropErrors([]byte{0xc3, 0xa1, 0xc3, 0xa9, 0xc3, 0xad, 0xc3, 0xb3, 0xc3, 0xba}, ConvertOpts{})
|
||||
res = ToUTF8DropErrors([]byte{0xc3, 0xa1, 0xc3, 0xa9, 0xc3, 0xad, 0xc3, 0xb3, 0xc3, 0xba})
|
||||
assert.Equal(t, []byte{0xc3, 0xa1, 0xc3, 0xa9, 0xc3, 0xad, 0xc3, 0xb3, 0xc3, 0xba}, res)
|
||||
|
||||
// UTF8 BOM + "áéíóú"
|
||||
res = ToUTF8DropErrors([]byte{0xef, 0xbb, 0xbf, 0xc3, 0xa1, 0xc3, 0xa9, 0xc3, 0xad, 0xc3, 0xb3, 0xc3, 0xba}, ConvertOpts{})
|
||||
res = ToUTF8DropErrors([]byte{0xef, 0xbb, 0xbf, 0xc3, 0xa1, 0xc3, 0xa9, 0xc3, 0xad, 0xc3, 0xb3, 0xc3, 0xba})
|
||||
assert.Equal(t, []byte{0xc3, 0xa1, 0xc3, 0xa9, 0xc3, 0xad, 0xc3, 0xb3, 0xc3, 0xba}, res)
|
||||
|
||||
// "Hola, así cómo ños"
|
||||
res = ToUTF8DropErrors([]byte{0x48, 0x6F, 0x6C, 0x61, 0x2C, 0x20, 0x61, 0x73, 0xED, 0x20, 0x63, 0xF3, 0x6D, 0x6F, 0x20, 0xF1, 0x6F, 0x73}, ConvertOpts{})
|
||||
res = ToUTF8DropErrors([]byte{0x48, 0x6F, 0x6C, 0x61, 0x2C, 0x20, 0x61, 0x73, 0xED, 0x20, 0x63, 0xF3, 0x6D, 0x6F, 0x20, 0xF1, 0x6F, 0x73})
|
||||
assert.Equal(t, []byte{0x48, 0x6F, 0x6C, 0x61, 0x2C, 0x20, 0x61, 0x73}, res[:8])
|
||||
assert.Equal(t, []byte{0x73}, res[len(res)-1:])
|
||||
|
||||
// "Hola, así cómo "
|
||||
minmatch := []byte{0x48, 0x6F, 0x6C, 0x61, 0x2C, 0x20, 0x61, 0x73, 0xC3, 0xAD, 0x20, 0x63, 0xC3, 0xB3, 0x6D, 0x6F, 0x20}
|
||||
|
||||
res = ToUTF8DropErrors([]byte{0x48, 0x6F, 0x6C, 0x61, 0x2C, 0x20, 0x61, 0x73, 0xED, 0x20, 0x63, 0xF3, 0x6D, 0x6F, 0x20, 0x07, 0xA4, 0x6F, 0x73}, ConvertOpts{})
|
||||
res = ToUTF8DropErrors([]byte{0x48, 0x6F, 0x6C, 0x61, 0x2C, 0x20, 0x61, 0x73, 0xED, 0x20, 0x63, 0xF3, 0x6D, 0x6F, 0x20, 0x07, 0xA4, 0x6F, 0x73})
|
||||
// Do not fail for differences in invalid cases, as the library might change the conversion criteria for those
|
||||
assert.Equal(t, minmatch, res[0:len(minmatch)])
|
||||
|
||||
res = ToUTF8DropErrors([]byte{0x48, 0x6F, 0x6C, 0x61, 0x2C, 0x20, 0x61, 0x73, 0xED, 0x20, 0x63, 0xF3, 0x6D, 0x6F, 0x20, 0x81, 0xA4, 0x6F, 0x73}, ConvertOpts{})
|
||||
res = ToUTF8DropErrors([]byte{0x48, 0x6F, 0x6C, 0x61, 0x2C, 0x20, 0x61, 0x73, 0xED, 0x20, 0x63, 0xF3, 0x6D, 0x6F, 0x20, 0x81, 0xA4, 0x6F, 0x73})
|
||||
// Do not fail for differences in invalid cases, as the library might change the conversion criteria for those
|
||||
assert.Equal(t, minmatch, res[0:len(minmatch)])
|
||||
|
||||
// Japanese (Shift-JIS)
|
||||
// "日属秘ぞしちゅ。"
|
||||
res = ToUTF8DropErrors([]byte{0x93, 0xFA, 0x91, 0xAE, 0x94, 0xE9, 0x82, 0xBC, 0x82, 0xB5, 0x82, 0xBF, 0x82, 0xE3, 0x81, 0x42}, ConvertOpts{})
|
||||
res = ToUTF8DropErrors([]byte{0x93, 0xFA, 0x91, 0xAE, 0x94, 0xE9, 0x82, 0xBC, 0x82, 0xB5, 0x82, 0xBF, 0x82, 0xE3, 0x81, 0x42})
|
||||
assert.Equal(t, []byte{
|
||||
0xE6, 0x97, 0xA5, 0xE5, 0xB1, 0x9E, 0xE7, 0xA7, 0x98, 0xE3,
|
||||
0x81, 0x9E, 0xE3, 0x81, 0x97, 0xE3, 0x81, 0xA1, 0xE3, 0x82, 0x85, 0xE3, 0x80, 0x82,
|
||||
}, res)
|
||||
|
||||
res = ToUTF8DropErrors([]byte{0x00, 0x00, 0x00, 0x00}, ConvertOpts{})
|
||||
res = ToUTF8DropErrors([]byte{0x00, 0x00, 0x00, 0x00})
|
||||
assert.Equal(t, []byte{0x00, 0x00, 0x00, 0x00}, res)
|
||||
}
|
||||
|
||||
func TestDetectEncoding(t *testing.T) {
|
||||
resetDefaultCharsetsOrder()
|
||||
testSuccess := func(b []byte, expected string) {
|
||||
encoding, err := DetectEncoding(b)
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, expected, encoding)
|
||||
}
|
||||
|
||||
// invalid bytes
|
||||
encoding, err := DetectEncoding([]byte{0xfa})
|
||||
assert.Error(t, err)
|
||||
assert.Equal(t, "UTF-8", encoding)
|
||||
|
||||
// utf-8
|
||||
b := []byte("just some ascii")
|
||||
testSuccess(b, "UTF-8")
|
||||
@@ -214,169 +199,49 @@ func TestDetectEncoding(t *testing.T) {
|
||||
|
||||
// iso-8859-1: d<accented e>cor<newline>
|
||||
b = []byte{0x44, 0xe9, 0x63, 0x6f, 0x72, 0x0a}
|
||||
encoding, err := DetectEncoding(b)
|
||||
encoding, err = DetectEncoding(b)
|
||||
assert.NoError(t, err)
|
||||
assert.Contains(t, encoding, "ISO-8859-1")
|
||||
|
||||
old := setting.Repository.AnsiCharset
|
||||
setting.Repository.AnsiCharset = "placeholder"
|
||||
defer func() {
|
||||
setting.Repository.AnsiCharset = old
|
||||
}()
|
||||
testSuccess(b, "placeholder")
|
||||
|
||||
// invalid bytes
|
||||
b = []byte{0xfa}
|
||||
_, err = DetectEncoding(b)
|
||||
assert.Error(t, err)
|
||||
defer test.MockVariableValue(&setting.Repository.AnsiCharset, "MyEncoding")()
|
||||
testSuccess(b, "MyEncoding")
|
||||
}
|
||||
|
||||
func stringMustStartWith(t *testing.T, expected, value string) {
|
||||
assert.Equal(t, expected, value[:len(expected)])
|
||||
func stringMustStartWith(t *testing.T, expected string, value []byte) {
|
||||
assert.Equal(t, expected, string(value[:len(expected)]))
|
||||
}
|
||||
|
||||
func stringMustEndWith(t *testing.T, expected, value string) {
|
||||
assert.Equal(t, expected, value[len(value)-len(expected):])
|
||||
func stringMustEndWith(t *testing.T, expected string, value []byte) {
|
||||
assert.Equal(t, expected, string(value[len(value)-len(expected):]))
|
||||
}
|
||||
|
||||
func TestToUTF8WithFallbackReader(t *testing.T) {
|
||||
resetDefaultCharsetsOrder()
|
||||
test.MockVariableValue(&ToUTF8WithFallbackReaderPrefetchSize)
|
||||
|
||||
for testLen := range 2048 {
|
||||
pattern := " test { () }\n"
|
||||
input := ""
|
||||
for len(input) < testLen {
|
||||
input += pattern
|
||||
}
|
||||
input = input[:testLen]
|
||||
input += "// Выключаем"
|
||||
rd := ToUTF8WithFallbackReader(bytes.NewReader([]byte(input)), ConvertOpts{})
|
||||
block := "aá啊🤔"
|
||||
runes := []rune(block)
|
||||
assert.Len(t, string(runes[0]), 1)
|
||||
assert.Len(t, string(runes[1]), 2)
|
||||
assert.Len(t, string(runes[2]), 3)
|
||||
assert.Len(t, string(runes[3]), 4)
|
||||
|
||||
content := strings.Repeat(block, 2)
|
||||
for i := 1; i < len(content); i++ {
|
||||
encoding, err := DetectEncoding([]byte(content[:i]))
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, "UTF-8", encoding)
|
||||
|
||||
ToUTF8WithFallbackReaderPrefetchSize = i
|
||||
rd := ToUTF8WithFallbackReader(strings.NewReader(content), ConvertOpts{})
|
||||
r, _ := io.ReadAll(rd)
|
||||
assert.Equalf(t, input, string(r), "testing string len=%d", testLen)
|
||||
assert.Equal(t, content, string(r))
|
||||
}
|
||||
for _, r := range runes {
|
||||
content = "abc abc " + string(r) + string(r) + string(r)
|
||||
for i := 0; i < len(content); i++ {
|
||||
encoding, err := DetectEncoding([]byte(content[:i]))
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, "UTF-8", encoding)
|
||||
}
|
||||
}
|
||||
|
||||
truncatedOneByteExtension := failFastBytes
|
||||
encoding, _ := DetectEncoding(truncatedOneByteExtension)
|
||||
assert.Equal(t, "UTF-8", encoding)
|
||||
|
||||
truncatedTwoByteExtension := failFastBytes
|
||||
truncatedTwoByteExtension[len(failFastBytes)-1] = 0x9b
|
||||
truncatedTwoByteExtension[len(failFastBytes)-2] = 0xe2
|
||||
|
||||
encoding, _ = DetectEncoding(truncatedTwoByteExtension)
|
||||
assert.Equal(t, "UTF-8", encoding)
|
||||
|
||||
truncatedThreeByteExtension := failFastBytes
|
||||
truncatedThreeByteExtension[len(failFastBytes)-1] = 0x92
|
||||
truncatedThreeByteExtension[len(failFastBytes)-2] = 0x9f
|
||||
truncatedThreeByteExtension[len(failFastBytes)-3] = 0xf0
|
||||
|
||||
encoding, _ = DetectEncoding(truncatedThreeByteExtension)
|
||||
assert.Equal(t, "UTF-8", encoding)
|
||||
}
|
||||
|
||||
var failFastBytes = []byte{
|
||||
0x69, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x20, 0x6f, 0x72, 0x67, 0x2e, 0x61, 0x70, 0x61, 0x63, 0x68, 0x65, 0x2e, 0x74, 0x6f,
|
||||
0x6f, 0x6c, 0x73, 0x2e, 0x61, 0x6e, 0x74, 0x2e, 0x74, 0x61, 0x73, 0x6b, 0x64, 0x65, 0x66, 0x73, 0x2e, 0x63, 0x6f, 0x6e,
|
||||
0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x2e, 0x4f, 0x73, 0x0a, 0x69, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x20, 0x6f, 0x72, 0x67,
|
||||
0x2e, 0x73, 0x70, 0x72, 0x69, 0x6e, 0x67, 0x66, 0x72, 0x61, 0x6d, 0x65, 0x77, 0x6f, 0x72, 0x6b, 0x2e, 0x62, 0x6f, 0x6f,
|
||||
0x74, 0x2e, 0x67, 0x72, 0x61, 0x64, 0x6c, 0x65, 0x2e, 0x74, 0x61, 0x73, 0x6b, 0x73, 0x2e, 0x72, 0x75, 0x6e, 0x2e, 0x42,
|
||||
0x6f, 0x6f, 0x74, 0x52, 0x75, 0x6e, 0x0a, 0x0a, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x73, 0x20, 0x7b, 0x0a, 0x20, 0x20,
|
||||
0x20, 0x20, 0x69, 0x64, 0x28, 0x22, 0x6f, 0x72, 0x67, 0x2e, 0x73, 0x70, 0x72, 0x69, 0x6e, 0x67, 0x66, 0x72, 0x61, 0x6d,
|
||||
0x65, 0x77, 0x6f, 0x72, 0x6b, 0x2e, 0x62, 0x6f, 0x6f, 0x74, 0x22, 0x29, 0x0a, 0x7d, 0x0a, 0x0a, 0x64, 0x65, 0x70, 0x65,
|
||||
0x6e, 0x64, 0x65, 0x6e, 0x63, 0x69, 0x65, 0x73, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x69, 0x6d, 0x70, 0x6c, 0x65,
|
||||
0x6d, 0x65, 0x6e, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x70, 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x28, 0x22, 0x3a,
|
||||
0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x3a, 0x61, 0x70, 0x69, 0x22, 0x29, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x69, 0x6d,
|
||||
0x70, 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x70, 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74,
|
||||
0x28, 0x22, 0x3a, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x3a, 0x61, 0x70, 0x69, 0x2d, 0x64, 0x6f, 0x63, 0x73, 0x22, 0x29,
|
||||
0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x69, 0x6d, 0x70, 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e,
|
||||
0x28, 0x70, 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x28, 0x22, 0x3a, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x3a, 0x64, 0x62,
|
||||
0x22, 0x29, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x69, 0x6d, 0x70, 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x61, 0x74, 0x69,
|
||||
0x6f, 0x6e, 0x28, 0x70, 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x28, 0x22, 0x3a, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x3a,
|
||||
0x73, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x22, 0x29, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x69, 0x6d, 0x70, 0x6c, 0x65,
|
||||
0x6d, 0x65, 0x6e, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x70, 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x28, 0x22, 0x3a,
|
||||
0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x3a, 0x69, 0x6e, 0x74, 0x65, 0x67, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2d, 0x66,
|
||||
0x73, 0x22, 0x29, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x69, 0x6d, 0x70, 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x61, 0x74,
|
||||
0x69, 0x6f, 0x6e, 0x28, 0x70, 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x28, 0x22, 0x3a, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72,
|
||||
0x3a, 0x69, 0x6e, 0x74, 0x65, 0x67, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2d, 0x6d, 0x71, 0x22, 0x29, 0x29, 0x0a, 0x0a,
|
||||
0x20, 0x20, 0x20, 0x20, 0x69, 0x6d, 0x70, 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x22,
|
||||
0x6a, 0x66, 0x75, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x70, 0x65, 0x3a, 0x70, 0x65, 0x2d, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e,
|
||||
0x2d, 0x61, 0x75, 0x74, 0x68, 0x2d, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x2d, 0x73, 0x74, 0x61, 0x72, 0x74,
|
||||
0x65, 0x72, 0x22, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x69, 0x6d, 0x70, 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x61, 0x74,
|
||||
0x69, 0x6f, 0x6e, 0x28, 0x22, 0x6a, 0x66, 0x75, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x70, 0x65, 0x3a, 0x70, 0x65, 0x2d, 0x63,
|
||||
0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2d, 0x68, 0x61, 0x6c, 0x22, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x69, 0x6d, 0x70, 0x6c,
|
||||
0x65, 0x6d, 0x65, 0x6e, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x22, 0x6a, 0x66, 0x75, 0x73, 0x69, 0x6f, 0x6e, 0x2e,
|
||||
0x70, 0x65, 0x3a, 0x70, 0x65, 0x2d, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2d, 0x63, 0x6f, 0x72, 0x65, 0x22, 0x29, 0x0a,
|
||||
0x0a, 0x20, 0x20, 0x20, 0x20, 0x69, 0x6d, 0x70, 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x28,
|
||||
0x22, 0x6f, 0x72, 0x67, 0x2e, 0x73, 0x70, 0x72, 0x69, 0x6e, 0x67, 0x66, 0x72, 0x61, 0x6d, 0x65, 0x77, 0x6f, 0x72, 0x6b,
|
||||
0x2e, 0x62, 0x6f, 0x6f, 0x74, 0x3a, 0x73, 0x70, 0x72, 0x69, 0x6e, 0x67, 0x2d, 0x62, 0x6f, 0x6f, 0x74, 0x2d, 0x73, 0x74,
|
||||
0x61, 0x72, 0x74, 0x65, 0x72, 0x2d, 0x77, 0x65, 0x62, 0x22, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x69, 0x6d, 0x70, 0x6c,
|
||||
0x65, 0x6d, 0x65, 0x6e, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x22, 0x6f, 0x72, 0x67, 0x2e, 0x73, 0x70, 0x72, 0x69,
|
||||
0x6e, 0x67, 0x66, 0x72, 0x61, 0x6d, 0x65, 0x77, 0x6f, 0x72, 0x6b, 0x2e, 0x62, 0x6f, 0x6f, 0x74, 0x3a, 0x73, 0x70, 0x72,
|
||||
0x69, 0x6e, 0x67, 0x2d, 0x62, 0x6f, 0x6f, 0x74, 0x2d, 0x73, 0x74, 0x61, 0x72, 0x74, 0x65, 0x72, 0x2d, 0x61, 0x6f, 0x70,
|
||||
0x22, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x69, 0x6d, 0x70, 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x61, 0x74, 0x69, 0x6f,
|
||||
0x6e, 0x28, 0x22, 0x6f, 0x72, 0x67, 0x2e, 0x73, 0x70, 0x72, 0x69, 0x6e, 0x67, 0x66, 0x72, 0x61, 0x6d, 0x65, 0x77, 0x6f,
|
||||
0x72, 0x6b, 0x2e, 0x62, 0x6f, 0x6f, 0x74, 0x3a, 0x73, 0x70, 0x72, 0x69, 0x6e, 0x67, 0x2d, 0x62, 0x6f, 0x6f, 0x74, 0x2d,
|
||||
0x73, 0x74, 0x61, 0x72, 0x74, 0x65, 0x72, 0x2d, 0x61, 0x63, 0x74, 0x75, 0x61, 0x74, 0x6f, 0x72, 0x22, 0x29, 0x0a, 0x20,
|
||||
0x20, 0x20, 0x20, 0x69, 0x6d, 0x70, 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x22, 0x6f,
|
||||
0x72, 0x67, 0x2e, 0x73, 0x70, 0x72, 0x69, 0x6e, 0x67, 0x66, 0x72, 0x61, 0x6d, 0x65, 0x77, 0x6f, 0x72, 0x6b, 0x2e, 0x63,
|
||||
0x6c, 0x6f, 0x75, 0x64, 0x3a, 0x73, 0x70, 0x72, 0x69, 0x6e, 0x67, 0x2d, 0x63, 0x6c, 0x6f, 0x75, 0x64, 0x2d, 0x73, 0x74,
|
||||
0x61, 0x72, 0x74, 0x65, 0x72, 0x2d, 0x62, 0x6f, 0x6f, 0x74, 0x73, 0x74, 0x72, 0x61, 0x70, 0x22, 0x29, 0x0a, 0x20, 0x20,
|
||||
0x20, 0x20, 0x69, 0x6d, 0x70, 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x22, 0x6f, 0x72,
|
||||
0x67, 0x2e, 0x73, 0x70, 0x72, 0x69, 0x6e, 0x67, 0x66, 0x72, 0x61, 0x6d, 0x65, 0x77, 0x6f, 0x72, 0x6b, 0x2e, 0x63, 0x6c,
|
||||
0x6f, 0x75, 0x64, 0x3a, 0x73, 0x70, 0x72, 0x69, 0x6e, 0x67, 0x2d, 0x63, 0x6c, 0x6f, 0x75, 0x64, 0x2d, 0x73, 0x74, 0x61,
|
||||
0x72, 0x74, 0x65, 0x72, 0x2d, 0x63, 0x6f, 0x6e, 0x73, 0x75, 0x6c, 0x2d, 0x61, 0x6c, 0x6c, 0x22, 0x29, 0x0a, 0x20, 0x20,
|
||||
0x20, 0x20, 0x69, 0x6d, 0x70, 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x22, 0x6f, 0x72,
|
||||
0x67, 0x2e, 0x73, 0x70, 0x72, 0x69, 0x6e, 0x67, 0x66, 0x72, 0x61, 0x6d, 0x65, 0x77, 0x6f, 0x72, 0x6b, 0x2e, 0x63, 0x6c,
|
||||
0x6f, 0x75, 0x64, 0x3a, 0x73, 0x70, 0x72, 0x69, 0x6e, 0x67, 0x2d, 0x63, 0x6c, 0x6f, 0x75, 0x64, 0x2d, 0x73, 0x74, 0x61,
|
||||
0x72, 0x74, 0x65, 0x72, 0x2d, 0x73, 0x6c, 0x65, 0x75, 0x74, 0x68, 0x22, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x69, 0x6d,
|
||||
0x70, 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x22, 0x6f, 0x72, 0x67, 0x2e, 0x73, 0x70,
|
||||
0x72, 0x69, 0x6e, 0x67, 0x66, 0x72, 0x61, 0x6d, 0x65, 0x77, 0x6f, 0x72, 0x6b, 0x2e, 0x72, 0x65, 0x74, 0x72, 0x79, 0x3a,
|
||||
0x73, 0x70, 0x72, 0x69, 0x6e, 0x67, 0x2d, 0x72, 0x65, 0x74, 0x72, 0x79, 0x22, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20,
|
||||
0x69, 0x6d, 0x70, 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x22, 0x63, 0x68, 0x2e, 0x71,
|
||||
0x6f, 0x73, 0x2e, 0x6c, 0x6f, 0x67, 0x62, 0x61, 0x63, 0x6b, 0x3a, 0x6c, 0x6f, 0x67, 0x62, 0x61, 0x63, 0x6b, 0x2d, 0x63,
|
||||
0x6c, 0x61, 0x73, 0x73, 0x69, 0x63, 0x22, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x69, 0x6d, 0x70, 0x6c, 0x65, 0x6d,
|
||||
0x65, 0x6e, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x22, 0x69, 0x6f, 0x2e, 0x6d, 0x69, 0x63, 0x72, 0x6f, 0x6d, 0x65,
|
||||
0x74, 0x65, 0x72, 0x3a, 0x6d, 0x69, 0x63, 0x72, 0x6f, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x2d, 0x72, 0x65, 0x67, 0x69, 0x73,
|
||||
0x74, 0x72, 0x79, 0x2d, 0x70, 0x72, 0x6f, 0x6d, 0x65, 0x74, 0x68, 0x65, 0x75, 0x73, 0x22, 0x29, 0x0a, 0x0a, 0x20, 0x20,
|
||||
0x20, 0x20, 0x69, 0x6d, 0x70, 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x6b, 0x6f, 0x74,
|
||||
0x6c, 0x69, 0x6e, 0x28, 0x22, 0x73, 0x74, 0x64, 0x6c, 0x69, 0x62, 0x22, 0x29, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20,
|
||||
0x2f, 0x2f, 0x2f, 0x2f, 0x2f, 0x2f, 0x2f, 0x2f, 0x2f, 0x2f, 0x2f, 0x2f, 0x2f, 0x2f, 0x2f, 0x2f, 0x2f, 0x2f, 0x2f, 0x2f,
|
||||
0x2f, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x54, 0x65, 0x73, 0x74, 0x20, 0x64, 0x65, 0x70, 0x65, 0x6e, 0x64,
|
||||
0x65, 0x6e, 0x63, 0x69, 0x65, 0x73, 0x2e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x2f, 0x2f, 0x2f, 0x2f, 0x2f, 0x2f,
|
||||
0x2f, 0x2f, 0x2f, 0x2f, 0x2f, 0x2f, 0x2f, 0x2f, 0x2f, 0x2f, 0x2f, 0x2f, 0x2f, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x74,
|
||||
0x65, 0x73, 0x74, 0x49, 0x6d, 0x70, 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x22, 0x6a,
|
||||
0x66, 0x75, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x70, 0x65, 0x3a, 0x70, 0x65, 0x2d, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2d,
|
||||
0x74, 0x65, 0x73, 0x74, 0x22, 0x29, 0x0a, 0x7d, 0x0a, 0x0a, 0x76, 0x61, 0x6c, 0x20, 0x70, 0x61, 0x74, 0x63, 0x68, 0x4a,
|
||||
0x61, 0x72, 0x20, 0x62, 0x79, 0x20, 0x74, 0x61, 0x73, 0x6b, 0x73, 0x2e, 0x72, 0x65, 0x67, 0x69, 0x73, 0x74, 0x65, 0x72,
|
||||
0x69, 0x6e, 0x67, 0x28, 0x4a, 0x61, 0x72, 0x3a, 0x3a, 0x63, 0x6c, 0x61, 0x73, 0x73, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20,
|
||||
0x20, 0x20, 0x61, 0x72, 0x63, 0x68, 0x69, 0x76, 0x65, 0x43, 0x6c, 0x61, 0x73, 0x73, 0x69, 0x66, 0x69, 0x65, 0x72, 0x2e,
|
||||
0x73, 0x65, 0x74, 0x28, 0x22, 0x70, 0x61, 0x74, 0x63, 0x68, 0x65, 0x64, 0x22, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20,
|
||||
0x76, 0x61, 0x6c, 0x20, 0x72, 0x75, 0x6e, 0x74, 0x69, 0x6d, 0x65, 0x43, 0x6c, 0x61, 0x73, 0x73, 0x70, 0x61, 0x74, 0x68,
|
||||
0x20, 0x62, 0x79, 0x20, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x75, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2e, 0x67,
|
||||
0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x6d, 0x61, 0x6e, 0x69, 0x66, 0x65, 0x73, 0x74,
|
||||
0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x61, 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, 0x65,
|
||||
0x73, 0x28, 0x22, 0x43, 0x6c, 0x61, 0x73, 0x73, 0x2d, 0x50, 0x61, 0x74, 0x68, 0x22, 0x20, 0x74, 0x6f, 0x20, 0x6f, 0x62,
|
||||
0x6a, 0x65, 0x63, 0x74, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x70,
|
||||
0x72, 0x69, 0x76, 0x61, 0x74, 0x65, 0x20, 0x76, 0x61, 0x6c, 0x20, 0x70, 0x61, 0x74, 0x74, 0x65, 0x72, 0x6e, 0x20, 0x3d,
|
||||
0x20, 0x22, 0x66, 0x69, 0x6c, 0x65, 0x3a, 0x2f, 0x2b, 0x22, 0x2e, 0x74, 0x6f, 0x52, 0x65, 0x67, 0x65, 0x78, 0x28, 0x29,
|
||||
0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6f, 0x76, 0x65, 0x72, 0x72, 0x69, 0x64,
|
||||
0x65, 0x20, 0x66, 0x75, 0x6e, 0x20, 0x74, 0x6f, 0x53, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x28, 0x29, 0x3a, 0x20, 0x53, 0x74,
|
||||
0x72, 0x69, 0x6e, 0x67, 0x20, 0x3d, 0x20, 0x72, 0x75, 0x6e, 0x74, 0x69, 0x6d, 0x65, 0x43, 0x6c, 0x61, 0x73, 0x73, 0x70,
|
||||
0x61, 0x74, 0x68, 0x2e, 0x66, 0x69, 0x6c, 0x65, 0x73, 0x2e, 0x6a, 0x6f, 0x69, 0x6e, 0x54, 0x6f, 0x53, 0x74, 0x72, 0x69,
|
||||
0x6e, 0x67, 0x28, 0x22, 0x20, 0x22, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20,
|
||||
0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x74, 0x2e, 0x74, 0x6f, 0x55, 0x52, 0x49, 0x28, 0x29, 0x2e, 0x74, 0x6f, 0x55,
|
||||
0x52, 0x4c, 0x28, 0x29, 0x2e, 0x74, 0x6f, 0x53, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x28, 0x29, 0x2e, 0x72, 0x65, 0x70, 0x6c,
|
||||
0x61, 0x63, 0x65, 0x46, 0x69, 0x72, 0x73, 0x74, 0x28, 0x70, 0x61, 0x74, 0x74, 0x65, 0x72, 0x6e, 0x2c, 0x20, 0x22, 0x2f,
|
||||
0x22, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20,
|
||||
0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x7d, 0x0a, 0x0a, 0x74, 0x61, 0x73,
|
||||
0x6b, 0x73, 0x2e, 0x6e, 0x61, 0x6d, 0x65, 0x64, 0x3c, 0x42, 0x6f, 0x6f, 0x74, 0x52, 0x75, 0x6e, 0x3e, 0x28, 0x22, 0x62,
|
||||
0x6f, 0x6f, 0x74, 0x52, 0x75, 0x6e, 0x22, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x4f,
|
||||
0x73, 0x2e, 0x69, 0x73, 0x46, 0x61, 0x6d, 0x69, 0x6c, 0x79, 0x28, 0x4f, 0x73, 0x2e, 0x46, 0x41, 0x4d, 0x49, 0x4c, 0x59,
|
||||
0x5f, 0x57, 0x49, 0x4e, 0x44, 0x4f, 0x57, 0x53, 0x29, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20,
|
||||
0x20, 0x63, 0x6c, 0x61, 0x73, 0x73, 0x70, 0x61, 0x74, 0x68, 0x20, 0x3d, 0x20, 0x66, 0x69, 0x6c, 0x65, 0x73, 0x28, 0x73,
|
||||
0x6f, 0x75, 0x72, 0x63, 0x65, 0x53, 0x65, 0x74, 0x73, 0x2e, 0x6e, 0x61, 0x6d, 0x65, 0x64, 0x28, 0x22, 0x6d, 0x61, 0x69,
|
||||
0x6e, 0x22, 0x29, 0x2e, 0x6d, 0x61, 0x70, 0x20, 0x7b, 0x20, 0x69, 0x74, 0x2e, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x20,
|
||||
0x7d, 0x2c, 0x20, 0x70, 0x61, 0x74, 0x63, 0x68, 0x4a, 0x61, 0x72, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a,
|
||||
0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0xd0,
|
||||
}
|
||||
|
||||
@@ -76,7 +76,7 @@ func (m *MaterialIconProvider) renderFileIconSVG(p *RenderedIconPool, name, svg,
|
||||
if p.IconSVGs[svgID] == "" {
|
||||
p.IconSVGs[svgID] = svgHTML
|
||||
}
|
||||
return template.HTML(`<svg ` + svgCommonAttrs + `><use xlink:href="#` + svgID + `"></use></svg>`)
|
||||
return template.HTML(`<svg ` + svgCommonAttrs + `><use href="#` + svgID + `"></use></svg>`)
|
||||
}
|
||||
|
||||
func (m *MaterialIconProvider) EntryIconHTML(p *RenderedIconPool, entry *EntryInfo) template.HTML {
|
||||
|
||||
@@ -25,7 +25,7 @@ func (p *RenderedIconPool) RenderToHTML() template.HTML {
|
||||
return ""
|
||||
}
|
||||
sb := &strings.Builder{}
|
||||
sb.WriteString(`<div class=tw-hidden>`)
|
||||
sb.WriteString(`<div class="svg-icon-container">`)
|
||||
for _, icon := range p.IconSVGs {
|
||||
sb.WriteString(string(icon))
|
||||
}
|
||||
|
||||
@@ -96,8 +96,8 @@ func (attrs *Attributes) GetGitlabLanguage() optional.Option[string] {
|
||||
// gitlab-language may have additional parameters after the language
|
||||
// ignore them and just use the main language
|
||||
// https://docs.gitlab.com/ee/user/project/highlighting.html#override-syntax-highlighting-for-a-file-type
|
||||
if idx := strings.IndexByte(raw, '?'); idx >= 0 {
|
||||
return optional.Some(raw[:idx])
|
||||
if before, _, ok := strings.Cut(raw, "?"); ok {
|
||||
return optional.Some(before)
|
||||
}
|
||||
}
|
||||
return attrStr
|
||||
|
||||
@@ -5,17 +5,13 @@
|
||||
package git
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"bytes"
|
||||
"context"
|
||||
"errors"
|
||||
"io"
|
||||
"os/exec"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"code.gitea.io/gitea/modules/git/gitcmd"
|
||||
"code.gitea.io/gitea/modules/log"
|
||||
"code.gitea.io/gitea/modules/util"
|
||||
)
|
||||
|
||||
@@ -130,65 +126,6 @@ func CommitChanges(ctx context.Context, repoPath string, opts CommitChangesOptio
|
||||
return err
|
||||
}
|
||||
|
||||
// AllCommitsCount returns count of all commits in repository
|
||||
func AllCommitsCount(ctx context.Context, repoPath string, hidePRRefs bool, files ...string) (int64, error) {
|
||||
cmd := gitcmd.NewCommand("rev-list")
|
||||
if hidePRRefs {
|
||||
cmd.AddArguments("--exclude=" + PullPrefix + "*")
|
||||
}
|
||||
cmd.AddArguments("--all", "--count")
|
||||
if len(files) > 0 {
|
||||
cmd.AddDashesAndList(files...)
|
||||
}
|
||||
|
||||
stdout, _, err := cmd.WithDir(repoPath).RunStdString(ctx)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
|
||||
return strconv.ParseInt(strings.TrimSpace(stdout), 10, 64)
|
||||
}
|
||||
|
||||
// CommitsCountOptions the options when counting commits
|
||||
type CommitsCountOptions struct {
|
||||
RepoPath string
|
||||
Not string
|
||||
Revision []string
|
||||
RelPath []string
|
||||
Since string
|
||||
Until string
|
||||
}
|
||||
|
||||
// CommitsCount returns number of total commits of until given revision.
|
||||
func CommitsCount(ctx context.Context, opts CommitsCountOptions) (int64, error) {
|
||||
cmd := gitcmd.NewCommand("rev-list", "--count")
|
||||
|
||||
cmd.AddDynamicArguments(opts.Revision...)
|
||||
|
||||
if opts.Not != "" {
|
||||
cmd.AddOptionValues("--not", opts.Not)
|
||||
}
|
||||
|
||||
if len(opts.RelPath) > 0 {
|
||||
cmd.AddDashesAndList(opts.RelPath...)
|
||||
}
|
||||
|
||||
stdout, _, err := cmd.WithDir(opts.RepoPath).RunStdString(ctx)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
|
||||
return strconv.ParseInt(strings.TrimSpace(stdout), 10, 64)
|
||||
}
|
||||
|
||||
// CommitsCount returns number of total commits of until current revision.
|
||||
func (c *Commit) CommitsCount() (int64, error) {
|
||||
return CommitsCount(c.repo.Ctx, CommitsCountOptions{
|
||||
RepoPath: c.repo.Path,
|
||||
Revision: []string{c.ID.String()},
|
||||
})
|
||||
}
|
||||
|
||||
// CommitsByRange returns the specific page commits before current revision, every page's number default by CommitsRangeSize
|
||||
func (c *Commit) CommitsByRange(page, pageSize int, not, since, until string) ([]*Commit, error) {
|
||||
return c.repo.commitsByRangeWithTime(c.ID, page, pageSize, not, since, until)
|
||||
@@ -371,85 +308,6 @@ func (c *Commit) GetBranchName() (string, error) {
|
||||
return strings.SplitN(strings.TrimSpace(data), "~", 2)[0], nil
|
||||
}
|
||||
|
||||
// CommitFileStatus represents status of files in a commit.
|
||||
type CommitFileStatus struct {
|
||||
Added []string
|
||||
Removed []string
|
||||
Modified []string
|
||||
}
|
||||
|
||||
// NewCommitFileStatus creates a CommitFileStatus
|
||||
func NewCommitFileStatus() *CommitFileStatus {
|
||||
return &CommitFileStatus{
|
||||
[]string{}, []string{}, []string{},
|
||||
}
|
||||
}
|
||||
|
||||
func parseCommitFileStatus(fileStatus *CommitFileStatus, stdout io.Reader) {
|
||||
rd := bufio.NewReader(stdout)
|
||||
peek, err := rd.Peek(1)
|
||||
if err != nil {
|
||||
if err != io.EOF {
|
||||
log.Error("Unexpected error whilst reading from git log --name-status. Error: %v", err)
|
||||
}
|
||||
return
|
||||
}
|
||||
if peek[0] == '\n' || peek[0] == '\x00' {
|
||||
_, _ = rd.Discard(1)
|
||||
}
|
||||
for {
|
||||
modifier, err := rd.ReadString('\x00')
|
||||
if err != nil {
|
||||
if err != io.EOF {
|
||||
log.Error("Unexpected error whilst reading from git log --name-status. Error: %v", err)
|
||||
}
|
||||
return
|
||||
}
|
||||
file, err := rd.ReadString('\x00')
|
||||
if err != nil {
|
||||
if err != io.EOF {
|
||||
log.Error("Unexpected error whilst reading from git log --name-status. Error: %v", err)
|
||||
}
|
||||
return
|
||||
}
|
||||
file = file[:len(file)-1]
|
||||
switch modifier[0] {
|
||||
case 'A':
|
||||
fileStatus.Added = append(fileStatus.Added, file)
|
||||
case 'D':
|
||||
fileStatus.Removed = append(fileStatus.Removed, file)
|
||||
case 'M':
|
||||
fileStatus.Modified = append(fileStatus.Modified, file)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// GetCommitFileStatus returns file status of commit in given repository.
|
||||
func GetCommitFileStatus(ctx context.Context, repoPath, commitID string) (*CommitFileStatus, error) {
|
||||
stdout, w := io.Pipe()
|
||||
done := make(chan struct{})
|
||||
fileStatus := NewCommitFileStatus()
|
||||
go func() {
|
||||
parseCommitFileStatus(fileStatus, stdout)
|
||||
close(done)
|
||||
}()
|
||||
|
||||
stderr := new(bytes.Buffer)
|
||||
err := gitcmd.NewCommand("log", "--name-status", "-m", "--pretty=format:", "--first-parent", "--no-renames", "-z", "-1").
|
||||
AddDynamicArguments(commitID).
|
||||
WithDir(repoPath).
|
||||
WithStdout(w).
|
||||
WithStderr(stderr).
|
||||
Run(ctx)
|
||||
w.Close() // Close writer to exit parsing goroutine
|
||||
if err != nil {
|
||||
return nil, gitcmd.ConcatenateError(err, stderr.String())
|
||||
}
|
||||
|
||||
<-done
|
||||
return fileStatus, nil
|
||||
}
|
||||
|
||||
// GetFullCommitID returns full length (40) of commit ID by given short SHA in a repository.
|
||||
func GetFullCommitID(ctx context.Context, repoPath, shortID string) (string, error) {
|
||||
commitID, _, err := gitcmd.NewCommand("rev-parse").
|
||||
|
||||
@@ -14,33 +14,6 @@ import (
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestCommitsCountSha256(t *testing.T) {
|
||||
bareRepo1Path := filepath.Join(testReposDir, "repo1_bare_sha256")
|
||||
|
||||
commitsCount, err := CommitsCount(t.Context(),
|
||||
CommitsCountOptions{
|
||||
RepoPath: bareRepo1Path,
|
||||
Revision: []string{"f004f41359117d319dedd0eaab8c5259ee2263da839dcba33637997458627fdc"},
|
||||
})
|
||||
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, int64(3), commitsCount)
|
||||
}
|
||||
|
||||
func TestCommitsCountWithoutBaseSha256(t *testing.T) {
|
||||
bareRepo1Path := filepath.Join(testReposDir, "repo1_bare_sha256")
|
||||
|
||||
commitsCount, err := CommitsCount(t.Context(),
|
||||
CommitsCountOptions{
|
||||
RepoPath: bareRepo1Path,
|
||||
Not: "main",
|
||||
Revision: []string{"branch1"},
|
||||
})
|
||||
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, int64(2), commitsCount)
|
||||
}
|
||||
|
||||
func TestGetFullCommitIDSha256(t *testing.T) {
|
||||
bareRepo1Path := filepath.Join(testReposDir, "repo1_bare_sha256")
|
||||
|
||||
@@ -157,39 +130,3 @@ func TestHasPreviousCommitSha256(t *testing.T) {
|
||||
assert.NoError(t, err)
|
||||
assert.False(t, selfNot)
|
||||
}
|
||||
|
||||
func TestGetCommitFileStatusMergesSha256(t *testing.T) {
|
||||
bareRepo1Path := filepath.Join(testReposDir, "repo6_merge_sha256")
|
||||
|
||||
commitFileStatus, err := GetCommitFileStatus(t.Context(), bareRepo1Path, "d2e5609f630dd8db500f5298d05d16def282412e3e66ed68cc7d0833b29129a1")
|
||||
assert.NoError(t, err)
|
||||
|
||||
expected := CommitFileStatus{
|
||||
[]string{
|
||||
"add_file.txt",
|
||||
},
|
||||
[]string{},
|
||||
[]string{
|
||||
"to_modify.txt",
|
||||
},
|
||||
}
|
||||
|
||||
assert.Equal(t, expected.Added, commitFileStatus.Added)
|
||||
assert.Equal(t, expected.Removed, commitFileStatus.Removed)
|
||||
assert.Equal(t, expected.Modified, commitFileStatus.Modified)
|
||||
|
||||
expected = CommitFileStatus{
|
||||
[]string{},
|
||||
[]string{
|
||||
"to_remove.txt",
|
||||
},
|
||||
[]string{},
|
||||
}
|
||||
|
||||
commitFileStatus, err = GetCommitFileStatus(t.Context(), bareRepo1Path, "da1ded40dc8e5b7c564171f4bf2fc8370487decfb1cb6a99ef28f3ed73d09172")
|
||||
assert.NoError(t, err)
|
||||
|
||||
assert.Equal(t, expected.Added, commitFileStatus.Added)
|
||||
assert.Equal(t, expected.Removed, commitFileStatus.Removed)
|
||||
assert.Equal(t, expected.Modified, commitFileStatus.Modified)
|
||||
}
|
||||
|
||||
@@ -13,33 +13,6 @@ import (
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestCommitsCount(t *testing.T) {
|
||||
bareRepo1Path := filepath.Join(testReposDir, "repo1_bare")
|
||||
|
||||
commitsCount, err := CommitsCount(t.Context(),
|
||||
CommitsCountOptions{
|
||||
RepoPath: bareRepo1Path,
|
||||
Revision: []string{"8006ff9adbf0cb94da7dad9e537e53817f9fa5c0"},
|
||||
})
|
||||
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, int64(3), commitsCount)
|
||||
}
|
||||
|
||||
func TestCommitsCountWithoutBase(t *testing.T) {
|
||||
bareRepo1Path := filepath.Join(testReposDir, "repo1_bare")
|
||||
|
||||
commitsCount, err := CommitsCount(t.Context(),
|
||||
CommitsCountOptions{
|
||||
RepoPath: bareRepo1Path,
|
||||
Not: "master",
|
||||
Revision: []string{"branch1"},
|
||||
})
|
||||
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, int64(2), commitsCount)
|
||||
}
|
||||
|
||||
func TestGetFullCommitID(t *testing.T) {
|
||||
bareRepo1Path := filepath.Join(testReposDir, "repo1_bare")
|
||||
|
||||
@@ -212,134 +185,6 @@ func TestHasPreviousCommit(t *testing.T) {
|
||||
assert.False(t, selfNot)
|
||||
}
|
||||
|
||||
func TestParseCommitFileStatus(t *testing.T) {
|
||||
type testcase struct {
|
||||
output string
|
||||
added []string
|
||||
removed []string
|
||||
modified []string
|
||||
}
|
||||
|
||||
kases := []testcase{
|
||||
{
|
||||
// Merge commit
|
||||
output: "MM\x00options/locale/locale_en-US.ini\x00",
|
||||
modified: []string{
|
||||
"options/locale/locale_en-US.ini",
|
||||
},
|
||||
added: []string{},
|
||||
removed: []string{},
|
||||
},
|
||||
{
|
||||
// Spaces commit
|
||||
output: "D\x00b\x00D\x00b b/b\x00A\x00b b/b b/b b/b\x00A\x00b b/b b/b b/b b/b\x00",
|
||||
removed: []string{
|
||||
"b",
|
||||
"b b/b",
|
||||
},
|
||||
modified: []string{},
|
||||
added: []string{
|
||||
"b b/b b/b b/b",
|
||||
"b b/b b/b b/b b/b",
|
||||
},
|
||||
},
|
||||
{
|
||||
// larger commit
|
||||
output: "M\x00go.mod\x00M\x00go.sum\x00M\x00modules/ssh/ssh.go\x00M\x00vendor/github.com/gliderlabs/ssh/circle.yml\x00M\x00vendor/github.com/gliderlabs/ssh/context.go\x00A\x00vendor/github.com/gliderlabs/ssh/go.mod\x00A\x00vendor/github.com/gliderlabs/ssh/go.sum\x00M\x00vendor/github.com/gliderlabs/ssh/server.go\x00M\x00vendor/github.com/gliderlabs/ssh/session.go\x00M\x00vendor/github.com/gliderlabs/ssh/ssh.go\x00M\x00vendor/golang.org/x/sys/unix/mkerrors.sh\x00M\x00vendor/golang.org/x/sys/unix/syscall_darwin.go\x00M\x00vendor/golang.org/x/sys/unix/zerrors_darwin_amd64.go\x00M\x00vendor/golang.org/x/sys/unix/zerrors_darwin_arm64.go\x00M\x00vendor/golang.org/x/sys/unix/zerrors_freebsd_386.go\x00M\x00vendor/golang.org/x/sys/unix/zerrors_freebsd_amd64.go\x00M\x00vendor/golang.org/x/sys/unix/zerrors_freebsd_arm.go\x00M\x00vendor/golang.org/x/sys/unix/zerrors_freebsd_arm64.go\x00M\x00vendor/golang.org/x/sys/unix/zerrors_linux.go\x00M\x00vendor/golang.org/x/sys/unix/ztypes_darwin_amd64.go\x00M\x00vendor/golang.org/x/sys/unix/ztypes_darwin_arm64.go\x00M\x00vendor/golang.org/x/sys/unix/ztypes_dragonfly_amd64.go\x00M\x00vendor/golang.org/x/sys/unix/ztypes_freebsd_386.go\x00M\x00vendor/golang.org/x/sys/unix/ztypes_freebsd_amd64.go\x00M\x00vendor/golang.org/x/sys/unix/ztypes_freebsd_arm.go\x00M\x00vendor/golang.org/x/sys/unix/ztypes_freebsd_arm64.go\x00M\x00vendor/golang.org/x/sys/unix/ztypes_netbsd_386.go\x00M\x00vendor/golang.org/x/sys/unix/ztypes_netbsd_amd64.go\x00M\x00vendor/golang.org/x/sys/unix/ztypes_netbsd_arm.go\x00M\x00vendor/golang.org/x/sys/unix/ztypes_netbsd_arm64.go\x00M\x00vendor/modules.txt\x00",
|
||||
modified: []string{
|
||||
"go.mod",
|
||||
"go.sum",
|
||||
"modules/ssh/ssh.go",
|
||||
"vendor/github.com/gliderlabs/ssh/circle.yml",
|
||||
"vendor/github.com/gliderlabs/ssh/context.go",
|
||||
"vendor/github.com/gliderlabs/ssh/server.go",
|
||||
"vendor/github.com/gliderlabs/ssh/session.go",
|
||||
"vendor/github.com/gliderlabs/ssh/ssh.go",
|
||||
"vendor/golang.org/x/sys/unix/mkerrors.sh",
|
||||
"vendor/golang.org/x/sys/unix/syscall_darwin.go",
|
||||
"vendor/golang.org/x/sys/unix/zerrors_darwin_amd64.go",
|
||||
"vendor/golang.org/x/sys/unix/zerrors_darwin_arm64.go",
|
||||
"vendor/golang.org/x/sys/unix/zerrors_freebsd_386.go",
|
||||
"vendor/golang.org/x/sys/unix/zerrors_freebsd_amd64.go",
|
||||
"vendor/golang.org/x/sys/unix/zerrors_freebsd_arm.go",
|
||||
"vendor/golang.org/x/sys/unix/zerrors_freebsd_arm64.go",
|
||||
"vendor/golang.org/x/sys/unix/zerrors_linux.go",
|
||||
"vendor/golang.org/x/sys/unix/ztypes_darwin_amd64.go",
|
||||
"vendor/golang.org/x/sys/unix/ztypes_darwin_arm64.go",
|
||||
"vendor/golang.org/x/sys/unix/ztypes_dragonfly_amd64.go",
|
||||
"vendor/golang.org/x/sys/unix/ztypes_freebsd_386.go",
|
||||
"vendor/golang.org/x/sys/unix/ztypes_freebsd_amd64.go",
|
||||
"vendor/golang.org/x/sys/unix/ztypes_freebsd_arm.go",
|
||||
"vendor/golang.org/x/sys/unix/ztypes_freebsd_arm64.go",
|
||||
"vendor/golang.org/x/sys/unix/ztypes_netbsd_386.go",
|
||||
"vendor/golang.org/x/sys/unix/ztypes_netbsd_amd64.go",
|
||||
"vendor/golang.org/x/sys/unix/ztypes_netbsd_arm.go",
|
||||
"vendor/golang.org/x/sys/unix/ztypes_netbsd_arm64.go",
|
||||
"vendor/modules.txt",
|
||||
},
|
||||
added: []string{
|
||||
"vendor/github.com/gliderlabs/ssh/go.mod",
|
||||
"vendor/github.com/gliderlabs/ssh/go.sum",
|
||||
},
|
||||
removed: []string{},
|
||||
},
|
||||
{
|
||||
// git 1.7.2 adds an unnecessary \x00 on merge commit
|
||||
output: "\x00MM\x00options/locale/locale_en-US.ini\x00",
|
||||
modified: []string{
|
||||
"options/locale/locale_en-US.ini",
|
||||
},
|
||||
added: []string{},
|
||||
removed: []string{},
|
||||
},
|
||||
{
|
||||
// git 1.7.2 adds an unnecessary \n on normal commit
|
||||
output: "\nD\x00b\x00D\x00b b/b\x00A\x00b b/b b/b b/b\x00A\x00b b/b b/b b/b b/b\x00",
|
||||
removed: []string{
|
||||
"b",
|
||||
"b b/b",
|
||||
},
|
||||
modified: []string{},
|
||||
added: []string{
|
||||
"b b/b b/b b/b",
|
||||
"b b/b b/b b/b b/b",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, kase := range kases {
|
||||
fileStatus := NewCommitFileStatus()
|
||||
parseCommitFileStatus(fileStatus, strings.NewReader(kase.output))
|
||||
|
||||
assert.Equal(t, kase.added, fileStatus.Added)
|
||||
assert.Equal(t, kase.removed, fileStatus.Removed)
|
||||
assert.Equal(t, kase.modified, fileStatus.Modified)
|
||||
}
|
||||
}
|
||||
|
||||
func TestGetCommitFileStatusMerges(t *testing.T) {
|
||||
bareRepo1Path := filepath.Join(testReposDir, "repo6_merge")
|
||||
|
||||
commitFileStatus, err := GetCommitFileStatus(t.Context(), bareRepo1Path, "022f4ce6214973e018f02bf363bf8a2e3691f699")
|
||||
assert.NoError(t, err)
|
||||
|
||||
expected := CommitFileStatus{
|
||||
[]string{
|
||||
"add_file.txt",
|
||||
},
|
||||
[]string{
|
||||
"to_remove.txt",
|
||||
},
|
||||
[]string{
|
||||
"to_modify.txt",
|
||||
},
|
||||
}
|
||||
|
||||
assert.Equal(t, expected.Added, commitFileStatus.Added)
|
||||
assert.Equal(t, expected.Removed, commitFileStatus.Removed)
|
||||
assert.Equal(t, expected.Modified, commitFileStatus.Modified)
|
||||
}
|
||||
|
||||
func Test_GetCommitBranchStart(t *testing.T) {
|
||||
bareRepo1Path := filepath.Join(testReposDir, "repo1_bare")
|
||||
repo, err := OpenRepository(t.Context(), bareRepo1Path)
|
||||
|
||||
@@ -32,20 +32,6 @@ func GetRawDiff(repo *Repository, commitID string, diffType RawDiffType, writer
|
||||
return GetRepoRawDiffForFile(repo, "", commitID, diffType, "", writer)
|
||||
}
|
||||
|
||||
// GetReverseRawDiff dumps the reverse diff results of repository in given commit ID to io.Writer.
|
||||
func GetReverseRawDiff(ctx context.Context, repoPath, commitID string, writer io.Writer) error {
|
||||
stderr := new(bytes.Buffer)
|
||||
if err := gitcmd.NewCommand("show", "--pretty=format:revert %H%n", "-R").
|
||||
AddDynamicArguments(commitID).
|
||||
WithDir(repoPath).
|
||||
WithStdout(writer).
|
||||
WithStderr(stderr).
|
||||
Run(ctx); err != nil {
|
||||
return fmt.Errorf("Run: %w - %s", err, stderr)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// GetRepoRawDiffForFile dumps diff results of file in given commit ID to io.Writer according given repository
|
||||
func GetRepoRawDiffForFile(repo *Repository, startCommit, endCommit string, diffType RawDiffType, file string, writer io.Writer) error {
|
||||
commit, err := repo.GetCommit(endCommit)
|
||||
|
||||
@@ -113,10 +113,10 @@ func (p *Parser) parseRef(refBlock string) (map[string]string, error) {
|
||||
|
||||
var fieldKey string
|
||||
var fieldVal string
|
||||
firstSpace := strings.Index(field, " ")
|
||||
if firstSpace > 0 {
|
||||
fieldKey = field[:firstSpace]
|
||||
fieldVal = field[firstSpace+1:]
|
||||
before, after, ok := strings.Cut(field, " ")
|
||||
if ok {
|
||||
fieldKey = before
|
||||
fieldVal = after
|
||||
} else {
|
||||
// could be the case if the requested field had no value
|
||||
fieldKey = field
|
||||
|
||||
@@ -27,15 +27,15 @@ func parseLsTreeLine(line []byte) (*LsTreeEntry, error) {
|
||||
// <mode> <type> <sha>\t<filename>
|
||||
|
||||
var err error
|
||||
posTab := bytes.IndexByte(line, '\t')
|
||||
if posTab == -1 {
|
||||
before, after, ok := bytes.Cut(line, []byte{'\t'})
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("invalid ls-tree output (no tab): %q", line)
|
||||
}
|
||||
|
||||
entry := new(LsTreeEntry)
|
||||
|
||||
entryAttrs := line[:posTab]
|
||||
entryName := line[posTab+1:]
|
||||
entryAttrs := before
|
||||
entryName := after
|
||||
|
||||
entryMode, entryAttrs, _ := bytes.Cut(entryAttrs, sepSpace)
|
||||
_ /* entryType */, entryAttrs, _ = bytes.Cut(entryAttrs, sepSpace) // the type is not used, the mode is enough to determine the type
|
||||
|
||||
@@ -32,11 +32,6 @@ type GPGSettings struct {
|
||||
|
||||
const prettyLogFormat = `--pretty=format:%H`
|
||||
|
||||
// GetAllCommitsCount returns count of all commits in repository
|
||||
func (repo *Repository) GetAllCommitsCount() (int64, error) {
|
||||
return AllCommitsCount(repo.Ctx, repo.Path, false)
|
||||
}
|
||||
|
||||
func (repo *Repository) ShowPrettyFormatLogToList(ctx context.Context, revisionRange string) ([]*Commit, error) {
|
||||
// avoid: ambiguous argument 'refs/a...refs/b': unknown revision or path not in the working tree. Use '--': 'git <command> [<revision>...] -- [<file>...]'
|
||||
logs, _, err := gitcmd.NewCommand("log").AddArguments(prettyLogFormat).
|
||||
@@ -128,6 +123,8 @@ type CloneRepoOptions struct {
|
||||
Depth int
|
||||
Filter string
|
||||
SkipTLSVerify bool
|
||||
SingleBranch bool
|
||||
Env []string
|
||||
}
|
||||
|
||||
// Clone clones original repository to target path.
|
||||
@@ -162,6 +159,9 @@ func Clone(ctx context.Context, from, to string, opts CloneRepoOptions) error {
|
||||
if opts.Filter != "" {
|
||||
cmd.AddArguments("--filter").AddDynamicArguments(opts.Filter)
|
||||
}
|
||||
if opts.SingleBranch {
|
||||
cmd.AddArguments("--single-branch")
|
||||
}
|
||||
if len(opts.Branch) > 0 {
|
||||
cmd.AddArguments("-b").AddDynamicArguments(opts.Branch)
|
||||
}
|
||||
@@ -172,13 +172,17 @@ func Clone(ctx context.Context, from, to string, opts CloneRepoOptions) error {
|
||||
}
|
||||
|
||||
envs := os.Environ()
|
||||
u, err := url.Parse(from)
|
||||
if err == nil {
|
||||
envs = proxy.EnvWithProxy(u)
|
||||
if opts.Env != nil {
|
||||
envs = opts.Env
|
||||
} else {
|
||||
u, err := url.Parse(from)
|
||||
if err == nil {
|
||||
envs = proxy.EnvWithProxy(u)
|
||||
}
|
||||
}
|
||||
|
||||
stderr := new(bytes.Buffer)
|
||||
if err = cmd.
|
||||
if err := cmd.
|
||||
WithTimeout(opts.Timeout).
|
||||
WithEnv(envs).
|
||||
WithStdout(io.Discard).
|
||||
@@ -191,18 +195,21 @@ func Clone(ctx context.Context, from, to string, opts CloneRepoOptions) error {
|
||||
|
||||
// PushOptions options when push to remote
|
||||
type PushOptions struct {
|
||||
Remote string
|
||||
Branch string
|
||||
Force bool
|
||||
Mirror bool
|
||||
Env []string
|
||||
Timeout time.Duration
|
||||
Remote string
|
||||
Branch string
|
||||
Force bool
|
||||
ForceWithLease string
|
||||
Mirror bool
|
||||
Env []string
|
||||
Timeout time.Duration
|
||||
}
|
||||
|
||||
// Push pushs local commits to given remote branch.
|
||||
func Push(ctx context.Context, repoPath string, opts PushOptions) error {
|
||||
cmd := gitcmd.NewCommand("push")
|
||||
if opts.Force {
|
||||
if opts.ForceWithLease != "" {
|
||||
cmd.AddOptionFormat("--force-with-lease=%s", opts.ForceWithLease)
|
||||
} else if opts.Force {
|
||||
cmd.AddArguments("-f")
|
||||
}
|
||||
if opts.Mirror {
|
||||
@@ -230,14 +237,3 @@ func Push(ctx context.Context, repoPath string, opts PushOptions) error {
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// GetLatestCommitTime returns time for latest commit in repository (across all branches)
|
||||
func GetLatestCommitTime(ctx context.Context, repoPath string) (time.Time, error) {
|
||||
cmd := gitcmd.NewCommand("for-each-ref", "--sort=-committerdate", BranchPrefix, "--count", "1", "--format=%(committerdate)")
|
||||
stdout, _, err := cmd.WithDir(repoPath).RunStdString(ctx)
|
||||
if err != nil {
|
||||
return time.Time{}, err
|
||||
}
|
||||
commitTime := strings.TrimSpace(stdout)
|
||||
return time.Parse("Mon Jan _2 15:04:05 2006 -0700", commitTime)
|
||||
}
|
||||
|
||||
@@ -11,7 +11,6 @@ import (
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"code.gitea.io/gitea/modules/cache"
|
||||
"code.gitea.io/gitea/modules/git/gitcmd"
|
||||
"code.gitea.io/gitea/modules/setting"
|
||||
)
|
||||
@@ -216,16 +215,6 @@ func (repo *Repository) FileChangedBetweenCommits(filename, id1, id2 string) (bo
|
||||
return len(strings.TrimSpace(string(stdout))) > 0, nil
|
||||
}
|
||||
|
||||
// FileCommitsCount return the number of files at a revision
|
||||
func (repo *Repository) FileCommitsCount(revision, file string) (int64, error) {
|
||||
return CommitsCount(repo.Ctx,
|
||||
CommitsCountOptions{
|
||||
RepoPath: repo.Path,
|
||||
Revision: []string{revision},
|
||||
RelPath: []string{file},
|
||||
})
|
||||
}
|
||||
|
||||
type CommitsByFileAndRangeOptions struct {
|
||||
Revision string
|
||||
File string
|
||||
@@ -433,25 +422,6 @@ func (repo *Repository) CommitsBetweenIDs(last, before string) ([]*Commit, error
|
||||
return repo.CommitsBetween(lastCommit, beforeCommit)
|
||||
}
|
||||
|
||||
// CommitsCountBetween return numbers of commits between two commits
|
||||
func (repo *Repository) CommitsCountBetween(start, end string) (int64, error) {
|
||||
count, err := CommitsCount(repo.Ctx, CommitsCountOptions{
|
||||
RepoPath: repo.Path,
|
||||
Revision: []string{start + ".." + end},
|
||||
})
|
||||
|
||||
if err != nil && strings.Contains(err.Error(), "no merge base") {
|
||||
// future versions of git >= 2.28 are likely to return an error if before and last have become unrelated.
|
||||
// previously it would return the results of git rev-list before last so let's try that...
|
||||
return CommitsCount(repo.Ctx, CommitsCountOptions{
|
||||
RepoPath: repo.Path,
|
||||
Revision: []string{start, end},
|
||||
})
|
||||
}
|
||||
|
||||
return count, err
|
||||
}
|
||||
|
||||
// commitsBefore the limit is depth, not total number of returned commits.
|
||||
func (repo *Repository) commitsBefore(id ObjectID, limit int) ([]*Commit, error) {
|
||||
cmd := gitcmd.NewCommand("log", prettyLogFormat)
|
||||
@@ -564,23 +534,6 @@ func (repo *Repository) IsCommitInBranch(commitID, branch string) (r bool, err e
|
||||
return len(stdout) > 0, err
|
||||
}
|
||||
|
||||
func (repo *Repository) AddLastCommitCache(cacheKey, fullName, sha string) error {
|
||||
if repo.LastCommitCache == nil {
|
||||
commitsCount, err := cache.GetInt64(cacheKey, func() (int64, error) {
|
||||
commit, err := repo.GetCommit(sha)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
return commit.CommitsCount()
|
||||
})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
repo.LastCommitCache = NewLastCommitCache(commitsCount, fullName, repo, cache.GetCache())
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// GetCommitBranchStart returns the commit where the branch diverged
|
||||
func (repo *Repository) GetCommitBranchStart(env []string, branch, endCommitID string) (string, error) {
|
||||
cmd := gitcmd.NewCommand("log", prettyLogFormat)
|
||||
|
||||
@@ -10,16 +10,6 @@ import (
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
func TestGetLatestCommitTime(t *testing.T) {
|
||||
bareRepo1Path := filepath.Join(testReposDir, "repo1_bare")
|
||||
lct, err := GetLatestCommitTime(t.Context(), bareRepo1Path)
|
||||
assert.NoError(t, err)
|
||||
// Time is Sun Nov 13 16:40:14 2022 +0100
|
||||
// which is the time of commit
|
||||
// ce064814f4a0d337b333e646ece456cd39fab612 (refs/heads/master)
|
||||
assert.EqualValues(t, 1668354014, lct.Unix())
|
||||
}
|
||||
|
||||
func TestRepoIsEmpty(t *testing.T) {
|
||||
emptyRepo2Path := filepath.Join(testReposDir, "repo2_empty")
|
||||
repo, err := OpenRepository(t.Context(), emptyRepo2Path)
|
||||
|
||||
14
modules/gitrepo/cat_file.go
Normal file
14
modules/gitrepo/cat_file.go
Normal file
@@ -0,0 +1,14 @@
|
||||
// Copyright 2025 The Gitea Authors. All rights reserved.
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
package gitrepo
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"code.gitea.io/gitea/modules/git"
|
||||
)
|
||||
|
||||
func NewBatch(ctx context.Context, repo Repository) (*git.Batch, error) {
|
||||
return git.NewBatch(ctx, repoPath(repo))
|
||||
}
|
||||
@@ -18,3 +18,7 @@ func CloneExternalRepo(ctx context.Context, fromRemoteURL string, toRepo Reposit
|
||||
func CloneRepoToLocal(ctx context.Context, fromRepo Repository, toLocalPath string, opts git.CloneRepoOptions) error {
|
||||
return git.Clone(ctx, repoPath(fromRepo), toLocalPath, opts)
|
||||
}
|
||||
|
||||
func Clone(ctx context.Context, fromRepo, toRepo Repository, opts git.CloneRepoOptions) error {
|
||||
return git.Clone(ctx, repoPath(fromRepo), repoPath(toRepo), opts)
|
||||
}
|
||||
|
||||
112
modules/gitrepo/commit.go
Normal file
112
modules/gitrepo/commit.go
Normal file
@@ -0,0 +1,112 @@
|
||||
// Copyright 2025 The Gitea Authors. All rights reserved.
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
package gitrepo
|
||||
|
||||
import (
|
||||
"context"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"code.gitea.io/gitea/modules/git"
|
||||
"code.gitea.io/gitea/modules/git/gitcmd"
|
||||
)
|
||||
|
||||
// CommitsCountOptions the options when counting commits
|
||||
type CommitsCountOptions struct {
|
||||
Not string
|
||||
Revision []string
|
||||
RelPath []string
|
||||
Since string
|
||||
Until string
|
||||
}
|
||||
|
||||
// CommitsCount returns number of total commits of until given revision.
|
||||
func CommitsCount(ctx context.Context, repo Repository, opts CommitsCountOptions) (int64, error) {
|
||||
cmd := gitcmd.NewCommand("rev-list", "--count")
|
||||
|
||||
cmd.AddDynamicArguments(opts.Revision...)
|
||||
|
||||
if opts.Not != "" {
|
||||
cmd.AddOptionValues("--not", opts.Not)
|
||||
}
|
||||
|
||||
if len(opts.RelPath) > 0 {
|
||||
cmd.AddDashesAndList(opts.RelPath...)
|
||||
}
|
||||
|
||||
stdout, _, err := cmd.WithDir(repoPath(repo)).RunStdString(ctx)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
|
||||
return strconv.ParseInt(strings.TrimSpace(stdout), 10, 64)
|
||||
}
|
||||
|
||||
// CommitsCountBetween return numbers of commits between two commits
|
||||
func CommitsCountBetween(ctx context.Context, repo Repository, start, end string) (int64, error) {
|
||||
count, err := CommitsCount(ctx, repo, CommitsCountOptions{
|
||||
Revision: []string{start + ".." + end},
|
||||
})
|
||||
|
||||
if err != nil && strings.Contains(err.Error(), "no merge base") {
|
||||
// future versions of git >= 2.28 are likely to return an error if before and last have become unrelated.
|
||||
// previously it would return the results of git rev-list before last so let's try that...
|
||||
return CommitsCount(ctx, repo, CommitsCountOptions{
|
||||
Revision: []string{start, end},
|
||||
})
|
||||
}
|
||||
|
||||
return count, err
|
||||
}
|
||||
|
||||
// FileCommitsCount return the number of files at a revision
|
||||
func FileCommitsCount(ctx context.Context, repo Repository, revision, file string) (int64, error) {
|
||||
return CommitsCount(ctx, repo,
|
||||
CommitsCountOptions{
|
||||
Revision: []string{revision},
|
||||
RelPath: []string{file},
|
||||
})
|
||||
}
|
||||
|
||||
// CommitsCountOfCommit returns number of total commits of until current revision.
|
||||
func CommitsCountOfCommit(ctx context.Context, repo Repository, commitID string) (int64, error) {
|
||||
return CommitsCount(ctx, repo, CommitsCountOptions{
|
||||
Revision: []string{commitID},
|
||||
})
|
||||
}
|
||||
|
||||
// AllCommitsCount returns count of all commits in repository
|
||||
func AllCommitsCount(ctx context.Context, repo Repository, hidePRRefs bool, files ...string) (int64, error) {
|
||||
cmd := gitcmd.NewCommand("rev-list")
|
||||
if hidePRRefs {
|
||||
cmd.AddArguments("--exclude=" + git.PullPrefix + "*")
|
||||
}
|
||||
cmd.AddArguments("--all", "--count")
|
||||
if len(files) > 0 {
|
||||
cmd.AddDashesAndList(files...)
|
||||
}
|
||||
|
||||
stdout, err := RunCmdString(ctx, repo, cmd)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
|
||||
return strconv.ParseInt(strings.TrimSpace(stdout), 10, 64)
|
||||
}
|
||||
|
||||
func GetFullCommitID(ctx context.Context, repo Repository, shortID string) (string, error) {
|
||||
return git.GetFullCommitID(ctx, repoPath(repo), shortID)
|
||||
}
|
||||
|
||||
// GetLatestCommitTime returns time for latest commit in repository (across all branches)
|
||||
func GetLatestCommitTime(ctx context.Context, repo Repository) (time.Time, error) {
|
||||
stdout, err := RunCmdString(ctx, repo,
|
||||
gitcmd.NewCommand("for-each-ref", "--sort=-committerdate", git.BranchPrefix, "--count", "1", "--format=%(committerdate)"))
|
||||
if err != nil {
|
||||
return time.Time{}, err
|
||||
}
|
||||
commitTime := strings.TrimSpace(stdout)
|
||||
return time.Parse("Mon Jan _2 15:04:05 2006 -0700", commitTime)
|
||||
}
|
||||
93
modules/gitrepo/commit_file.go
Normal file
93
modules/gitrepo/commit_file.go
Normal file
@@ -0,0 +1,93 @@
|
||||
// Copyright 2025 The Gitea Authors. All rights reserved.
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
package gitrepo
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"bytes"
|
||||
"context"
|
||||
"io"
|
||||
|
||||
"code.gitea.io/gitea/modules/git/gitcmd"
|
||||
"code.gitea.io/gitea/modules/log"
|
||||
)
|
||||
|
||||
// CommitFileStatus represents status of files in a commit.
|
||||
type CommitFileStatus struct {
|
||||
Added []string
|
||||
Removed []string
|
||||
Modified []string
|
||||
}
|
||||
|
||||
// NewCommitFileStatus creates a CommitFileStatus
|
||||
func NewCommitFileStatus() *CommitFileStatus {
|
||||
return &CommitFileStatus{
|
||||
[]string{}, []string{}, []string{},
|
||||
}
|
||||
}
|
||||
|
||||
func parseCommitFileStatus(fileStatus *CommitFileStatus, stdout io.Reader) {
|
||||
rd := bufio.NewReader(stdout)
|
||||
peek, err := rd.Peek(1)
|
||||
if err != nil {
|
||||
if err != io.EOF {
|
||||
log.Error("Unexpected error whilst reading from git log --name-status. Error: %v", err)
|
||||
}
|
||||
return
|
||||
}
|
||||
if peek[0] == '\n' || peek[0] == '\x00' {
|
||||
_, _ = rd.Discard(1)
|
||||
}
|
||||
for {
|
||||
modifier, err := rd.ReadString('\x00')
|
||||
if err != nil {
|
||||
if err != io.EOF {
|
||||
log.Error("Unexpected error whilst reading from git log --name-status. Error: %v", err)
|
||||
}
|
||||
return
|
||||
}
|
||||
file, err := rd.ReadString('\x00')
|
||||
if err != nil {
|
||||
if err != io.EOF {
|
||||
log.Error("Unexpected error whilst reading from git log --name-status. Error: %v", err)
|
||||
}
|
||||
return
|
||||
}
|
||||
file = file[:len(file)-1]
|
||||
switch modifier[0] {
|
||||
case 'A':
|
||||
fileStatus.Added = append(fileStatus.Added, file)
|
||||
case 'D':
|
||||
fileStatus.Removed = append(fileStatus.Removed, file)
|
||||
case 'M':
|
||||
fileStatus.Modified = append(fileStatus.Modified, file)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// GetCommitFileStatus returns file status of commit in given repository.
|
||||
func GetCommitFileStatus(ctx context.Context, repo Repository, commitID string) (*CommitFileStatus, error) {
|
||||
stdout, w := io.Pipe()
|
||||
done := make(chan struct{})
|
||||
fileStatus := NewCommitFileStatus()
|
||||
go func() {
|
||||
parseCommitFileStatus(fileStatus, stdout)
|
||||
close(done)
|
||||
}()
|
||||
|
||||
stderr := new(bytes.Buffer)
|
||||
err := gitcmd.NewCommand("log", "--name-status", "-m", "--pretty=format:", "--first-parent", "--no-renames", "-z", "-1").
|
||||
AddDynamicArguments(commitID).
|
||||
WithDir(repoPath(repo)).
|
||||
WithStdout(w).
|
||||
WithStderr(stderr).
|
||||
Run(ctx)
|
||||
w.Close() // Close writer to exit parsing goroutine
|
||||
if err != nil {
|
||||
return nil, gitcmd.ConcatenateError(err, stderr.String())
|
||||
}
|
||||
|
||||
<-done
|
||||
return fileStatus, nil
|
||||
}
|
||||
175
modules/gitrepo/commit_file_test.go
Normal file
175
modules/gitrepo/commit_file_test.go
Normal file
@@ -0,0 +1,175 @@
|
||||
// Copyright 2025 The Gitea Authors. All rights reserved.
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
package gitrepo
|
||||
|
||||
import (
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
func TestParseCommitFileStatus(t *testing.T) {
|
||||
type testcase struct {
|
||||
output string
|
||||
added []string
|
||||
removed []string
|
||||
modified []string
|
||||
}
|
||||
|
||||
kases := []testcase{
|
||||
{
|
||||
// Merge commit
|
||||
output: "MM\x00options/locale/locale_en-US.ini\x00",
|
||||
modified: []string{
|
||||
"options/locale/locale_en-US.ini",
|
||||
},
|
||||
added: []string{},
|
||||
removed: []string{},
|
||||
},
|
||||
{
|
||||
// Spaces commit
|
||||
output: "D\x00b\x00D\x00b b/b\x00A\x00b b/b b/b b/b\x00A\x00b b/b b/b b/b b/b\x00",
|
||||
removed: []string{
|
||||
"b",
|
||||
"b b/b",
|
||||
},
|
||||
modified: []string{},
|
||||
added: []string{
|
||||
"b b/b b/b b/b",
|
||||
"b b/b b/b b/b b/b",
|
||||
},
|
||||
},
|
||||
{
|
||||
// larger commit
|
||||
output: "M\x00go.mod\x00M\x00go.sum\x00M\x00modules/ssh/ssh.go\x00M\x00vendor/github.com/gliderlabs/ssh/circle.yml\x00M\x00vendor/github.com/gliderlabs/ssh/context.go\x00A\x00vendor/github.com/gliderlabs/ssh/go.mod\x00A\x00vendor/github.com/gliderlabs/ssh/go.sum\x00M\x00vendor/github.com/gliderlabs/ssh/server.go\x00M\x00vendor/github.com/gliderlabs/ssh/session.go\x00M\x00vendor/github.com/gliderlabs/ssh/ssh.go\x00M\x00vendor/golang.org/x/sys/unix/mkerrors.sh\x00M\x00vendor/golang.org/x/sys/unix/syscall_darwin.go\x00M\x00vendor/golang.org/x/sys/unix/zerrors_darwin_amd64.go\x00M\x00vendor/golang.org/x/sys/unix/zerrors_darwin_arm64.go\x00M\x00vendor/golang.org/x/sys/unix/zerrors_freebsd_386.go\x00M\x00vendor/golang.org/x/sys/unix/zerrors_freebsd_amd64.go\x00M\x00vendor/golang.org/x/sys/unix/zerrors_freebsd_arm.go\x00M\x00vendor/golang.org/x/sys/unix/zerrors_freebsd_arm64.go\x00M\x00vendor/golang.org/x/sys/unix/zerrors_linux.go\x00M\x00vendor/golang.org/x/sys/unix/ztypes_darwin_amd64.go\x00M\x00vendor/golang.org/x/sys/unix/ztypes_darwin_arm64.go\x00M\x00vendor/golang.org/x/sys/unix/ztypes_dragonfly_amd64.go\x00M\x00vendor/golang.org/x/sys/unix/ztypes_freebsd_386.go\x00M\x00vendor/golang.org/x/sys/unix/ztypes_freebsd_amd64.go\x00M\x00vendor/golang.org/x/sys/unix/ztypes_freebsd_arm.go\x00M\x00vendor/golang.org/x/sys/unix/ztypes_freebsd_arm64.go\x00M\x00vendor/golang.org/x/sys/unix/ztypes_netbsd_386.go\x00M\x00vendor/golang.org/x/sys/unix/ztypes_netbsd_amd64.go\x00M\x00vendor/golang.org/x/sys/unix/ztypes_netbsd_arm.go\x00M\x00vendor/golang.org/x/sys/unix/ztypes_netbsd_arm64.go\x00M\x00vendor/modules.txt\x00",
|
||||
modified: []string{
|
||||
"go.mod",
|
||||
"go.sum",
|
||||
"modules/ssh/ssh.go",
|
||||
"vendor/github.com/gliderlabs/ssh/circle.yml",
|
||||
"vendor/github.com/gliderlabs/ssh/context.go",
|
||||
"vendor/github.com/gliderlabs/ssh/server.go",
|
||||
"vendor/github.com/gliderlabs/ssh/session.go",
|
||||
"vendor/github.com/gliderlabs/ssh/ssh.go",
|
||||
"vendor/golang.org/x/sys/unix/mkerrors.sh",
|
||||
"vendor/golang.org/x/sys/unix/syscall_darwin.go",
|
||||
"vendor/golang.org/x/sys/unix/zerrors_darwin_amd64.go",
|
||||
"vendor/golang.org/x/sys/unix/zerrors_darwin_arm64.go",
|
||||
"vendor/golang.org/x/sys/unix/zerrors_freebsd_386.go",
|
||||
"vendor/golang.org/x/sys/unix/zerrors_freebsd_amd64.go",
|
||||
"vendor/golang.org/x/sys/unix/zerrors_freebsd_arm.go",
|
||||
"vendor/golang.org/x/sys/unix/zerrors_freebsd_arm64.go",
|
||||
"vendor/golang.org/x/sys/unix/zerrors_linux.go",
|
||||
"vendor/golang.org/x/sys/unix/ztypes_darwin_amd64.go",
|
||||
"vendor/golang.org/x/sys/unix/ztypes_darwin_arm64.go",
|
||||
"vendor/golang.org/x/sys/unix/ztypes_dragonfly_amd64.go",
|
||||
"vendor/golang.org/x/sys/unix/ztypes_freebsd_386.go",
|
||||
"vendor/golang.org/x/sys/unix/ztypes_freebsd_amd64.go",
|
||||
"vendor/golang.org/x/sys/unix/ztypes_freebsd_arm.go",
|
||||
"vendor/golang.org/x/sys/unix/ztypes_freebsd_arm64.go",
|
||||
"vendor/golang.org/x/sys/unix/ztypes_netbsd_386.go",
|
||||
"vendor/golang.org/x/sys/unix/ztypes_netbsd_amd64.go",
|
||||
"vendor/golang.org/x/sys/unix/ztypes_netbsd_arm.go",
|
||||
"vendor/golang.org/x/sys/unix/ztypes_netbsd_arm64.go",
|
||||
"vendor/modules.txt",
|
||||
},
|
||||
added: []string{
|
||||
"vendor/github.com/gliderlabs/ssh/go.mod",
|
||||
"vendor/github.com/gliderlabs/ssh/go.sum",
|
||||
},
|
||||
removed: []string{},
|
||||
},
|
||||
{
|
||||
// git 1.7.2 adds an unnecessary \x00 on merge commit
|
||||
output: "\x00MM\x00options/locale/locale_en-US.ini\x00",
|
||||
modified: []string{
|
||||
"options/locale/locale_en-US.ini",
|
||||
},
|
||||
added: []string{},
|
||||
removed: []string{},
|
||||
},
|
||||
{
|
||||
// git 1.7.2 adds an unnecessary \n on normal commit
|
||||
output: "\nD\x00b\x00D\x00b b/b\x00A\x00b b/b b/b b/b\x00A\x00b b/b b/b b/b b/b\x00",
|
||||
removed: []string{
|
||||
"b",
|
||||
"b b/b",
|
||||
},
|
||||
modified: []string{},
|
||||
added: []string{
|
||||
"b b/b b/b b/b",
|
||||
"b b/b b/b b/b b/b",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, kase := range kases {
|
||||
fileStatus := NewCommitFileStatus()
|
||||
parseCommitFileStatus(fileStatus, strings.NewReader(kase.output))
|
||||
|
||||
assert.Equal(t, kase.added, fileStatus.Added)
|
||||
assert.Equal(t, kase.removed, fileStatus.Removed)
|
||||
assert.Equal(t, kase.modified, fileStatus.Modified)
|
||||
}
|
||||
}
|
||||
|
||||
func TestGetCommitFileStatusMerges(t *testing.T) {
|
||||
bareRepo6 := &mockRepository{path: "repo6_merge"}
|
||||
|
||||
commitFileStatus, err := GetCommitFileStatus(t.Context(), bareRepo6, "022f4ce6214973e018f02bf363bf8a2e3691f699")
|
||||
assert.NoError(t, err)
|
||||
|
||||
expected := CommitFileStatus{
|
||||
[]string{
|
||||
"add_file.txt",
|
||||
},
|
||||
[]string{
|
||||
"to_remove.txt",
|
||||
},
|
||||
[]string{
|
||||
"to_modify.txt",
|
||||
},
|
||||
}
|
||||
|
||||
assert.Equal(t, expected.Added, commitFileStatus.Added)
|
||||
assert.Equal(t, expected.Removed, commitFileStatus.Removed)
|
||||
assert.Equal(t, expected.Modified, commitFileStatus.Modified)
|
||||
}
|
||||
|
||||
func TestGetCommitFileStatusMergesSha256(t *testing.T) {
|
||||
bareRepo6Sha256 := &mockRepository{path: "repo6_merge_sha256"}
|
||||
|
||||
commitFileStatus, err := GetCommitFileStatus(t.Context(), bareRepo6Sha256, "d2e5609f630dd8db500f5298d05d16def282412e3e66ed68cc7d0833b29129a1")
|
||||
assert.NoError(t, err)
|
||||
|
||||
expected := CommitFileStatus{
|
||||
[]string{
|
||||
"add_file.txt",
|
||||
},
|
||||
[]string{},
|
||||
[]string{
|
||||
"to_modify.txt",
|
||||
},
|
||||
}
|
||||
|
||||
assert.Equal(t, expected.Added, commitFileStatus.Added)
|
||||
assert.Equal(t, expected.Removed, commitFileStatus.Removed)
|
||||
assert.Equal(t, expected.Modified, commitFileStatus.Modified)
|
||||
|
||||
expected = CommitFileStatus{
|
||||
[]string{},
|
||||
[]string{
|
||||
"to_remove.txt",
|
||||
},
|
||||
[]string{},
|
||||
}
|
||||
|
||||
commitFileStatus, err = GetCommitFileStatus(t.Context(), bareRepo6Sha256, "da1ded40dc8e5b7c564171f4bf2fc8370487decfb1cb6a99ef28f3ed73d09172")
|
||||
assert.NoError(t, err)
|
||||
|
||||
assert.Equal(t, expected.Added, commitFileStatus.Added)
|
||||
assert.Equal(t, expected.Removed, commitFileStatus.Removed)
|
||||
assert.Equal(t, expected.Modified, commitFileStatus.Modified)
|
||||
}
|
||||
45
modules/gitrepo/commit_test.go
Normal file
45
modules/gitrepo/commit_test.go
Normal file
@@ -0,0 +1,45 @@
|
||||
// Copyright 2025 The Gitea Authors. All rights reserved.
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
package gitrepo
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
func TestCommitsCount(t *testing.T) {
|
||||
bareRepo1 := &mockRepository{path: "repo1_bare"}
|
||||
|
||||
commitsCount, err := CommitsCount(t.Context(), bareRepo1,
|
||||
CommitsCountOptions{
|
||||
Revision: []string{"8006ff9adbf0cb94da7dad9e537e53817f9fa5c0"},
|
||||
})
|
||||
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, int64(3), commitsCount)
|
||||
}
|
||||
|
||||
func TestCommitsCountWithoutBase(t *testing.T) {
|
||||
bareRepo1 := &mockRepository{path: "repo1_bare"}
|
||||
|
||||
commitsCount, err := CommitsCount(t.Context(), bareRepo1,
|
||||
CommitsCountOptions{
|
||||
Not: "master",
|
||||
Revision: []string{"branch1"},
|
||||
})
|
||||
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, int64(2), commitsCount)
|
||||
}
|
||||
|
||||
func TestGetLatestCommitTime(t *testing.T) {
|
||||
bareRepo1 := &mockRepository{path: "repo1_bare"}
|
||||
lct, err := GetLatestCommitTime(t.Context(), bareRepo1)
|
||||
assert.NoError(t, err)
|
||||
// Time is Sun Nov 13 16:40:14 2022 +0100
|
||||
// which is the time of commit
|
||||
// ce064814f4a0d337b333e646ece456cd39fab612 (refs/heads/master)
|
||||
assert.EqualValues(t, 1668354014, lct.Unix())
|
||||
}
|
||||
@@ -4,8 +4,10 @@
|
||||
package gitrepo
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"fmt"
|
||||
"io"
|
||||
"regexp"
|
||||
"strconv"
|
||||
|
||||
@@ -60,3 +62,15 @@ func parseDiffStat(stdout string) (numFiles, totalAdditions, totalDeletions int,
|
||||
}
|
||||
return numFiles, totalAdditions, totalDeletions, err
|
||||
}
|
||||
|
||||
// GetReverseRawDiff dumps the reverse diff results of repository in given commit ID to io.Writer.
|
||||
func GetReverseRawDiff(ctx context.Context, repo Repository, commitID string, writer io.Writer) error {
|
||||
stderr := new(bytes.Buffer)
|
||||
if err := RunCmd(ctx, repo, gitcmd.NewCommand("show", "--pretty=format:revert %H%n", "-R").
|
||||
AddDynamicArguments(commitID).
|
||||
WithStdout(writer).
|
||||
WithStderr(stderr)); err != nil {
|
||||
return fmt.Errorf("GetReverseRawDiff: %w - %s", err, stderr)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
@@ -98,3 +98,23 @@ func UpdateServerInfo(ctx context.Context, repo Repository) error {
|
||||
func GetRepoFS(repo Repository) fs.FS {
|
||||
return os.DirFS(repoPath(repo))
|
||||
}
|
||||
|
||||
func IsRepoFileExist(ctx context.Context, repo Repository, relativeFilePath string) (bool, error) {
|
||||
absoluteFilePath := filepath.Join(repoPath(repo), relativeFilePath)
|
||||
return util.IsExist(absoluteFilePath)
|
||||
}
|
||||
|
||||
func IsRepoDirExist(ctx context.Context, repo Repository, relativeDirPath string) (bool, error) {
|
||||
absoluteDirPath := filepath.Join(repoPath(repo), relativeDirPath)
|
||||
return util.IsDir(absoluteDirPath)
|
||||
}
|
||||
|
||||
func RemoveRepoFile(ctx context.Context, repo Repository, relativeFilePath string) error {
|
||||
absoluteFilePath := filepath.Join(repoPath(repo), relativeFilePath)
|
||||
return util.Remove(absoluteFilePath)
|
||||
}
|
||||
|
||||
func CreateRepoFile(ctx context.Context, repo Repository, relativeFilePath string) (io.WriteCloser, error) {
|
||||
absoluteFilePath := filepath.Join(repoPath(repo), relativeFilePath)
|
||||
return os.Create(absoluteFilePath)
|
||||
}
|
||||
|
||||
@@ -9,6 +9,19 @@ import (
|
||||
"code.gitea.io/gitea/modules/git"
|
||||
)
|
||||
|
||||
func Push(ctx context.Context, repo Repository, opts git.PushOptions) error {
|
||||
// PushToExternal pushes a managed repository to an external remote.
|
||||
func PushToExternal(ctx context.Context, repo Repository, opts git.PushOptions) error {
|
||||
return git.Push(ctx, repoPath(repo), opts)
|
||||
}
|
||||
|
||||
// Push pushes from one managed repository to another managed repository.
|
||||
func Push(ctx context.Context, fromRepo, toRepo Repository, opts git.PushOptions) error {
|
||||
opts.Remote = repoPath(toRepo)
|
||||
return git.Push(ctx, repoPath(fromRepo), opts)
|
||||
}
|
||||
|
||||
// PushFromLocal pushes from a local path to a managed repository.
|
||||
func PushFromLocal(ctx context.Context, fromLocalPath string, toRepo Repository, opts git.PushOptions) error {
|
||||
opts.Remote = repoPath(toRepo)
|
||||
return git.Push(ctx, fromLocalPath, opts)
|
||||
}
|
||||
|
||||
@@ -77,8 +77,8 @@ func Code(fileName, language, code string) (output template.HTML, lexerName stri
|
||||
|
||||
if lexer == nil {
|
||||
// Attempt stripping off the '?'
|
||||
if idx := strings.IndexByte(language, '?'); idx > 0 {
|
||||
lexer = lexers.Get(language[:idx])
|
||||
if before, _, ok := strings.Cut(language, "?"); ok {
|
||||
lexer = lexers.Get(before)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -19,7 +19,6 @@ import (
|
||||
charsetModule "code.gitea.io/gitea/modules/charset"
|
||||
"code.gitea.io/gitea/modules/container"
|
||||
"code.gitea.io/gitea/modules/httpcache"
|
||||
"code.gitea.io/gitea/modules/log"
|
||||
"code.gitea.io/gitea/modules/setting"
|
||||
"code.gitea.io/gitea/modules/typesniffer"
|
||||
"code.gitea.io/gitea/modules/util"
|
||||
@@ -109,11 +108,7 @@ func setServeHeadersByFile(r *http.Request, w http.ResponseWriter, mineBuf []byt
|
||||
}
|
||||
|
||||
if isPlain {
|
||||
charset, err := charsetModule.DetectEncoding(mineBuf)
|
||||
if err != nil {
|
||||
log.Error("Detect raw file %s charset failed: %v, using by default utf-8", opts.Filename, err)
|
||||
charset = "utf-8"
|
||||
}
|
||||
charset, _ := charsetModule.DetectEncoding(mineBuf)
|
||||
opts.ContentTypeCharset = strings.ToLower(charset)
|
||||
}
|
||||
|
||||
|
||||
@@ -203,7 +203,7 @@ func (b *Indexer) addUpdate(ctx context.Context, batchWriter git.WriteCloserErro
|
||||
RepoID: repo.ID,
|
||||
CommitID: commitSha,
|
||||
Filename: update.Filename,
|
||||
Content: string(charset.ToUTF8DropErrors(fileContents, charset.ConvertOpts{})),
|
||||
Content: string(charset.ToUTF8DropErrors(fileContents)),
|
||||
Language: analyze.GetCodeLanguage(update.Filename, fileContents),
|
||||
UpdatedAt: time.Now().UTC(),
|
||||
})
|
||||
@@ -218,7 +218,7 @@ func (b *Indexer) addDelete(filename string, repo *repo_model.Repository, batch
|
||||
func (b *Indexer) Index(ctx context.Context, repo *repo_model.Repository, sha string, changes *internal.RepoChanges) error {
|
||||
batch := inner_bleve.NewFlushingBatch(b.inner.Indexer, maxBatchSize)
|
||||
if len(changes.Updates) > 0 {
|
||||
gitBatch, err := git.NewBatch(ctx, repo.RepoPath())
|
||||
gitBatch, err := gitrepo.NewBatch(ctx, repo)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
@@ -191,7 +191,7 @@ func (b *Indexer) addUpdate(ctx context.Context, batchWriter git.WriteCloserErro
|
||||
Doc(map[string]any{
|
||||
"repo_id": repo.ID,
|
||||
"filename": update.Filename,
|
||||
"content": string(charset.ToUTF8DropErrors(fileContents, charset.ConvertOpts{})),
|
||||
"content": string(charset.ToUTF8DropErrors(fileContents)),
|
||||
"commit_id": sha,
|
||||
"language": analyze.GetCodeLanguage(update.Filename, fileContents),
|
||||
"updated_at": timeutil.TimeStampNow(),
|
||||
@@ -210,7 +210,7 @@ func (b *Indexer) addDelete(filename string, repo *repo_model.Repository) elasti
|
||||
func (b *Indexer) Index(ctx context.Context, repo *repo_model.Repository, sha string, changes *internal.RepoChanges) error {
|
||||
reqs := make([]elastic.BulkableRequest, 0)
|
||||
if len(changes.Updates) > 0 {
|
||||
batch, err := git.NewBatch(ctx, repo.RepoPath())
|
||||
batch, err := gitrepo.NewBatch(ctx, repo)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
@@ -17,20 +17,20 @@ func FilenameIndexerID(repoID int64, filename string) string {
|
||||
}
|
||||
|
||||
func ParseIndexerID(indexerID string) (int64, string) {
|
||||
index := strings.IndexByte(indexerID, '_')
|
||||
if index == -1 {
|
||||
before, after, ok := strings.Cut(indexerID, "_")
|
||||
if !ok {
|
||||
log.Error("Unexpected ID in repo indexer: %s", indexerID)
|
||||
}
|
||||
repoID, _ := internal.ParseBase36(indexerID[:index])
|
||||
return repoID, indexerID[index+1:]
|
||||
repoID, _ := internal.ParseBase36(before)
|
||||
return repoID, after
|
||||
}
|
||||
|
||||
func FilenameOfIndexerID(indexerID string) string {
|
||||
index := strings.IndexByte(indexerID, '_')
|
||||
if index == -1 {
|
||||
_, after, ok := strings.Cut(indexerID, "_")
|
||||
if !ok {
|
||||
log.Error("Unexpected ID in repo indexer: %s", indexerID)
|
||||
}
|
||||
return indexerID[index+1:]
|
||||
return after
|
||||
}
|
||||
|
||||
// FilenameMatchIndexPos returns the boundaries of its first seven lines.
|
||||
|
||||
@@ -33,7 +33,7 @@ func shortLinkProcessor(ctx *RenderContext, node *html.Node) {
|
||||
// Of text and link contents
|
||||
sl := strings.SplitSeq(content, "|")
|
||||
for v := range sl {
|
||||
if equalPos := strings.IndexByte(v, '='); equalPos == -1 {
|
||||
if found := strings.Contains(v, "="); !found {
|
||||
// There is no equal in this argument; this is a mandatory arg
|
||||
if props["name"] == "" {
|
||||
if IsFullURLString(v) {
|
||||
@@ -55,8 +55,8 @@ func shortLinkProcessor(ctx *RenderContext, node *html.Node) {
|
||||
} else {
|
||||
// There is an equal; optional argument.
|
||||
|
||||
sep := strings.IndexByte(v, '=')
|
||||
key, val := v[:sep], html.UnescapeString(v[sep+1:])
|
||||
before, after, _ := strings.Cut(v, "=")
|
||||
key, val := before, html.UnescapeString(after)
|
||||
|
||||
// When parsing HTML, x/net/html will change all quotes which are
|
||||
// not used for syntax into UTF-8 quotes. So checking val[0] won't
|
||||
|
||||
@@ -30,6 +30,10 @@ func TestMathRender(t *testing.T) {
|
||||
"$ a $",
|
||||
`<p><code class="language-math">a</code></p>` + nl,
|
||||
},
|
||||
{
|
||||
"$a$$b$",
|
||||
`<p><code class="language-math">a</code><code class="language-math">b</code></p>` + nl,
|
||||
},
|
||||
{
|
||||
"$a$ $b$",
|
||||
`<p><code class="language-math">a</code> <code class="language-math">b</code></p>` + nl,
|
||||
@@ -59,7 +63,7 @@ func TestMathRender(t *testing.T) {
|
||||
`<p>a$b $a a$b b$</p>` + nl,
|
||||
},
|
||||
{
|
||||
"a$x$",
|
||||
"a$x$", // Pattern: "word$other$" The real world example is: "Price is between US$1 and US$2.", so don't parse this.
|
||||
`<p>a$x$</p>` + nl,
|
||||
},
|
||||
{
|
||||
@@ -70,6 +74,10 @@ func TestMathRender(t *testing.T) {
|
||||
"$a$ ($b$) [$c$] {$d$}",
|
||||
`<p><code class="language-math">a</code> (<code class="language-math">b</code>) [$c$] {$d$}</p>` + nl,
|
||||
},
|
||||
{
|
||||
"[$a$](link)",
|
||||
`<p><a href="/link" rel="nofollow"><code class="language-math">a</code></a></p>` + nl,
|
||||
},
|
||||
{
|
||||
"$$a$$",
|
||||
`<p><code class="language-math">a</code></p>` + nl,
|
||||
|
||||
@@ -54,6 +54,10 @@ func isAlphanumeric(b byte) bool {
|
||||
return (b >= 'a' && b <= 'z') || (b >= 'A' && b <= 'Z') || (b >= '0' && b <= '9')
|
||||
}
|
||||
|
||||
func isInMarkdownLinkText(block text.Reader, lineAfter []byte) bool {
|
||||
return block.PrecendingCharacter() == '[' && bytes.HasPrefix(lineAfter, []byte("]("))
|
||||
}
|
||||
|
||||
// Parse parses the current line and returns a result of parsing.
|
||||
func (parser *inlineParser) Parse(parent ast.Node, block text.Reader, pc parser.Context) ast.Node {
|
||||
line, _ := block.PeekLine()
|
||||
@@ -115,7 +119,9 @@ func (parser *inlineParser) Parse(parent ast.Node, block text.Reader, pc parser.
|
||||
}
|
||||
// check valid ending character
|
||||
isValidEndingChar := isPunctuation(succeedingCharacter) || isParenthesesClose(succeedingCharacter) ||
|
||||
succeedingCharacter == ' ' || succeedingCharacter == '\n' || succeedingCharacter == 0
|
||||
succeedingCharacter == ' ' || succeedingCharacter == '\n' || succeedingCharacter == 0 ||
|
||||
succeedingCharacter == '$' ||
|
||||
isInMarkdownLinkText(block, line[i+len(stopMark):])
|
||||
if checkSurrounding && !isValidEndingChar {
|
||||
break
|
||||
}
|
||||
|
||||
@@ -62,7 +62,28 @@ type PackageMetadata struct {
|
||||
Author User `json:"author"`
|
||||
ReadmeFilename string `json:"readmeFilename,omitempty"`
|
||||
Users map[string]bool `json:"users,omitempty"`
|
||||
License string `json:"license,omitempty"`
|
||||
License License `json:"license,omitempty"`
|
||||
}
|
||||
|
||||
type License string
|
||||
|
||||
func (l *License) UnmarshalJSON(data []byte) error {
|
||||
switch data[0] {
|
||||
case '"':
|
||||
var value string
|
||||
if err := json.Unmarshal(data, &value); err != nil {
|
||||
return err
|
||||
}
|
||||
*l = License(value)
|
||||
case '{':
|
||||
var values map[string]any
|
||||
if err := json.Unmarshal(data, &values); err != nil {
|
||||
return err
|
||||
}
|
||||
value, _ := values["type"].(string)
|
||||
*l = License(value)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// PackageMetadataVersion documentation: https://github.com/npm/registry/blob/master/docs/REGISTRY-API.md#version
|
||||
@@ -74,7 +95,7 @@ type PackageMetadataVersion struct {
|
||||
Description string `json:"description"`
|
||||
Author User `json:"author"`
|
||||
Homepage string `json:"homepage,omitempty"`
|
||||
License string `json:"license,omitempty"`
|
||||
License License `json:"license,omitempty"`
|
||||
Repository Repository `json:"repository"`
|
||||
Keywords []string `json:"keywords,omitempty"`
|
||||
Dependencies map[string]string `json:"dependencies,omitempty"`
|
||||
|
||||
@@ -13,6 +13,7 @@ import (
|
||||
"code.gitea.io/gitea/modules/json"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestParsePackage(t *testing.T) {
|
||||
@@ -291,11 +292,36 @@ func TestParsePackage(t *testing.T) {
|
||||
assert.Equal(t, packageDescription, p.Metadata.Readme)
|
||||
assert.Equal(t, packageAuthor, p.Metadata.Author)
|
||||
assert.Equal(t, packageBin, p.Metadata.Bin["bin"])
|
||||
assert.Equal(t, "MIT", p.Metadata.License)
|
||||
assert.Equal(t, "MIT", string(p.Metadata.License))
|
||||
assert.Equal(t, "https://gitea.io/", p.Metadata.ProjectURL)
|
||||
assert.Contains(t, p.Metadata.Dependencies, "package")
|
||||
assert.Equal(t, "1.2.0", p.Metadata.Dependencies["package"])
|
||||
assert.Equal(t, repository.Type, p.Metadata.Repository.Type)
|
||||
assert.Equal(t, repository.URL, p.Metadata.Repository.URL)
|
||||
})
|
||||
|
||||
t.Run("ValidLicenseMap", func(t *testing.T) {
|
||||
packageJSON := `{
|
||||
"versions": {
|
||||
"0.1.1": {
|
||||
"name": "dev-null",
|
||||
"version": "0.1.1",
|
||||
"license": {
|
||||
"type": "MIT"
|
||||
},
|
||||
"dist": {
|
||||
"integrity": "sha256-"
|
||||
}
|
||||
}
|
||||
},
|
||||
"_attachments": {
|
||||
"foo": {
|
||||
"data": "AAAA"
|
||||
}
|
||||
}
|
||||
}`
|
||||
p, err := ParsePackage(strings.NewReader(packageJSON))
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, "MIT", string(p.Metadata.License))
|
||||
})
|
||||
}
|
||||
|
||||
@@ -12,7 +12,7 @@ type Metadata struct {
|
||||
Name string `json:"name,omitempty"`
|
||||
Description string `json:"description,omitempty"`
|
||||
Author string `json:"author,omitempty"`
|
||||
License string `json:"license,omitempty"`
|
||||
License License `json:"license,omitempty"`
|
||||
ProjectURL string `json:"project_url,omitempty"`
|
||||
Keywords []string `json:"keywords,omitempty"`
|
||||
Dependencies map[string]string `json:"dependencies,omitempty"`
|
||||
|
||||
@@ -15,6 +15,7 @@ import (
|
||||
"code.gitea.io/gitea/modules/cache"
|
||||
"code.gitea.io/gitea/modules/cachegroup"
|
||||
"code.gitea.io/gitea/modules/git"
|
||||
"code.gitea.io/gitea/modules/gitrepo"
|
||||
"code.gitea.io/gitea/modules/log"
|
||||
"code.gitea.io/gitea/modules/setting"
|
||||
api "code.gitea.io/gitea/modules/structs"
|
||||
@@ -72,7 +73,7 @@ func ToAPIPayloadCommit(ctx context.Context, emailUsers map[string]*user_model.U
|
||||
committerUsername = committer.Name
|
||||
}
|
||||
|
||||
fileStatus, err := git.GetCommitFileStatus(ctx, repo.RepoPath(), commit.Sha1)
|
||||
fileStatus, err := gitrepo.GetCommitFileStatus(ctx, repo, commit.Sha1)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("FileStatus [commit_sha1: %s]: %w", commit.Sha1, err)
|
||||
}
|
||||
|
||||
@@ -51,10 +51,10 @@ func decodeEnvSectionKey(encoded string) (ok bool, section, key string) {
|
||||
for _, unescapeIdx := range escapeStringIndices {
|
||||
preceding := encoded[last:unescapeIdx[0]]
|
||||
if !inKey {
|
||||
if splitter := strings.Index(preceding, "__"); splitter > -1 {
|
||||
section += preceding[:splitter]
|
||||
if before, after, cutOk := strings.Cut(preceding, "__"); cutOk {
|
||||
section += before
|
||||
inKey = true
|
||||
key += preceding[splitter+2:]
|
||||
key += after
|
||||
} else {
|
||||
section += preceding
|
||||
}
|
||||
@@ -77,9 +77,9 @@ func decodeEnvSectionKey(encoded string) (ok bool, section, key string) {
|
||||
}
|
||||
remaining := encoded[last:]
|
||||
if !inKey {
|
||||
if splitter := strings.Index(remaining, "__"); splitter > -1 {
|
||||
section += remaining[:splitter]
|
||||
key += remaining[splitter+2:]
|
||||
if before, after, cutOk := strings.Cut(remaining, "__"); cutOk {
|
||||
section += before
|
||||
key += after
|
||||
} else {
|
||||
section += remaining
|
||||
}
|
||||
@@ -111,21 +111,21 @@ func decodeEnvironmentKey(prefixGitea, suffixFile, envKey string) (ok bool, sect
|
||||
|
||||
func EnvironmentToConfig(cfg ConfigProvider, envs []string) (changed bool) {
|
||||
for _, kv := range envs {
|
||||
idx := strings.IndexByte(kv, '=')
|
||||
if idx < 0 {
|
||||
before, after, ok := strings.Cut(kv, "=")
|
||||
if !ok {
|
||||
continue
|
||||
}
|
||||
|
||||
// parse the environment variable to config section name and key name
|
||||
envKey := kv[:idx]
|
||||
envValue := kv[idx+1:]
|
||||
envKey := before
|
||||
envValue := after
|
||||
ok, sectionName, keyName, useFileValue := decodeEnvironmentKey(EnvConfigKeyPrefixGitea, EnvConfigKeySuffixFile, envKey)
|
||||
if !ok {
|
||||
continue
|
||||
}
|
||||
|
||||
// use environment value as config value, or read the file content as value if the key indicates a file
|
||||
keyValue := envValue
|
||||
keyValue := envValue //nolint:staticcheck // false positive
|
||||
if useFileValue {
|
||||
fileContent, err := os.ReadFile(envValue)
|
||||
if err != nil {
|
||||
|
||||
@@ -337,14 +337,14 @@ func LogStartupProblem(skip int, level log.Level, format string, args ...any) {
|
||||
|
||||
func deprecatedSetting(rootCfg ConfigProvider, oldSection, oldKey, newSection, newKey, version string) {
|
||||
if rootCfg.Section(oldSection).HasKey(oldKey) {
|
||||
LogStartupProblem(1, log.ERROR, "Deprecation: config option `[%s].%s` presents, please use `[%s].%s` instead because this fallback will be/has been removed in %s", oldSection, oldKey, newSection, newKey, version)
|
||||
LogStartupProblem(1, log.ERROR, "Deprecation: config option `[%s].%s` present, please use `[%s].%s` instead because this fallback will be/has been removed in %s", oldSection, oldKey, newSection, newKey, version)
|
||||
}
|
||||
}
|
||||
|
||||
// deprecatedSettingDB add a hint that the configuration has been moved to database but still kept in app.ini
|
||||
func deprecatedSettingDB(rootCfg ConfigProvider, oldSection, oldKey string) {
|
||||
if rootCfg.Section(oldSection).HasKey(oldKey) {
|
||||
LogStartupProblem(1, log.ERROR, "Deprecation: config option `[%s].%s` presents but it won't take effect because it has been moved to admin panel -> config setting", oldSection, oldKey)
|
||||
LogStartupProblem(1, log.ERROR, "Deprecation: config option `[%s].%s` present but it won't take effect because it has been moved to admin panel -> config setting", oldSection, oldKey)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -370,6 +370,10 @@ func loadServerFrom(rootCfg ConfigProvider) {
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: GOLANG-HTTP-TMPDIR: Some Golang packages (like "http") use os.TempDir() to create temporary files when uploading files.
|
||||
// So ideally we should set the TMPDIR environment variable to make them use our managed temp directory.
|
||||
// But there is no clear place to set it currently, for example: when running "install" page, the AppDataPath is not ready yet, then AppDataTempDir won't work
|
||||
|
||||
EnableGzip = sec.Key("ENABLE_GZIP").MustBool()
|
||||
EnablePprof = sec.Key("ENABLE_PPROF").MustBool(false)
|
||||
PprofDataPath = sec.Key("PPROF_DATA_PATH").MustString(filepath.Join(AppWorkPath, "data/tmp/pprof"))
|
||||
|
||||
@@ -240,4 +240,5 @@ func PanicInDevOrTesting(msg string, a ...any) {
|
||||
if !IsProd || IsInTesting {
|
||||
panic(fmt.Sprintf(msg, a...))
|
||||
}
|
||||
log.Error(msg, a...)
|
||||
}
|
||||
|
||||
@@ -292,6 +292,21 @@ type RenameBranchRepoOption struct {
|
||||
Name string `json:"name" binding:"Required;GitRefName;MaxSize(100)"`
|
||||
}
|
||||
|
||||
// UpdateBranchRepoOption options when updating a branch reference in a repository
|
||||
// swagger:model
|
||||
type UpdateBranchRepoOption struct {
|
||||
// New commit SHA (or any ref) the branch should point to
|
||||
//
|
||||
// required: true
|
||||
NewCommitID string `json:"new_commit_id" binding:"Required"`
|
||||
|
||||
// Expected old commit SHA of the branch; if provided it must match the current tip
|
||||
OldCommitID string `json:"old_commit_id"`
|
||||
|
||||
// Force update even if the change is not a fast-forward
|
||||
Force bool `json:"force"`
|
||||
}
|
||||
|
||||
// TransferRepoOption options when transfer a repository's ownership
|
||||
// swagger:model
|
||||
type TransferRepoOption struct {
|
||||
|
||||
@@ -215,8 +215,8 @@ func addValidGroupTeamMapRule() {
|
||||
}
|
||||
|
||||
func portOnly(hostport string) string {
|
||||
colon := strings.IndexByte(hostport, ':')
|
||||
if colon == -1 {
|
||||
_, after, ok := strings.Cut(hostport, ":")
|
||||
if !ok {
|
||||
return ""
|
||||
}
|
||||
if i := strings.Index(hostport, "]:"); i != -1 {
|
||||
@@ -225,7 +225,7 @@ func portOnly(hostport string) string {
|
||||
if strings.Contains(hostport, "]") {
|
||||
return ""
|
||||
}
|
||||
return hostport[colon+len(":"):]
|
||||
return after
|
||||
}
|
||||
|
||||
func validPort(p string) bool {
|
||||
|
||||
@@ -46,11 +46,15 @@ func RouterMockPoint(pointName string) func(next http.Handler) http.Handler {
|
||||
//
|
||||
// Then the mock function will be executed as a middleware at the mock point.
|
||||
// It only takes effect in testing mode (setting.IsInTesting == true).
|
||||
func RouteMock(pointName string, h any) {
|
||||
func RouteMock(pointName string, h any) func() {
|
||||
if _, ok := routeMockPoints[pointName]; !ok {
|
||||
panic("route mock point not found: " + pointName)
|
||||
}
|
||||
old := routeMockPoints[pointName]
|
||||
routeMockPoints[pointName] = toHandlerProvider(h)
|
||||
return func() {
|
||||
routeMockPoints[pointName] = old
|
||||
}
|
||||
}
|
||||
|
||||
// RouteMockReset resets all mock points (no mock anymore)
|
||||
|
||||
@@ -55,7 +55,7 @@ func NewRouter() *Router {
|
||||
// Use supports two middlewares
|
||||
func (r *Router) Use(middlewares ...any) {
|
||||
for _, m := range middlewares {
|
||||
if m != nil {
|
||||
if !isNilOrFuncNil(m) {
|
||||
r.chiRouter.Use(toHandlerProvider(m))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -215,6 +215,7 @@ more = More
|
||||
buttons.heading.tooltip = Add heading
|
||||
buttons.bold.tooltip = Add bold text
|
||||
buttons.italic.tooltip = Add italic text
|
||||
buttons.strikethrough.tooltip = Add strikethrough text
|
||||
buttons.quote.tooltip = Quote text
|
||||
buttons.code.tooltip = Add code
|
||||
buttons.link.tooltip = Add a link
|
||||
|
||||
@@ -215,6 +215,7 @@ more=Plus
|
||||
buttons.heading.tooltip=Ajouter un en-tête
|
||||
buttons.bold.tooltip=Ajouter du texte en gras
|
||||
buttons.italic.tooltip=Ajouter du texte en italique
|
||||
buttons.strikethrough.tooltip=Ajouté un texte barré
|
||||
buttons.quote.tooltip=Citer le texte
|
||||
buttons.code.tooltip=Ajouter du code
|
||||
buttons.link.tooltip=Ajouter un lien
|
||||
@@ -1354,8 +1355,11 @@ editor.this_file_locked=Le fichier est verrouillé
|
||||
editor.must_be_on_a_branch=Vous devez être sur une branche pour appliquer ou proposer des modifications à ce fichier.
|
||||
editor.fork_before_edit=Vous devez faire bifurquer ce dépôt pour appliquer ou proposer des modifications à ce fichier.
|
||||
editor.delete_this_file=Supprimer le fichier
|
||||
editor.delete_this_directory=Supprimer le répertoire
|
||||
editor.must_have_write_access=Vous devez avoir un accès en écriture pour appliquer ou proposer des modifications à ce fichier.
|
||||
editor.file_delete_success=Le fichier "%s" a été supprimé.
|
||||
editor.directory_delete_success=Le répertoire « %s » a été supprimé.
|
||||
editor.delete_directory=Supprimer le répertoire « %s »
|
||||
editor.name_your_file=Nommez votre fichier…
|
||||
editor.filename_help=Ajoutez un dossier en entrant son nom suivi d'une barre oblique ('/'). Supprimez un dossier avec un retour arrière au début du champ.
|
||||
editor.or=ou
|
||||
@@ -1482,6 +1486,7 @@ projects.column.new_submit=Créer une colonne
|
||||
projects.column.new=Nouvelle colonne
|
||||
projects.column.set_default=Définir par défaut
|
||||
projects.column.set_default_desc=Les tickets et demandes d’ajout non-catégorisés seront placés dans cette colonne.
|
||||
projects.column.default_column_hint=Les nouveaux tickets ajoutés à ce projet seront ajoutés dans cette colonne
|
||||
projects.column.delete=Supprimer la colonne
|
||||
projects.column.deletion_desc=La suppression d’une colonne déplace tous ses tickets dans la colonne par défaut. Continuer ?
|
||||
projects.column.color=Couleur
|
||||
|
||||
@@ -215,6 +215,7 @@ more=Níos mó
|
||||
buttons.heading.tooltip=Cuir ceannteideal leis
|
||||
buttons.bold.tooltip=Cuir téacs trom leis
|
||||
buttons.italic.tooltip=Cuir téacs iodálach leis
|
||||
buttons.strikethrough.tooltip=Cuir téacs trína chéile
|
||||
buttons.quote.tooltip=Téacs luaigh
|
||||
buttons.code.tooltip=Cuir cód leis
|
||||
buttons.link.tooltip=Cuir nasc leis
|
||||
@@ -1354,8 +1355,11 @@ editor.this_file_locked=Tá an comhad faoi ghlas
|
||||
editor.must_be_on_a_branch=Caithfidh tú a bheith ar bhrainse chun athruithe a dhéanamh nó a mholadh ar an gcomhad seo.
|
||||
editor.fork_before_edit=Ní mór duit an stór seo a fhorcáil chun athruithe a dhéanamh nó a mholadh ar an gcomhad seo.
|
||||
editor.delete_this_file=Scrios Comhad
|
||||
editor.delete_this_directory=Scrios Eolaire
|
||||
editor.must_have_write_access=Caithfidh rochtain scríofa a bheith agat chun athruithe a dhéanamh nó a mholadh ar an gcomhad seo.
|
||||
editor.file_delete_success=Tá an comhad "%s" scriosta.
|
||||
editor.directory_delete_success=Scriosadh an eolaire "%s".
|
||||
editor.delete_directory=Scrios an eolaire '%s'
|
||||
editor.name_your_file=Ainmnigh do chomhad…
|
||||
editor.filename_help=Cuir eolaire leis trína ainm a chlóscríobh ina dhiaidh sin le slash ('/'). Bain eolaire trí backspace a chlóscríobh ag tús an réimse ionchuir.
|
||||
editor.or=nó
|
||||
@@ -1482,6 +1486,7 @@ projects.column.new_submit=Cruthaigh Colún
|
||||
projects.column.new=Colún Nua
|
||||
projects.column.set_default=Socraigh Réamhshocrú
|
||||
projects.column.set_default_desc=Socraigh an colún seo mar réamhshocrú le haghaidh saincheisteanna agus tarraingtí gan chatagóir
|
||||
projects.column.default_column_hint=Cuirfear saincheisteanna nua a chuirtear leis an tionscadal seo leis an gcolún seo
|
||||
projects.column.delete=Scrios Colún
|
||||
projects.column.deletion_desc=Ag scriosadh colún tionscadail aistríonn gach saincheist ghaolmhar chuig an gcolún. Lean ar aghaidh?
|
||||
projects.column.color=Dath
|
||||
@@ -3038,6 +3043,7 @@ dashboard.update_migration_poster_id=Nuashonraigh ID póstaer imir
|
||||
dashboard.git_gc_repos=Bailitheoir bruscair gach stórais
|
||||
dashboard.resync_all_sshkeys=Nuashonraigh an comhad '.ssh/authorized_keys' le heochracha SSH Gitea
|
||||
dashboard.resync_all_sshprincipals=Nuashonraigh an comhad '.ssh/authorized_principals' le príomhoidí SSH Gitea
|
||||
dashboard.resync_all_hooks=Athshioncrónaigh crúcaí git na stórtha uile (réamhghlacadh, nuashonrú, iarghlacadh, próiseasghlacadh, ...)
|
||||
dashboard.reinit_missing_repos=Aththosaigh gach stórais Git atá in easnamh a bhfuil taifid ann dóibh
|
||||
dashboard.sync_external_users=Sioncrónaigh sonraí úsáideoirí seachtracha
|
||||
dashboard.cleanup_hook_task_table=Glan suas an tábla hook_task
|
||||
|
||||
@@ -1354,8 +1354,11 @@ editor.this_file_locked=ファイルはロックされています
|
||||
editor.must_be_on_a_branch=このファイルを変更したり変更の提案をするには、ブランチ上にいる必要があります。
|
||||
editor.fork_before_edit=このファイルを変更したり変更の提案をするには、リポジトリをフォークする必要があります。
|
||||
editor.delete_this_file=ファイルを削除
|
||||
editor.delete_this_directory=ディレクトリを削除
|
||||
editor.must_have_write_access=このファイルを変更したり変更の提案をするには、書き込み権限が必要です。
|
||||
editor.file_delete_success=ファイル "%s" を削除しました。
|
||||
editor.directory_delete_success=ディレクトリ "%s" を削除しました。
|
||||
editor.delete_directory=ディレクトリ'%s'を削除
|
||||
editor.name_your_file=ファイル名を指定…
|
||||
editor.filename_help=ディレクトリを追加するにはディレクトリ名に続けてスラッシュ('/')を入力します。 ディレクトリを削除するには入力欄の先頭でbackspaceキーを押します。
|
||||
editor.or=または
|
||||
@@ -1482,6 +1485,7 @@ projects.column.new_submit=列を作成
|
||||
projects.column.new=新しい列
|
||||
projects.column.set_default=デフォルトに設定
|
||||
projects.column.set_default_desc=この列を未分類のイシューやプルリクエストが入るデフォルトの列にします
|
||||
projects.column.default_column_hint=このプロジェクトに追加された新しいイシューがこの列に追加されます
|
||||
projects.column.delete=列を削除
|
||||
projects.column.deletion_desc=プロジェクト列を削除すると、関連するすべてのイシューがデフォルトの列に移動します。 続行しますか?
|
||||
projects.column.color=カラー
|
||||
@@ -3038,6 +3042,7 @@ dashboard.update_migration_poster_id=移行する投稿者IDの更新
|
||||
dashboard.git_gc_repos=すべてのリポジトリでガベージコレクションを実行
|
||||
dashboard.resync_all_sshkeys='.ssh/authorized_keys' ファイルをGitea上のSSHキーで更新
|
||||
dashboard.resync_all_sshprincipals='.ssh/authorized_principals' ファイルをGitea上のSSHプリンシパルで更新
|
||||
dashboard.resync_all_hooks=すべてのリポジトリのGitフックを再同期する (pre-receive, update, post-receive, proc-receive, ...)
|
||||
dashboard.reinit_missing_repos=レコードが存在するが見当たらないすべてのGitリポジトリを再初期化する
|
||||
dashboard.sync_external_users=外部ユーザーデータの同期
|
||||
dashboard.cleanup_hook_task_table=hook_taskテーブルのクリーンアップ
|
||||
|
||||
@@ -215,6 +215,7 @@ more=Mais
|
||||
buttons.heading.tooltip=Adicionar cabeçalho
|
||||
buttons.bold.tooltip=Adicionar texto em negrito
|
||||
buttons.italic.tooltip=Adicionar texto em itálico
|
||||
buttons.strikethrough.tooltip=Adicionar texto rasurado
|
||||
buttons.quote.tooltip=Citar texto
|
||||
buttons.code.tooltip=Adicionar código-fonte
|
||||
buttons.link.tooltip=Adicionar uma ligação
|
||||
@@ -1354,8 +1355,11 @@ editor.this_file_locked=Ficheiro bloqueado
|
||||
editor.must_be_on_a_branch=Tem que estar num ramo para fazer ou propor modificações neste ficheiro.
|
||||
editor.fork_before_edit=Tem que fazer uma derivação deste repositório para fazer ou propor modificações neste ficheiro.
|
||||
editor.delete_this_file=Eliminar ficheiro
|
||||
editor.delete_this_directory=Eliminar pasta
|
||||
editor.must_have_write_access=Tem que ter permissões de escrita para fazer ou propor modificações neste ficheiro.
|
||||
editor.file_delete_success=O ficheiro "%s" foi eliminado.
|
||||
editor.directory_delete_success=A pasta "%s" foi eliminada.
|
||||
editor.delete_directory=Eliminar a pasta '%s'
|
||||
editor.name_your_file=Nomeie o seu ficheiro…
|
||||
editor.filename_help=Adicione uma pasta escrevendo o nome dessa pasta seguido de uma barra('/'). Remova uma pasta carregando na tecla de apagar ('←') no início do campo.
|
||||
editor.or=ou
|
||||
|
||||
24
package.json
24
package.json
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"type": "module",
|
||||
"packageManager": "pnpm@10.23.0",
|
||||
"packageManager": "pnpm@10.24.0",
|
||||
"engines": {
|
||||
"node": ">= 22.6.0",
|
||||
"pnpm": ">= 10.0.0"
|
||||
@@ -15,7 +15,7 @@
|
||||
"@github/relative-time-element": "4.5.1",
|
||||
"@github/text-expander-element": "2.9.2",
|
||||
"@mcaptcha/vanilla-glue": "0.1.0-alpha-3",
|
||||
"@primer/octicons": "19.21.0",
|
||||
"@primer/octicons": "19.21.1",
|
||||
"@resvg/resvg-wasm": "2.6.2",
|
||||
"@silverwind/vue3-calendar-heatmap": "2.0.6",
|
||||
"@techknowlogick/license-checker-webpack-plugin": "0.3.0",
|
||||
@@ -36,7 +36,7 @@
|
||||
"idiomorph": "0.7.4",
|
||||
"jquery": "3.7.1",
|
||||
"katex": "0.16.25",
|
||||
"mermaid": "11.12.1",
|
||||
"mermaid": "11.12.2",
|
||||
"mini-css-extract-plugin": "2.9.4",
|
||||
"monaco-editor": "0.55.1",
|
||||
"monaco-editor-webpack-plugin": "7.1.1",
|
||||
@@ -79,38 +79,38 @@
|
||||
"@types/throttle-debounce": "5.0.2",
|
||||
"@types/tinycolor2": "1.4.6",
|
||||
"@types/toastify-js": "1.12.4",
|
||||
"@typescript-eslint/parser": "8.48.0",
|
||||
"@typescript-eslint/parser": "8.48.1",
|
||||
"@vitejs/plugin-vue": "6.0.2",
|
||||
"@vitest/eslint-plugin": "1.5.0",
|
||||
"@vitest/eslint-plugin": "1.5.1",
|
||||
"eslint": "9.39.1",
|
||||
"eslint-import-resolver-typescript": "4.4.4",
|
||||
"eslint-plugin-array-func": "5.1.0",
|
||||
"eslint-plugin-github": "6.0.0",
|
||||
"eslint-plugin-import-x": "4.16.1",
|
||||
"eslint-plugin-playwright": "2.3.0",
|
||||
"eslint-plugin-playwright": "2.4.0",
|
||||
"eslint-plugin-regexp": "2.10.0",
|
||||
"eslint-plugin-sonarjs": "3.0.5",
|
||||
"eslint-plugin-unicorn": "62.0.0",
|
||||
"eslint-plugin-vue": "10.6.1",
|
||||
"eslint-plugin-vue": "10.6.2",
|
||||
"eslint-plugin-vue-scoped-css": "2.12.0",
|
||||
"eslint-plugin-wc": "3.0.2",
|
||||
"globals": "16.5.0",
|
||||
"happy-dom": "20.0.10",
|
||||
"happy-dom": "20.0.11",
|
||||
"markdownlint-cli": "0.46.0",
|
||||
"material-icon-theme": "5.29.0",
|
||||
"nolyfill": "1.0.44",
|
||||
"postcss-html": "1.8.0",
|
||||
"spectral-cli-bundle": "1.0.3",
|
||||
"stylelint": "16.26.0",
|
||||
"stylelint": "16.26.1",
|
||||
"stylelint-config-recommended": "17.0.0",
|
||||
"stylelint-declaration-block-no-ignored-properties": "2.8.0",
|
||||
"stylelint-declaration-strict-value": "1.10.11",
|
||||
"stylelint-value-no-unknown-custom-properties": "6.0.1",
|
||||
"svgo": "4.0.0",
|
||||
"typescript-eslint": "8.48.0",
|
||||
"updates": "16.9.2",
|
||||
"typescript-eslint": "8.48.1",
|
||||
"updates": "17.0.4",
|
||||
"vite-string-plugin": "1.4.9",
|
||||
"vitest": "4.0.14",
|
||||
"vitest": "4.0.15",
|
||||
"vue-tsc": "3.1.5"
|
||||
},
|
||||
"browserslist": [
|
||||
|
||||
657
pnpm-lock.yaml
generated
657
pnpm-lock.yaml
generated
File diff suppressed because it is too large
Load Diff
@@ -216,9 +216,12 @@ func EditUser(ctx *context.APIContext) {
|
||||
}
|
||||
|
||||
if form.Email != nil {
|
||||
if err := user_service.AdminAddOrSetPrimaryEmailAddress(ctx, ctx.ContextUser, *form.Email); err != nil {
|
||||
if err := user_service.ReplacePrimaryEmailAddress(ctx, ctx.ContextUser, *form.Email); err != nil {
|
||||
switch {
|
||||
case user_model.IsErrEmailCharIsNotSupported(err), user_model.IsErrEmailInvalid(err):
|
||||
if !user_model.IsEmailDomainAllowed(*form.Email) {
|
||||
err = fmt.Errorf("the domain of user email %s conflicts with EMAIL_DOMAIN_ALLOWLIST or EMAIL_DOMAIN_BLOCKLIST", *form.Email)
|
||||
}
|
||||
ctx.APIError(http.StatusBadRequest, err)
|
||||
case user_model.IsErrEmailAlreadyUsed(err):
|
||||
ctx.APIError(http.StatusBadRequest, err)
|
||||
@@ -227,10 +230,6 @@ func EditUser(ctx *context.APIContext) {
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
if !user_model.IsEmailDomainAllowed(*form.Email) {
|
||||
ctx.Resp.Header().Add("X-Gitea-Warning", fmt.Sprintf("the domain of user email %s conflicts with EMAIL_DOMAIN_ALLOWLIST or EMAIL_DOMAIN_BLOCKLIST", *form.Email))
|
||||
}
|
||||
}
|
||||
|
||||
opts := &user_service.UpdateOptions{
|
||||
@@ -415,22 +414,116 @@ func SearchUsers(ctx *context.APIContext) {
|
||||
// in: query
|
||||
// description: page size of results
|
||||
// type: integer
|
||||
// - name: sort
|
||||
// in: query
|
||||
// description: sort users by attribute. Supported values are
|
||||
// "name", "created", "updated" and "id".
|
||||
// Default is "name"
|
||||
// type: string
|
||||
// - name: order
|
||||
// in: query
|
||||
// description: sort order, either "asc" (ascending) or "desc" (descending).
|
||||
// Default is "asc", ignored if "sort" is not specified.
|
||||
// type: string
|
||||
// - name: q
|
||||
// in: query
|
||||
// description: search term (username, full name, email)
|
||||
// type: string
|
||||
// - name: visibility
|
||||
// in: query
|
||||
// description: visibility filter. Supported values are
|
||||
// "public", "limited" and "private".
|
||||
// type: string
|
||||
// - name: is_active
|
||||
// in: query
|
||||
// description: filter active users
|
||||
// type: boolean
|
||||
// - name: is_admin
|
||||
// in: query
|
||||
// description: filter admin users
|
||||
// type: boolean
|
||||
// - name: is_restricted
|
||||
// in: query
|
||||
// description: filter restricted users
|
||||
// type: boolean
|
||||
// - name: is_2fa_enabled
|
||||
// in: query
|
||||
// description: filter 2FA enabled users
|
||||
// type: boolean
|
||||
// - name: is_prohibit_login
|
||||
// in: query
|
||||
// description: filter login prohibited users
|
||||
// type: boolean
|
||||
// responses:
|
||||
// "200":
|
||||
// "$ref": "#/responses/UserList"
|
||||
// "403":
|
||||
// "$ref": "#/responses/forbidden"
|
||||
// "422":
|
||||
// "$ref": "#/responses/validationError"
|
||||
|
||||
listOptions := utils.GetListOptions(ctx)
|
||||
|
||||
users, maxResults, err := user_model.SearchUsers(ctx, user_model.SearchUserOptions{
|
||||
Actor: ctx.Doer,
|
||||
Types: []user_model.UserType{user_model.UserTypeIndividual},
|
||||
LoginName: ctx.FormTrim("login_name"),
|
||||
SourceID: ctx.FormInt64("source_id"),
|
||||
OrderBy: db.SearchOrderByAlphabetically,
|
||||
ListOptions: listOptions,
|
||||
})
|
||||
orderBy := db.SearchOrderByAlphabetically
|
||||
sortMode := ctx.FormString("sort")
|
||||
if len(sortMode) > 0 {
|
||||
sortOrder := ctx.FormString("order")
|
||||
if len(sortOrder) == 0 {
|
||||
sortOrder = "asc"
|
||||
}
|
||||
if searchModeMap, ok := user_model.AdminUserOrderByMap[sortOrder]; ok {
|
||||
if order, ok := searchModeMap[sortMode]; ok {
|
||||
orderBy = order
|
||||
} else {
|
||||
ctx.APIError(http.StatusUnprocessableEntity, fmt.Errorf("Invalid sort mode: \"%s\"", sortMode))
|
||||
return
|
||||
}
|
||||
} else {
|
||||
ctx.APIError(http.StatusUnprocessableEntity, fmt.Errorf("Invalid sort order: \"%s\"", sortOrder))
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
var visible []api.VisibleType
|
||||
visibilityParam := ctx.FormString("visibility")
|
||||
if len(visibilityParam) > 0 {
|
||||
if visibility, ok := api.VisibilityModes[visibilityParam]; ok {
|
||||
visible = []api.VisibleType{visibility}
|
||||
} else {
|
||||
ctx.APIError(http.StatusUnprocessableEntity, fmt.Errorf("Invalid visibility: \"%s\"", visibilityParam))
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
searchOpts := user_model.SearchUserOptions{
|
||||
Actor: ctx.Doer,
|
||||
Types: []user_model.UserType{user_model.UserTypeIndividual},
|
||||
LoginName: ctx.FormTrim("login_name"),
|
||||
SourceID: ctx.FormInt64("source_id"),
|
||||
Keyword: ctx.FormTrim("q"),
|
||||
Visible: visible,
|
||||
OrderBy: orderBy,
|
||||
ListOptions: listOptions,
|
||||
SearchByEmail: true,
|
||||
}
|
||||
|
||||
if ctx.FormString("is_active") != "" {
|
||||
searchOpts.IsActive = optional.Some(ctx.FormBool("is_active"))
|
||||
}
|
||||
if ctx.FormString("is_admin") != "" {
|
||||
searchOpts.IsAdmin = optional.Some(ctx.FormBool("is_admin"))
|
||||
}
|
||||
if ctx.FormString("is_restricted") != "" {
|
||||
searchOpts.IsRestricted = optional.Some(ctx.FormBool("is_restricted"))
|
||||
}
|
||||
if ctx.FormString("is_2fa_enabled") != "" {
|
||||
searchOpts.IsTwoFactorEnabled = optional.Some(ctx.FormBool("is_2fa_enabled"))
|
||||
}
|
||||
if ctx.FormString("is_prohibit_login") != "" {
|
||||
searchOpts.IsProhibitLogin = optional.Some(ctx.FormBool("is_prohibit_login"))
|
||||
}
|
||||
|
||||
users, maxResults, err := user_model.SearchUsers(ctx, searchOpts)
|
||||
if err != nil {
|
||||
ctx.APIErrorInternal(err)
|
||||
return
|
||||
|
||||
@@ -1242,6 +1242,7 @@ func Routes() *web.Router {
|
||||
m.Get("/*", repo.GetBranch)
|
||||
m.Delete("/*", reqToken(), reqRepoWriter(unit.TypeCode), mustNotBeArchived, repo.DeleteBranch)
|
||||
m.Post("", reqToken(), reqRepoWriter(unit.TypeCode), mustNotBeArchived, bind(api.CreateBranchRepoOption{}), repo.CreateBranch)
|
||||
m.Put("/*", reqToken(), reqRepoWriter(unit.TypeCode), mustNotBeArchived, bind(api.UpdateBranchRepoOption{}), repo.UpdateBranch)
|
||||
m.Patch("/*", reqToken(), reqRepoWriter(unit.TypeCode), mustNotBeArchived, bind(api.RenameBranchRepoOption{}), repo.RenameBranch)
|
||||
}, context.ReferencesGitRepo(), reqRepoReader(unit.TypeCode))
|
||||
m.Group("/branch_protections", func() {
|
||||
|
||||
@@ -380,6 +380,81 @@ func ListBranches(ctx *context.APIContext) {
|
||||
ctx.JSON(http.StatusOK, apiBranches)
|
||||
}
|
||||
|
||||
// UpdateBranch moves a branch reference to a new commit.
|
||||
func UpdateBranch(ctx *context.APIContext) {
|
||||
// swagger:operation PUT /repos/{owner}/{repo}/branches/{branch} repository repoUpdateBranch
|
||||
// ---
|
||||
// summary: Update a branch reference to a new commit
|
||||
// consumes:
|
||||
// - application/json
|
||||
// produces:
|
||||
// - application/json
|
||||
// parameters:
|
||||
// - name: owner
|
||||
// in: path
|
||||
// description: owner of the repo
|
||||
// type: string
|
||||
// required: true
|
||||
// - name: repo
|
||||
// in: path
|
||||
// description: name of the repo
|
||||
// type: string
|
||||
// required: true
|
||||
// - name: branch
|
||||
// in: path
|
||||
// description: name of the branch
|
||||
// type: string
|
||||
// required: true
|
||||
// - name: body
|
||||
// in: body
|
||||
// schema:
|
||||
// "$ref": "#/definitions/UpdateBranchRepoOption"
|
||||
// responses:
|
||||
// "204":
|
||||
// "$ref": "#/responses/empty"
|
||||
// "403":
|
||||
// "$ref": "#/responses/forbidden"
|
||||
// "404":
|
||||
// "$ref": "#/responses/notFound"
|
||||
// "409":
|
||||
// "$ref": "#/responses/conflict"
|
||||
// "422":
|
||||
// "$ref": "#/responses/validationError"
|
||||
|
||||
opt := web.GetForm(ctx).(*api.UpdateBranchRepoOption)
|
||||
|
||||
branchName := ctx.PathParam("*")
|
||||
repo := ctx.Repo.Repository
|
||||
|
||||
if repo.IsEmpty {
|
||||
ctx.APIError(http.StatusNotFound, "Git Repository is empty.")
|
||||
return
|
||||
}
|
||||
|
||||
if repo.IsMirror {
|
||||
ctx.APIError(http.StatusForbidden, "Git Repository is a mirror.")
|
||||
return
|
||||
}
|
||||
|
||||
// permission check has been done in api.go
|
||||
if err := repo_service.UpdateBranch(ctx, repo, ctx.Repo.GitRepo, ctx.Doer, branchName, opt.NewCommitID, opt.OldCommitID, opt.Force); err != nil {
|
||||
switch {
|
||||
case git_model.IsErrBranchNotExist(err):
|
||||
ctx.APIErrorNotFound(err)
|
||||
case errors.Is(err, util.ErrInvalidArgument):
|
||||
ctx.APIError(http.StatusUnprocessableEntity, err)
|
||||
case git.IsErrPushRejected(err):
|
||||
rej := err.(*git.ErrPushRejected)
|
||||
ctx.APIError(http.StatusForbidden, rej.Message)
|
||||
default:
|
||||
ctx.APIErrorInternal(err)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
ctx.Status(http.StatusNoContent)
|
||||
}
|
||||
|
||||
// RenameBranch renames a repository's branch.
|
||||
func RenameBranch(ctx *context.APIContext) {
|
||||
// swagger:operation PATCH /repos/{owner}/{repo}/branches/{branch} repository repoRenameBranch
|
||||
|
||||
@@ -13,6 +13,7 @@ import (
|
||||
issues_model "code.gitea.io/gitea/models/issues"
|
||||
user_model "code.gitea.io/gitea/models/user"
|
||||
"code.gitea.io/gitea/modules/git"
|
||||
"code.gitea.io/gitea/modules/gitrepo"
|
||||
"code.gitea.io/gitea/modules/setting"
|
||||
api "code.gitea.io/gitea/modules/structs"
|
||||
"code.gitea.io/gitea/routers/api/v1/utils"
|
||||
@@ -222,8 +223,7 @@ func GetAllCommits(ctx *context.APIContext) {
|
||||
}
|
||||
|
||||
// Total commit count
|
||||
commitsCountTotal, err = git.CommitsCount(ctx.Repo.GitRepo.Ctx, git.CommitsCountOptions{
|
||||
RepoPath: ctx.Repo.GitRepo.Path,
|
||||
commitsCountTotal, err = gitrepo.CommitsCount(ctx, ctx.Repo.Repository, gitrepo.CommitsCountOptions{
|
||||
Not: not,
|
||||
Revision: []string{baseCommit.ID.String()},
|
||||
Since: since,
|
||||
@@ -245,9 +245,8 @@ func GetAllCommits(ctx *context.APIContext) {
|
||||
sha = ctx.Repo.Repository.DefaultBranch
|
||||
}
|
||||
|
||||
commitsCountTotal, err = git.CommitsCount(ctx,
|
||||
git.CommitsCountOptions{
|
||||
RepoPath: ctx.Repo.GitRepo.Path,
|
||||
commitsCountTotal, err = gitrepo.CommitsCount(ctx, ctx.Repo.Repository,
|
||||
gitrepo.CommitsCountOptions{
|
||||
Not: not,
|
||||
Revision: []string{sha},
|
||||
RelPath: []string{path},
|
||||
|
||||
@@ -1165,7 +1165,7 @@ func parseCompareInfo(ctx *context.APIContext, form api.CreatePullRequestOption)
|
||||
baseRef := ctx.Repo.GitRepo.UnstableGuessRefByShortName(baseRefToGuess)
|
||||
headRef := headGitRepo.UnstableGuessRefByShortName(headRefToGuess)
|
||||
|
||||
log.Trace("Repo path: %q, base ref: %q->%q, head ref: %q->%q", ctx.Repo.GitRepo.Path, baseRefToGuess, baseRef, headRefToGuess, headRef)
|
||||
log.Trace("Repo path: %q, base ref: %q->%q, head ref: %q->%q", ctx.Repo.Repository.RelativePath(), baseRefToGuess, baseRef, headRefToGuess, headRef)
|
||||
|
||||
baseRefValid := baseRef.IsBranch() || baseRef.IsTag() || git.IsStringLikelyCommitID(git.ObjectFormatFromName(ctx.Repo.Repository.ObjectFormatName), baseRef.ShortName())
|
||||
headRefValid := headRef.IsBranch() || headRef.IsTag() || git.IsStringLikelyCommitID(git.ObjectFormatFromName(headRepo.ObjectFormatName), headRef.ShortName())
|
||||
|
||||
@@ -193,7 +193,7 @@ func getWikiPage(ctx *context.APIContext, wikiName wiki_service.WebPath) *api.Wi
|
||||
}
|
||||
|
||||
// get commit count - wiki revisions
|
||||
commitsCount, _ := wikiRepo.FileCommitsCount(ctx.Repo.Repository.DefaultWikiBranch, pageFilename)
|
||||
commitsCount, _ := gitrepo.FileCommitsCount(ctx, ctx.Repo.Repository.WikiStorageRepo(), ctx.Repo.Repository.DefaultWikiBranch, pageFilename)
|
||||
|
||||
// Get last change information.
|
||||
lastCommit, err := wikiRepo.GetCommitByPath(pageFilename)
|
||||
@@ -429,7 +429,7 @@ func ListPageRevisions(ctx *context.APIContext) {
|
||||
}
|
||||
|
||||
// get commit count - wiki revisions
|
||||
commitsCount, _ := wikiRepo.FileCommitsCount(ctx.Repo.Repository.DefaultWikiBranch, pageFilename)
|
||||
commitsCount, _ := gitrepo.FileCommitsCount(ctx, ctx.Repo.Repository.WikiStorageRepo(), ctx.Repo.Repository.DefaultWikiBranch, pageFilename)
|
||||
|
||||
page := max(ctx.FormInt("page"), 1)
|
||||
|
||||
|
||||
@@ -147,6 +147,8 @@ type swaggerParameterBodies struct {
|
||||
|
||||
// in:body
|
||||
CreateBranchRepoOption api.CreateBranchRepoOption
|
||||
// in:body
|
||||
UpdateBranchRepoOption api.UpdateBranchRepoOption
|
||||
|
||||
// in:body
|
||||
CreateBranchProtectionOption api.CreateBranchProtectionOption
|
||||
|
||||
@@ -72,8 +72,13 @@ func RequestContextHandler() func(h http.Handler) http.Handler {
|
||||
req = req.WithContext(cache.WithCacheContext(ctx))
|
||||
ds.SetContextValue(httplib.RequestContextKey, req)
|
||||
ds.AddCleanUp(func() {
|
||||
if req.MultipartForm != nil {
|
||||
_ = req.MultipartForm.RemoveAll() // remove the temp files buffered to tmp directory
|
||||
// TODO: GOLANG-HTTP-TMPDIR: Golang saves the uploaded files to temp directory (TMPDIR) when parsing multipart-form.
|
||||
// The "req" might have changed due to the new "req.WithContext" calls
|
||||
// For example: in NewBaseContext, a new "req" with context is created, and the multipart-form is parsed there.
|
||||
// So we always use the latest "req" from the data store.
|
||||
ctxReq := ds.GetContextValue(httplib.RequestContextKey).(*http.Request)
|
||||
if ctxReq.MultipartForm != nil {
|
||||
_ = ctxReq.MultipartForm.RemoveAll() // remove the temp files buffered to tmp directory
|
||||
}
|
||||
})
|
||||
next.ServeHTTP(respWriter, req)
|
||||
|
||||
@@ -108,21 +108,19 @@ func ServCommand(ctx *context.PrivateContext) {
|
||||
results.RepoName = repoName[:len(repoName)-5]
|
||||
}
|
||||
|
||||
// Check if there is a user redirect for the requested owner
|
||||
redirectedUserID, err := user_model.LookupUserRedirect(ctx, results.OwnerName)
|
||||
if err == nil {
|
||||
owner, err := user_model.GetUserByID(ctx, redirectedUserID)
|
||||
if err == nil {
|
||||
log.Info("User %s has been redirected to %s", results.OwnerName, owner.Name)
|
||||
results.OwnerName = owner.Name
|
||||
} else {
|
||||
log.Warn("User %s has a redirect to user with ID %d, but no user with this ID could be found. Trying without redirect...", results.OwnerName, redirectedUserID)
|
||||
}
|
||||
}
|
||||
|
||||
owner, err := user_model.GetUserByName(ctx, results.OwnerName)
|
||||
if err != nil {
|
||||
if user_model.IsErrUserNotExist(err) {
|
||||
if !user_model.IsErrUserNotExist(err) {
|
||||
log.Error("Unable to get repository owner: %s/%s Error: %v", results.OwnerName, results.RepoName, err)
|
||||
ctx.JSON(http.StatusForbidden, private.Response{
|
||||
UserMsg: fmt.Sprintf("Unable to get repository owner: %s/%s %v", results.OwnerName, results.RepoName, err),
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
// Check if there is a user redirect for the requested owner
|
||||
redirectedUserID, err := user_model.LookupUserRedirect(ctx, results.OwnerName)
|
||||
if err != nil {
|
||||
// User is fetching/cloning a non-existent repository
|
||||
log.Warn("Failed authentication attempt (cannot find repository: %s/%s) from %s", results.OwnerName, results.RepoName, ctx.RemoteAddr())
|
||||
ctx.JSON(http.StatusNotFound, private.Response{
|
||||
@@ -130,11 +128,20 @@ func ServCommand(ctx *context.PrivateContext) {
|
||||
})
|
||||
return
|
||||
}
|
||||
log.Error("Unable to get repository owner: %s/%s Error: %v", results.OwnerName, results.RepoName, err)
|
||||
ctx.JSON(http.StatusForbidden, private.Response{
|
||||
UserMsg: fmt.Sprintf("Unable to get repository owner: %s/%s %v", results.OwnerName, results.RepoName, err),
|
||||
})
|
||||
return
|
||||
|
||||
redirectUser, err := user_model.GetUserByID(ctx, redirectedUserID)
|
||||
if err != nil {
|
||||
// User is fetching/cloning a non-existent repository
|
||||
log.Warn("Failed authentication attempt (cannot find repository: %s/%s) from %s", results.OwnerName, results.RepoName, ctx.RemoteAddr())
|
||||
ctx.JSON(http.StatusNotFound, private.Response{
|
||||
UserMsg: fmt.Sprintf("Cannot find repository: %s/%s", results.OwnerName, results.RepoName),
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
log.Info("User %s has been redirected to %s", results.OwnerName, redirectUser.Name)
|
||||
results.OwnerName = redirectUser.Name
|
||||
owner = redirectUser
|
||||
}
|
||||
if !owner.IsOrganization() && !owner.IsActive {
|
||||
ctx.JSON(http.StatusForbidden, private.Response{
|
||||
@@ -143,24 +150,33 @@ func ServCommand(ctx *context.PrivateContext) {
|
||||
return
|
||||
}
|
||||
|
||||
redirectedRepoID, err := repo_model.LookupRedirect(ctx, owner.ID, results.RepoName)
|
||||
if err == nil {
|
||||
redirectedRepo, err := repo_model.GetRepositoryByID(ctx, redirectedRepoID)
|
||||
if err == nil {
|
||||
log.Info("Repository %s/%s has been redirected to %s/%s", results.OwnerName, results.RepoName, redirectedRepo.OwnerName, redirectedRepo.Name)
|
||||
results.RepoName = redirectedRepo.Name
|
||||
results.OwnerName = redirectedRepo.OwnerName
|
||||
owner.ID = redirectedRepo.OwnerID
|
||||
} else {
|
||||
log.Warn("Repo %s/%s has a redirect to repo with ID %d, but no repo with this ID could be found. Trying without redirect...", results.OwnerName, results.RepoName, redirectedRepoID)
|
||||
}
|
||||
}
|
||||
|
||||
// Now get the Repository and set the results section
|
||||
repoExist := true
|
||||
repo, err := repo_model.GetRepositoryByName(ctx, owner.ID, results.RepoName)
|
||||
if err != nil {
|
||||
if repo_model.IsErrRepoNotExist(err) {
|
||||
if !repo_model.IsErrRepoNotExist(err) {
|
||||
log.Error("Unable to get repository: %s/%s Error: %v", results.OwnerName, results.RepoName, err)
|
||||
ctx.JSON(http.StatusInternalServerError, private.Response{
|
||||
Err: fmt.Sprintf("Unable to get repository: %s/%s %v", results.OwnerName, results.RepoName, err),
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
redirectedRepoID, err := repo_model.LookupRedirect(ctx, owner.ID, results.RepoName)
|
||||
if err == nil {
|
||||
redirectedRepo, err := repo_model.GetRepositoryByID(ctx, redirectedRepoID)
|
||||
if err == nil {
|
||||
log.Info("Repository %s/%s has been redirected to %s/%s", results.OwnerName, results.RepoName, redirectedRepo.OwnerName, redirectedRepo.Name)
|
||||
results.RepoName = redirectedRepo.Name
|
||||
results.OwnerName = redirectedRepo.OwnerName
|
||||
repo = redirectedRepo
|
||||
owner.ID = redirectedRepo.OwnerID
|
||||
} else {
|
||||
log.Warn("Repo %s/%s has a redirect to repo with ID %d, but no repo with this ID could be found. Trying without redirect...", results.OwnerName, results.RepoName, redirectedRepoID)
|
||||
}
|
||||
}
|
||||
|
||||
if repo == nil {
|
||||
repoExist = false
|
||||
if mode == perm.AccessModeRead {
|
||||
// User is fetching/cloning a non-existent repository
|
||||
@@ -170,13 +186,6 @@ func ServCommand(ctx *context.PrivateContext) {
|
||||
})
|
||||
return
|
||||
}
|
||||
// else fallthrough (push-to-create may kick in below)
|
||||
} else {
|
||||
log.Error("Unable to get repository: %s/%s Error: %v", results.OwnerName, results.RepoName, err)
|
||||
ctx.JSON(http.StatusInternalServerError, private.Response{
|
||||
Err: fmt.Sprintf("Unable to get repository: %s/%s %v", results.OwnerName, results.RepoName, err),
|
||||
})
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -409,7 +409,7 @@ func EditUserPost(ctx *context.Context) {
|
||||
}
|
||||
|
||||
if form.Email != "" {
|
||||
if err := user_service.AdminAddOrSetPrimaryEmailAddress(ctx, u, form.Email); err != nil {
|
||||
if err := user_service.ReplacePrimaryEmailAddress(ctx, u, form.Email); err != nil {
|
||||
switch {
|
||||
case user_model.IsErrEmailCharIsNotSupported(err), user_model.IsErrEmailInvalid(err):
|
||||
ctx.Data["Err_Email"] = true
|
||||
|
||||
@@ -15,6 +15,7 @@ import (
|
||||
repo_model "code.gitea.io/gitea/models/repo"
|
||||
"code.gitea.io/gitea/models/unit"
|
||||
"code.gitea.io/gitea/modules/git"
|
||||
"code.gitea.io/gitea/modules/gitrepo"
|
||||
"code.gitea.io/gitea/modules/log"
|
||||
"code.gitea.io/gitea/modules/optional"
|
||||
repo_module "code.gitea.io/gitea/modules/repository"
|
||||
@@ -133,8 +134,7 @@ func RestoreBranchPost(ctx *context.Context) {
|
||||
return
|
||||
}
|
||||
|
||||
if err := git.Push(ctx, ctx.Repo.Repository.RepoPath(), git.PushOptions{
|
||||
Remote: ctx.Repo.Repository.RepoPath(),
|
||||
if err := gitrepo.Push(ctx, ctx.Repo.Repository, ctx.Repo.Repository, git.PushOptions{
|
||||
Branch: fmt.Sprintf("%s:%s%s", deletedBranch.CommitID, git.BranchPrefix, deletedBranch.Name),
|
||||
Env: repo_module.PushingEnvironment(ctx.Doer, ctx.Repo.Repository),
|
||||
}); err != nil {
|
||||
|
||||
@@ -222,7 +222,7 @@ func FileHistory(ctx *context.Context) {
|
||||
return
|
||||
}
|
||||
|
||||
commitsCount, err := ctx.Repo.GitRepo.FileCommitsCount(ctx.Repo.RefFullName.ShortName(), ctx.Repo.TreePath)
|
||||
commitsCount, err := gitrepo.FileCommitsCount(ctx, ctx.Repo.Repository, ctx.Repo.RefFullName.ShortName(), ctx.Repo.TreePath)
|
||||
if err != nil {
|
||||
ctx.ServerError("FileCommitsCount", err)
|
||||
return
|
||||
|
||||
@@ -317,11 +317,7 @@ func EditFile(ctx *context.Context) {
|
||||
ctx.ServerError("ReadAll", err)
|
||||
return
|
||||
}
|
||||
if content, err := charset.ToUTF8(buf, charset.ConvertOpts{KeepBOM: true}); err != nil {
|
||||
ctx.Data["FileContent"] = string(buf)
|
||||
} else {
|
||||
ctx.Data["FileContent"] = content
|
||||
}
|
||||
ctx.Data["FileContent"] = string(charset.ToUTF8(buf, charset.ConvertOpts{KeepBOM: true, ErrorReturnOrigin: true}))
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -9,6 +9,7 @@ import (
|
||||
"strings"
|
||||
|
||||
"code.gitea.io/gitea/modules/git"
|
||||
"code.gitea.io/gitea/modules/gitrepo"
|
||||
"code.gitea.io/gitea/modules/util"
|
||||
"code.gitea.io/gitea/services/context"
|
||||
"code.gitea.io/gitea/services/forms"
|
||||
@@ -35,9 +36,7 @@ func CherryPick(ctx *context.Context) {
|
||||
ctx.Data["commit_message"] = "revert " + cherryPickCommit.Message()
|
||||
} else {
|
||||
ctx.Data["CherryPickType"] = "cherry-pick"
|
||||
splits := strings.SplitN(cherryPickCommit.Message(), "\n", 2)
|
||||
ctx.Data["commit_summary"] = splits[0]
|
||||
ctx.Data["commit_message"] = splits[1]
|
||||
ctx.Data["commit_summary"], ctx.Data["commit_message"], _ = strings.Cut(cherryPickCommit.Message(), "\n")
|
||||
}
|
||||
|
||||
ctx.HTML(http.StatusOK, tplCherryPick)
|
||||
@@ -66,7 +65,7 @@ func CherryPickPost(ctx *context.Context) {
|
||||
// Drop through to the "apply" method
|
||||
buf := &bytes.Buffer{}
|
||||
if parsed.form.Revert {
|
||||
err = git.GetReverseRawDiff(ctx, ctx.Repo.Repository.RepoPath(), fromCommitID, buf)
|
||||
err = gitrepo.GetReverseRawDiff(ctx, ctx.Repo.Repository, fromCommitID, buf)
|
||||
} else {
|
||||
err = git.GetRawDiff(ctx.Repo.GitRepo, fromCommitID, "patch", buf)
|
||||
}
|
||||
|
||||
@@ -13,6 +13,7 @@ import (
|
||||
repo_model "code.gitea.io/gitea/models/repo"
|
||||
user_model "code.gitea.io/gitea/models/user"
|
||||
"code.gitea.io/gitea/modules/git"
|
||||
"code.gitea.io/gitea/modules/gitrepo"
|
||||
"code.gitea.io/gitea/modules/json"
|
||||
"code.gitea.io/gitea/modules/log"
|
||||
repo_module "code.gitea.io/gitea/modules/repository"
|
||||
@@ -102,8 +103,7 @@ func getUniqueRepositoryName(ctx context.Context, ownerID int64, name string) st
|
||||
}
|
||||
|
||||
func editorPushBranchToForkedRepository(ctx context.Context, doer *user_model.User, baseRepo *repo_model.Repository, baseBranchName string, targetRepo *repo_model.Repository, targetBranchName string) error {
|
||||
return git.Push(ctx, baseRepo.RepoPath(), git.PushOptions{
|
||||
Remote: targetRepo.RepoPath(),
|
||||
return gitrepo.Push(ctx, baseRepo, targetRepo, git.PushOptions{
|
||||
Branch: baseBranchName + ":" + targetBranchName,
|
||||
Env: repo_module.PushingEnvironment(doer, targetRepo),
|
||||
})
|
||||
|
||||
@@ -16,6 +16,7 @@ import (
|
||||
"code.gitea.io/gitea/models/renderhelper"
|
||||
user_model "code.gitea.io/gitea/models/user"
|
||||
"code.gitea.io/gitea/modules/git"
|
||||
"code.gitea.io/gitea/modules/gitrepo"
|
||||
"code.gitea.io/gitea/modules/htmlutil"
|
||||
"code.gitea.io/gitea/modules/log"
|
||||
"code.gitea.io/gitea/modules/markup/markdown"
|
||||
@@ -110,7 +111,7 @@ func NewComment(ctx *context.Context) {
|
||||
ctx.ServerError("Unable to load base repo", err)
|
||||
return
|
||||
}
|
||||
prHeadCommitID, err := git.GetFullCommitID(ctx, pull.BaseRepo.RepoPath(), prHeadRef)
|
||||
prHeadCommitID, err := gitrepo.GetFullCommitID(ctx, pull.BaseRepo, prHeadRef)
|
||||
if err != nil {
|
||||
ctx.ServerError("Get head commit Id of pr fail", err)
|
||||
return
|
||||
@@ -127,7 +128,7 @@ func NewComment(ctx *context.Context) {
|
||||
return
|
||||
}
|
||||
headBranchRef := git.RefNameFromBranch(pull.HeadBranch)
|
||||
headBranchCommitID, err := git.GetFullCommitID(ctx, pull.HeadRepo.RepoPath(), headBranchRef.String())
|
||||
headBranchCommitID, err := gitrepo.GetFullCommitID(ctx, pull.HeadRepo, headBranchRef.String())
|
||||
if err != nil {
|
||||
ctx.ServerError("Get head commit Id of head branch fail", err)
|
||||
return
|
||||
@@ -141,8 +142,7 @@ func NewComment(ctx *context.Context) {
|
||||
|
||||
if prHeadCommitID != headBranchCommitID {
|
||||
// force push to base repo
|
||||
err := git.Push(ctx, pull.HeadRepo.RepoPath(), git.PushOptions{
|
||||
Remote: pull.BaseRepo.RepoPath(),
|
||||
err := gitrepo.Push(ctx, pull.HeadRepo, pull.BaseRepo, git.PushOptions{
|
||||
Branch: pull.HeadBranch + ":" + prHeadRef,
|
||||
Force: true,
|
||||
Env: repo_module.InternalPushingEnvironment(pull.Issue.Poster, pull.BaseRepo),
|
||||
|
||||
@@ -25,6 +25,7 @@ import (
|
||||
"code.gitea.io/gitea/services/context"
|
||||
"code.gitea.io/gitea/services/forms"
|
||||
"code.gitea.io/gitea/services/migrations"
|
||||
repo_service "code.gitea.io/gitea/services/repository"
|
||||
"code.gitea.io/gitea/services/task"
|
||||
)
|
||||
|
||||
@@ -237,7 +238,7 @@ func MigratePost(ctx *context.Context) {
|
||||
opts.AWSSecretAccessKey = form.AWSSecretAccessKey
|
||||
}
|
||||
|
||||
err = repo_model.CheckCreateRepository(ctx, ctx.Doer, ctxUser, opts.RepoName, false)
|
||||
err = repo_service.CheckCreateRepository(ctx, ctx.Doer, ctxUser, opts.RepoName, false)
|
||||
if err != nil {
|
||||
handleMigrateError(ctx, ctxUser, err, "MigratePost", tpl, form)
|
||||
return
|
||||
|
||||
@@ -5,6 +5,7 @@
|
||||
package repo
|
||||
|
||||
import (
|
||||
stdCtx "context"
|
||||
"errors"
|
||||
"fmt"
|
||||
"net/http"
|
||||
@@ -18,6 +19,7 @@ import (
|
||||
"code.gitea.io/gitea/models/unit"
|
||||
user_model "code.gitea.io/gitea/models/user"
|
||||
"code.gitea.io/gitea/modules/git"
|
||||
"code.gitea.io/gitea/modules/gitrepo"
|
||||
"code.gitea.io/gitea/modules/markup/markdown"
|
||||
"code.gitea.io/gitea/modules/optional"
|
||||
"code.gitea.io/gitea/modules/setting"
|
||||
@@ -39,7 +41,7 @@ const (
|
||||
)
|
||||
|
||||
// calReleaseNumCommitsBehind calculates given release has how many commits behind release target.
|
||||
func calReleaseNumCommitsBehind(repoCtx *context.Repository, release *repo_model.Release, countCache map[string]int64) error {
|
||||
func calReleaseNumCommitsBehind(ctx stdCtx.Context, repoCtx *context.Repository, release *repo_model.Release, countCache map[string]int64) error {
|
||||
target := release.Target
|
||||
if target == "" {
|
||||
target = repoCtx.Repository.DefaultBranch
|
||||
@@ -59,7 +61,7 @@ func calReleaseNumCommitsBehind(repoCtx *context.Repository, release *repo_model
|
||||
return fmt.Errorf("GetBranchCommit(DefaultBranch): %w", err)
|
||||
}
|
||||
}
|
||||
countCache[target], err = commit.CommitsCount()
|
||||
countCache[target], err = gitrepo.CommitsCountOfCommit(ctx, repoCtx.Repository, commit.ID.String())
|
||||
if err != nil {
|
||||
return fmt.Errorf("CommitsCount: %w", err)
|
||||
}
|
||||
@@ -122,7 +124,7 @@ func getReleaseInfos(ctx *context.Context, opts *repo_model.FindReleasesOptions)
|
||||
}
|
||||
|
||||
if !r.IsDraft {
|
||||
if err := calReleaseNumCommitsBehind(ctx.Repo, r, countCache); err != nil {
|
||||
if err := calReleaseNumCommitsBehind(ctx, ctx.Repo, r, countCache); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
@@ -158,7 +158,7 @@ func TestCalReleaseNumCommitsBehind(t *testing.T) {
|
||||
|
||||
countCache := make(map[string]int64)
|
||||
for _, release := range releases {
|
||||
err := calReleaseNumCommitsBehind(ctx.Repo, release, countCache)
|
||||
err := calReleaseNumCommitsBehind(ctx, ctx.Repo, release, countCache)
|
||||
assert.NoError(t, err)
|
||||
}
|
||||
|
||||
|
||||
@@ -70,7 +70,7 @@ func CommitInfoCache(ctx *context.Context) {
|
||||
ctx.ServerError("GetBranchCommit", err)
|
||||
return
|
||||
}
|
||||
ctx.Repo.CommitsCount, err = ctx.Repo.GetCommitsCount()
|
||||
ctx.Repo.CommitsCount, err = ctx.Repo.GetCommitsCount(ctx)
|
||||
if err != nil {
|
||||
ctx.ServerError("GetCommitsCount", err)
|
||||
return
|
||||
|
||||
@@ -20,6 +20,7 @@ import (
|
||||
"code.gitea.io/gitea/modules/git"
|
||||
"code.gitea.io/gitea/modules/git/attribute"
|
||||
"code.gitea.io/gitea/modules/git/pipeline"
|
||||
"code.gitea.io/gitea/modules/gitrepo"
|
||||
"code.gitea.io/gitea/modules/lfs"
|
||||
"code.gitea.io/gitea/modules/log"
|
||||
repo_module "code.gitea.io/gitea/modules/repository"
|
||||
@@ -112,7 +113,7 @@ func LFSLocks(ctx *context.Context) {
|
||||
}
|
||||
defer cleanup()
|
||||
|
||||
if err := git.Clone(ctx, ctx.Repo.Repository.RepoPath(), tmpBasePath, git.CloneRepoOptions{
|
||||
if err := gitrepo.CloneRepoToLocal(ctx, ctx.Repo.Repository, tmpBasePath, git.CloneRepoOptions{
|
||||
Bare: true,
|
||||
Shared: true,
|
||||
}); err != nil {
|
||||
|
||||
@@ -33,7 +33,7 @@ func TestTransformDiffTreeForWeb(t *testing.T) {
|
||||
})
|
||||
|
||||
mockIconForFile := func(id string) template.HTML {
|
||||
return template.HTML(`<svg class="svg git-entry-icon octicon-file" width="16" height="16" aria-hidden="true"><use xlink:href="#` + id + `"></use></svg>`)
|
||||
return template.HTML(`<svg class="svg git-entry-icon octicon-file" width="16" height="16" aria-hidden="true"><use href="#` + id + `"></use></svg>`)
|
||||
}
|
||||
assert.Equal(t, WebDiffFileTree{
|
||||
TreeRoot: WebDiffFileItem{
|
||||
|
||||
@@ -310,7 +310,7 @@ func renderViewPage(ctx *context.Context) (*git.Repository, *git.TreeEntry) {
|
||||
}
|
||||
|
||||
// get commit count - wiki revisions
|
||||
commitsCount, _ := wikiGitRepo.FileCommitsCount(ctx.Repo.Repository.DefaultWikiBranch, pageFilename)
|
||||
commitsCount, _ := gitrepo.FileCommitsCount(ctx, ctx.Repo.Repository.WikiStorageRepo(), ctx.Repo.Repository.DefaultWikiBranch, pageFilename)
|
||||
ctx.Data["CommitCount"] = commitsCount
|
||||
|
||||
return wikiGitRepo, entry
|
||||
@@ -350,7 +350,7 @@ func renderRevisionPage(ctx *context.Context) (*git.Repository, *git.TreeEntry)
|
||||
}
|
||||
|
||||
// get commit count - wiki revisions
|
||||
commitsCount, _ := wikiGitRepo.FileCommitsCount(ctx.Repo.Repository.DefaultWikiBranch, pageFilename)
|
||||
commitsCount, _ := gitrepo.FileCommitsCount(ctx, ctx.Repo.Repository.WikiStorageRepo(), ctx.Repo.Repository.DefaultWikiBranch, pageFilename)
|
||||
ctx.Data["CommitCount"] = commitsCount
|
||||
|
||||
// get page
|
||||
|
||||
@@ -227,6 +227,8 @@ func ctxDataSet(args ...any) func(ctx *context.Context) {
|
||||
}
|
||||
}
|
||||
|
||||
const RouterMockPointBeforeWebRoutes = "before-web-routes"
|
||||
|
||||
// Routes returns all web routes
|
||||
func Routes() *web.Router {
|
||||
routes := web.NewRouter()
|
||||
@@ -285,7 +287,7 @@ func Routes() *web.Router {
|
||||
|
||||
webRoutes := web.NewRouter()
|
||||
webRoutes.Use(mid...)
|
||||
webRoutes.Group("", func() { registerWebRoutes(webRoutes) }, common.BlockExpensive(), common.QoS())
|
||||
webRoutes.Group("", func() { registerWebRoutes(webRoutes) }, common.BlockExpensive(), common.QoS(), web.RouterMockPoint(RouterMockPointBeforeWebRoutes))
|
||||
routes.Mount("", webRoutes)
|
||||
return routes
|
||||
}
|
||||
|
||||
@@ -35,9 +35,9 @@ func (source *Source) Authenticate(ctx context.Context, user *user_model.User, u
|
||||
// Allow PAM sources with `@` in their name, like from Active Directory
|
||||
username := pamLogin
|
||||
email := pamLogin
|
||||
idx := strings.Index(pamLogin, "@")
|
||||
if idx > -1 {
|
||||
username = pamLogin[:idx]
|
||||
before, _, ok := strings.Cut(pamLogin, "@")
|
||||
if ok {
|
||||
username = before
|
||||
}
|
||||
if user_model.ValidateEmail(email) != nil {
|
||||
if source.EmailDomain != "" {
|
||||
|
||||
@@ -21,10 +21,10 @@ import (
|
||||
func (source *Source) Authenticate(ctx context.Context, user *user_model.User, userName, password string) (*user_model.User, error) {
|
||||
// Verify allowed domains.
|
||||
if len(source.AllowedDomains) > 0 {
|
||||
idx := strings.Index(userName, "@")
|
||||
if idx == -1 {
|
||||
_, after, ok := strings.Cut(userName, "@")
|
||||
if !ok {
|
||||
return nil, user_model.ErrUserNotExist{Name: userName}
|
||||
} else if !util.SliceContainsString(strings.Split(source.AllowedDomains, ","), userName[idx+1:], true) {
|
||||
} else if !util.SliceContainsString(strings.Split(source.AllowedDomains, ","), after, true) {
|
||||
return nil, user_model.ErrUserNotExist{Name: userName}
|
||||
}
|
||||
}
|
||||
@@ -61,9 +61,9 @@ func (source *Source) Authenticate(ctx context.Context, user *user_model.User, u
|
||||
}
|
||||
|
||||
username := userName
|
||||
idx := strings.Index(userName, "@")
|
||||
if idx > -1 {
|
||||
username = userName[:idx]
|
||||
before, _, ok := strings.Cut(userName, "@")
|
||||
if ok {
|
||||
username = before
|
||||
}
|
||||
|
||||
user = &user_model.User{
|
||||
|
||||
@@ -43,8 +43,10 @@ type Base struct {
|
||||
Locale translation.Locale
|
||||
}
|
||||
|
||||
var ParseMultipartFormMaxMemory = int64(32 << 20)
|
||||
|
||||
func (b *Base) ParseMultipartForm() bool {
|
||||
err := b.Req.ParseMultipartForm(32 << 20)
|
||||
err := b.Req.ParseMultipartForm(ParseMultipartFormMaxMemory)
|
||||
if err != nil {
|
||||
// TODO: all errors caused by client side should be ignored (connection closed).
|
||||
if !errors.Is(err, io.EOF) && !errors.Is(err, io.ErrUnexpectedEOF) {
|
||||
|
||||
@@ -118,7 +118,7 @@ func (c *csrfProtector) PrepareForSessionUser(ctx *Context) {
|
||||
if uidChanged {
|
||||
_ = ctx.Session.Set(c.opt.oldSessionKey, c.id)
|
||||
} else if cookieToken != "" {
|
||||
// If cookie token presents, re-use existing unexpired token, else generate a new one.
|
||||
// If cookie token present, re-use existing unexpired token, else generate a new one.
|
||||
if issueTime, ok := ParseCsrfToken(cookieToken); ok {
|
||||
dur := time.Since(issueTime) // issueTime is not a monotonic-clock, the server time may change a lot to an early time.
|
||||
if dur >= -CsrfTokenRegenerationInterval && dur <= CsrfTokenRegenerationInterval {
|
||||
|
||||
@@ -202,14 +202,14 @@ func (r *Repository) CanCreateIssueDependencies(ctx context.Context, user *user_
|
||||
}
|
||||
|
||||
// GetCommitsCount returns cached commit count for current view
|
||||
func (r *Repository) GetCommitsCount() (int64, error) {
|
||||
func (r *Repository) GetCommitsCount(ctx context.Context) (int64, error) {
|
||||
if r.Commit == nil {
|
||||
return 0, nil
|
||||
}
|
||||
contextName := r.RefFullName.ShortName()
|
||||
isRef := r.RefFullName.IsBranch() || r.RefFullName.IsTag()
|
||||
return cache.GetInt64(r.Repository.GetCommitsCountCacheKey(contextName, isRef), func() (int64, error) {
|
||||
return r.Commit.CommitsCount()
|
||||
return gitrepo.CommitsCountOfCommit(ctx, r.Repository, r.Commit.ID.String())
|
||||
})
|
||||
}
|
||||
|
||||
@@ -219,11 +219,10 @@ func (r *Repository) GetCommitGraphsCount(ctx context.Context, hidePRRefs bool,
|
||||
|
||||
return cache.GetInt64(cacheKey, func() (int64, error) {
|
||||
if len(branches) == 0 {
|
||||
return git.AllCommitsCount(ctx, r.Repository.RepoPath(), hidePRRefs, files...)
|
||||
return gitrepo.AllCommitsCount(ctx, r.Repository, hidePRRefs, files...)
|
||||
}
|
||||
return git.CommitsCount(ctx,
|
||||
git.CommitsCountOptions{
|
||||
RepoPath: r.Repository.RepoPath(),
|
||||
return gitrepo.CommitsCount(ctx, r.Repository,
|
||||
gitrepo.CommitsCountOptions{
|
||||
Revision: branches,
|
||||
RelPath: files,
|
||||
})
|
||||
@@ -821,7 +820,7 @@ func RepoRefByDefaultBranch() func(*Context) {
|
||||
ctx.Repo.RefFullName = git.RefNameFromBranch(ctx.Repo.Repository.DefaultBranch)
|
||||
ctx.Repo.BranchName = ctx.Repo.Repository.DefaultBranch
|
||||
ctx.Repo.Commit, _ = ctx.Repo.GitRepo.GetBranchCommit(ctx.Repo.BranchName)
|
||||
ctx.Repo.CommitsCount, _ = ctx.Repo.GetCommitsCount()
|
||||
ctx.Repo.CommitsCount, _ = ctx.Repo.GetCommitsCount(ctx)
|
||||
ctx.Data["RefFullName"] = ctx.Repo.RefFullName
|
||||
ctx.Data["BranchName"] = ctx.Repo.BranchName
|
||||
ctx.Data["CommitsCount"] = ctx.Repo.CommitsCount
|
||||
@@ -858,7 +857,7 @@ func RepoRefByType(detectRefType git.RefType) func(*Context) {
|
||||
if err == nil && len(brs) != 0 {
|
||||
refShortName = brs[0]
|
||||
} else if len(brs) == 0 {
|
||||
log.Error("No branches in non-empty repository %s", ctx.Repo.GitRepo.Path)
|
||||
log.Error("No branches in non-empty repository %s", ctx.Repo.Repository.RelativePath())
|
||||
} else {
|
||||
log.Error("GetBranches error: %v", err)
|
||||
}
|
||||
@@ -970,7 +969,7 @@ func RepoRefByType(detectRefType git.RefType) func(*Context) {
|
||||
|
||||
ctx.Data["CanCreateBranch"] = ctx.Repo.CanCreateBranch() // only used by the branch selector dropdown: AllowCreateNewRef
|
||||
|
||||
ctx.Repo.CommitsCount, err = ctx.Repo.GetCommitsCount()
|
||||
ctx.Repo.CommitsCount, err = ctx.Repo.GetCommitsCount(ctx)
|
||||
if err != nil {
|
||||
ctx.ServerError("GetCommitsCount", err)
|
||||
return
|
||||
|
||||
@@ -11,6 +11,7 @@ import (
|
||||
repo_model "code.gitea.io/gitea/models/repo"
|
||||
user_model "code.gitea.io/gitea/models/user"
|
||||
"code.gitea.io/gitea/modules/git"
|
||||
"code.gitea.io/gitea/modules/gitrepo"
|
||||
"code.gitea.io/gitea/modules/log"
|
||||
api "code.gitea.io/gitea/modules/structs"
|
||||
"code.gitea.io/gitea/modules/util"
|
||||
@@ -190,7 +191,7 @@ func ToCommit(ctx context.Context, repo *repo_model.Repository, gitRepo *git.Rep
|
||||
|
||||
// Retrieve files affected by the commit
|
||||
if opts.Files {
|
||||
fileStatus, err := git.GetCommitFileStatus(gitRepo.Ctx, repo.RepoPath(), commit.ID.String())
|
||||
fileStatus, err := gitrepo.GetCommitFileStatus(ctx, repo, commit.ID.String())
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
@@ -6,9 +6,7 @@ package doctor
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"os"
|
||||
"os/exec"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
|
||||
"code.gitea.io/gitea/models"
|
||||
@@ -20,7 +18,6 @@ import (
|
||||
"code.gitea.io/gitea/modules/log"
|
||||
"code.gitea.io/gitea/modules/setting"
|
||||
"code.gitea.io/gitea/modules/structs"
|
||||
"code.gitea.io/gitea/modules/util"
|
||||
|
||||
lru "github.com/hashicorp/golang-lru/v2"
|
||||
"xorm.io/builder"
|
||||
@@ -142,10 +139,10 @@ func checkDaemonExport(ctx context.Context, logger log.Logger, autofix bool) err
|
||||
}
|
||||
|
||||
// Create/Remove git-daemon-export-ok for git-daemon...
|
||||
daemonExportFile := filepath.Join(repo.RepoPath(), `git-daemon-export-ok`)
|
||||
isExist, err := util.IsExist(daemonExportFile)
|
||||
daemonExportFile := `git-daemon-export-ok`
|
||||
isExist, err := gitrepo.IsRepoFileExist(ctx, repo, daemonExportFile)
|
||||
if err != nil {
|
||||
log.Error("Unable to check if %s exists. Error: %v", daemonExportFile, err)
|
||||
log.Error("Unable to check if %s:%s exists. Error: %v", repo.FullName(), daemonExportFile, err)
|
||||
return err
|
||||
}
|
||||
isPublic := !repo.IsPrivate && repo.Owner.Visibility == structs.VisibleTypePublic
|
||||
@@ -154,12 +151,12 @@ func checkDaemonExport(ctx context.Context, logger log.Logger, autofix bool) err
|
||||
numNeedUpdate++
|
||||
if autofix {
|
||||
if !isPublic && isExist {
|
||||
if err = util.Remove(daemonExportFile); err != nil {
|
||||
log.Error("Failed to remove %s: %v", daemonExportFile, err)
|
||||
if err = gitrepo.RemoveRepoFile(ctx, repo, daemonExportFile); err != nil {
|
||||
log.Error("Failed to remove %s:%s: %v", repo.FullName(), daemonExportFile, err)
|
||||
}
|
||||
} else if isPublic && !isExist {
|
||||
if f, err := os.Create(daemonExportFile); err != nil {
|
||||
log.Error("Failed to create %s: %v", daemonExportFile, err)
|
||||
if f, err := gitrepo.CreateRepoFile(ctx, repo, daemonExportFile); err != nil {
|
||||
log.Error("Failed to create %s:%s: %v", repo.FullName(), daemonExportFile, err)
|
||||
} else {
|
||||
f.Close()
|
||||
}
|
||||
@@ -190,16 +187,16 @@ func checkCommitGraph(ctx context.Context, logger log.Logger, autofix bool) erro
|
||||
|
||||
commitGraphExists := func() (bool, error) {
|
||||
// Check commit-graph exists
|
||||
commitGraphFile := filepath.Join(repo.RepoPath(), `objects/info/commit-graph`)
|
||||
isExist, err := util.IsExist(commitGraphFile)
|
||||
commitGraphFile := `objects/info/commit-graph`
|
||||
isExist, err := gitrepo.IsRepoFileExist(ctx, repo, commitGraphFile)
|
||||
if err != nil {
|
||||
logger.Error("Unable to check if %s exists. Error: %v", commitGraphFile, err)
|
||||
return false, err
|
||||
}
|
||||
|
||||
if !isExist {
|
||||
commitGraphsDir := filepath.Join(repo.RepoPath(), `objects/info/commit-graphs`)
|
||||
isExist, err = util.IsExist(commitGraphsDir)
|
||||
commitGraphsDir := `objects/info/commit-graphs`
|
||||
isExist, err = gitrepo.IsRepoDirExist(ctx, repo, commitGraphsDir)
|
||||
if err != nil {
|
||||
logger.Error("Unable to check if %s exists. Error: %v", commitGraphsDir, err)
|
||||
return false, err
|
||||
@@ -215,7 +212,7 @@ func checkCommitGraph(ctx context.Context, logger log.Logger, autofix bool) erro
|
||||
if !isExist {
|
||||
numNeedUpdate++
|
||||
if autofix {
|
||||
if err := git.WriteCommitGraph(ctx, repo.RepoPath()); err != nil {
|
||||
if err := gitrepo.WriteCommitGraph(ctx, repo); err != nil {
|
||||
logger.Error("Unable to write commit-graph in %s. Error: %v", repo.FullName(), err)
|
||||
return err
|
||||
}
|
||||
|
||||
@@ -835,11 +835,11 @@ parsingLoop:
|
||||
if buffer.Len() == 0 {
|
||||
continue
|
||||
}
|
||||
charsetLabel, err := charset.DetectEncoding(buffer.Bytes())
|
||||
if charsetLabel != "UTF-8" && err == nil {
|
||||
encoding, _ := stdcharset.Lookup(charsetLabel)
|
||||
if encoding != nil {
|
||||
diffLineTypeDecoders[lineType] = encoding.NewDecoder()
|
||||
charsetLabel, _ := charset.DetectEncoding(buffer.Bytes())
|
||||
if charsetLabel != "UTF-8" {
|
||||
charsetEncoding, _ := stdcharset.Lookup(charsetLabel)
|
||||
if charsetEncoding != nil {
|
||||
diffLineTypeDecoders[lineType] = charsetEncoding.NewDecoder()
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1325,10 +1325,10 @@ func GetDiffForRender(ctx context.Context, repoLink string, gitRepo *git.Reposit
|
||||
shouldFullFileHighlight := !setting.Git.DisableDiffHighlight && attrDiff.Value() == ""
|
||||
if shouldFullFileHighlight {
|
||||
if limitedContent.LeftContent != nil && limitedContent.LeftContent.buf.Len() < MaxDiffHighlightEntireFileSize {
|
||||
diffFile.highlightedLeftLines = highlightCodeLines(diffFile, true /* left */, limitedContent.LeftContent.buf.String())
|
||||
diffFile.highlightedLeftLines = highlightCodeLines(diffFile, true /* left */, limitedContent.LeftContent.buf.Bytes())
|
||||
}
|
||||
if limitedContent.RightContent != nil && limitedContent.RightContent.buf.Len() < MaxDiffHighlightEntireFileSize {
|
||||
diffFile.highlightedRightLines = highlightCodeLines(diffFile, false /* right */, limitedContent.RightContent.buf.String())
|
||||
diffFile.highlightedRightLines = highlightCodeLines(diffFile, false /* right */, limitedContent.RightContent.buf.Bytes())
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1336,9 +1336,34 @@ func GetDiffForRender(ctx context.Context, repoLink string, gitRepo *git.Reposit
|
||||
return diff, nil
|
||||
}
|
||||
|
||||
func highlightCodeLines(diffFile *DiffFile, isLeft bool, content string) map[int]template.HTML {
|
||||
func splitHighlightLines(buf []byte) (ret [][]byte) {
|
||||
lineCount := bytes.Count(buf, []byte("\n")) + 1
|
||||
ret = make([][]byte, 0, lineCount)
|
||||
nlTagClose := []byte("\n</")
|
||||
for {
|
||||
pos := bytes.IndexByte(buf, '\n')
|
||||
if pos == -1 {
|
||||
ret = append(ret, buf)
|
||||
return ret
|
||||
}
|
||||
// Chroma highlighting output sometimes have "</span>" right after \n, sometimes before.
|
||||
// * "<span>text\n</span>"
|
||||
// * "<span>text</span>\n"
|
||||
if bytes.HasPrefix(buf[pos:], nlTagClose) {
|
||||
pos1 := bytes.IndexByte(buf[pos:], '>')
|
||||
if pos1 != -1 {
|
||||
pos += pos1
|
||||
}
|
||||
}
|
||||
ret = append(ret, buf[:pos+1])
|
||||
buf = buf[pos+1:]
|
||||
}
|
||||
}
|
||||
|
||||
func highlightCodeLines(diffFile *DiffFile, isLeft bool, rawContent []byte) map[int]template.HTML {
|
||||
content := util.UnsafeBytesToString(charset.ToUTF8(rawContent, charset.ConvertOpts{}))
|
||||
highlightedNewContent, _ := highlight.Code(diffFile.Name, diffFile.Language, content)
|
||||
splitLines := strings.Split(string(highlightedNewContent), "\n")
|
||||
splitLines := splitHighlightLines([]byte(highlightedNewContent))
|
||||
lines := make(map[int]template.HTML, len(splitLines))
|
||||
// only save the highlighted lines we need, but not the whole file, to save memory
|
||||
for _, sec := range diffFile.Sections {
|
||||
|
||||
@@ -5,6 +5,7 @@
|
||||
package gitdiff
|
||||
|
||||
import (
|
||||
"html/template"
|
||||
"strconv"
|
||||
"strings"
|
||||
"testing"
|
||||
@@ -1106,3 +1107,41 @@ func TestDiffLine_GetExpandDirection(t *testing.T) {
|
||||
assert.Equal(t, c.direction, c.diffLine.GetExpandDirection(), "case %s expected direction: %s", c.name, c.direction)
|
||||
}
|
||||
}
|
||||
|
||||
func TestHighlightCodeLines(t *testing.T) {
|
||||
t.Run("CharsetDetecting", func(t *testing.T) {
|
||||
diffFile := &DiffFile{
|
||||
Name: "a.c",
|
||||
Language: "c",
|
||||
Sections: []*DiffSection{
|
||||
{
|
||||
Lines: []*DiffLine{{LeftIdx: 1}},
|
||||
},
|
||||
},
|
||||
}
|
||||
ret := highlightCodeLines(diffFile, true, []byte("// abc\xcc def\xcd")) // ISO-8859-1 bytes
|
||||
assert.Equal(t, "<span class=\"c1\">// abcÌ defÍ\n</span>", string(ret[0]))
|
||||
})
|
||||
|
||||
t.Run("LeftLines", func(t *testing.T) {
|
||||
diffFile := &DiffFile{
|
||||
Name: "a.c",
|
||||
Language: "c",
|
||||
Sections: []*DiffSection{
|
||||
{
|
||||
Lines: []*DiffLine{
|
||||
{LeftIdx: 1},
|
||||
{LeftIdx: 2},
|
||||
{LeftIdx: 3},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
const nl = "\n"
|
||||
ret := highlightCodeLines(diffFile, true, []byte("a\nb\n"))
|
||||
assert.Equal(t, map[int]template.HTML{
|
||||
0: `<span class="n">a</span>` + nl,
|
||||
1: `<span class="n">b</span>`,
|
||||
}, ret)
|
||||
})
|
||||
}
|
||||
|
||||
@@ -25,12 +25,12 @@ func TestDiffWithHighlight(t *testing.T) {
|
||||
|
||||
t.Run("CleanUp", func(t *testing.T) {
|
||||
hcd := newHighlightCodeDiff()
|
||||
codeA := template.HTML(`<span class="cm>this is a comment</span>`)
|
||||
codeB := template.HTML(`<span class="cm>this is updated comment</span>`)
|
||||
codeA := template.HTML(`<span class="cm">this is a comment</span>`)
|
||||
codeB := template.HTML(`<span class="cm">this is updated comment</span>`)
|
||||
outDel := hcd.diffLineWithHighlight(DiffLineDel, codeA, codeB)
|
||||
assert.Equal(t, `<span class="cm>this is <span class="removed-code">a</span> comment</span>`, string(outDel))
|
||||
assert.Equal(t, `<span class="cm">this is <span class="removed-code">a</span> comment</span>`, string(outDel))
|
||||
outAdd := hcd.diffLineWithHighlight(DiffLineAdd, codeA, codeB)
|
||||
assert.Equal(t, `<span class="cm>this is <span class="added-code">updated</span> comment</span>`, string(outAdd))
|
||||
assert.Equal(t, `<span class="cm">this is <span class="added-code">updated</span> comment</span>`, string(outAdd))
|
||||
})
|
||||
|
||||
t.Run("OpenCloseTags", func(t *testing.T) {
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user