Merge branch 'main' into lunny/add_composor_source

This commit is contained in:
Lunny Xiao 2025-02-16 19:40:54 -08:00
commit 5c1d4c4f66
229 changed files with 8479 additions and 5039 deletions

View File

@ -1,3 +1,4 @@
const vitestPlugin = require('@vitest/eslint-plugin');
const restrictedSyntax = ['WithStatement', 'ForInStatement', 'LabeledStatement', 'SequenceExpression'];
module.exports = {
@ -37,8 +38,6 @@ module.exports = {
'eslint-plugin-regexp',
'eslint-plugin-sonarjs',
'eslint-plugin-unicorn',
'eslint-plugin-vitest',
'eslint-plugin-vitest-globals',
'eslint-plugin-wc',
],
env: {
@ -46,6 +45,13 @@ module.exports = {
node: true,
},
overrides: [
{
files: ['**/*.cjs'],
rules: {
'import-x/no-commonjs': [0],
'@typescript-eslint/no-require-imports': [0],
},
},
{
files: ['web_src/**/*'],
globals: {
@ -82,59 +88,58 @@ module.exports = {
},
{
files: ['**/*.test.*', 'web_src/js/test/setup.ts'],
env: {
'vitest-globals/env': true,
},
plugins: ['@vitest/eslint-plugin'],
globals: vitestPlugin.environments.env.globals,
rules: {
'vitest/consistent-test-filename': [0],
'vitest/consistent-test-it': [0],
'vitest/expect-expect': [0],
'vitest/max-expects': [0],
'vitest/max-nested-describe': [0],
'vitest/no-alias-methods': [0],
'vitest/no-commented-out-tests': [0],
'vitest/no-conditional-expect': [0],
'vitest/no-conditional-in-test': [0],
'vitest/no-conditional-tests': [0],
'vitest/no-disabled-tests': [0],
'vitest/no-done-callback': [0],
'vitest/no-duplicate-hooks': [0],
'vitest/no-focused-tests': [0],
'vitest/no-hooks': [0],
'vitest/no-identical-title': [2],
'vitest/no-interpolation-in-snapshots': [0],
'vitest/no-large-snapshots': [0],
'vitest/no-mocks-import': [0],
'vitest/no-restricted-matchers': [0],
'vitest/no-restricted-vi-methods': [0],
'vitest/no-standalone-expect': [0],
'vitest/no-test-prefixes': [0],
'vitest/no-test-return-statement': [0],
'vitest/prefer-called-with': [0],
'vitest/prefer-comparison-matcher': [0],
'vitest/prefer-each': [0],
'vitest/prefer-equality-matcher': [0],
'vitest/prefer-expect-resolves': [0],
'vitest/prefer-hooks-in-order': [0],
'vitest/prefer-hooks-on-top': [2],
'vitest/prefer-lowercase-title': [0],
'vitest/prefer-mock-promise-shorthand': [0],
'vitest/prefer-snapshot-hint': [0],
'vitest/prefer-spy-on': [0],
'vitest/prefer-strict-equal': [0],
'vitest/prefer-to-be': [0],
'vitest/prefer-to-be-falsy': [0],
'vitest/prefer-to-be-object': [0],
'vitest/prefer-to-be-truthy': [0],
'vitest/prefer-to-contain': [0],
'vitest/prefer-to-have-length': [0],
'vitest/prefer-todo': [0],
'vitest/require-hook': [0],
'vitest/require-to-throw-message': [0],
'vitest/require-top-level-describe': [0],
'vitest/valid-describe-callback': [2],
'vitest/valid-expect': [2],
'vitest/valid-title': [2],
'@vitest/consistent-test-filename': [0],
'@vitest/consistent-test-it': [0],
'@vitest/expect-expect': [0],
'@vitest/max-expects': [0],
'@vitest/max-nested-describe': [0],
'@vitest/no-alias-methods': [0],
'@vitest/no-commented-out-tests': [0],
'@vitest/no-conditional-expect': [0],
'@vitest/no-conditional-in-test': [0],
'@vitest/no-conditional-tests': [0],
'@vitest/no-disabled-tests': [0],
'@vitest/no-done-callback': [0],
'@vitest/no-duplicate-hooks': [0],
'@vitest/no-focused-tests': [0],
'@vitest/no-hooks': [0],
'@vitest/no-identical-title': [2],
'@vitest/no-interpolation-in-snapshots': [0],
'@vitest/no-large-snapshots': [0],
'@vitest/no-mocks-import': [0],
'@vitest/no-restricted-matchers': [0],
'@vitest/no-restricted-vi-methods': [0],
'@vitest/no-standalone-expect': [0],
'@vitest/no-test-prefixes': [0],
'@vitest/no-test-return-statement': [0],
'@vitest/prefer-called-with': [0],
'@vitest/prefer-comparison-matcher': [0],
'@vitest/prefer-each': [0],
'@vitest/prefer-equality-matcher': [0],
'@vitest/prefer-expect-resolves': [0],
'@vitest/prefer-hooks-in-order': [0],
'@vitest/prefer-hooks-on-top': [2],
'@vitest/prefer-lowercase-title': [0],
'@vitest/prefer-mock-promise-shorthand': [0],
'@vitest/prefer-snapshot-hint': [0],
'@vitest/prefer-spy-on': [0],
'@vitest/prefer-strict-equal': [0],
'@vitest/prefer-to-be': [0],
'@vitest/prefer-to-be-falsy': [0],
'@vitest/prefer-to-be-object': [0],
'@vitest/prefer-to-be-truthy': [0],
'@vitest/prefer-to-contain': [0],
'@vitest/prefer-to-have-length': [0],
'@vitest/prefer-todo': [0],
'@vitest/require-hook': [0],
'@vitest/require-to-throw-message': [0],
'@vitest/require-top-level-describe': [0],
'@vitest/valid-describe-callback': [2],
'@vitest/valid-expect': [2],
'@vitest/valid-title': [2],
},
},
{
@ -163,7 +168,7 @@ module.exports = {
{
files: ['tests/e2e/**'],
plugins: [
'eslint-plugin-playwright'
'eslint-plugin-playwright',
],
extends: [
'plugin:playwright/recommended',

View File

@ -1,8 +1,8 @@
name: cron-licenses
on:
#schedule:
# - cron: "7 0 * * 1" # every Monday at 00:07 UTC
# schedule:
# - cron: "7 0 * * 1" # every Monday at 00:07 UTC
workflow_dispatch:
jobs:

2
.mailmap Normal file
View File

@ -0,0 +1,2 @@
Unknwon <u@gogs.io> <joe2010xtmf@163.com>
Unknwon <u@gogs.io> 无闻 <u@gogs.io>

View File

@ -1,5 +1,5 @@
# Build stage
FROM docker.io/library/golang:1.23-alpine3.21 AS build-env
FROM docker.io/library/golang:1.24-alpine3.21 AS build-env
ARG GOPROXY
ENV GOPROXY=${GOPROXY:-direct}

View File

@ -1,5 +1,5 @@
# Build stage
FROM docker.io/library/golang:1.23-alpine3.21 AS build-env
FROM docker.io/library/golang:1.24-alpine3.21 AS build-env
ARG GOPROXY
ENV GOPROXY=${GOPROXY:-direct}

View File

@ -63,3 +63,4 @@ Kemal Zebari <kemalzebra@gmail.com> (@kemzeb)
Rowan Bohde <rowan.bohde@gmail.com> (@bohde)
hiifong <i@hiif.ong> (@hiifong)
metiftikci <metiftikci@hotmail.com> (@metiftikci)
Christopher Homberger <christopher.homberger@web.de> (@ChristopherHX)

View File

@ -23,7 +23,7 @@ SHASUM ?= shasum -a 256
HAS_GO := $(shell hash $(GO) > /dev/null 2>&1 && echo yes)
COMMA := ,
XGO_VERSION := go-1.23.x
XGO_VERSION := go-1.24.x
AIR_PACKAGE ?= github.com/air-verse/air@v1
EDITORCONFIG_CHECKER_PACKAGE ?= github.com/editorconfig-checker/editorconfig-checker/v3/cmd/editorconfig-checker@v3.1.2
@ -144,7 +144,7 @@ TAR_EXCLUDES := .git data indexers queues log node_modules $(EXECUTABLE) $(FOMAN
GO_DIRS := build cmd models modules routers services tests
WEB_DIRS := web_src/js web_src/css
ESLINT_FILES := web_src/js tools *.js *.ts tests/e2e
ESLINT_FILES := web_src/js tools *.js *.ts *.cjs tests/e2e
STYLELINT_FILES := web_src/css web_src/js/components/*.vue
SPELLCHECK_FILES := $(GO_DIRS) $(WEB_DIRS) templates options/locale/locale_en-US.ini .github $(filter-out CHANGELOG.md, $(wildcard *.go *.js *.md *.yml *.yaml *.toml))
EDITORCONFIG_FILES := templates .github/workflows options/locale/locale_en-US.ini
@ -393,7 +393,7 @@ lint-templates: .venv node_modules ## lint template files
.PHONY: lint-yaml
lint-yaml: .venv ## lint yaml files
@poetry run yamllint .
@poetry run yamllint -s .
.PHONY: watch
watch: ## watch everything and continuously rebuild

View File

@ -31,6 +31,11 @@ var microcmdUserCreate = &cli.Command{
Name: "username",
Usage: "Username",
},
&cli.StringFlag{
Name: "user-type",
Usage: "Set user's type: individual or bot",
Value: "individual",
},
&cli.StringFlag{
Name: "password",
Usage: "User password",
@ -77,6 +82,22 @@ func runCreateUser(c *cli.Context) error {
return err
}
userTypes := map[string]user_model.UserType{
"individual": user_model.UserTypeIndividual,
"bot": user_model.UserTypeBot,
}
userType, ok := userTypes[c.String("user-type")]
if !ok {
return fmt.Errorf("invalid user type: %s", c.String("user-type"))
}
if userType != user_model.UserTypeIndividual {
// Some other commands like "change-password" also only support individual users.
// It needs to clarify the "password" behavior for bot users in the future.
// At the moment, we do not allow setting password for bot users.
if c.IsSet("password") || c.IsSet("random-password") {
return errors.New("password can only be set for individual users")
}
}
if c.IsSet("name") && c.IsSet("username") {
return errors.New("cannot set both --name and --username flags")
}
@ -118,16 +139,19 @@ func runCreateUser(c *cli.Context) error {
return err
}
fmt.Printf("generated random password is '%s'\n", password)
} else {
} else if userType == user_model.UserTypeIndividual {
return errors.New("must set either password or random-password flag")
}
isAdmin := c.Bool("admin")
mustChangePassword := true // always default to true
if c.IsSet("must-change-password") {
if userType != user_model.UserTypeIndividual {
return errors.New("must-change-password flag can only be set for individual users")
}
// if the flag is set, use the value provided by the user
mustChangePassword = c.Bool("must-change-password")
} else {
} else if userType == user_model.UserTypeIndividual {
// check whether there are users in the database
hasUserRecord, err := db.IsTableNotEmpty(&user_model.User{})
if err != nil {
@ -151,8 +175,9 @@ func runCreateUser(c *cli.Context) error {
u := &user_model.User{
Name: username,
Email: c.String("email"),
Passwd: password,
IsAdmin: isAdmin,
Type: userType,
Passwd: password,
MustChangePassword: mustChangePassword,
Visibility: visibility,
}

View File

@ -13,32 +13,54 @@ import (
user_model "code.gitea.io/gitea/models/user"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestAdminUserCreate(t *testing.T) {
app := NewMainApp(AppVersion{})
reset := func() {
assert.NoError(t, db.TruncateBeans(db.DefaultContext, &user_model.User{}))
assert.NoError(t, db.TruncateBeans(db.DefaultContext, &user_model.EmailAddress{}))
require.NoError(t, db.TruncateBeans(db.DefaultContext, &user_model.User{}))
require.NoError(t, db.TruncateBeans(db.DefaultContext, &user_model.EmailAddress{}))
}
type createCheck struct{ IsAdmin, MustChangePassword bool }
createUser := func(name, args string) createCheck {
assert.NoError(t, app.Run(strings.Fields(fmt.Sprintf("./gitea admin user create --username %s --email %s@gitea.local %s --password foobar", name, name, args))))
u := unittest.AssertExistsAndLoadBean(t, &user_model.User{LowerName: name})
return createCheck{u.IsAdmin, u.MustChangePassword}
}
reset()
assert.Equal(t, createCheck{IsAdmin: false, MustChangePassword: false}, createUser("u", ""), "first non-admin user doesn't need to change password")
t.Run("MustChangePassword", func(t *testing.T) {
type check struct {
IsAdmin bool
MustChangePassword bool
}
createCheck := func(name, args string) check {
require.NoError(t, app.Run(strings.Fields(fmt.Sprintf("./gitea admin user create --username %s --email %s@gitea.local %s --password foobar", name, name, args))))
u := unittest.AssertExistsAndLoadBean(t, &user_model.User{LowerName: name})
return check{IsAdmin: u.IsAdmin, MustChangePassword: u.MustChangePassword}
}
reset()
assert.Equal(t, check{IsAdmin: false, MustChangePassword: false}, createCheck("u", ""), "first non-admin user doesn't need to change password")
reset()
assert.Equal(t, createCheck{IsAdmin: true, MustChangePassword: false}, createUser("u", "--admin"), "first admin user doesn't need to change password")
reset()
assert.Equal(t, check{IsAdmin: true, MustChangePassword: false}, createCheck("u", "--admin"), "first admin user doesn't need to change password")
reset()
assert.Equal(t, createCheck{IsAdmin: true, MustChangePassword: true}, createUser("u", "--admin --must-change-password"))
assert.Equal(t, createCheck{IsAdmin: true, MustChangePassword: true}, createUser("u2", "--admin"))
assert.Equal(t, createCheck{IsAdmin: true, MustChangePassword: false}, createUser("u3", "--admin --must-change-password=false"))
assert.Equal(t, createCheck{IsAdmin: false, MustChangePassword: true}, createUser("u4", ""))
assert.Equal(t, createCheck{IsAdmin: false, MustChangePassword: false}, createUser("u5", "--must-change-password=false"))
reset()
assert.Equal(t, check{IsAdmin: true, MustChangePassword: true}, createCheck("u", "--admin --must-change-password"))
assert.Equal(t, check{IsAdmin: true, MustChangePassword: true}, createCheck("u2", "--admin"))
assert.Equal(t, check{IsAdmin: true, MustChangePassword: false}, createCheck("u3", "--admin --must-change-password=false"))
assert.Equal(t, check{IsAdmin: false, MustChangePassword: true}, createCheck("u4", ""))
assert.Equal(t, check{IsAdmin: false, MustChangePassword: false}, createCheck("u5", "--must-change-password=false"))
})
t.Run("UserType", func(t *testing.T) {
createUser := func(name, args string) error {
return app.Run(strings.Fields(fmt.Sprintf("./gitea admin user create --username %s --email %s@gitea.local %s", name, name, args)))
}
reset()
assert.ErrorContains(t, createUser("u", "--user-type invalid"), "invalid user type")
assert.ErrorContains(t, createUser("u", "--user-type bot --password 123"), "can only be set for individual users")
assert.ErrorContains(t, createUser("u", "--user-type bot --must-change-password"), "can only be set for individual users")
assert.NoError(t, createUser("u", "--user-type bot"))
u := unittest.AssertExistsAndLoadBean(t, &user_model.User{LowerName: "u"})
assert.Equal(t, user_model.UserTypeBot, u.Type)
assert.Equal(t, "", u.Passwd)
})
}

View File

@ -196,7 +196,7 @@ func migrateActionsLog(ctx context.Context, dstStorage storage.ObjectStorage) er
func migrateActionsArtifacts(ctx context.Context, dstStorage storage.ObjectStorage) error {
return db.Iterate(ctx, nil, func(ctx context.Context, artifact *actions_model.ActionArtifact) error {
if artifact.Status == int64(actions_model.ArtifactStatusExpired) {
if artifact.Status == actions_model.ArtifactStatusExpired {
return nil
}

6
flake.lock generated
View File

@ -20,11 +20,11 @@
},
"nixpkgs": {
"locked": {
"lastModified": 1736798957,
"narHash": "sha256-qwpCtZhSsSNQtK4xYGzMiyEDhkNzOCz/Vfu4oL2ETsQ=",
"lastModified": 1739214665,
"narHash": "sha256-26L8VAu3/1YRxS8MHgBOyOM8xALdo6N0I04PgorE7UM=",
"owner": "nixos",
"repo": "nixpkgs",
"rev": "9abb87b552b7f55ac8916b6fc9e5cb486656a2f3",
"rev": "64e75cd44acf21c7933d61d7721e812eac1b5a0a",
"type": "github"
},
"original": {

View File

@ -29,13 +29,13 @@
poetry
# backend
go_1_23
go_1_24
gofumpt
sqlite
];
shellHook = ''
export GO="${pkgs.go_1_23}/bin/go"
export GOROOT="${pkgs.go_1_23}/share/go"
export GO="${pkgs.go_1_24}/bin/go"
export GOROOT="${pkgs.go_1_24}/share/go"
'';
};
}

2
go.mod
View File

@ -1,6 +1,6 @@
module code.gitea.io/gitea
go 1.23
go 1.24
// rfc5280 said: "The serial number is an integer assigned by the CA to each certificate."
// But some CAs use negative serial number, just relax the check. related:

View File

@ -48,7 +48,7 @@ type ActionArtifact struct {
ContentEncoding string // The content encoding of the artifact
ArtifactPath string `xorm:"index unique(runid_name_path)"` // The path to the artifact when runner uploads it
ArtifactName string `xorm:"index unique(runid_name_path)"` // The name of the artifact when runner uploads it
Status int64 `xorm:"index"` // The status of the artifact, uploading, expired or need-delete
Status ArtifactStatus `xorm:"index"` // The status of the artifact, uploading, expired or need-delete
CreatedUnix timeutil.TimeStamp `xorm:"created"`
UpdatedUnix timeutil.TimeStamp `xorm:"updated index"`
ExpiredUnix timeutil.TimeStamp `xorm:"index"` // The time when the artifact will be expired
@ -68,7 +68,7 @@ func CreateArtifact(ctx context.Context, t *ActionTask, artifactName, artifactPa
RepoID: t.RepoID,
OwnerID: t.OwnerID,
CommitSHA: t.CommitSHA,
Status: int64(ArtifactStatusUploadPending),
Status: ArtifactStatusUploadPending,
ExpiredUnix: timeutil.TimeStamp(time.Now().Unix() + timeutil.Day*expiredDays),
}
if _, err := db.GetEngine(ctx).Insert(artifact); err != nil {
@ -108,12 +108,19 @@ func UpdateArtifactByID(ctx context.Context, id int64, art *ActionArtifact) erro
type FindArtifactsOptions struct {
db.ListOptions
RepoID int64
RunID int64
ArtifactName string
Status int
RepoID int64
RunID int64
ArtifactName string
Status int
FinalizedArtifactsV4 bool
}
func (opts FindArtifactsOptions) ToOrders() string {
return "id"
}
var _ db.FindOptionsOrder = (*FindArtifactsOptions)(nil)
func (opts FindArtifactsOptions) ToConds() builder.Cond {
cond := builder.NewCond()
if opts.RepoID > 0 {
@ -128,11 +135,15 @@ func (opts FindArtifactsOptions) ToConds() builder.Cond {
if opts.Status > 0 {
cond = cond.And(builder.Eq{"status": opts.Status})
}
if opts.FinalizedArtifactsV4 {
cond = cond.And(builder.Eq{"status": ArtifactStatusUploadConfirmed}.Or(builder.Eq{"status": ArtifactStatusExpired}))
cond = cond.And(builder.Eq{"content_encoding": "application/zip"})
}
return cond
}
// ActionArtifactMeta is the meta data of an artifact
// ActionArtifactMeta is the meta-data of an artifact
type ActionArtifactMeta struct {
ArtifactName string
FileSize int64
@ -166,18 +177,18 @@ func ListPendingDeleteArtifacts(ctx context.Context, limit int) ([]*ActionArtifa
// SetArtifactExpired sets an artifact to expired
func SetArtifactExpired(ctx context.Context, artifactID int64) error {
_, err := db.GetEngine(ctx).Where("id=? AND status = ?", artifactID, ArtifactStatusUploadConfirmed).Cols("status").Update(&ActionArtifact{Status: int64(ArtifactStatusExpired)})
_, err := db.GetEngine(ctx).Where("id=? AND status = ?", artifactID, ArtifactStatusUploadConfirmed).Cols("status").Update(&ActionArtifact{Status: ArtifactStatusExpired})
return err
}
// SetArtifactNeedDelete sets an artifact to need-delete, cron job will delete it
func SetArtifactNeedDelete(ctx context.Context, runID int64, name string) error {
_, err := db.GetEngine(ctx).Where("run_id=? AND artifact_name=? AND status = ?", runID, name, ArtifactStatusUploadConfirmed).Cols("status").Update(&ActionArtifact{Status: int64(ArtifactStatusPendingDeletion)})
_, err := db.GetEngine(ctx).Where("run_id=? AND artifact_name=? AND status = ?", runID, name, ArtifactStatusUploadConfirmed).Cols("status").Update(&ActionArtifact{Status: ArtifactStatusPendingDeletion})
return err
}
// SetArtifactDeleted sets an artifact to deleted
func SetArtifactDeleted(ctx context.Context, artifactID int64) error {
_, err := db.GetEngine(ctx).ID(artifactID).Cols("status").Update(&ActionArtifact{Status: int64(ArtifactStatusDeleted)})
_, err := db.GetEngine(ctx).ID(artifactID).Cols("status").Update(&ActionArtifact{Status: ArtifactStatusDeleted})
return err
}

View File

@ -167,6 +167,7 @@ func init() {
type FindRunnerOptions struct {
db.ListOptions
IDs []int64
RepoID int64
OwnerID int64 // it will be ignored if RepoID is set
Sort string
@ -178,6 +179,14 @@ type FindRunnerOptions struct {
func (opts FindRunnerOptions) ToConds() builder.Cond {
cond := builder.NewCond()
if len(opts.IDs) > 0 {
if len(opts.IDs) == 1 {
cond = cond.And(builder.Eq{"id": opts.IDs[0]})
} else {
cond = cond.And(builder.In("id", opts.IDs))
}
}
if opts.RepoID > 0 {
c := builder.NewCond().And(builder.Eq{"repo_id": opts.RepoID})
if opts.WithAvailable {

View File

@ -58,6 +58,7 @@ func InsertVariable(ctx context.Context, ownerID, repoID int64, name, data strin
type FindVariablesOpts struct {
db.ListOptions
IDs []int64
RepoID int64
OwnerID int64 // it will be ignored if RepoID is set
Name string
@ -65,6 +66,15 @@ type FindVariablesOpts struct {
func (opts FindVariablesOpts) ToConds() builder.Cond {
cond := builder.NewCond()
if len(opts.IDs) > 0 {
if len(opts.IDs) == 1 {
cond = cond.And(builder.Eq{"id": opts.IDs[0]})
} else {
cond = cond.And(builder.In("id", opts.IDs))
}
}
// Since we now support instance-level variables,
// there is no need to check for null values for `owner_id` and `repo_id`
cond = cond.And(builder.Eq{"repo_id": opts.RepoID})
@ -85,12 +95,12 @@ func FindVariables(ctx context.Context, opts FindVariablesOpts) ([]*ActionVariab
return db.Find[ActionVariable](ctx, opts)
}
func UpdateVariable(ctx context.Context, variable *ActionVariable) (bool, error) {
count, err := db.GetEngine(ctx).ID(variable.ID).Cols("name", "data").
Update(&ActionVariable{
Name: variable.Name,
Data: variable.Data,
})
func UpdateVariableCols(ctx context.Context, variable *ActionVariable, cols ...string) (bool, error) {
variable.Name = strings.ToUpper(variable.Name)
count, err := db.GetEngine(ctx).
ID(variable.ID).
Cols(cols...).
Update(variable)
return count != 0, err
}

View File

@ -106,7 +106,7 @@ func GPGKeyToEntity(ctx context.Context, k *GPGKey) (*openpgp.Entity, error) {
if err != nil {
return nil, err
}
keys, err := checkArmoredGPGKeyString(impKey.Content)
keys, err := CheckArmoredGPGKeyString(impKey.Content)
if err != nil {
return nil, err
}
@ -115,7 +115,7 @@ func GPGKeyToEntity(ctx context.Context, k *GPGKey) (*openpgp.Entity, error) {
// parseSubGPGKey parse a sub Key
func parseSubGPGKey(ownerID int64, primaryID string, pubkey *packet.PublicKey, expiry time.Time) (*GPGKey, error) {
content, err := base64EncPubKey(pubkey)
content, err := Base64EncPubKey(pubkey)
if err != nil {
return nil, err
}
@ -183,7 +183,7 @@ func parseGPGKey(ctx context.Context, ownerID int64, e *openpgp.Entity, verified
}
}
content, err := base64EncPubKey(pubkey)
content, err := Base64EncPubKey(pubkey)
if err != nil {
return nil, err
}
@ -239,33 +239,3 @@ func DeleteGPGKey(ctx context.Context, doer *user_model.User, id int64) (err err
return committer.Commit()
}
func checkKeyEmails(ctx context.Context, email string, keys ...*GPGKey) (bool, string) {
uid := int64(0)
var userEmails []*user_model.EmailAddress
var user *user_model.User
for _, key := range keys {
for _, e := range key.Emails {
if e.IsActivated && (email == "" || strings.EqualFold(e.Email, email)) {
return true, e.Email
}
}
if key.Verified && key.OwnerID != 0 {
if uid != key.OwnerID {
userEmails, _ = user_model.GetEmailAddresses(ctx, key.OwnerID)
uid = key.OwnerID
user = &user_model.User{ID: uid}
_, _ = user_model.GetUser(ctx, user)
}
for _, e := range userEmails {
if e.IsActivated && (email == "" || strings.EqualFold(e.Email, email)) {
return true, e.Email
}
}
if user.KeepEmailPrivate && strings.EqualFold(email, user.GetEmail()) {
return true, user.GetEmail()
}
}
}
return false, email
}

View File

@ -67,7 +67,7 @@ func addGPGSubKey(ctx context.Context, key *GPGKey) (err error) {
// AddGPGKey adds new public key to database.
func AddGPGKey(ctx context.Context, ownerID int64, content, token, signature string) ([]*GPGKey, error) {
ekeys, err := checkArmoredGPGKeyString(content)
ekeys, err := CheckArmoredGPGKeyString(content)
if err != nil {
return nil, err
}

View File

@ -4,17 +4,12 @@
package asymkey
import (
"context"
"fmt"
"hash"
"strings"
"code.gitea.io/gitea/models/db"
repo_model "code.gitea.io/gitea/models/repo"
user_model "code.gitea.io/gitea/models/user"
"code.gitea.io/gitea/modules/git"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/setting"
"github.com/ProtonMail/go-crypto/openpgp/packet"
)
@ -70,263 +65,6 @@ const (
NoKeyFound = "gpg.error.no_gpg_keys_found"
)
// ParseCommitsWithSignature checks if signaute of commits are corresponding to users gpg keys.
func ParseCommitsWithSignature(ctx context.Context, oldCommits []*user_model.UserCommit, repoTrustModel repo_model.TrustModelType, isOwnerMemberCollaborator func(*user_model.User) (bool, error)) []*SignCommit {
newCommits := make([]*SignCommit, 0, len(oldCommits))
keyMap := map[string]bool{}
for _, c := range oldCommits {
signCommit := &SignCommit{
UserCommit: c,
Verification: ParseCommitWithSignature(ctx, c.Commit),
}
_ = CalculateTrustStatus(signCommit.Verification, repoTrustModel, isOwnerMemberCollaborator, &keyMap)
newCommits = append(newCommits, signCommit)
}
return newCommits
}
// ParseCommitWithSignature check if signature is good against keystore.
func ParseCommitWithSignature(ctx context.Context, c *git.Commit) *CommitVerification {
var committer *user_model.User
if c.Committer != nil {
var err error
// Find Committer account
committer, err = user_model.GetUserByEmail(ctx, c.Committer.Email) // This finds the user by primary email or activated email so commit will not be valid if email is not
if err != nil { // Skipping not user for committer
committer = &user_model.User{
Name: c.Committer.Name,
Email: c.Committer.Email,
}
// We can expect this to often be an ErrUserNotExist. in the case
// it is not, however, it is important to log it.
if !user_model.IsErrUserNotExist(err) {
log.Error("GetUserByEmail: %v", err)
return &CommitVerification{
CommittingUser: committer,
Verified: false,
Reason: "gpg.error.no_committer_account",
}
}
}
}
// If no signature just report the committer
if c.Signature == nil {
return &CommitVerification{
CommittingUser: committer,
Verified: false, // Default value
Reason: "gpg.error.not_signed_commit", // Default value
}
}
// If this a SSH signature handle it differently
if strings.HasPrefix(c.Signature.Signature, "-----BEGIN SSH SIGNATURE-----") {
return ParseCommitWithSSHSignature(ctx, c, committer)
}
// Parsing signature
sig, err := extractSignature(c.Signature.Signature)
if err != nil { // Skipping failed to extract sign
log.Error("SignatureRead err: %v", err)
return &CommitVerification{
CommittingUser: committer,
Verified: false,
Reason: "gpg.error.extract_sign",
}
}
keyID := tryGetKeyIDFromSignature(sig)
defaultReason := NoKeyFound
// First check if the sig has a keyID and if so just look at that
if commitVerification := hashAndVerifyForKeyID(
ctx,
sig,
c.Signature.Payload,
committer,
keyID,
setting.AppName,
""); commitVerification != nil {
if commitVerification.Reason == BadSignature {
defaultReason = BadSignature
} else {
return commitVerification
}
}
// Now try to associate the signature with the committer, if present
if committer.ID != 0 {
keys, err := db.Find[GPGKey](ctx, FindGPGKeyOptions{
OwnerID: committer.ID,
})
if err != nil { // Skipping failed to get gpg keys of user
log.Error("ListGPGKeys: %v", err)
return &CommitVerification{
CommittingUser: committer,
Verified: false,
Reason: "gpg.error.failed_retrieval_gpg_keys",
}
}
if err := GPGKeyList(keys).LoadSubKeys(ctx); err != nil {
log.Error("LoadSubKeys: %v", err)
return &CommitVerification{
CommittingUser: committer,
Verified: false,
Reason: "gpg.error.failed_retrieval_gpg_keys",
}
}
committerEmailAddresses, _ := user_model.GetEmailAddresses(ctx, committer.ID)
activated := false
for _, e := range committerEmailAddresses {
if e.IsActivated && strings.EqualFold(e.Email, c.Committer.Email) {
activated = true
break
}
}
for _, k := range keys {
// Pre-check (& optimization) that emails attached to key can be attached to the committer email and can validate
canValidate := false
email := ""
if k.Verified && activated {
canValidate = true
email = c.Committer.Email
}
if !canValidate {
for _, e := range k.Emails {
if e.IsActivated && strings.EqualFold(e.Email, c.Committer.Email) {
canValidate = true
email = e.Email
break
}
}
}
if !canValidate {
continue // Skip this key
}
commitVerification := hashAndVerifyWithSubKeysCommitVerification(sig, c.Signature.Payload, k, committer, committer, email)
if commitVerification != nil {
return commitVerification
}
}
}
if setting.Repository.Signing.SigningKey != "" && setting.Repository.Signing.SigningKey != "default" && setting.Repository.Signing.SigningKey != "none" {
// OK we should try the default key
gpgSettings := git.GPGSettings{
Sign: true,
KeyID: setting.Repository.Signing.SigningKey,
Name: setting.Repository.Signing.SigningName,
Email: setting.Repository.Signing.SigningEmail,
}
if err := gpgSettings.LoadPublicKeyContent(); err != nil {
log.Error("Error getting default signing key: %s %v", gpgSettings.KeyID, err)
} else if commitVerification := verifyWithGPGSettings(ctx, &gpgSettings, sig, c.Signature.Payload, committer, keyID); commitVerification != nil {
if commitVerification.Reason == BadSignature {
defaultReason = BadSignature
} else {
return commitVerification
}
}
}
defaultGPGSettings, err := c.GetRepositoryDefaultPublicGPGKey(false)
if err != nil {
log.Error("Error getting default public gpg key: %v", err)
} else if defaultGPGSettings == nil {
log.Warn("Unable to get defaultGPGSettings for unattached commit: %s", c.ID.String())
} else if defaultGPGSettings.Sign {
if commitVerification := verifyWithGPGSettings(ctx, defaultGPGSettings, sig, c.Signature.Payload, committer, keyID); commitVerification != nil {
if commitVerification.Reason == BadSignature {
defaultReason = BadSignature
} else {
return commitVerification
}
}
}
return &CommitVerification{ // Default at this stage
CommittingUser: committer,
Verified: false,
Warning: defaultReason != NoKeyFound,
Reason: defaultReason,
SigningKey: &GPGKey{
KeyID: keyID,
},
}
}
func verifyWithGPGSettings(ctx context.Context, gpgSettings *git.GPGSettings, sig *packet.Signature, payload string, committer *user_model.User, keyID string) *CommitVerification {
// First try to find the key in the db
if commitVerification := hashAndVerifyForKeyID(ctx, sig, payload, committer, gpgSettings.KeyID, gpgSettings.Name, gpgSettings.Email); commitVerification != nil {
return commitVerification
}
// Otherwise we have to parse the key
ekeys, err := checkArmoredGPGKeyString(gpgSettings.PublicKeyContent)
if err != nil {
log.Error("Unable to get default signing key: %v", err)
return &CommitVerification{
CommittingUser: committer,
Verified: false,
Reason: "gpg.error.generate_hash",
}
}
for _, ekey := range ekeys {
pubkey := ekey.PrimaryKey
content, err := base64EncPubKey(pubkey)
if err != nil {
return &CommitVerification{
CommittingUser: committer,
Verified: false,
Reason: "gpg.error.generate_hash",
}
}
k := &GPGKey{
Content: content,
CanSign: pubkey.CanSign(),
KeyID: pubkey.KeyIdString(),
}
for _, subKey := range ekey.Subkeys {
content, err := base64EncPubKey(subKey.PublicKey)
if err != nil {
return &CommitVerification{
CommittingUser: committer,
Verified: false,
Reason: "gpg.error.generate_hash",
}
}
k.SubsKey = append(k.SubsKey, &GPGKey{
Content: content,
CanSign: subKey.PublicKey.CanSign(),
KeyID: subKey.PublicKey.KeyIdString(),
})
}
if commitVerification := hashAndVerifyWithSubKeysCommitVerification(sig, payload, k, committer, &user_model.User{
Name: gpgSettings.Name,
Email: gpgSettings.Email,
}, gpgSettings.Email); commitVerification != nil {
return commitVerification
}
if keyID == k.KeyID {
// This is a bad situation ... We have a key id that matches our default key but the signature doesn't match.
return &CommitVerification{
CommittingUser: committer,
Verified: false,
Warning: true,
Reason: BadSignature,
}
}
}
return nil
}
func verifySign(s *packet.Signature, h hash.Hash, k *GPGKey) error {
// Check if key can sign
if !k.CanSign {
@ -369,7 +107,7 @@ func hashAndVerifyWithSubKeys(sig *packet.Signature, payload string, k *GPGKey)
return nil, nil
}
func hashAndVerifyWithSubKeysCommitVerification(sig *packet.Signature, payload string, k *GPGKey, committer, signer *user_model.User, email string) *CommitVerification {
func HashAndVerifyWithSubKeysCommitVerification(sig *packet.Signature, payload string, k *GPGKey, committer, signer *user_model.User, email string) *CommitVerification {
key, err := hashAndVerifyWithSubKeys(sig, payload, k)
if err != nil { // Skipping failed to generate hash
return &CommitVerification{
@ -392,78 +130,6 @@ func hashAndVerifyWithSubKeysCommitVerification(sig *packet.Signature, payload s
return nil
}
func hashAndVerifyForKeyID(ctx context.Context, sig *packet.Signature, payload string, committer *user_model.User, keyID, name, email string) *CommitVerification {
if keyID == "" {
return nil
}
keys, err := db.Find[GPGKey](ctx, FindGPGKeyOptions{
KeyID: keyID,
IncludeSubKeys: true,
})
if err != nil {
log.Error("GetGPGKeysByKeyID: %v", err)
return &CommitVerification{
CommittingUser: committer,
Verified: false,
Reason: "gpg.error.failed_retrieval_gpg_keys",
}
}
if len(keys) == 0 {
return nil
}
for _, key := range keys {
var primaryKeys []*GPGKey
if key.PrimaryKeyID != "" {
primaryKeys, err = db.Find[GPGKey](ctx, FindGPGKeyOptions{
KeyID: key.PrimaryKeyID,
IncludeSubKeys: true,
})
if err != nil {
log.Error("GetGPGKeysByKeyID: %v", err)
return &CommitVerification{
CommittingUser: committer,
Verified: false,
Reason: "gpg.error.failed_retrieval_gpg_keys",
}
}
}
activated, email := checkKeyEmails(ctx, email, append([]*GPGKey{key}, primaryKeys...)...)
if !activated {
continue
}
signer := &user_model.User{
Name: name,
Email: email,
}
if key.OwnerID != 0 {
owner, err := user_model.GetUserByID(ctx, key.OwnerID)
if err == nil {
signer = owner
} else if !user_model.IsErrUserNotExist(err) {
log.Error("Failed to user_model.GetUserByID: %d for key ID: %d (%s) %v", key.OwnerID, key.ID, key.KeyID, err)
return &CommitVerification{
CommittingUser: committer,
Verified: false,
Reason: "gpg.error.no_committer_account",
}
}
}
commitVerification := hashAndVerifyWithSubKeysCommitVerification(sig, payload, key, committer, signer, email)
if commitVerification != nil {
return commitVerification
}
}
// This is a bad situation ... We have a key id that is in our database but the signature doesn't match.
return &CommitVerification{
CommittingUser: committer,
Verified: false,
Warning: true,
Reason: BadSignature,
}
}
// CalculateTrustStatus will calculate the TrustStatus for a commit verification within a repository
// There are several trust models in Gitea
func CalculateTrustStatus(verification *CommitVerification, repoTrustModel repo_model.TrustModelType, isOwnerMemberCollaborator func(*user_model.User) (bool, error), keyMap *map[string]bool) error {

View File

@ -33,9 +33,9 @@ import (
// This file provides common functions relating to GPG Keys
// checkArmoredGPGKeyString checks if the given key string is a valid GPG armored key.
// CheckArmoredGPGKeyString checks if the given key string is a valid GPG armored key.
// The function returns the actual public key on success
func checkArmoredGPGKeyString(content string) (openpgp.EntityList, error) {
func CheckArmoredGPGKeyString(content string) (openpgp.EntityList, error) {
list, err := openpgp.ReadArmoredKeyRing(strings.NewReader(content))
if err != nil {
return nil, ErrGPGKeyParsing{err}
@ -43,8 +43,8 @@ func checkArmoredGPGKeyString(content string) (openpgp.EntityList, error) {
return list, nil
}
// base64EncPubKey encode public key content to base 64
func base64EncPubKey(pubkey *packet.PublicKey) (string, error) {
// Base64EncPubKey encode public key content to base 64
func Base64EncPubKey(pubkey *packet.PublicKey) (string, error) {
var w bytes.Buffer
err := pubkey.Serialize(&w)
if err != nil {
@ -119,7 +119,7 @@ func readArmoredSign(r io.Reader) (body io.Reader, err error) {
return block.Body, nil
}
func extractSignature(s string) (*packet.Signature, error) {
func ExtractSignature(s string) (*packet.Signature, error) {
r, err := readArmoredSign(strings.NewReader(s))
if err != nil {
return nil, fmt.Errorf("Failed to read signature armor")
@ -135,7 +135,7 @@ func extractSignature(s string) (*packet.Signature, error) {
return sig, nil
}
func tryGetKeyIDFromSignature(sig *packet.Signature) string {
func TryGetKeyIDFromSignature(sig *packet.Signature) string {
if sig.IssuerKeyId != nil && (*sig.IssuerKeyId) != 0 {
return fmt.Sprintf("%016X", *sig.IssuerKeyId)
}

View File

@ -51,7 +51,7 @@ MkM/fdpyc2hY7Dl/+qFmN5MG5yGmMpQcX+RNNR222ibNC1D3wg==
=i9b7
-----END PGP PUBLIC KEY BLOCK-----`
key, err := checkArmoredGPGKeyString(testGPGArmor)
key, err := CheckArmoredGPGKeyString(testGPGArmor)
assert.NoError(t, err, "Could not parse a valid GPG public armored rsa key", key)
// TODO verify value of key
}
@ -72,7 +72,7 @@ OyjLLnFQiVmq7kEA/0z0CQe3ZQiQIq5zrs7Nh1XRkFAo8GlU/SGC9XFFi722
=ZiSe
-----END PGP PUBLIC KEY BLOCK-----`
key, err := checkArmoredGPGKeyString(testGPGArmor)
key, err := CheckArmoredGPGKeyString(testGPGArmor)
assert.NoError(t, err, "Could not parse a valid GPG public armored brainpoolP256r1 key", key)
// TODO verify value of key
}
@ -108,14 +108,14 @@ Av844q/BfRuVsJsK1NDNG09LC30B0l3LKBqlrRmRTUMHtgchdX2dY+p7GPOoSzlR
MkM/fdpyc2hY7Dl/+qFmN5MG5yGmMpQcX+RNNR222ibNC1D3wg==
=i9b7
-----END PGP PUBLIC KEY BLOCK-----`
keys, err := checkArmoredGPGKeyString(testGPGArmor)
keys, err := CheckArmoredGPGKeyString(testGPGArmor)
require.NotEmpty(t, keys)
ekey := keys[0]
assert.NoError(t, err, "Could not parse a valid GPG armored key", ekey)
pubkey := ekey.PrimaryKey
content, err := base64EncPubKey(pubkey)
content, err := Base64EncPubKey(pubkey)
assert.NoError(t, err, "Could not base64 encode a valid PublicKey content", ekey)
key := &GPGKey{
@ -176,9 +176,9 @@ committer Antoine GIRARD <sapk@sapk.fr> 1489013107 +0100
Unknown GPG key with good email
`
// Reading Sign
goodSig, err := extractSignature(testGoodSigArmor)
goodSig, err := ExtractSignature(testGoodSigArmor)
assert.NoError(t, err, "Could not parse a valid GPG armored signature", testGoodSigArmor)
badSig, err := extractSignature(testBadSigArmor)
badSig, err := ExtractSignature(testBadSigArmor)
assert.NoError(t, err, "Could not parse a valid GPG armored signature", testBadSigArmor)
// Generating hash of commit
@ -386,7 +386,7 @@ epiDVQ==
=VSKJ
-----END PGP PUBLIC KEY BLOCK-----
`
keys, err := checkArmoredGPGKeyString(testIssue6599)
keys, err := CheckArmoredGPGKeyString(testIssue6599)
assert.NoError(t, err)
if assert.NotEmpty(t, keys) {
ekey := keys[0]
@ -396,11 +396,11 @@ epiDVQ==
}
func TestTryGetKeyIDFromSignature(t *testing.T) {
assert.Empty(t, tryGetKeyIDFromSignature(&packet.Signature{}))
assert.Equal(t, "038D1A3EADDBEA9C", tryGetKeyIDFromSignature(&packet.Signature{
assert.Empty(t, TryGetKeyIDFromSignature(&packet.Signature{}))
assert.Equal(t, "038D1A3EADDBEA9C", TryGetKeyIDFromSignature(&packet.Signature{
IssuerKeyId: util.ToPointer(uint64(0x38D1A3EADDBEA9C)),
}))
assert.Equal(t, "038D1A3EADDBEA9C", tryGetKeyIDFromSignature(&packet.Signature{
assert.Equal(t, "038D1A3EADDBEA9C", TryGetKeyIDFromSignature(&packet.Signature{
IssuerFingerprint: []uint8{0xb, 0x23, 0x24, 0xc7, 0xe6, 0xfe, 0x4f, 0x3a, 0x6, 0x26, 0xc1, 0x21, 0x3, 0x8d, 0x1a, 0x3e, 0xad, 0xdb, 0xea, 0x9c},
}))
}

View File

@ -50,7 +50,7 @@ func VerifyGPGKey(ctx context.Context, ownerID int64, keyID, token, signature st
return "", err
}
sig, err := extractSignature(signature)
sig, err := ExtractSignature(signature)
if err != nil {
return "", ErrGPGInvalidTokenSignature{
ID: key.KeyID,

View File

@ -69,3 +69,21 @@
created_unix: 1730330775
updated_unix: 1730330775
expired_unix: 1738106775
-
id: 23
run_id: 793
runner_id: 1
repo_id: 2
owner_id: 2
commit_sha: c2d72f548424103f01ee1dc02889c1e2bff816b0
storage_path: "27/5/1730330775594233150.chunk"
file_size: 1024
file_compressed_size: 1024
content_encoding: "application/zip"
artifact_path: "artifact-v4-download.zip"
artifact_name: "artifact-v4-download"
status: 2
created_unix: 1730330775
updated_unix: 1730330775
expired_unix: 1738106775

View File

@ -496,47 +496,11 @@ type SignCommitWithStatuses struct {
*asymkey_model.SignCommit
}
// ParseCommitsWithStatus checks commits latest statuses and calculates its worst status state
func ParseCommitsWithStatus(ctx context.Context, oldCommits []*asymkey_model.SignCommit, repo *repo_model.Repository) []*SignCommitWithStatuses {
newCommits := make([]*SignCommitWithStatuses, 0, len(oldCommits))
for _, c := range oldCommits {
commit := &SignCommitWithStatuses{
SignCommit: c,
}
statuses, _, err := GetLatestCommitStatus(ctx, repo.ID, commit.ID.String(), db.ListOptions{})
if err != nil {
log.Error("GetLatestCommitStatus: %v", err)
} else {
commit.Statuses = statuses
commit.Status = CalcCommitStatus(statuses)
}
newCommits = append(newCommits, commit)
}
return newCommits
}
// hashCommitStatusContext hash context
func hashCommitStatusContext(context string) string {
return fmt.Sprintf("%x", sha1.Sum([]byte(context)))
}
// ConvertFromGitCommit converts git commits into SignCommitWithStatuses
func ConvertFromGitCommit(ctx context.Context, commits []*git.Commit, repo *repo_model.Repository) []*SignCommitWithStatuses {
return ParseCommitsWithStatus(ctx,
asymkey_model.ParseCommitsWithSignature(
ctx,
user_model.ValidateCommitsWithEmails(ctx, commits),
repo.GetTrustModel(),
func(user *user_model.User) (bool, error) {
return repo_model.IsOwnerMemberCollaborator(ctx, repo, user.ID)
},
),
repo,
)
}
// CommitStatusesHideActionsURL hide Gitea Actions urls
func CommitStatusesHideActionsURL(ctx context.Context, statuses []*CommitStatus) {
idToRepos := make(map[int64]*repo_model.Repository)

View File

@ -19,8 +19,6 @@ import (
repo_model "code.gitea.io/gitea/models/repo"
user_model "code.gitea.io/gitea/models/user"
"code.gitea.io/gitea/modules/container"
"code.gitea.io/gitea/modules/gitrepo"
"code.gitea.io/gitea/modules/json"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/optional"
"code.gitea.io/gitea/modules/references"
@ -774,41 +772,6 @@ func (c *Comment) CodeCommentLink(ctx context.Context) string {
return fmt.Sprintf("%s/files#%s", c.Issue.Link(), c.HashTag())
}
// LoadPushCommits Load push commits
func (c *Comment) LoadPushCommits(ctx context.Context) (err error) {
if c.Content == "" || c.Commits != nil || c.Type != CommentTypePullRequestPush {
return nil
}
var data PushActionContent
err = json.Unmarshal([]byte(c.Content), &data)
if err != nil {
return err
}
c.IsForcePush = data.IsForcePush
if c.IsForcePush {
if len(data.CommitIDs) != 2 {
return nil
}
c.OldCommit = data.CommitIDs[0]
c.NewCommit = data.CommitIDs[1]
} else {
gitRepo, closer, err := gitrepo.RepositoryFromContextOrOpen(ctx, c.Issue.Repo)
if err != nil {
return err
}
defer closer.Close()
c.Commits = git_model.ConvertFromGitCommit(ctx, gitRepo.GetCommitsFromIDs(data.CommitIDs), c.Issue.Repo)
c.CommitsNum = int64(len(c.Commits))
}
return err
}
// CreateComment creates comment with context
func CreateComment(ctx context.Context, opts *CreateCommentOptions) (_ *Comment, err error) {
ctx, committer, err := db.TxContext(ctx)

View File

@ -86,8 +86,10 @@ func findCodeComments(ctx context.Context, opts FindCommentsOptions, issue *Issu
ids = append(ids, comment.ReviewID)
}
}
if err := e.In("id", ids).Find(&reviews); err != nil {
return nil, err
if len(ids) > 0 {
if err := e.In("id", ids).Find(&reviews); err != nil {
return nil, err
}
}
n := 0

View File

@ -17,6 +17,7 @@ import (
user_model "code.gitea.io/gitea/models/user"
"code.gitea.io/gitea/modules/container"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/optional"
"code.gitea.io/gitea/modules/setting"
api "code.gitea.io/gitea/modules/structs"
"code.gitea.io/gitea/modules/timeutil"
@ -501,6 +502,45 @@ func GetIssueByIndex(ctx context.Context, repoID, index int64) (*Issue, error) {
return issue, nil
}
func isPullToCond(isPull optional.Option[bool]) builder.Cond {
if isPull.Has() {
return builder.Eq{"is_pull": isPull.Value()}
}
return builder.NewCond()
}
func FindLatestUpdatedIssues(ctx context.Context, repoID int64, isPull optional.Option[bool], pageSize int) (IssueList, error) {
issues := make([]*Issue, 0, pageSize)
err := db.GetEngine(ctx).Where("repo_id = ?", repoID).
And(isPullToCond(isPull)).
OrderBy("updated_unix DESC").
Limit(pageSize).
Find(&issues)
return issues, err
}
func FindIssuesSuggestionByKeyword(ctx context.Context, repoID int64, keyword string, isPull optional.Option[bool], excludedID int64, pageSize int) (IssueList, error) {
cond := builder.NewCond()
if excludedID > 0 {
cond = cond.And(builder.Neq{"`id`": excludedID})
}
// It seems that GitHub searches both title and content (maybe sorting by the search engine's ranking system?)
// The first PR (https://github.com/go-gitea/gitea/pull/32327) uses "search indexer" to search "name(title) + content"
// But it seems that searching "content" (especially LIKE by DB engine) generates worse (unusable) results.
// So now (https://github.com/go-gitea/gitea/pull/33538) it only searches "name(title)", leave the improvements to the future.
cond = cond.And(db.BuildCaseInsensitiveLike("`name`", keyword))
issues := make([]*Issue, 0, pageSize)
err := db.GetEngine(ctx).Where("repo_id = ?", repoID).
And(isPullToCond(isPull)).
And(cond).
OrderBy("updated_unix DESC, `index` DESC").
Limit(pageSize).
Find(&issues)
return issues, err
}
// GetIssueWithAttrsByIndex returns issue by index in a repository.
func GetIssueWithAttrsByIndex(ctx context.Context, repoID, index int64) (*Issue, error) {
issue, err := GetIssueByIndex(ctx, repoID, index)

View File

@ -228,6 +228,11 @@ func SetRepositoryLink(ctx context.Context, packageID, repoID int64) error {
return err
}
func UnlinkRepository(ctx context.Context, packageID int64) error {
_, err := db.GetEngine(ctx).ID(packageID).Cols("repo_id").Update(&Package{RepoID: 0})
return err
}
// UnlinkRepositoryFromAllPackages unlinks every package from the repository
func UnlinkRepositoryFromAllPackages(ctx context.Context, repoID int64) error {
_, err := db.GetEngine(ctx).Where("repo_id = ?", repoID).Cols("repo_id").Update(&Package{})

View File

@ -244,6 +244,10 @@ func GetSearchOrderByBySortType(sortType string) db.SearchOrderBy {
return db.SearchOrderByRecentUpdated
case "leastupdate":
return db.SearchOrderByLeastUpdated
case "alphabetically":
return "title ASC"
case "reversealphabetically":
return "title DESC"
default:
return db.SearchOrderByNewest
}

View File

@ -385,11 +385,12 @@ func (u *User) ValidatePassword(passwd string) bool {
}
// IsPasswordSet checks if the password is set or left empty
// TODO: It's better to clarify the "password" behavior for different types (individual, bot)
func (u *User) IsPasswordSet() bool {
return len(u.Passwd) != 0
return u.Passwd != ""
}
// IsOrganization returns true if user is actually a organization.
// IsOrganization returns true if user is actually an organization.
func (u *User) IsOrganization() bool {
return u.Type == UserTypeOrganization
}
@ -399,13 +400,14 @@ func (u *User) IsIndividual() bool {
return u.Type == UserTypeIndividual
}
func (u *User) IsUser() bool {
return u.Type == UserTypeIndividual || u.Type == UserTypeBot
// IsTypeBot returns whether the user is of type bot
func (u *User) IsTypeBot() bool {
return u.Type == UserTypeBot
}
// IsBot returns whether or not the user is of type bot
func (u *User) IsBot() bool {
return u.Type == UserTypeBot
// IsTokenAccessAllowed returns whether the user is an individual or a bot (which allows for token access)
func (u *User) IsTokenAccessAllowed() bool {
return u.Type == UserTypeIndividual || u.Type == UserTypeBot
}
// DisplayName returns full name if it's not empty,
@ -1127,28 +1129,89 @@ func ValidateCommitWithEmail(ctx context.Context, c *git.Commit) *User {
}
// ValidateCommitsWithEmails checks if authors' e-mails of commits are corresponding to users.
func ValidateCommitsWithEmails(ctx context.Context, oldCommits []*git.Commit) []*UserCommit {
func ValidateCommitsWithEmails(ctx context.Context, oldCommits []*git.Commit) ([]*UserCommit, error) {
var (
emails = make(map[string]*User)
newCommits = make([]*UserCommit, 0, len(oldCommits))
emailSet = make(container.Set[string])
)
for _, c := range oldCommits {
var u *User
if c.Author != nil {
if v, ok := emails[c.Author.Email]; !ok {
u, _ = GetUserByEmail(ctx, c.Author.Email)
emails[c.Author.Email] = u
} else {
u = v
emailSet.Add(c.Author.Email)
}
}
emailUserMap, err := GetUsersByEmails(ctx, emailSet.Values())
if err != nil {
return nil, err
}
for _, c := range oldCommits {
user, ok := emailUserMap[c.Author.Email]
if !ok {
user = &User{
Name: c.Author.Name,
Email: c.Author.Email,
}
}
newCommits = append(newCommits, &UserCommit{
User: u,
User: user,
Commit: c,
})
}
return newCommits
return newCommits, nil
}
func GetUsersByEmails(ctx context.Context, emails []string) (map[string]*User, error) {
if len(emails) == 0 {
return nil, nil
}
needCheckEmails := make(container.Set[string])
needCheckUserNames := make(container.Set[string])
for _, email := range emails {
if strings.HasSuffix(email, fmt.Sprintf("@%s", setting.Service.NoReplyAddress)) {
username := strings.TrimSuffix(email, fmt.Sprintf("@%s", setting.Service.NoReplyAddress))
needCheckUserNames.Add(username)
} else {
needCheckEmails.Add(strings.ToLower(email))
}
}
emailAddresses := make([]*EmailAddress, 0, len(needCheckEmails))
if err := db.GetEngine(ctx).In("lower_email", needCheckEmails.Values()).
And("is_activated=?", true).
Find(&emailAddresses); err != nil {
return nil, err
}
userIDs := make(container.Set[int64])
for _, email := range emailAddresses {
userIDs.Add(email.UID)
}
users, err := GetUsersMapByIDs(ctx, userIDs.Values())
if err != nil {
return nil, err
}
results := make(map[string]*User, len(emails))
for _, email := range emailAddresses {
user := users[email.UID]
if user != nil {
if user.KeepEmailPrivate {
results[user.LowerName+"@"+setting.Service.NoReplyAddress] = user
} else {
results[email.Email] = user
}
}
}
users = make(map[int64]*User, len(needCheckUserNames))
if err := db.GetEngine(ctx).In("lower_name", needCheckUserNames.Values()).Find(&users); err != nil {
return nil, err
}
for _, user := range users {
results[user.LowerName+"@"+setting.Service.NoReplyAddress] = user
}
return results, nil
}
// GetUserByEmail returns the user object by given e-mail if exists.

View File

@ -56,7 +56,7 @@ func NewActionsUser() *User {
Email: ActionsUserEmail,
KeepEmailPrivate: true,
LoginName: ActionsUserName,
Type: UserTypeIndividual,
Type: UserTypeBot,
AllowCreateOrganization: true,
Visibility: structs.VisibleTypePublic,
}

View File

@ -0,0 +1,48 @@
// Copyright 2025 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package actions
import (
"net/http"
actions_model "code.gitea.io/gitea/models/actions"
"code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/modules/storage"
"code.gitea.io/gitea/services/context"
)
// Artifacts using the v4 backend are stored as a single combined zip file per artifact on the backend
// The v4 backend ensures ContentEncoding is set to "application/zip", which is not the case for the old backend
func IsArtifactV4(art *actions_model.ActionArtifact) bool {
return art.ArtifactName+".zip" == art.ArtifactPath && art.ContentEncoding == "application/zip"
}
func DownloadArtifactV4ServeDirectOnly(ctx *context.Base, art *actions_model.ActionArtifact) (bool, error) {
if setting.Actions.ArtifactStorage.ServeDirect() {
u, err := storage.ActionsArtifacts.URL(art.StoragePath, art.ArtifactPath, nil)
if u != nil && err == nil {
ctx.Redirect(u.String(), http.StatusFound)
return true, nil
}
}
return false, nil
}
func DownloadArtifactV4Fallback(ctx *context.Base, art *actions_model.ActionArtifact) error {
f, err := storage.ActionsArtifacts.Open(art.StoragePath)
if err != nil {
return err
}
defer f.Close()
http.ServeContent(ctx.Resp, ctx.Req, art.ArtifactName+".zip", art.CreatedUnix.AsLocalTime(), f)
return nil
}
func DownloadArtifactV4(ctx *context.Base, art *actions_model.ActionArtifact) error {
ok, err := DownloadArtifactV4ServeDirectOnly(ctx, art)
if ok || err != nil {
return err
}
return DownloadArtifactV4Fallback(ctx, art)
}

View File

@ -46,19 +46,9 @@ func parseLsTreeLine(line []byte) (*LsTreeEntry, error) {
entry.Size = optional.Some(size)
}
switch string(entryMode) {
case "100644":
entry.EntryMode = EntryModeBlob
case "100755":
entry.EntryMode = EntryModeExec
case "120000":
entry.EntryMode = EntryModeSymlink
case "160000":
entry.EntryMode = EntryModeCommit
case "040000", "040755": // git uses 040000 for tree object, but some users may get 040755 for unknown reasons
entry.EntryMode = EntryModeTree
default:
return nil, fmt.Errorf("unknown type: %v", string(entryMode))
entry.EntryMode, err = ParseEntryMode(string(entryMode))
if err != nil || entry.EntryMode == EntryModeNoEntry {
return nil, fmt.Errorf("invalid ls-tree output (invalid mode): %q, err: %w", line, err)
}
entry.ID, err = NewIDFromString(string(entryObjectID))

View File

@ -3,7 +3,10 @@
package git
import "strconv"
import (
"fmt"
"strconv"
)
// EntryMode the type of the object in the git tree
type EntryMode int
@ -11,6 +14,9 @@ type EntryMode int
// There are only a few file modes in Git. They look like unix file modes, but they can only be
// one of these.
const (
// EntryModeNoEntry is possible if the file was added or removed in a commit. In the case of
// added the base commit will not have the file in its tree so a mode of 0o000000 is used.
EntryModeNoEntry EntryMode = 0o000000
// EntryModeBlob
EntryModeBlob EntryMode = 0o100644
// EntryModeExec
@ -33,3 +39,22 @@ func ToEntryMode(value string) EntryMode {
v, _ := strconv.ParseInt(value, 8, 32)
return EntryMode(v)
}
func ParseEntryMode(mode string) (EntryMode, error) {
switch mode {
case "000000":
return EntryModeNoEntry, nil
case "100644":
return EntryModeBlob, nil
case "100755":
return EntryModeExec, nil
case "120000":
return EntryModeSymlink, nil
case "160000":
return EntryModeCommit, nil
case "040000", "040755": // git uses 040000 for tree object, but some users may get 040755 for unknown reasons
return EntryModeTree, nil
default:
return 0, fmt.Errorf("unparsable entry mode: %s", mode)
}
}

View File

@ -260,17 +260,28 @@ func (b *Indexer) Search(ctx context.Context, opts *internal.SearchOptions) (int
var (
indexerQuery query.Query
keywordQuery query.Query
contentQuery query.Query
)
pathQuery := bleve.NewPrefixQuery(strings.ToLower(opts.Keyword))
pathQuery.FieldVal = "Filename"
pathQuery.SetBoost(10)
contentQuery := bleve.NewMatchQuery(opts.Keyword)
contentQuery.FieldVal = "Content"
if opts.IsKeywordFuzzy {
contentQuery.Fuzziness = inner_bleve.GuessFuzzinessByKeyword(opts.Keyword)
keywordAsPhrase, isPhrase := internal.ParseKeywordAsPhrase(opts.Keyword)
if isPhrase {
q := bleve.NewMatchPhraseQuery(keywordAsPhrase)
q.FieldVal = "Content"
if opts.IsKeywordFuzzy {
q.Fuzziness = inner_bleve.GuessFuzzinessByKeyword(keywordAsPhrase)
}
contentQuery = q
} else {
q := bleve.NewMatchQuery(opts.Keyword)
q.FieldVal = "Content"
if opts.IsKeywordFuzzy {
q.Fuzziness = inner_bleve.GuessFuzzinessByKeyword(opts.Keyword)
}
contentQuery = q
}
keywordQuery = bleve.NewDisjunctionQuery(contentQuery, pathQuery)

View File

@ -24,6 +24,7 @@ import (
"code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/modules/timeutil"
"code.gitea.io/gitea/modules/typesniffer"
"code.gitea.io/gitea/modules/util"
"github.com/go-enry/go-enry/v2"
"github.com/olivere/elastic/v7"
@ -359,13 +360,19 @@ func extractAggs(searchResult *elastic.SearchResult) []*internal.SearchResultLan
// Search searches for codes and language stats by given conditions.
func (b *Indexer) Search(ctx context.Context, opts *internal.SearchOptions) (int64, []*internal.SearchResult, []*internal.SearchResultLanguages, error) {
searchType := esMultiMatchTypePhrasePrefix
if opts.IsKeywordFuzzy {
searchType = esMultiMatchTypeBestFields
var contentQuery elastic.Query
keywordAsPhrase, isPhrase := internal.ParseKeywordAsPhrase(opts.Keyword)
if isPhrase {
contentQuery = elastic.NewMatchPhraseQuery("content", keywordAsPhrase)
} else {
// TODO: this is the old logic, but not really using "fuzziness"
// * IsKeywordFuzzy=true: "best_fields"
// * IsKeywordFuzzy=false: "phrase_prefix"
contentQuery = elastic.NewMultiMatchQuery("content", opts.Keyword).
Type(util.Iif(opts.IsKeywordFuzzy, esMultiMatchTypeBestFields, esMultiMatchTypePhrasePrefix))
}
kwQuery := elastic.NewBoolQuery().Should(
elastic.NewMultiMatchQuery(opts.Keyword, "content").Type(searchType),
contentQuery,
elastic.NewMultiMatchQuery(opts.Keyword, "filename^10").Type(esMultiMatchTypePhrasePrefix),
)
query := elastic.NewBoolQuery()

View File

@ -0,0 +1,59 @@
// Copyright 2025 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package gitgrep
import (
"context"
"fmt"
"strings"
"code.gitea.io/gitea/modules/git"
code_indexer "code.gitea.io/gitea/modules/indexer/code"
"code.gitea.io/gitea/modules/setting"
)
func indexSettingToGitGrepPathspecList() (list []string) {
for _, expr := range setting.Indexer.IncludePatterns {
list = append(list, ":(glob)"+expr.PatternString())
}
for _, expr := range setting.Indexer.ExcludePatterns {
list = append(list, ":(glob,exclude)"+expr.PatternString())
}
return list
}
func PerformSearch(ctx context.Context, page int, repoID int64, gitRepo *git.Repository, ref git.RefName, keyword string, isFuzzy bool) (searchResults []*code_indexer.Result, total int, err error) {
// TODO: it should also respect ParseKeywordAsPhrase and clarify the "fuzzy" behavior
res, err := git.GrepSearch(ctx, gitRepo, keyword, git.GrepOptions{
ContextLineNumber: 1,
IsFuzzy: isFuzzy,
RefName: ref.String(),
PathspecList: indexSettingToGitGrepPathspecList(),
})
if err != nil {
// TODO: if no branch exists, it reports: exit status 128, fatal: this operation must be run in a work tree.
return nil, 0, fmt.Errorf("git.GrepSearch: %w", err)
}
commitID, err := gitRepo.GetRefCommitID(ref.String())
if err != nil {
return nil, 0, fmt.Errorf("gitRepo.GetRefCommitID: %w", err)
}
total = len(res)
pageStart := min((page-1)*setting.UI.RepoSearchPagingNum, len(res))
pageEnd := min(page*setting.UI.RepoSearchPagingNum, len(res))
res = res[pageStart:pageEnd]
for _, r := range res {
searchResults = append(searchResults, &code_indexer.Result{
RepoID: repoID,
Filename: r.Filename,
CommitID: commitID,
// UpdatedUnix: not supported yet
// Language: not supported yet
// Color: not supported yet
Lines: code_indexer.HighlightSearchResultCode(r.Filename, "", r.LineNumbers, strings.Join(r.LineCodes, "\n")),
})
}
return searchResults, total, nil
}

View File

@ -1,7 +1,7 @@
// Copyright 2024 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package repo
package gitgrep
import (
"testing"

View File

@ -29,13 +29,11 @@ var (
// When the real indexer is not ready, it will be a dummy indexer which will return error to explain it's not ready.
// So it's always safe use it as *globalIndexer.Load() and call its methods.
globalIndexer atomic.Pointer[internal.Indexer]
dummyIndexer *internal.Indexer
)
func init() {
i := internal.NewDummyIndexer()
dummyIndexer = &i
globalIndexer.Store(dummyIndexer)
dummyIndexer := internal.NewDummyIndexer()
globalIndexer.Store(&dummyIndexer)
}
func index(ctx context.Context, indexer internal.Indexer, repoID int64) error {

View File

@ -35,7 +35,7 @@ func FilenameOfIndexerID(indexerID string) string {
return indexerID[index+1:]
}
// Given the contents of file, returns the boundaries of its first seven lines.
// FilenameMatchIndexPos returns the boundaries of its first seven lines.
func FilenameMatchIndexPos(content string) (int, int) {
count := 1
for i, c := range content {
@ -48,3 +48,11 @@ func FilenameMatchIndexPos(content string) (int, int) {
}
return 0, len(content)
}
func ParseKeywordAsPhrase(keyword string) (string, bool) {
if strings.HasPrefix(keyword, `"`) && strings.HasSuffix(keyword, `"`) && len(keyword) > 1 {
// only remove the prefix and suffix quotes, no need to decode the content at the moment
return keyword[1 : len(keyword)-1], true
}
return "", false
}

View File

@ -0,0 +1,30 @@
// Copyright 2025 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package internal
import (
"testing"
"github.com/stretchr/testify/assert"
)
func TestParseKeywordAsPhrase(t *testing.T) {
cases := []struct {
keyword string
phrase string
isPhrase bool
}{
{``, "", false},
{`a`, "", false},
{`"`, "", false},
{`"a`, "", false},
{`"a"`, "a", true},
{`""\"""`, `"\""`, true},
}
for _, c := range cases {
phrase, isPhrase := ParseKeywordAsPhrase(c.keyword)
assert.Equal(t, c.phrase, phrase, "keyword=%q", c.keyword)
assert.Equal(t, c.isPhrase, isPhrase, "keyword=%q", c.keyword)
}
}

View File

@ -12,18 +12,17 @@ import (
// Downloader downloads the site repo information
type Downloader interface {
SetContext(context.Context)
GetRepoInfo() (*Repository, error)
GetTopics() ([]string, error)
GetMilestones() ([]*Milestone, error)
GetReleases() ([]*Release, error)
GetLabels() ([]*Label, error)
GetIssues(page, perPage int) ([]*Issue, bool, error)
GetComments(commentable Commentable) ([]*Comment, bool, error)
GetAllComments(page, perPage int) ([]*Comment, bool, error)
GetRepoInfo(ctx context.Context) (*Repository, error)
GetTopics(ctx context.Context) ([]string, error)
GetMilestones(ctx context.Context) ([]*Milestone, error)
GetReleases(ctx context.Context) ([]*Release, error)
GetLabels(ctx context.Context) ([]*Label, error)
GetIssues(ctx context.Context, page, perPage int) ([]*Issue, bool, error)
GetComments(ctx context.Context, commentable Commentable) ([]*Comment, bool, error)
GetAllComments(ctx context.Context, page, perPage int) ([]*Comment, bool, error)
SupportGetRepoComments() bool
GetPullRequests(page, perPage int) ([]*PullRequest, bool, error)
GetReviews(reviewable Reviewable) ([]*Review, error)
GetPullRequests(ctx context.Context, page, perPage int) ([]*PullRequest, bool, error)
GetReviews(ctx context.Context, reviewable Reviewable) ([]*Review, error)
FormatCloneURL(opts MigrateOptions, remoteAddr string) (string, error)
}

View File

@ -13,56 +13,53 @@ type NullDownloader struct{}
var _ Downloader = &NullDownloader{}
// SetContext set context
func (n NullDownloader) SetContext(_ context.Context) {}
// GetRepoInfo returns a repository information
func (n NullDownloader) GetRepoInfo() (*Repository, error) {
func (n NullDownloader) GetRepoInfo(_ context.Context) (*Repository, error) {
return nil, ErrNotSupported{Entity: "RepoInfo"}
}
// GetTopics return repository topics
func (n NullDownloader) GetTopics() ([]string, error) {
func (n NullDownloader) GetTopics(_ context.Context) ([]string, error) {
return nil, ErrNotSupported{Entity: "Topics"}
}
// GetMilestones returns milestones
func (n NullDownloader) GetMilestones() ([]*Milestone, error) {
func (n NullDownloader) GetMilestones(_ context.Context) ([]*Milestone, error) {
return nil, ErrNotSupported{Entity: "Milestones"}
}
// GetReleases returns releases
func (n NullDownloader) GetReleases() ([]*Release, error) {
func (n NullDownloader) GetReleases(_ context.Context) ([]*Release, error) {
return nil, ErrNotSupported{Entity: "Releases"}
}
// GetLabels returns labels
func (n NullDownloader) GetLabels() ([]*Label, error) {
func (n NullDownloader) GetLabels(_ context.Context) ([]*Label, error) {
return nil, ErrNotSupported{Entity: "Labels"}
}
// GetIssues returns issues according start and limit
func (n NullDownloader) GetIssues(page, perPage int) ([]*Issue, bool, error) {
func (n NullDownloader) GetIssues(_ context.Context, page, perPage int) ([]*Issue, bool, error) {
return nil, false, ErrNotSupported{Entity: "Issues"}
}
// GetComments returns comments of an issue or PR
func (n NullDownloader) GetComments(commentable Commentable) ([]*Comment, bool, error) {
func (n NullDownloader) GetComments(_ context.Context, commentable Commentable) ([]*Comment, bool, error) {
return nil, false, ErrNotSupported{Entity: "Comments"}
}
// GetAllComments returns paginated comments
func (n NullDownloader) GetAllComments(page, perPage int) ([]*Comment, bool, error) {
func (n NullDownloader) GetAllComments(_ context.Context, page, perPage int) ([]*Comment, bool, error) {
return nil, false, ErrNotSupported{Entity: "AllComments"}
}
// GetPullRequests returns pull requests according page and perPage
func (n NullDownloader) GetPullRequests(page, perPage int) ([]*PullRequest, bool, error) {
func (n NullDownloader) GetPullRequests(_ context.Context, page, perPage int) ([]*PullRequest, bool, error) {
return nil, false, ErrNotSupported{Entity: "PullRequests"}
}
// GetReviews returns pull requests review
func (n NullDownloader) GetReviews(reviewable Reviewable) ([]*Review, error) {
func (n NullDownloader) GetReviews(_ context.Context, reviewable Reviewable) ([]*Review, error) {
return nil, ErrNotSupported{Entity: "Reviews"}
}

View File

@ -49,21 +49,15 @@ func (d *RetryDownloader) retry(work func() error) error {
return err
}
// SetContext set context
func (d *RetryDownloader) SetContext(ctx context.Context) {
d.ctx = ctx
d.Downloader.SetContext(ctx)
}
// GetRepoInfo returns a repository information with retry
func (d *RetryDownloader) GetRepoInfo() (*Repository, error) {
func (d *RetryDownloader) GetRepoInfo(ctx context.Context) (*Repository, error) {
var (
repo *Repository
err error
)
err = d.retry(func() error {
repo, err = d.Downloader.GetRepoInfo()
repo, err = d.Downloader.GetRepoInfo(ctx)
return err
})
@ -71,14 +65,14 @@ func (d *RetryDownloader) GetRepoInfo() (*Repository, error) {
}
// GetTopics returns a repository's topics with retry
func (d *RetryDownloader) GetTopics() ([]string, error) {
func (d *RetryDownloader) GetTopics(ctx context.Context) ([]string, error) {
var (
topics []string
err error
)
err = d.retry(func() error {
topics, err = d.Downloader.GetTopics()
topics, err = d.Downloader.GetTopics(ctx)
return err
})
@ -86,14 +80,14 @@ func (d *RetryDownloader) GetTopics() ([]string, error) {
}
// GetMilestones returns a repository's milestones with retry
func (d *RetryDownloader) GetMilestones() ([]*Milestone, error) {
func (d *RetryDownloader) GetMilestones(ctx context.Context) ([]*Milestone, error) {
var (
milestones []*Milestone
err error
)
err = d.retry(func() error {
milestones, err = d.Downloader.GetMilestones()
milestones, err = d.Downloader.GetMilestones(ctx)
return err
})
@ -101,14 +95,14 @@ func (d *RetryDownloader) GetMilestones() ([]*Milestone, error) {
}
// GetReleases returns a repository's releases with retry
func (d *RetryDownloader) GetReleases() ([]*Release, error) {
func (d *RetryDownloader) GetReleases(ctx context.Context) ([]*Release, error) {
var (
releases []*Release
err error
)
err = d.retry(func() error {
releases, err = d.Downloader.GetReleases()
releases, err = d.Downloader.GetReleases(ctx)
return err
})
@ -116,14 +110,14 @@ func (d *RetryDownloader) GetReleases() ([]*Release, error) {
}
// GetLabels returns a repository's labels with retry
func (d *RetryDownloader) GetLabels() ([]*Label, error) {
func (d *RetryDownloader) GetLabels(ctx context.Context) ([]*Label, error) {
var (
labels []*Label
err error
)
err = d.retry(func() error {
labels, err = d.Downloader.GetLabels()
labels, err = d.Downloader.GetLabels(ctx)
return err
})
@ -131,7 +125,7 @@ func (d *RetryDownloader) GetLabels() ([]*Label, error) {
}
// GetIssues returns a repository's issues with retry
func (d *RetryDownloader) GetIssues(page, perPage int) ([]*Issue, bool, error) {
func (d *RetryDownloader) GetIssues(ctx context.Context, page, perPage int) ([]*Issue, bool, error) {
var (
issues []*Issue
isEnd bool
@ -139,7 +133,7 @@ func (d *RetryDownloader) GetIssues(page, perPage int) ([]*Issue, bool, error) {
)
err = d.retry(func() error {
issues, isEnd, err = d.Downloader.GetIssues(page, perPage)
issues, isEnd, err = d.Downloader.GetIssues(ctx, page, perPage)
return err
})
@ -147,7 +141,7 @@ func (d *RetryDownloader) GetIssues(page, perPage int) ([]*Issue, bool, error) {
}
// GetComments returns a repository's comments with retry
func (d *RetryDownloader) GetComments(commentable Commentable) ([]*Comment, bool, error) {
func (d *RetryDownloader) GetComments(ctx context.Context, commentable Commentable) ([]*Comment, bool, error) {
var (
comments []*Comment
isEnd bool
@ -155,7 +149,7 @@ func (d *RetryDownloader) GetComments(commentable Commentable) ([]*Comment, bool
)
err = d.retry(func() error {
comments, isEnd, err = d.Downloader.GetComments(commentable)
comments, isEnd, err = d.Downloader.GetComments(ctx, commentable)
return err
})
@ -163,7 +157,7 @@ func (d *RetryDownloader) GetComments(commentable Commentable) ([]*Comment, bool
}
// GetPullRequests returns a repository's pull requests with retry
func (d *RetryDownloader) GetPullRequests(page, perPage int) ([]*PullRequest, bool, error) {
func (d *RetryDownloader) GetPullRequests(ctx context.Context, page, perPage int) ([]*PullRequest, bool, error) {
var (
prs []*PullRequest
err error
@ -171,7 +165,7 @@ func (d *RetryDownloader) GetPullRequests(page, perPage int) ([]*PullRequest, bo
)
err = d.retry(func() error {
prs, isEnd, err = d.Downloader.GetPullRequests(page, perPage)
prs, isEnd, err = d.Downloader.GetPullRequests(ctx, page, perPage)
return err
})
@ -179,14 +173,13 @@ func (d *RetryDownloader) GetPullRequests(page, perPage int) ([]*PullRequest, bo
}
// GetReviews returns pull requests reviews
func (d *RetryDownloader) GetReviews(reviewable Reviewable) ([]*Review, error) {
func (d *RetryDownloader) GetReviews(ctx context.Context, reviewable Reviewable) ([]*Review, error) {
var (
reviews []*Review
err error
)
err = d.retry(func() error {
reviews, err = d.Downloader.GetReviews(reviewable)
reviews, err = d.Downloader.GetReviews(ctx, reviewable)
return err
})

View File

@ -4,20 +4,22 @@
package migration
import "context"
// Uploader uploads all the information of one repository
type Uploader interface {
MaxBatchInsertSize(tp string) int
CreateRepo(repo *Repository, opts MigrateOptions) error
CreateTopics(topic ...string) error
CreateMilestones(milestones ...*Milestone) error
CreateReleases(releases ...*Release) error
SyncTags() error
CreateLabels(labels ...*Label) error
CreateIssues(issues ...*Issue) error
CreateComments(comments ...*Comment) error
CreatePullRequests(prs ...*PullRequest) error
CreateReviews(reviews ...*Review) error
CreateRepo(ctx context.Context, repo *Repository, opts MigrateOptions) error
CreateTopics(ctx context.Context, topic ...string) error
CreateMilestones(ctx context.Context, milestones ...*Milestone) error
CreateReleases(ctx context.Context, releases ...*Release) error
SyncTags(ctx context.Context) error
CreateLabels(ctx context.Context, labels ...*Label) error
CreateIssues(ctx context.Context, issues ...*Issue) error
CreateComments(ctx context.Context, comments ...*Comment) error
CreatePullRequests(ctx context.Context, prs ...*PullRequest) error
CreateReviews(ctx context.Context, reviews ...*Review) error
Rollback() error
Finish() error
Finish(ctx context.Context) error
Close()
}

View File

@ -32,3 +32,67 @@ type ActionTaskResponse struct {
Entries []*ActionTask `json:"workflow_runs"`
TotalCount int64 `json:"total_count"`
}
// CreateActionWorkflowDispatch represents the payload for triggering a workflow dispatch event
// swagger:model
type CreateActionWorkflowDispatch struct {
// required: true
// example: refs/heads/main
Ref string `json:"ref" binding:"Required"`
// required: false
Inputs map[string]string `json:"inputs,omitempty"`
}
// ActionWorkflow represents a ActionWorkflow
type ActionWorkflow struct {
ID string `json:"id"`
Name string `json:"name"`
Path string `json:"path"`
State string `json:"state"`
// swagger:strfmt date-time
CreatedAt time.Time `json:"created_at"`
// swagger:strfmt date-time
UpdatedAt time.Time `json:"updated_at"`
URL string `json:"url"`
HTMLURL string `json:"html_url"`
BadgeURL string `json:"badge_url"`
// swagger:strfmt date-time
DeletedAt time.Time `json:"deleted_at,omitempty"`
}
// ActionWorkflowResponse returns a ActionWorkflow
type ActionWorkflowResponse struct {
Workflows []*ActionWorkflow `json:"workflows"`
TotalCount int64 `json:"total_count"`
}
// ActionArtifact represents a ActionArtifact
type ActionArtifact struct {
ID int64 `json:"id"`
Name string `json:"name"`
SizeInBytes int64 `json:"size_in_bytes"`
URL string `json:"url"`
ArchiveDownloadURL string `json:"archive_download_url"`
Expired bool `json:"expired"`
WorkflowRun *ActionWorkflowRun `json:"workflow_run"`
// swagger:strfmt date-time
CreatedAt time.Time `json:"created_at"`
// swagger:strfmt date-time
UpdatedAt time.Time `json:"updated_at"`
// swagger:strfmt date-time
ExpiresAt time.Time `json:"expires_at"`
}
// ActionWorkflowRun represents a WorkflowRun
type ActionWorkflowRun struct {
ID int64 `json:"id"`
RepositoryID int64 `json:"repository_id"`
HeadSha string `json:"head_sha"`
}
// ActionArtifactsResponse returns ActionArtifacts
type ActionArtifactsResponse struct {
Entries []*ActionArtifact `json:"artifacts"`
TotalCount int64 `json:"total_count"`
}

View File

@ -36,6 +36,22 @@ func (w SilentWrap) Unwrap() error {
return w.Err
}
type LocaleWrap struct {
err error
TrKey string
TrArgs []any
}
// Error returns the message
func (w LocaleWrap) Error() string {
return w.err.Error()
}
// Unwrap returns the underlying error
func (w LocaleWrap) Unwrap() error {
return w.err
}
// NewSilentWrapErrorf returns an error that formats as the given text but unwraps as the provided error
func NewSilentWrapErrorf(unwrap error, message string, args ...any) error {
if len(args) == 0 {
@ -63,3 +79,16 @@ func NewAlreadyExistErrorf(message string, args ...any) error {
func NewNotExistErrorf(message string, args ...any) error {
return NewSilentWrapErrorf(ErrNotExist, message, args...)
}
// ErrWrapLocale wraps an err with a translation key and arguments
func ErrWrapLocale(err error, trKey string, trArgs ...any) error {
return LocaleWrap{err: err, TrKey: trKey, TrArgs: trArgs}
}
func ErrAsLocale(err error) *LocaleWrap {
var e LocaleWrap
if errors.As(err, &e) {
return &e
}
return nil
}

View File

@ -71,3 +71,10 @@ func KeysOfMap[K comparable, V any](m map[K]V) []K {
}
return keys
}
func SliceNilAsEmpty[T any](a []T) []T {
if a == nil {
return []T{}
}
return a
}

View File

@ -385,6 +385,7 @@ show_only_public=Zobrazeny pouze veřejné
issues.in_your_repos=Ve vašich repozitářích
[explore]
repos=Repozitáře
users=Uživatelé

View File

@ -384,6 +384,7 @@ show_only_public=Nur öffentliche anzeigen
issues.in_your_repos=Eigene Repositories
[explore]
repos=Repositories
users=Benutzer

View File

@ -335,6 +335,7 @@ show_only_public=Εμφανίζονται μόνο δημόσια
issues.in_your_repos=Στα αποθετήρια σας
[explore]
repos=Αποθετήρια
users=Χρήστες

View File

@ -385,6 +385,13 @@ show_only_public = Showing only public
issues.in_your_repos = In your repositories
guide_title = No Activity
guide_desc = You are currently not following any repositories or users, so there is no content to display. You can explore repositories or users of interest from the links below.
explore_repos = Explore repositories
explore_users = Explore users
empty_org = There are no organizations yet.
empty_repo = There are no repositories yet.
[explore]
repos = Repositories
users = Users

View File

@ -333,6 +333,7 @@ show_only_public=Mostrar sólo repositorios públicos
issues.in_your_repos=En tus repositorios
[explore]
repos=Repositorios
users=Usuarios

View File

@ -256,6 +256,7 @@ show_only_public=نمایش دادن موارد عمومی
issues.in_your_repos=در مخازن شما
[explore]
repos=مخازن
users=کاربران

View File

@ -266,6 +266,7 @@ show_only_public=Näytetään vain julkiset
issues.in_your_repos=Repoissasi
[explore]
repos=Repot
users=Käyttäjät

View File

@ -385,6 +385,13 @@ show_only_public=Afficher uniquement les dépôts publics
issues.in_your_repos=Dans vos dépôts
guide_title=Aucune activité
guide_desc=Vous nêtes actuellement abonné à aucun dépôt ou utilisateur et il ny a donc aucun contenu à afficher. Les liens ci-dessous vous permettront dexplorer des dépôts ou des utilisateurs susceptibles de vous intéresser.
explore_repos=Explorer des dépôts
explore_users=Explorer des utilisateurs
empty_org=Il ny a pas encore dorganisations.
empty_repo=Il ny a pas encore de dépôts.
[explore]
repos=Dépôts
users=Utilisateurs
@ -2330,6 +2337,8 @@ settings.event_fork=Bifurcation
settings.event_fork_desc=Dépôt bifurqué.
settings.event_wiki=Wiki
settings.event_wiki_desc=Page wiki créée, renommée, modifiée ou supprimée.
settings.event_statuses=Statuts
settings.event_statuses_desc=Statut de validation mis à jour depuis lAPI.
settings.event_release=Publication
settings.event_release_desc=Publication publiée, mise à jour ou supprimée.
settings.event_push=Soumission
@ -2877,6 +2886,14 @@ view_as_role=Voir en tant que %s
view_as_public_hint=Vous visualisez le README en tant quutilisateur public.
view_as_member_hint=Vous visualisez le README en tant que membre de cette organisation.
worktime=Temps de travail
worktime.date_range_start=Date de début
worktime.date_range_end=Date de fin
worktime.query=Demande
worktime.time=Durée
worktime.by_repositories=Par dépôts
worktime.by_milestones=Par jalons
worktime.by_members=Par membres
[admin]
maintenance=Maintenance

View File

@ -385,6 +385,13 @@ show_only_public=Ag taispeáint poiblí amháin
issues.in_your_repos=I do stórais
guide_title=Gan Ghníomhaíocht
guide_desc=Níl aon stórtha nó úsáideoirí á leanúint agat faoi láthair, mar sin níl aon ábhar le taispeáint. Is féidir leat stórtha nó úsáideoirí spéise a iniúchadh ó na naisc thíos.
explore_repos=Déan stórtha a iniúchadh
explore_users=Déan iniúchadh ar úsáideoirí
empty_org=Níl aon eagraíochtaí ann fós.
empty_repo=Níl aon stórtha ann fós.
[explore]
repos=Stórais
users=Úsáideoirí
@ -2330,6 +2337,8 @@ settings.event_fork=Forc
settings.event_fork_desc=Forcadh stóras.
settings.event_wiki=Vicí
settings.event_wiki_desc=Leathanach Vicí cruthaithe, athainmnithe, curtha in eagar nó scriosta.
settings.event_statuses=Stádais
settings.event_statuses_desc=Nuashonraíodh Stádas Commit ón API.
settings.event_release=Scaoileadh
settings.event_release_desc=Scaoileadh foilsithe, nuashonraithe nó scriosta i stóras.
settings.event_push=Brúigh

View File

@ -225,6 +225,7 @@ show_only_public=Csak publikus mutatása
issues.in_your_repos=A tárolóidban
[explore]
repos=Tárolók
users=Felhasználók

View File

@ -243,6 +243,7 @@ show_private=Pribadi
issues.in_your_repos=Dalam repositori anda
[explore]
repos=Repositori
users=Pengguna

View File

@ -240,6 +240,7 @@ show_only_public=Að sýna aðeins opinber
issues.in_your_repos=Í hugbúnaðarsöfnum þínum
[explore]
repos=Hugbúnaðarsöfn
users=Notendur

View File

@ -277,6 +277,7 @@ show_only_public=Mostrando solo pubblici
issues.in_your_repos=Nei tuoi repository
[explore]
repos=Repository
users=Utenti

View File

@ -385,6 +385,7 @@ show_only_public=公開のみ表示
issues.in_your_repos=あなたのリポジトリ
[explore]
repos=リポジトリ
users=ユーザー

View File

@ -212,6 +212,7 @@ show_private=비공개
issues.in_your_repos=당신의 저장소에
[explore]
repos=저장소
users=유저

View File

@ -338,6 +338,7 @@ show_only_public=Attēlot tikai publiskos
issues.in_your_repos=Jūsu repozitorijos
[explore]
repos=Repozitoriji
users=Lietotāji

View File

@ -276,6 +276,7 @@ show_only_public=Toon alleen opbenbaar
issues.in_your_repos=In uw repositories
[explore]
repos=Repositories
users=Gebruikers

View File

@ -272,6 +272,7 @@ show_only_public=Wyświetlanie tylko publicznych
issues.in_your_repos=W Twoich repozytoriach
[explore]
repos=Repozytoria
users=Użytkownicy

View File

@ -335,6 +335,7 @@ show_only_public=Mostrando somente públicos
issues.in_your_repos=Em seus repositórios
[explore]
repos=Repositórios
users=Usuários

View File

@ -385,6 +385,13 @@ show_only_public=Apresentando somente os públicos
issues.in_your_repos=Nos seus repositórios
guide_title=Sem trabalho
guide_desc=Neste momento não está a seguir repositórios nem utilizadores, por isso não há conteúdo a apresentar. Pode explorar repositórios ou utilizadores de interesse a partir das ligações abaixo.
explore_repos=Explorar repositórios
explore_users=Explorar utilizadores
empty_org=Ainda não há organizações.
empty_repo=Ainda não há repositórios.
[explore]
repos=Repositórios
users=Utilizadores
@ -2330,6 +2337,8 @@ settings.event_fork=Derivar
settings.event_fork_desc=Feita a derivação do repositório.
settings.event_wiki=Wiki
settings.event_wiki_desc=Página do wiki criada, renomeada, editada ou eliminada.
settings.event_statuses=Estados
settings.event_statuses_desc=Estado do cometimento modificado através da API.
settings.event_release=Lançamento
settings.event_release_desc=Lançamento publicado, modificado ou eliminado num repositório.
settings.event_push=Enviar

View File

@ -333,6 +333,7 @@ show_only_public=Показаны только публичные
issues.in_your_repos=В ваших репозиториях
[explore]
repos=Репозитории
users=Пользователи

View File

@ -246,6 +246,7 @@ show_only_public=ප්‍රසිද්ධ පමණක් පෙන්වය
issues.in_your_repos=ඔබගේ කෝෂ්ඨවල
[explore]
repos=කෝෂ්ඨ
users=පරිශීලකයින්

View File

@ -328,6 +328,7 @@ show_only_public=Zobrazuje sa iba verejné
issues.in_your_repos=Vo vašich repozitároch
[explore]
repos=Repozitáre
users=Používatelia

View File

@ -233,6 +233,7 @@ show_only_public=Visar endast publika
issues.in_your_repos=I dina utvecklingskataloger
[explore]
repos=Utvecklingskataloger
users=Användare

View File

@ -380,6 +380,7 @@ show_only_public=Yalnızca açık olanlar gösteriliyor
issues.in_your_repos=Depolarınızda
[explore]
repos=Depolar
users=Kullanıcılar

View File

@ -260,6 +260,7 @@ show_only_public=Показано тільки публічні
issues.in_your_repos=В ваших репозиторіях
[explore]
repos=Репозиторії
users=Користувачі

View File

@ -384,6 +384,7 @@ show_only_public=只显示公开的
issues.in_your_repos=在您的仓库中
[explore]
repos=仓库
users=用户

View File

@ -118,6 +118,7 @@ show_private=私有庫
issues.in_your_repos=屬於該用戶儲存庫的
[explore]
repos=儲存庫
users=使用者

View File

@ -383,6 +383,7 @@ show_only_public=只顯示公開
issues.in_your_repos=在您的儲存庫中
[explore]
repos=儲存庫
users=使用者

4278
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@ -4,29 +4,29 @@
"node": ">= 18.0.0"
},
"dependencies": {
"@citation-js/core": "0.7.14",
"@citation-js/plugin-bibtex": "0.7.17",
"@citation-js/plugin-csl": "0.7.14",
"@citation-js/core": "0.7.18",
"@citation-js/plugin-bibtex": "0.7.18",
"@citation-js/plugin-csl": "0.7.18",
"@citation-js/plugin-software-formats": "0.6.1",
"@github/markdown-toolbar-element": "2.2.3",
"@github/relative-time-element": "4.4.5",
"@github/text-expander-element": "2.9.1",
"@mcaptcha/vanilla-glue": "0.1.0-alpha-3",
"@primer/octicons": "19.14.0",
"@primer/octicons": "19.15.0",
"@silverwind/vue3-calendar-heatmap": "2.0.6",
"add-asset-webpack-plugin": "3.0.0",
"ansi_up": "6.0.2",
"asciinema-player": "3.8.2",
"asciinema-player": "3.9.0",
"chart.js": "4.4.7",
"chartjs-adapter-dayjs-4": "1.0.4",
"chartjs-plugin-zoom": "2.2.0",
"clippie": "4.1.4",
"clippie": "4.1.5",
"cropperjs": "1.6.2",
"css-loader": "7.1.2",
"dayjs": "1.11.13",
"dropzone": "6.0.0-beta.2",
"easymde": "2.18.0",
"esbuild-loader": "4.2.2",
"esbuild-loader": "4.3.0",
"escape-goat": "4.0.0",
"fast-glob": "3.3.3",
"htmx.org": "2.0.4",
@ -39,13 +39,13 @@
"minimatch": "10.0.1",
"monaco-editor": "0.52.2",
"monaco-editor-webpack-plugin": "7.1.0",
"pdfobject": "2.3.0",
"pdfobject": "2.3.1",
"perfect-debounce": "1.0.0",
"postcss": "8.5.1",
"postcss": "8.5.2",
"postcss-loader": "8.1.1",
"postcss-nesting": "13.0.1",
"sortablejs": "1.15.6",
"swagger-ui-dist": "5.18.2",
"swagger-ui-dist": "5.18.3",
"tailwindcss": "3.4.17",
"throttle-debounce": "5.0.2",
"tinycolor2": "1.6.0",
@ -59,7 +59,7 @@
"vue-bar-graph": "2.2.0",
"vue-chartjs": "5.3.2",
"vue-loader": "17.4.2",
"webpack": "5.97.1",
"webpack": "5.98.0",
"webpack-cli": "6.0.1",
"wrap-ansi": "9.0.0"
},
@ -67,8 +67,8 @@
"@eslint-community/eslint-plugin-eslint-comments": "4.4.1",
"@playwright/test": "1.49.1",
"@stoplight/spectral-cli": "6.14.2",
"@stylistic/eslint-plugin-js": "2.13.0",
"@stylistic/stylelint-plugin": "3.1.1",
"@stylistic/eslint-plugin-js": "3.1.0",
"@stylistic/stylelint-plugin": "3.1.2",
"@types/dropzone": "5.7.9",
"@types/jquery": "3.5.32",
"@types/katex": "0.16.7",
@ -79,41 +79,40 @@
"@types/throttle-debounce": "5.0.2",
"@types/tinycolor2": "1.4.6",
"@types/toastify-js": "1.12.3",
"@typescript-eslint/eslint-plugin": "8.21.0",
"@typescript-eslint/parser": "8.21.0",
"@typescript-eslint/eslint-plugin": "8.24.0",
"@typescript-eslint/parser": "8.24.0",
"@vitejs/plugin-vue": "5.2.1",
"@vitest/eslint-plugin": "1.1.31",
"eslint": "8.57.0",
"eslint-import-resolver-typescript": "3.7.0",
"eslint-import-resolver-typescript": "3.8.0",
"eslint-plugin-array-func": "4.0.0",
"eslint-plugin-github": "5.0.2",
"eslint-plugin-import-x": "4.6.1",
"eslint-plugin-no-jquery": "3.1.0",
"eslint-plugin-no-use-extend-native": "0.5.0",
"eslint-plugin-playwright": "2.1.0",
"eslint-plugin-playwright": "2.2.0",
"eslint-plugin-regexp": "2.7.0",
"eslint-plugin-sonarjs": "3.0.1",
"eslint-plugin-sonarjs": "3.0.2",
"eslint-plugin-unicorn": "56.0.1",
"eslint-plugin-vitest": "0.4.1",
"eslint-plugin-vitest-globals": "1.5.0",
"eslint-plugin-vue": "9.32.0",
"eslint-plugin-vue-scoped-css": "2.9.0",
"eslint-plugin-wc": "2.2.0",
"happy-dom": "16.7.2",
"markdownlint-cli": "0.43.0",
"happy-dom": "17.1.0",
"markdownlint-cli": "0.44.0",
"nolyfill": "1.0.43",
"postcss-html": "1.8.0",
"stylelint": "16.14.1",
"stylelint-config-recommended": "15.0.0",
"stylelint-declaration-block-no-ignored-properties": "2.8.0",
"stylelint-declaration-strict-value": "1.10.7",
"stylelint-define-config": "16.14.0",
"stylelint-define-config": "16.14.1",
"stylelint-value-no-unknown-custom-properties": "6.0.1",
"svgo": "3.3.2",
"type-fest": "4.33.0",
"updates": "16.4.1",
"vite-string-plugin": "1.4.3",
"vitest": "3.0.3",
"vue-tsc": "2.2.0"
"type-fest": "4.34.1",
"updates": "16.4.2",
"vite-string-plugin": "1.4.4",
"vitest": "3.0.5",
"vue-tsc": "2.2.2"
},
"browserslist": [
"defaults"

10
poetry.lock generated
View File

@ -29,13 +29,14 @@ files = [
[[package]]
name = "cssbeautifier"
version = "1.15.1"
version = "1.15.3"
description = "CSS unobfuscator and beautifier."
optional = false
python-versions = "*"
groups = ["dev"]
files = [
{file = "cssbeautifier-1.15.1.tar.gz", hash = "sha256:9f7064362aedd559c55eeecf6b6bed65e05f33488dcbe39044f0403c26e1c006"},
{file = "cssbeautifier-1.15.3-py3-none-any.whl", hash = "sha256:0dcaf5ce197743a79b3a160b84ea58fcbd9e3e767c96df1171e428125b16d410"},
{file = "cssbeautifier-1.15.3.tar.gz", hash = "sha256:406b04d09e7d62c0be084fbfa2cba5126fe37359ea0d8d9f7b963a6354fc8303"},
]
[package.dependencies]
@ -102,13 +103,14 @@ files = [
[[package]]
name = "jsbeautifier"
version = "1.15.1"
version = "1.15.3"
description = "JavaScript unobfuscator and beautifier."
optional = false
python-versions = "*"
groups = ["dev"]
files = [
{file = "jsbeautifier-1.15.1.tar.gz", hash = "sha256:ebd733b560704c602d744eafc839db60a1ee9326e30a2a80c4adb8718adc1b24"},
{file = "jsbeautifier-1.15.3-py3-none-any.whl", hash = "sha256:b207a15ab7529eee4a35ae7790e9ec4e32a2b5026d51e2d0386c3a65e6ecfc91"},
{file = "jsbeautifier-1.15.3.tar.gz", hash = "sha256:5f1baf3d4ca6a615bb5417ee861b34b77609eeb12875555f8bbfabd9bf2f3457"},
]
[package.dependencies]

View File

@ -0,0 +1 @@
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 16 16" class="svg octicon-square-circle" width="16" height="16" aria-hidden="true"><path d="M8 16A8 8 0 1 1 8 0a8 8 0 0 1 0 16m0-1.5a6.5 6.5 0 1 0 0-13 6.5 6.5 0 0 0 0 13"/><path d="M5 5.75A.75.75 0 0 1 5.75 5h4.5a.75.75 0 0 1 .75.75v4.5a.75.75 0 0 1-.75.75h-4.5a.75.75 0 0 1-.75-.75Z"/></svg>

After

Width:  |  Height:  |  Size: 346 B

View File

@ -292,7 +292,7 @@ func mergeChunksForArtifact(ctx *ArtifactContext, chunks []*chunkFileItem, st st
}
artifact.StoragePath = storagePath
artifact.Status = int64(actions.ArtifactStatusUploadConfirmed)
artifact.Status = actions.ArtifactStatusUploadConfirmed
if err := actions.UpdateArtifactByID(ctx, artifact.ID, artifact); err != nil {
return fmt.Errorf("update artifact error: %v", err)
}

View File

@ -25,7 +25,7 @@ package actions
// 1.3. Continue Upload Zip Content to Blobstorage (unauthenticated request), repeat until everything is uploaded
// PUT: http://localhost:3000/twirp/github.actions.results.api.v1.ArtifactService/UploadArtifact?sig=mO7y35r4GyjN7fwg0DTv3-Fv1NDXD84KLEgLpoPOtDI=&expires=2024-01-23+21%3A48%3A37.20833956+%2B0100+CET&artifactName=test&taskID=75&comp=appendBlock
// 1.4. BlockList xml payload to Blobstorage (unauthenticated request)
// Files of about 800MB are parallel in parallel and / or out of order, this file is needed to enshure the correct order
// Files of about 800MB are parallel in parallel and / or out of order, this file is needed to ensure the correct order
// PUT: http://localhost:3000/twirp/github.actions.results.api.v1.ArtifactService/UploadArtifact?sig=mO7y35r4GyjN7fwg0DTv3-Fv1NDXD84KLEgLpoPOtDI=&expires=2024-01-23+21%3A48%3A37.20833956+%2B0100+CET&artifactName=test&taskID=75&comp=blockList
// Request
// <?xml version="1.0" encoding="UTF-8" standalone="yes"?>

View File

@ -156,7 +156,7 @@ func (s *Service) FetchTask(
// if the task version in request is not equal to the version in db,
// it means there may still be some tasks not be assgined.
// try to pick a task for the runner that send the request.
if t, ok, err := pickTask(ctx, runner); err != nil {
if t, ok, err := actions_service.PickTask(ctx, runner); err != nil {
log.Error("pick task failed: %v", err)
return nil, status.Errorf(codes.Internal, "pick task: %v", err)
} else if ok {

View File

@ -1,95 +0,0 @@
// Copyright 2022 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package runner
import (
"context"
"fmt"
actions_model "code.gitea.io/gitea/models/actions"
secret_model "code.gitea.io/gitea/models/secret"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/services/actions"
runnerv1 "code.gitea.io/actions-proto-go/runner/v1"
"google.golang.org/protobuf/types/known/structpb"
)
func pickTask(ctx context.Context, runner *actions_model.ActionRunner) (*runnerv1.Task, bool, error) {
t, ok, err := actions_model.CreateTaskForRunner(ctx, runner)
if err != nil {
return nil, false, fmt.Errorf("CreateTaskForRunner: %w", err)
}
if !ok {
return nil, false, nil
}
secrets, err := secret_model.GetSecretsOfTask(ctx, t)
if err != nil {
return nil, false, fmt.Errorf("GetSecretsOfTask: %w", err)
}
vars, err := actions_model.GetVariablesOfRun(ctx, t.Job.Run)
if err != nil {
return nil, false, fmt.Errorf("GetVariablesOfRun: %w", err)
}
actions.CreateCommitStatus(ctx, t.Job)
task := &runnerv1.Task{
Id: t.ID,
WorkflowPayload: t.Job.WorkflowPayload,
Context: generateTaskContext(t),
Secrets: secrets,
Vars: vars,
}
if needs, err := findTaskNeeds(ctx, t); err != nil {
log.Error("Cannot find needs for task %v: %v", t.ID, err)
// Go on with empty needs.
// If return error, the task will be wild, which means the runner will never get it when it has been assigned to the runner.
// In contrast, missing needs is less serious.
// And the task will fail and the runner will report the error in the logs.
} else {
task.Needs = needs
}
return task, true, nil
}
func generateTaskContext(t *actions_model.ActionTask) *structpb.Struct {
giteaRuntimeToken, err := actions.CreateAuthorizationToken(t.ID, t.Job.RunID, t.JobID)
if err != nil {
log.Error("actions.CreateAuthorizationToken failed: %v", err)
}
gitCtx := actions.GenerateGiteaContext(t.Job.Run, t.Job)
gitCtx["token"] = t.Token
gitCtx["gitea_runtime_token"] = giteaRuntimeToken
taskContext, err := structpb.NewStruct(gitCtx)
if err != nil {
log.Error("structpb.NewStruct failed: %v", err)
}
return taskContext
}
func findTaskNeeds(ctx context.Context, task *actions_model.ActionTask) (map[string]*runnerv1.TaskNeed, error) {
if err := task.LoadAttributes(ctx); err != nil {
return nil, fmt.Errorf("task LoadAttributes: %w", err)
}
taskNeeds, err := actions.FindTaskNeeds(ctx, task.Job)
if err != nil {
return nil, err
}
ret := make(map[string]*runnerv1.TaskNeed, len(taskNeeds))
for jobID, taskNeed := range taskNeeds {
ret[jobID] = &runnerv1.TaskNeed{
Outputs: taskNeed.Outputs,
Result: runnerv1.Result(taskNeed.Result),
}
}
return ret, nil
}

View File

@ -268,12 +268,12 @@ func checkTokenPublicOnly() func(ctx *context.APIContext) {
return
}
case auth_model.ContainsCategory(requiredScopeCategories, auth_model.AccessTokenScopeCategoryUser):
if ctx.ContextUser != nil && ctx.ContextUser.IsUser() && ctx.ContextUser.Visibility != api.VisibleTypePublic {
if ctx.ContextUser != nil && ctx.ContextUser.IsTokenAccessAllowed() && ctx.ContextUser.Visibility != api.VisibleTypePublic {
ctx.Error(http.StatusForbidden, "reqToken", "token scope is limited to public users")
return
}
case auth_model.ContainsCategory(requiredScopeCategories, auth_model.AccessTokenScopeCategoryActivityPub):
if ctx.ContextUser != nil && ctx.ContextUser.IsUser() && ctx.ContextUser.Visibility != api.VisibleTypePublic {
if ctx.ContextUser != nil && ctx.ContextUser.IsTokenAccessAllowed() && ctx.ContextUser.Visibility != api.VisibleTypePublic {
ctx.Error(http.StatusForbidden, "reqToken", "token scope is limited to public activitypub")
return
}
@ -1155,11 +1155,17 @@ func Routes() *web.Router {
m.Post("/accept", repo.AcceptTransfer)
m.Post("/reject", repo.RejectTransfer)
}, reqToken())
addActionsRoutes(
m,
reqOwner(),
repo.NewAction(),
)
addActionsRoutes(m, reqOwner(), repo.NewAction()) // it adds the routes for secrets/variables and runner management
m.Group("/actions/workflows", func() {
m.Get("", repo.ActionsListRepositoryWorkflows)
m.Get("/{workflow_id}", repo.ActionsGetWorkflow)
m.Put("/{workflow_id}/disable", reqRepoWriter(unit.TypeActions), repo.ActionsDisableWorkflow)
m.Put("/{workflow_id}/enable", reqRepoWriter(unit.TypeActions), repo.ActionsEnableWorkflow)
m.Post("/{workflow_id}/dispatches", reqRepoWriter(unit.TypeActions), bind(api.CreateActionWorkflowDispatch{}), repo.ActionsDispatchWorkflow)
}, context.ReferencesGitRepo(), reqToken(), reqRepoReader(unit.TypeActions))
m.Group("/hooks/git", func() {
m.Combo("").Get(repo.ListGitHooks)
m.Group("/{id}", func() {
@ -1235,6 +1241,13 @@ func Routes() *web.Router {
}, reqToken(), reqAdmin())
m.Group("/actions", func() {
m.Get("/tasks", repo.ListActionTasks)
m.Get("/runs/{run}/artifacts", repo.GetArtifactsOfRun)
m.Get("/artifacts", repo.GetArtifacts)
m.Group("/artifacts/{artifact_id}", func() {
m.Get("", repo.GetArtifact)
m.Delete("", reqRepoWriter(unit.TypeActions), repo.DeleteArtifact)
})
m.Get("/artifacts/{artifact_id}/zip", repo.DownloadArtifact)
}, reqRepoReader(unit.TypeActions), context.ReferencesGitRepo(true))
m.Group("/keys", func() {
m.Combo("").Get(repo.ListDeployKeys).
@ -1395,6 +1408,10 @@ func Routes() *web.Router {
}, repoAssignment(), checkTokenPublicOnly())
}, tokenRequiresScopes(auth_model.AccessTokenScopeCategoryRepository))
// Artifacts direct download endpoint authenticates via signed url
// it is protected by the "sig" parameter (to help to access private repo), so no need to use other middlewares
m.Get("/repos/{username}/{reponame}/actions/artifacts/{artifact_id}/zip/raw", repo.DownloadArtifactRaw)
// Notifications (requires notifications scope)
m.Group("/repos", func() {
m.Group("/{username}/{reponame}", func() {
@ -1520,13 +1537,19 @@ func Routes() *web.Router {
// NOTE: these are Gitea package management API - see packages.CommonRoutes and packages.DockerContainerRoutes for endpoints that implement package manager APIs
m.Group("/packages/{username}", func() {
m.Group("/{type}/{name}/{version}", func() {
m.Get("", reqToken(), packages.GetPackage)
m.Delete("", reqToken(), reqPackageAccess(perm.AccessModeWrite), packages.DeletePackage)
m.Get("/files", reqToken(), packages.ListPackageFiles)
m.Group("/{type}/{name}", func() {
m.Group("/{version}", func() {
m.Get("", packages.GetPackage)
m.Delete("", reqPackageAccess(perm.AccessModeWrite), packages.DeletePackage)
m.Get("/files", packages.ListPackageFiles)
})
m.Post("/-/link/{repo_name}", reqPackageAccess(perm.AccessModeWrite), packages.LinkPackage)
m.Post("/-/unlink", reqPackageAccess(perm.AccessModeWrite), packages.UnlinkPackage)
})
m.Get("/", reqToken(), packages.ListPackages)
}, tokenRequiresScopes(auth_model.AccessTokenScopeCategoryPackage), context.UserAssignmentAPI(), context.PackageAssignmentAPI(), reqPackageAccess(perm.AccessModeRead), checkTokenPublicOnly())
m.Get("/", packages.ListPackages)
}, reqToken(), tokenRequiresScopes(auth_model.AccessTokenScopeCategoryPackage), context.UserAssignmentAPI(), context.PackageAssignmentAPI(), reqPackageAccess(perm.AccessModeRead), checkTokenPublicOnly())
// Organizations
m.Get("/user/orgs", reqToken(), tokenRequiresScopes(auth_model.AccessTokenScopeCategoryUser, auth_model.AccessTokenScopeCategoryOrganization), org.ListMyOrgs)

View File

@ -450,7 +450,11 @@ func (Action) UpdateVariable(ctx *context.APIContext) {
if opt.Name == "" {
opt.Name = ctx.PathParam("variablename")
}
if _, err := actions_service.UpdateVariable(ctx, v.ID, opt.Name, opt.Value); err != nil {
v.Name = opt.Name
v.Data = opt.Value
if _, err := actions_service.UpdateVariableNameData(ctx, v); err != nil {
if errors.Is(err, util.ErrInvalidArgument) {
ctx.Error(http.StatusBadRequest, "UpdateVariable", err)
} else {

View File

@ -4,11 +4,14 @@
package packages
import (
"errors"
"net/http"
"code.gitea.io/gitea/models/packages"
repo_model "code.gitea.io/gitea/models/repo"
"code.gitea.io/gitea/modules/optional"
api "code.gitea.io/gitea/modules/structs"
"code.gitea.io/gitea/modules/util"
"code.gitea.io/gitea/routers/api/v1/utils"
"code.gitea.io/gitea/services/context"
"code.gitea.io/gitea/services/convert"
@ -213,3 +216,122 @@ func ListPackageFiles(ctx *context.APIContext) {
ctx.JSON(http.StatusOK, apiPackageFiles)
}
// LinkPackage sets a repository link for a package
func LinkPackage(ctx *context.APIContext) {
// swagger:operation POST /packages/{owner}/{type}/{name}/-/link/{repo_name} package linkPackage
// ---
// summary: Link a package to a repository
// parameters:
// - name: owner
// in: path
// description: owner of the package
// type: string
// required: true
// - name: type
// in: path
// description: type of the package
// type: string
// required: true
// - name: name
// in: path
// description: name of the package
// type: string
// required: true
// - name: repo_name
// in: path
// description: name of the repository to link.
// type: string
// required: true
// responses:
// "201":
// "$ref": "#/responses/empty"
// "404":
// "$ref": "#/responses/notFound"
pkg, err := packages.GetPackageByName(ctx, ctx.ContextUser.ID, packages.Type(ctx.PathParam("type")), ctx.PathParam("name"))
if err != nil {
if errors.Is(err, util.ErrNotExist) {
ctx.Error(http.StatusNotFound, "GetPackageByName", err)
} else {
ctx.Error(http.StatusInternalServerError, "GetPackageByName", err)
}
return
}
repo, err := repo_model.GetRepositoryByName(ctx, ctx.ContextUser.ID, ctx.PathParam("repo_name"))
if err != nil {
if errors.Is(err, util.ErrNotExist) {
ctx.Error(http.StatusNotFound, "GetRepositoryByName", err)
} else {
ctx.Error(http.StatusInternalServerError, "GetRepositoryByName", err)
}
return
}
err = packages_service.LinkToRepository(ctx, pkg, repo, ctx.Doer)
if err != nil {
switch {
case errors.Is(err, util.ErrInvalidArgument):
ctx.Error(http.StatusBadRequest, "LinkToRepository", err)
case errors.Is(err, util.ErrPermissionDenied):
ctx.Error(http.StatusForbidden, "LinkToRepository", err)
default:
ctx.Error(http.StatusInternalServerError, "LinkToRepository", err)
}
return
}
ctx.Status(http.StatusCreated)
}
// UnlinkPackage sets a repository link for a package
func UnlinkPackage(ctx *context.APIContext) {
// swagger:operation POST /packages/{owner}/{type}/{name}/-/unlink package unlinkPackage
// ---
// summary: Unlink a package from a repository
// parameters:
// - name: owner
// in: path
// description: owner of the package
// type: string
// required: true
// - name: type
// in: path
// description: type of the package
// type: string
// required: true
// - name: name
// in: path
// description: name of the package
// type: string
// required: true
// responses:
// "201":
// "$ref": "#/responses/empty"
// "404":
// "$ref": "#/responses/notFound"
pkg, err := packages.GetPackageByName(ctx, ctx.ContextUser.ID, packages.Type(ctx.PathParam("type")), ctx.PathParam("name"))
if err != nil {
if errors.Is(err, util.ErrNotExist) {
ctx.Error(http.StatusNotFound, "GetPackageByName", err)
} else {
ctx.Error(http.StatusInternalServerError, "GetPackageByName", err)
}
return
}
err = packages_service.UnlinkFromRepository(ctx, pkg, ctx.Doer)
if err != nil {
switch {
case errors.Is(err, util.ErrPermissionDenied):
ctx.Error(http.StatusForbidden, "UnlinkFromRepository", err)
case errors.Is(err, util.ErrInvalidArgument):
ctx.Error(http.StatusBadRequest, "UnlinkFromRepository", err)
default:
ctx.Error(http.StatusInternalServerError, "UnlinkFromRepository", err)
}
return
}
ctx.Status(http.StatusNoContent)
}

View File

@ -4,12 +4,25 @@
package repo
import (
go_context "context"
"crypto/hmac"
"crypto/sha256"
"encoding/base64"
"errors"
"fmt"
"net/http"
"net/url"
"strconv"
"strings"
"time"
actions_model "code.gitea.io/gitea/models/actions"
"code.gitea.io/gitea/models/db"
repo_model "code.gitea.io/gitea/models/repo"
secret_model "code.gitea.io/gitea/models/secret"
"code.gitea.io/gitea/modules/actions"
"code.gitea.io/gitea/modules/httplib"
"code.gitea.io/gitea/modules/setting"
api "code.gitea.io/gitea/modules/structs"
"code.gitea.io/gitea/modules/util"
"code.gitea.io/gitea/modules/web"
@ -19,6 +32,8 @@ import (
"code.gitea.io/gitea/services/context"
"code.gitea.io/gitea/services/convert"
secret_service "code.gitea.io/gitea/services/secrets"
"github.com/nektos/act/pkg/model"
)
// ListActionsSecrets list an repo's actions secrets
@ -414,7 +429,11 @@ func (Action) UpdateVariable(ctx *context.APIContext) {
if opt.Name == "" {
opt.Name = ctx.PathParam("variablename")
}
if _, err := actions_service.UpdateVariable(ctx, v.ID, opt.Name, opt.Value); err != nil {
v.Name = opt.Name
v.Data = opt.Value
if _, err := actions_service.UpdateVariableNameData(ctx, v); err != nil {
if errors.Is(err, util.ErrInvalidArgument) {
ctx.Error(http.StatusBadRequest, "UpdateVariable", err)
} else {
@ -581,3 +600,649 @@ func ListActionTasks(ctx *context.APIContext) {
ctx.JSON(http.StatusOK, &res)
}
func ActionsListRepositoryWorkflows(ctx *context.APIContext) {
// swagger:operation GET /repos/{owner}/{repo}/actions/workflows repository ActionsListRepositoryWorkflows
// ---
// summary: List repository workflows
// produces:
// - application/json
// parameters:
// - name: owner
// in: path
// description: owner of the repo
// type: string
// required: true
// - name: repo
// in: path
// description: name of the repo
// type: string
// required: true
// responses:
// "200":
// "$ref": "#/responses/ActionWorkflowList"
// "400":
// "$ref": "#/responses/error"
// "403":
// "$ref": "#/responses/forbidden"
// "404":
// "$ref": "#/responses/notFound"
// "422":
// "$ref": "#/responses/validationError"
// "500":
// "$ref": "#/responses/error"
workflows, err := actions_service.ListActionWorkflows(ctx)
if err != nil {
ctx.Error(http.StatusInternalServerError, "ListActionWorkflows", err)
return
}
ctx.JSON(http.StatusOK, &api.ActionWorkflowResponse{Workflows: workflows, TotalCount: int64(len(workflows))})
}
func ActionsGetWorkflow(ctx *context.APIContext) {
// swagger:operation GET /repos/{owner}/{repo}/actions/workflows/{workflow_id} repository ActionsGetWorkflow
// ---
// summary: Get a workflow
// produces:
// - application/json
// parameters:
// - name: owner
// in: path
// description: owner of the repo
// type: string
// required: true
// - name: repo
// in: path
// description: name of the repo
// type: string
// required: true
// - name: workflow_id
// in: path
// description: id of the workflow
// type: string
// required: true
// responses:
// "200":
// "$ref": "#/responses/ActionWorkflow"
// "400":
// "$ref": "#/responses/error"
// "403":
// "$ref": "#/responses/forbidden"
// "404":
// "$ref": "#/responses/notFound"
// "422":
// "$ref": "#/responses/validationError"
// "500":
// "$ref": "#/responses/error"
workflowID := ctx.PathParam("workflow_id")
workflow, err := actions_service.GetActionWorkflow(ctx, workflowID)
if err != nil {
if errors.Is(err, util.ErrNotExist) {
ctx.Error(http.StatusNotFound, "GetActionWorkflow", err)
} else {
ctx.Error(http.StatusInternalServerError, "GetActionWorkflow", err)
}
return
}
ctx.JSON(http.StatusOK, workflow)
}
func ActionsDisableWorkflow(ctx *context.APIContext) {
// swagger:operation PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/disable repository ActionsDisableWorkflow
// ---
// summary: Disable a workflow
// produces:
// - application/json
// parameters:
// - name: owner
// in: path
// description: owner of the repo
// type: string
// required: true
// - name: repo
// in: path
// description: name of the repo
// type: string
// required: true
// - name: workflow_id
// in: path
// description: id of the workflow
// type: string
// required: true
// responses:
// "204":
// description: No Content
// "400":
// "$ref": "#/responses/error"
// "403":
// "$ref": "#/responses/forbidden"
// "404":
// "$ref": "#/responses/notFound"
// "422":
// "$ref": "#/responses/validationError"
workflowID := ctx.PathParam("workflow_id")
err := actions_service.EnableOrDisableWorkflow(ctx, workflowID, false)
if err != nil {
if errors.Is(err, util.ErrNotExist) {
ctx.Error(http.StatusNotFound, "DisableActionWorkflow", err)
} else {
ctx.Error(http.StatusInternalServerError, "DisableActionWorkflow", err)
}
return
}
ctx.Status(http.StatusNoContent)
}
func ActionsDispatchWorkflow(ctx *context.APIContext) {
// swagger:operation POST /repos/{owner}/{repo}/actions/workflows/{workflow_id}/dispatches repository ActionsDispatchWorkflow
// ---
// summary: Create a workflow dispatch event
// produces:
// - application/json
// parameters:
// - name: owner
// in: path
// description: owner of the repo
// type: string
// required: true
// - name: repo
// in: path
// description: name of the repo
// type: string
// required: true
// - name: workflow_id
// in: path
// description: id of the workflow
// type: string
// required: true
// - name: body
// in: body
// schema:
// "$ref": "#/definitions/CreateActionWorkflowDispatch"
// responses:
// "204":
// description: No Content
// "400":
// "$ref": "#/responses/error"
// "403":
// "$ref": "#/responses/forbidden"
// "404":
// "$ref": "#/responses/notFound"
// "422":
// "$ref": "#/responses/validationError"
workflowID := ctx.PathParam("workflow_id")
opt := web.GetForm(ctx).(*api.CreateActionWorkflowDispatch)
if opt.Ref == "" {
ctx.Error(http.StatusUnprocessableEntity, "MissingWorkflowParameter", util.NewInvalidArgumentErrorf("ref is required parameter"))
return
}
err := actions_service.DispatchActionWorkflow(ctx, ctx.Doer, ctx.Repo.Repository, ctx.Repo.GitRepo, workflowID, opt.Ref, func(workflowDispatch *model.WorkflowDispatch, inputs map[string]any) error {
if strings.Contains(ctx.Req.Header.Get("Content-Type"), "form-urlencoded") {
// The chi framework's "Binding" doesn't support to bind the form map values into a map[string]string
// So we have to manually read the `inputs[key]` from the form
for name, config := range workflowDispatch.Inputs {
value := ctx.FormString("inputs["+name+"]", config.Default)
inputs[name] = value
}
} else {
for name, config := range workflowDispatch.Inputs {
value, ok := opt.Inputs[name]
if ok {
inputs[name] = value
} else {
inputs[name] = config.Default
}
}
}
return nil
})
if err != nil {
if errors.Is(err, util.ErrNotExist) {
ctx.Error(http.StatusNotFound, "DispatchActionWorkflow", err)
} else if errors.Is(err, util.ErrPermissionDenied) {
ctx.Error(http.StatusForbidden, "DispatchActionWorkflow", err)
} else {
ctx.Error(http.StatusInternalServerError, "DispatchActionWorkflow", err)
}
return
}
ctx.Status(http.StatusNoContent)
}
func ActionsEnableWorkflow(ctx *context.APIContext) {
// swagger:operation PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/enable repository ActionsEnableWorkflow
// ---
// summary: Enable a workflow
// produces:
// - application/json
// parameters:
// - name: owner
// in: path
// description: owner of the repo
// type: string
// required: true
// - name: repo
// in: path
// description: name of the repo
// type: string
// required: true
// - name: workflow_id
// in: path
// description: id of the workflow
// type: string
// required: true
// responses:
// "204":
// description: No Content
// "400":
// "$ref": "#/responses/error"
// "403":
// "$ref": "#/responses/forbidden"
// "404":
// "$ref": "#/responses/notFound"
// "409":
// "$ref": "#/responses/conflict"
// "422":
// "$ref": "#/responses/validationError"
workflowID := ctx.PathParam("workflow_id")
err := actions_service.EnableOrDisableWorkflow(ctx, workflowID, true)
if err != nil {
if errors.Is(err, util.ErrNotExist) {
ctx.Error(http.StatusNotFound, "EnableActionWorkflow", err)
} else {
ctx.Error(http.StatusInternalServerError, "EnableActionWorkflow", err)
}
return
}
ctx.Status(http.StatusNoContent)
}
// GetArtifacts Lists all artifacts for a repository.
func GetArtifactsOfRun(ctx *context.APIContext) {
// swagger:operation GET /repos/{owner}/{repo}/actions/runs/{run}/artifacts repository getArtifactsOfRun
// ---
// summary: Lists all artifacts for a repository run
// produces:
// - application/json
// parameters:
// - name: owner
// in: path
// description: name of the owner
// type: string
// required: true
// - name: repo
// in: path
// description: name of the repository
// type: string
// required: true
// - name: run
// in: path
// description: runid of the workflow run
// type: integer
// required: true
// - name: name
// in: query
// description: name of the artifact
// type: string
// required: false
// responses:
// "200":
// "$ref": "#/responses/ArtifactsList"
// "400":
// "$ref": "#/responses/error"
// "404":
// "$ref": "#/responses/notFound"
repoID := ctx.Repo.Repository.ID
artifactName := ctx.Req.URL.Query().Get("name")
runID := ctx.PathParamInt64("run")
artifacts, total, err := db.FindAndCount[actions_model.ActionArtifact](ctx, actions_model.FindArtifactsOptions{
RepoID: repoID,
RunID: runID,
ArtifactName: artifactName,
FinalizedArtifactsV4: true,
ListOptions: utils.GetListOptions(ctx),
})
if err != nil {
ctx.Error(http.StatusInternalServerError, err.Error(), err)
return
}
res := new(api.ActionArtifactsResponse)
res.TotalCount = total
res.Entries = make([]*api.ActionArtifact, len(artifacts))
for i := range artifacts {
convertedArtifact, err := convert.ToActionArtifact(ctx.Repo.Repository, artifacts[i])
if err != nil {
ctx.Error(http.StatusInternalServerError, "ToActionArtifact", err)
return
}
res.Entries[i] = convertedArtifact
}
ctx.JSON(http.StatusOK, &res)
}
// GetArtifacts Lists all artifacts for a repository.
func GetArtifacts(ctx *context.APIContext) {
// swagger:operation GET /repos/{owner}/{repo}/actions/artifacts repository getArtifacts
// ---
// summary: Lists all artifacts for a repository
// produces:
// - application/json
// parameters:
// - name: owner
// in: path
// description: name of the owner
// type: string
// required: true
// - name: repo
// in: path
// description: name of the repository
// type: string
// required: true
// - name: name
// in: query
// description: name of the artifact
// type: string
// required: false
// responses:
// "200":
// "$ref": "#/responses/ArtifactsList"
// "400":
// "$ref": "#/responses/error"
// "404":
// "$ref": "#/responses/notFound"
repoID := ctx.Repo.Repository.ID
artifactName := ctx.Req.URL.Query().Get("name")
artifacts, total, err := db.FindAndCount[actions_model.ActionArtifact](ctx, actions_model.FindArtifactsOptions{
RepoID: repoID,
ArtifactName: artifactName,
FinalizedArtifactsV4: true,
ListOptions: utils.GetListOptions(ctx),
})
if err != nil {
ctx.Error(http.StatusInternalServerError, err.Error(), err)
return
}
res := new(api.ActionArtifactsResponse)
res.TotalCount = total
res.Entries = make([]*api.ActionArtifact, len(artifacts))
for i := range artifacts {
convertedArtifact, err := convert.ToActionArtifact(ctx.Repo.Repository, artifacts[i])
if err != nil {
ctx.Error(http.StatusInternalServerError, "ToActionArtifact", err)
return
}
res.Entries[i] = convertedArtifact
}
ctx.JSON(http.StatusOK, &res)
}
// GetArtifact Gets a specific artifact for a workflow run.
func GetArtifact(ctx *context.APIContext) {
// swagger:operation GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id} repository getArtifact
// ---
// summary: Gets a specific artifact for a workflow run
// produces:
// - application/json
// parameters:
// - name: owner
// in: path
// description: name of the owner
// type: string
// required: true
// - name: repo
// in: path
// description: name of the repository
// type: string
// required: true
// - name: artifact_id
// in: path
// description: id of the artifact
// type: string
// required: true
// responses:
// "200":
// "$ref": "#/responses/Artifact"
// "400":
// "$ref": "#/responses/error"
// "404":
// "$ref": "#/responses/notFound"
art := getArtifactByPathParam(ctx, ctx.Repo.Repository)
if ctx.Written() {
return
}
if actions.IsArtifactV4(art) {
convertedArtifact, err := convert.ToActionArtifact(ctx.Repo.Repository, art)
if err != nil {
ctx.Error(http.StatusInternalServerError, "ToActionArtifact", err)
return
}
ctx.JSON(http.StatusOK, convertedArtifact)
return
}
// v3 not supported due to not having one unique id
ctx.Error(http.StatusNotFound, "GetArtifact", "Artifact not found")
}
// DeleteArtifact Deletes a specific artifact for a workflow run.
func DeleteArtifact(ctx *context.APIContext) {
// swagger:operation DELETE /repos/{owner}/{repo}/actions/artifacts/{artifact_id} repository deleteArtifact
// ---
// summary: Deletes a specific artifact for a workflow run
// produces:
// - application/json
// parameters:
// - name: owner
// in: path
// description: name of the owner
// type: string
// required: true
// - name: repo
// in: path
// description: name of the repository
// type: string
// required: true
// - name: artifact_id
// in: path
// description: id of the artifact
// type: string
// required: true
// responses:
// "204":
// description: "No Content"
// "400":
// "$ref": "#/responses/error"
// "404":
// "$ref": "#/responses/notFound"
art := getArtifactByPathParam(ctx, ctx.Repo.Repository)
if ctx.Written() {
return
}
if actions.IsArtifactV4(art) {
if err := actions_model.SetArtifactNeedDelete(ctx, art.RunID, art.ArtifactName); err != nil {
ctx.Error(http.StatusInternalServerError, "DeleteArtifact", err)
return
}
ctx.Status(http.StatusNoContent)
return
}
// v3 not supported due to not having one unique id
ctx.Error(http.StatusNotFound, "DeleteArtifact", "Artifact not found")
}
func buildSignature(endp string, expires, artifactID int64) []byte {
mac := hmac.New(sha256.New, setting.GetGeneralTokenSigningSecret())
mac.Write([]byte(endp))
mac.Write([]byte(fmt.Sprint(expires)))
mac.Write([]byte(fmt.Sprint(artifactID)))
return mac.Sum(nil)
}
func buildDownloadRawEndpoint(repo *repo_model.Repository, artifactID int64) string {
return fmt.Sprintf("api/v1/repos/%s/%s/actions/artifacts/%d/zip/raw", url.PathEscape(repo.OwnerName), url.PathEscape(repo.Name), artifactID)
}
func buildSigURL(ctx go_context.Context, endPoint string, artifactID int64) string {
// endPoint is a path like "api/v1/repos/owner/repo/actions/artifacts/1/zip/raw"
expires := time.Now().Add(60 * time.Minute).Unix()
uploadURL := httplib.GuessCurrentAppURL(ctx) + endPoint + "?sig=" + base64.URLEncoding.EncodeToString(buildSignature(endPoint, expires, artifactID)) + "&expires=" + strconv.FormatInt(expires, 10)
return uploadURL
}
// DownloadArtifact Downloads a specific artifact for a workflow run redirects to blob url.
func DownloadArtifact(ctx *context.APIContext) {
// swagger:operation GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}/zip repository downloadArtifact
// ---
// summary: Downloads a specific artifact for a workflow run redirects to blob url
// produces:
// - application/json
// parameters:
// - name: owner
// in: path
// description: name of the owner
// type: string
// required: true
// - name: repo
// in: path
// description: name of the repository
// type: string
// required: true
// - name: artifact_id
// in: path
// description: id of the artifact
// type: string
// required: true
// responses:
// "302":
// description: redirect to the blob download
// "400":
// "$ref": "#/responses/error"
// "404":
// "$ref": "#/responses/notFound"
art := getArtifactByPathParam(ctx, ctx.Repo.Repository)
if ctx.Written() {
return
}
// if artifacts status is not uploaded-confirmed, treat it as not found
if art.Status == actions_model.ArtifactStatusExpired {
ctx.Error(http.StatusNotFound, "DownloadArtifact", "Artifact has expired")
return
}
ctx.Resp.Header().Set("Content-Disposition", fmt.Sprintf("attachment; filename=%s.zip; filename*=UTF-8''%s.zip", url.PathEscape(art.ArtifactName), art.ArtifactName))
if actions.IsArtifactV4(art) {
ok, err := actions.DownloadArtifactV4ServeDirectOnly(ctx.Base, art)
if ok {
return
}
if err != nil {
ctx.Error(http.StatusInternalServerError, "DownloadArtifactV4ServeDirectOnly", err)
return
}
redirectURL := buildSigURL(ctx, buildDownloadRawEndpoint(ctx.Repo.Repository, art.ID), art.ID)
ctx.Redirect(redirectURL, http.StatusFound)
return
}
// v3 not supported due to not having one unique id
ctx.Error(http.StatusNotFound, "DownloadArtifact", "Artifact not found")
}
// DownloadArtifactRaw Downloads a specific artifact for a workflow run directly.
func DownloadArtifactRaw(ctx *context.APIContext) {
// it doesn't use repoAssignment middleware, so it needs to prepare the repo and check permission (sig) by itself
repo, err := repo_model.GetRepositoryByOwnerAndName(ctx, ctx.PathParam("username"), ctx.PathParam("reponame"))
if err != nil {
if errors.Is(err, util.ErrNotExist) {
ctx.NotFound()
} else {
ctx.InternalServerError(err)
}
return
}
art := getArtifactByPathParam(ctx, repo)
if ctx.Written() {
return
}
sigStr := ctx.Req.URL.Query().Get("sig")
expiresStr := ctx.Req.URL.Query().Get("expires")
sigBytes, _ := base64.URLEncoding.DecodeString(sigStr)
expires, _ := strconv.ParseInt(expiresStr, 10, 64)
expectedSig := buildSignature(buildDownloadRawEndpoint(repo, art.ID), expires, art.ID)
if !hmac.Equal(sigBytes, expectedSig) {
ctx.Error(http.StatusUnauthorized, "DownloadArtifactRaw", "Error unauthorized")
return
}
t := time.Unix(expires, 0)
if t.Before(time.Now()) {
ctx.Error(http.StatusUnauthorized, "DownloadArtifactRaw", "Error link expired")
return
}
// if artifacts status is not uploaded-confirmed, treat it as not found
if art.Status == actions_model.ArtifactStatusExpired {
ctx.Error(http.StatusNotFound, "DownloadArtifactRaw", "Artifact has expired")
return
}
ctx.Resp.Header().Set("Content-Disposition", fmt.Sprintf("attachment; filename=%s.zip; filename*=UTF-8''%s.zip", url.PathEscape(art.ArtifactName), art.ArtifactName))
if actions.IsArtifactV4(art) {
err := actions.DownloadArtifactV4(ctx.Base, art)
if err != nil {
ctx.Error(http.StatusInternalServerError, "DownloadArtifactV4", err)
return
}
return
}
// v3 not supported due to not having one unique id
ctx.Error(http.StatusNotFound, "DownloadArtifactRaw", "artifact not found")
}
// Try to get the artifact by ID and check access
func getArtifactByPathParam(ctx *context.APIContext, repo *repo_model.Repository) *actions_model.ActionArtifact {
artifactID := ctx.PathParamInt64("artifact_id")
art, ok, err := db.GetByID[actions_model.ActionArtifact](ctx, artifactID)
if err != nil {
ctx.Error(http.StatusInternalServerError, "getArtifactByPathParam", err)
return nil
}
// if artifacts status is not uploaded-confirmed, treat it as not found
// only check RepoID here, because the repository owner may change over the time
if !ok ||
art.RepoID != repo.ID ||
art.Status != actions_model.ArtifactStatusUploadConfirmed && art.Status != actions_model.ArtifactStatusExpired {
ctx.Error(http.StatusNotFound, "getArtifactByPathParam", "artifact not found")
return nil
}
return art
}

View File

@ -32,3 +32,17 @@ type swaggerResponseVariableList struct {
// in:body
Body []api.ActionVariable `json:"body"`
}
// ActionWorkflow
// swagger:response ActionWorkflow
type swaggerResponseActionWorkflow struct {
// in:body
Body api.ActionWorkflow `json:"body"`
}
// ActionWorkflowList
// swagger:response ActionWorkflowList
type swaggerResponseActionWorkflowList struct {
// in:body
Body []api.ActionWorkflow `json:"body"`
}

View File

@ -211,6 +211,9 @@ type swaggerParameterBodies struct {
// in:body
RenameOrgOption api.RenameOrgOption
// in:body
CreateActionWorkflowDispatch api.CreateActionWorkflowDispatch
// in:body
UpdateVariableOption api.UpdateVariableOption
}

View File

@ -443,6 +443,20 @@ type swaggerRepoTasksList struct {
Body api.ActionTaskResponse `json:"body"`
}
// ArtifactsList
// swagger:response ArtifactsList
type swaggerRepoArtifactsList struct {
// in:body
Body api.ActionArtifactsResponse `json:"body"`
}
// Artifact
// swagger:response Artifact
type swaggerRepoArtifact struct {
// in:body
Body api.ActionArtifact `json:"body"`
}
// swagger:response Compare
type swaggerCompare struct {
// in:body

View File

@ -212,7 +212,11 @@ func UpdateVariable(ctx *context.APIContext) {
if opt.Name == "" {
opt.Name = ctx.PathParam("variablename")
}
if _, err := actions_service.UpdateVariable(ctx, v.ID, opt.Name, opt.Value); err != nil {
v.Name = opt.Name
v.Data = opt.Value
if _, err := actions_service.UpdateVariableNameData(ctx, v); err != nil {
if errors.Is(err, util.ErrInvalidArgument) {
ctx.Error(http.StatusBadRequest, "UpdateVariable", err)
} else {

View File

@ -25,7 +25,7 @@ func PrepareCodeSearch(ctx *context.Context) (ret struct {
}
isFuzzy := ctx.FormOptionalBool("fuzzy").ValueOrDefault(fuzzyDefault)
if isFuzzy && !fuzzyAllow {
ctx.Flash.Info("Fuzzy search is disabled by default due to performance reasons")
ctx.Flash.Info("Fuzzy search is disabled by default due to performance reasons", true)
isFuzzy = false
}

View File

@ -64,7 +64,6 @@ func Contexter() func(next http.Handler) http.Handler {
return http.HandlerFunc(func(resp http.ResponseWriter, req *http.Request) {
base := context.NewBaseContext(resp, req)
ctx := context.NewWebContext(base, rnd, session.GetSession(req))
ctx.SetContextValue(context.WebContextKey, ctx)
ctx.Data.MergeFrom(middleware.CommonTemplateContextData())
ctx.Data.MergeFrom(reqctx.ContextData{
"Title": ctx.Locale.Tr("install.install"),

View File

@ -10,9 +10,9 @@ import (
"io"
"os"
asymkey_model "code.gitea.io/gitea/models/asymkey"
"code.gitea.io/gitea/modules/git"
"code.gitea.io/gitea/modules/log"
asymkey_service "code.gitea.io/gitea/services/asymkey"
)
// This file contains commit verification functions for refs passed across in hooks
@ -96,7 +96,7 @@ func readAndVerifyCommit(sha string, repo *git.Repository, env []string) error {
if err != nil {
return err
}
verification := asymkey_model.ParseCommitWithSignature(ctx, commit)
verification := asymkey_service.ParseCommitWithSignature(ctx, commit)
if !verification.Verified {
cancel()
return &errUnverifiedCommit{

View File

@ -87,8 +87,8 @@ func Routes() *web.Router {
// FIXME: it is not right to use context.Contexter here because all routes here should use PrivateContext
// Fortunately, the LFS handlers are able to handle requests without a complete web context
common.AddOwnerRepoGitLFSRoutes(r, func(ctx *context.PrivateContext) {
webContext := &context.Context{Base: ctx.Base}
ctx.SetContextValue(context.WebContextKey, webContext)
webContext := &context.Context{Base: ctx.Base} // see above, it shouldn't manually construct the web context
ctx.SetContextValue(context.WebContextKey, webContext) // FIXME: this is not ideal but no other way at the moment
})
})

View File

@ -20,26 +20,20 @@ import (
actions_model "code.gitea.io/gitea/models/actions"
"code.gitea.io/gitea/models/db"
git_model "code.gitea.io/gitea/models/git"
"code.gitea.io/gitea/models/perm"
access_model "code.gitea.io/gitea/models/perm/access"
repo_model "code.gitea.io/gitea/models/repo"
"code.gitea.io/gitea/models/unit"
"code.gitea.io/gitea/modules/actions"
"code.gitea.io/gitea/modules/base"
"code.gitea.io/gitea/modules/git"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/modules/storage"
api "code.gitea.io/gitea/modules/structs"
"code.gitea.io/gitea/modules/templates"
"code.gitea.io/gitea/modules/timeutil"
"code.gitea.io/gitea/modules/util"
"code.gitea.io/gitea/modules/web"
actions_service "code.gitea.io/gitea/services/actions"
context_module "code.gitea.io/gitea/services/context"
"code.gitea.io/gitea/services/convert"
"github.com/nektos/act/pkg/jobparser"
"github.com/nektos/act/pkg/model"
"xorm.io/builder"
)
@ -674,7 +668,7 @@ func ArtifactsDownloadView(ctx *context_module.Context) {
// if artifacts status is not uploaded-confirmed, treat it as not found
for _, art := range artifacts {
if art.Status != int64(actions_model.ArtifactStatusUploadConfirmed) {
if art.Status != actions_model.ArtifactStatusUploadConfirmed {
ctx.Error(http.StatusNotFound, "artifact not found")
return
}
@ -682,23 +676,12 @@ func ArtifactsDownloadView(ctx *context_module.Context) {
ctx.Resp.Header().Set("Content-Disposition", fmt.Sprintf("attachment; filename=%s.zip; filename*=UTF-8''%s.zip", url.PathEscape(artifactName), artifactName))
// Artifacts using the v4 backend are stored as a single combined zip file per artifact on the backend
// The v4 backend enshures ContentEncoding is set to "application/zip", which is not the case for the old backend
if len(artifacts) == 1 && artifacts[0].ArtifactName+".zip" == artifacts[0].ArtifactPath && artifacts[0].ContentEncoding == "application/zip" {
art := artifacts[0]
if setting.Actions.ArtifactStorage.ServeDirect() {
u, err := storage.ActionsArtifacts.URL(art.StoragePath, art.ArtifactPath, nil)
if u != nil && err == nil {
ctx.Redirect(u.String())
return
}
}
f, err := storage.ActionsArtifacts.Open(art.StoragePath)
if len(artifacts) == 1 && actions.IsArtifactV4(artifacts[0]) {
err := actions.DownloadArtifactV4(ctx.Base, artifacts[0])
if err != nil {
ctx.Error(http.StatusInternalServerError, err.Error())
return
}
_, _ = io.Copy(ctx.Resp, f)
return
}
@ -792,142 +775,28 @@ func Run(ctx *context_module.Context) {
ctx.ServerError("ref", nil)
return
}
// can not rerun job when workflow is disabled
cfgUnit := ctx.Repo.Repository.MustGetUnit(ctx, unit.TypeActions)
cfg := cfgUnit.ActionsConfig()
if cfg.IsWorkflowDisabled(workflowID) {
ctx.Flash.Error(ctx.Tr("actions.workflow.disabled"))
ctx.Redirect(redirectURL)
return
}
// get target commit of run from specified ref
refName := git.RefName(ref)
var runTargetCommit *git.Commit
var err error
if refName.IsTag() {
runTargetCommit, err = ctx.Repo.GitRepo.GetTagCommit(refName.TagName())
} else if refName.IsBranch() {
runTargetCommit, err = ctx.Repo.GitRepo.GetBranchCommit(refName.BranchName())
} else {
ctx.Flash.Error(ctx.Tr("form.git_ref_name_error", ref))
ctx.Redirect(redirectURL)
return
}
if err != nil {
ctx.Flash.Error(ctx.Tr("form.target_ref_not_exist", ref))
ctx.Redirect(redirectURL)
return
}
// get workflow entry from runTargetCommit
entries, err := actions.ListWorkflows(runTargetCommit)
if err != nil {
ctx.Error(http.StatusInternalServerError, err.Error())
return
}
// find workflow from commit
var workflows []*jobparser.SingleWorkflow
for _, entry := range entries {
if entry.Name() == workflowID {
content, err := actions.GetContentFromEntry(entry)
if err != nil {
ctx.Error(http.StatusInternalServerError, err.Error())
return
}
workflows, err = jobparser.Parse(content)
if err != nil {
ctx.ServerError("workflow", err)
return
}
break
}
}
if len(workflows) == 0 {
ctx.Flash.Error(ctx.Tr("actions.workflow.not_found", workflowID))
ctx.Redirect(redirectURL)
return
}
// get inputs from post
workflow := &model.Workflow{
RawOn: workflows[0].RawOn,
}
inputs := make(map[string]any)
if workflowDispatch := workflow.WorkflowDispatchConfig(); workflowDispatch != nil {
err := actions_service.DispatchActionWorkflow(ctx, ctx.Doer, ctx.Repo.Repository, ctx.Repo.GitRepo, workflowID, ref, func(workflowDispatch *model.WorkflowDispatch, inputs map[string]any) error {
for name, config := range workflowDispatch.Inputs {
value := ctx.Req.PostFormValue(name)
if config.Type == "boolean" {
// https://www.w3.org/TR/html401/interact/forms.html
// https://stackoverflow.com/questions/11424037/do-checkbox-inputs-only-post-data-if-theyre-checked
// Checkboxes (and radio buttons) are on/off switches that may be toggled by the user.
// A switch is "on" when the control element's checked attribute is set.
// When a form is submitted, only "on" checkbox controls can become successful.
inputs[name] = strconv.FormatBool(value == "on")
inputs[name] = strconv.FormatBool(ctx.FormBool(name))
} else if value != "" {
inputs[name] = value
} else {
inputs[name] = config.Default
}
}
}
// ctx.Req.PostForm -> WorkflowDispatchPayload.Inputs -> ActionRun.EventPayload -> runner: ghc.Event
// https://docs.github.com/en/actions/learn-github-actions/contexts#github-context
// https://docs.github.com/en/webhooks/webhook-events-and-payloads#workflow_dispatch
workflowDispatchPayload := &api.WorkflowDispatchPayload{
Workflow: workflowID,
Ref: ref,
Repository: convert.ToRepo(ctx, ctx.Repo.Repository, access_model.Permission{AccessMode: perm.AccessModeNone}),
Inputs: inputs,
Sender: convert.ToUserWithAccessMode(ctx, ctx.Doer, perm.AccessModeNone),
}
var eventPayload []byte
if eventPayload, err = workflowDispatchPayload.JSONPayload(); err != nil {
ctx.ServerError("JSONPayload", err)
return
}
run := &actions_model.ActionRun{
Title: strings.SplitN(runTargetCommit.CommitMessage, "\n", 2)[0],
RepoID: ctx.Repo.Repository.ID,
OwnerID: ctx.Repo.Repository.OwnerID,
WorkflowID: workflowID,
TriggerUserID: ctx.Doer.ID,
Ref: ref,
CommitSHA: runTargetCommit.ID.String(),
IsForkPullRequest: false,
Event: "workflow_dispatch",
TriggerEvent: "workflow_dispatch",
EventPayload: string(eventPayload),
Status: actions_model.StatusWaiting,
}
// cancel running jobs of the same workflow
if err := actions_model.CancelPreviousJobs(
ctx,
run.RepoID,
run.Ref,
run.WorkflowID,
run.Event,
); err != nil {
log.Error("CancelRunningJobs: %v", err)
}
// Insert the action run and its associated jobs into the database
if err := actions_model.InsertRun(ctx, run, workflows); err != nil {
ctx.ServerError("workflow", err)
return
}
alljobs, err := db.Find[actions_model.ActionRunJob](ctx, actions_model.FindRunJobOptions{RunID: run.ID})
return nil
})
if err != nil {
log.Error("FindRunJobs: %v", err)
if errLocale := util.ErrAsLocale(err); errLocale != nil {
ctx.Flash.Error(ctx.Tr(errLocale.TrKey, errLocale.TrArgs...))
ctx.Redirect(redirectURL)
} else {
ctx.ServerError("DispatchActionWorkflow", err)
}
return
}
actions_service.CreateCommitStatus(ctx, alljobs...)
ctx.Flash.Success(ctx.Tr("actions.workflow.run_success", workflowID))
ctx.Redirect(redirectURL)

View File

@ -234,7 +234,12 @@ func processBlameParts(ctx *context.Context, blameParts []*git.BlamePart) map[st
}
// populate commit email addresses to later look up avatars.
for _, c := range user_model.ValidateCommitsWithEmails(ctx, commits) {
validatedCommits, err := user_model.ValidateCommitsWithEmails(ctx, commits)
if err != nil {
ctx.ServerError("ValidateCommitsWithEmails", err)
return nil
}
for _, c := range validatedCommits {
commitNames[c.ID.String()] = c
}

View File

@ -22,16 +22,18 @@ import (
"code.gitea.io/gitea/modules/base"
"code.gitea.io/gitea/modules/charset"
"code.gitea.io/gitea/modules/git"
"code.gitea.io/gitea/modules/gitgraph"
"code.gitea.io/gitea/modules/gitrepo"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/markup"
"code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/modules/templates"
"code.gitea.io/gitea/modules/util"
asymkey_service "code.gitea.io/gitea/services/asymkey"
"code.gitea.io/gitea/services/context"
git_service "code.gitea.io/gitea/services/git"
"code.gitea.io/gitea/services/gitdiff"
repo_service "code.gitea.io/gitea/services/repository"
"code.gitea.io/gitea/services/repository/gitgraph"
)
const (
@ -80,7 +82,11 @@ func Commits(ctx *context.Context) {
ctx.ServerError("CommitsByRange", err)
return
}
ctx.Data["Commits"] = processGitCommits(ctx, commits)
ctx.Data["Commits"], err = processGitCommits(ctx, commits)
if err != nil {
ctx.ServerError("processGitCommits", err)
return
}
commitIDs := make([]string, 0, len(commits))
for _, c := range commits {
commitIDs = append(commitIDs, c.ID.String())
@ -192,7 +198,11 @@ func SearchCommits(ctx *context.Context) {
return
}
ctx.Data["CommitCount"] = len(commits)
ctx.Data["Commits"] = processGitCommits(ctx, commits)
ctx.Data["Commits"], err = processGitCommits(ctx, commits)
if err != nil {
ctx.ServerError("processGitCommits", err)
return
}
ctx.Data["Keyword"] = query
if all {
@ -235,7 +245,11 @@ func FileHistory(ctx *context.Context) {
ctx.ServerError("CommitsByFileAndRange", err)
return
}
ctx.Data["Commits"] = processGitCommits(ctx, commits)
ctx.Data["Commits"], err = processGitCommits(ctx, commits)
if err != nil {
ctx.ServerError("processGitCommits", err)
return
}
ctx.Data["Username"] = ctx.Repo.Owner.Name
ctx.Data["Reponame"] = ctx.Repo.Repository.Name
@ -353,7 +367,7 @@ func Diff(ctx *context.Context) {
ctx.Data["CommitStatus"] = git_model.CalcCommitStatus(statuses)
ctx.Data["CommitStatuses"] = statuses
verification := asymkey_model.ParseCommitWithSignature(ctx, commit)
verification := asymkey_service.ParseCommitWithSignature(ctx, commit)
ctx.Data["Verification"] = verification
ctx.Data["Author"] = user_model.ValidateCommitWithEmail(ctx, commit)
ctx.Data["Parents"] = parents
@ -416,13 +430,16 @@ func RawDiff(ctx *context.Context) {
}
}
func processGitCommits(ctx *context.Context, gitCommits []*git.Commit) []*git_model.SignCommitWithStatuses {
commits := git_model.ConvertFromGitCommit(ctx, gitCommits, ctx.Repo.Repository)
func processGitCommits(ctx *context.Context, gitCommits []*git.Commit) ([]*git_model.SignCommitWithStatuses, error) {
commits, err := git_service.ConvertFromGitCommit(ctx, gitCommits, ctx.Repo.Repository)
if err != nil {
return nil, err
}
if !ctx.Repo.CanRead(unit_model.TypeActions) {
for _, commit := range commits {
commit.Status.HideActionsURL(ctx)
git_model.CommitStatusesHideActionsURL(ctx, commit.Statuses)
}
}
return commits
return commits, nil
}

Some files were not shown because too many files have changed in this diff Show More