Initial Commit

This commit is contained in:
2025-06-20 06:50:22 +09:00
commit 3bcbba5aa1
466 changed files with 84887 additions and 0 deletions

108
actions/auth.go Normal file
View File

@ -0,0 +1,108 @@
// Copyright 2024 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package actions
import (
"errors"
"fmt"
"net/http"
"strings"
"time"
"code.gitea.io/gitea/modules/json"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/setting"
"github.com/golang-jwt/jwt/v5"
)
type actionsClaims struct {
jwt.RegisteredClaims
Scp string `json:"scp"`
TaskID int64
RunID int64
JobID int64
Ac string `json:"ac"`
}
type actionsCacheScope struct {
Scope string
Permission actionsCachePermission
}
type actionsCachePermission int
const (
actionsCachePermissionRead = 1 << iota
actionsCachePermissionWrite
)
func CreateAuthorizationToken(taskID, runID, jobID int64) (string, error) {
now := time.Now()
ac, err := json.Marshal(&[]actionsCacheScope{
{
Scope: "",
Permission: actionsCachePermissionWrite,
},
})
if err != nil {
return "", err
}
claims := actionsClaims{
RegisteredClaims: jwt.RegisteredClaims{
ExpiresAt: jwt.NewNumericDate(now.Add(24 * time.Hour)),
NotBefore: jwt.NewNumericDate(now),
},
Scp: fmt.Sprintf("Actions.Results:%d:%d", runID, jobID),
Ac: string(ac),
TaskID: taskID,
RunID: runID,
JobID: jobID,
}
token := jwt.NewWithClaims(jwt.SigningMethodHS256, claims)
tokenString, err := token.SignedString(setting.GetGeneralTokenSigningSecret())
if err != nil {
return "", err
}
return tokenString, nil
}
func ParseAuthorizationToken(req *http.Request) (int64, error) {
h := req.Header.Get("Authorization")
if h == "" {
return 0, nil
}
parts := strings.SplitN(h, " ", 2)
if len(parts) != 2 {
log.Error("split token failed: %s", h)
return 0, errors.New("split token failed")
}
return TokenToTaskID(parts[1])
}
// TokenToTaskID returns the TaskID associated with the provided JWT token
func TokenToTaskID(token string) (int64, error) {
parsedToken, err := jwt.ParseWithClaims(token, &actionsClaims{}, func(t *jwt.Token) (any, error) {
if _, ok := t.Method.(*jwt.SigningMethodHMAC); !ok {
return nil, fmt.Errorf("unexpected signing method: %v", t.Header["alg"])
}
return setting.GetGeneralTokenSigningSecret(), nil
})
if err != nil {
return 0, err
}
c, ok := parsedToken.Claims.(*actionsClaims)
if !parsedToken.Valid || !ok {
return 0, errors.New("invalid token claim")
}
return c.TaskID, nil
}

64
actions/auth_test.go Normal file
View File

@ -0,0 +1,64 @@
// Copyright 2024 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package actions
import (
"net/http"
"testing"
"code.gitea.io/gitea/modules/json"
"code.gitea.io/gitea/modules/setting"
"github.com/golang-jwt/jwt/v5"
"github.com/stretchr/testify/assert"
)
func TestCreateAuthorizationToken(t *testing.T) {
var taskID int64 = 23
token, err := CreateAuthorizationToken(taskID, 1, 2)
assert.NoError(t, err)
assert.NotEmpty(t, token)
claims := jwt.MapClaims{}
_, err = jwt.ParseWithClaims(token, claims, func(t *jwt.Token) (any, error) {
return setting.GetGeneralTokenSigningSecret(), nil
})
assert.NoError(t, err)
scp, ok := claims["scp"]
assert.True(t, ok, "Has scp claim in jwt token")
assert.Contains(t, scp, "Actions.Results:1:2")
taskIDClaim, ok := claims["TaskID"]
assert.True(t, ok, "Has TaskID claim in jwt token")
assert.InDelta(t, float64(taskID), taskIDClaim, 0, "Supplied taskid must match stored one")
acClaim, ok := claims["ac"]
assert.True(t, ok, "Has ac claim in jwt token")
ac, ok := acClaim.(string)
assert.True(t, ok, "ac claim is a string for buildx gha cache")
scopes := []actionsCacheScope{}
err = json.Unmarshal([]byte(ac), &scopes)
assert.NoError(t, err, "ac claim is a json list for buildx gha cache")
assert.GreaterOrEqual(t, len(scopes), 1, "Expected at least one action cache scope for buildx gha cache")
}
func TestParseAuthorizationToken(t *testing.T) {
var taskID int64 = 23
token, err := CreateAuthorizationToken(taskID, 1, 2)
assert.NoError(t, err)
assert.NotEmpty(t, token)
headers := http.Header{}
headers.Set("Authorization", "Bearer "+token)
rTaskID, err := ParseAuthorizationToken(&http.Request{
Header: headers,
})
assert.NoError(t, err)
assert.Equal(t, taskID, rTaskID)
}
func TestParseAuthorizationTokenNoAuthHeader(t *testing.T) {
headers := http.Header{}
rTaskID, err := ParseAuthorizationToken(&http.Request{
Header: headers,
})
assert.NoError(t, err)
assert.Equal(t, int64(0), rTaskID)
}

260
actions/cleanup.go Normal file
View File

@ -0,0 +1,260 @@
// Copyright 2023 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package actions
import (
"context"
"errors"
"fmt"
"time"
actions_model "code.gitea.io/gitea/models/actions"
"code.gitea.io/gitea/models/db"
actions_module "code.gitea.io/gitea/modules/actions"
"code.gitea.io/gitea/modules/container"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/modules/storage"
"code.gitea.io/gitea/modules/timeutil"
"xorm.io/builder"
)
// Cleanup removes expired actions logs, data, artifacts and used ephemeral runners
func Cleanup(ctx context.Context) error {
// clean up expired artifacts
if err := CleanupArtifacts(ctx); err != nil {
return fmt.Errorf("cleanup artifacts: %w", err)
}
// clean up old logs
if err := CleanupExpiredLogs(ctx); err != nil {
return fmt.Errorf("cleanup logs: %w", err)
}
// clean up old ephemeral runners
if err := CleanupEphemeralRunners(ctx); err != nil {
return fmt.Errorf("cleanup old ephemeral runners: %w", err)
}
return nil
}
// CleanupArtifacts removes expired add need-deleted artifacts and set records expired status
func CleanupArtifacts(taskCtx context.Context) error {
if err := cleanExpiredArtifacts(taskCtx); err != nil {
return err
}
return cleanNeedDeleteArtifacts(taskCtx)
}
func cleanExpiredArtifacts(taskCtx context.Context) error {
artifacts, err := actions_model.ListNeedExpiredArtifacts(taskCtx)
if err != nil {
return err
}
log.Info("Found %d expired artifacts", len(artifacts))
for _, artifact := range artifacts {
if err := actions_model.SetArtifactExpired(taskCtx, artifact.ID); err != nil {
log.Error("Cannot set artifact %d expired: %v", artifact.ID, err)
continue
}
if err := storage.ActionsArtifacts.Delete(artifact.StoragePath); err != nil {
log.Error("Cannot delete artifact %d: %v", artifact.ID, err)
// go on
}
log.Info("Artifact %d is deleted (due to expiration)", artifact.ID)
}
return nil
}
// deleteArtifactBatchSize is the batch size of deleting artifacts
const deleteArtifactBatchSize = 100
func cleanNeedDeleteArtifacts(taskCtx context.Context) error {
for {
artifacts, err := actions_model.ListPendingDeleteArtifacts(taskCtx, deleteArtifactBatchSize)
if err != nil {
return err
}
log.Info("Found %d artifacts pending deletion", len(artifacts))
for _, artifact := range artifacts {
if err := actions_model.SetArtifactDeleted(taskCtx, artifact.ID); err != nil {
log.Error("Cannot set artifact %d deleted: %v", artifact.ID, err)
continue
}
if err := storage.ActionsArtifacts.Delete(artifact.StoragePath); err != nil {
log.Error("Cannot delete artifact %d: %v", artifact.ID, err)
// go on
}
log.Info("Artifact %d is deleted (due to pending deletion)", artifact.ID)
}
if len(artifacts) < deleteArtifactBatchSize {
log.Debug("No more artifacts pending deletion")
break
}
}
return nil
}
const deleteLogBatchSize = 100
func removeTaskLog(ctx context.Context, task *actions_model.ActionTask) {
if err := actions_module.RemoveLogs(ctx, task.LogInStorage, task.LogFilename); err != nil {
log.Error("Failed to remove log %s (in storage %v) of task %v: %v", task.LogFilename, task.LogInStorage, task.ID, err)
// do not return error here, go on
}
}
// CleanupExpiredLogs removes logs which are older than the configured retention time
func CleanupExpiredLogs(ctx context.Context) error {
olderThan := timeutil.TimeStampNow().AddDuration(-time.Duration(setting.Actions.LogRetentionDays) * 24 * time.Hour)
count := 0
for {
tasks, err := actions_model.FindOldTasksToExpire(ctx, olderThan, deleteLogBatchSize)
if err != nil {
return fmt.Errorf("find old tasks: %w", err)
}
for _, task := range tasks {
removeTaskLog(ctx, task)
task.LogIndexes = nil // clear log indexes since it's a heavy field
task.LogExpired = true
if err := actions_model.UpdateTask(ctx, task, "log_indexes", "log_expired"); err != nil {
log.Error("Failed to update task %v: %v", task.ID, err)
// do not return error here, continue to next task
continue
}
count++
log.Trace("Removed log %s of task %v", task.LogFilename, task.ID)
}
if len(tasks) < deleteLogBatchSize {
break
}
}
log.Info("Removed %d logs", count)
return nil
}
// CleanupEphemeralRunners removes used ephemeral runners which are no longer able to process jobs
func CleanupEphemeralRunners(ctx context.Context) error {
subQuery := builder.Select("`action_runner`.id").
From(builder.Select("*").From("`action_runner`"), "`action_runner`"). // mysql needs this redundant subquery
Join("INNER", "`action_task`", "`action_task`.`runner_id` = `action_runner`.`id`").
Where(builder.Eq{"`action_runner`.`ephemeral`": true}).
And(builder.NotIn("`action_task`.`status`", actions_model.StatusWaiting, actions_model.StatusRunning, actions_model.StatusBlocked))
b := builder.Delete(builder.In("id", subQuery)).From("`action_runner`")
res, err := db.GetEngine(ctx).Exec(b)
if err != nil {
return fmt.Errorf("find runners: %w", err)
}
affected, _ := res.RowsAffected()
log.Info("Removed %d runners", affected)
return nil
}
// CleanupEphemeralRunnersByPickedTaskOfRepo removes all ephemeral runners that have active/finished tasks on the given repository
func CleanupEphemeralRunnersByPickedTaskOfRepo(ctx context.Context, repoID int64) error {
subQuery := builder.Select("`action_runner`.id").
From(builder.Select("*").From("`action_runner`"), "`action_runner`"). // mysql needs this redundant subquery
Join("INNER", "`action_task`", "`action_task`.`runner_id` = `action_runner`.`id`").
Where(builder.And(builder.Eq{"`action_runner`.`ephemeral`": true}, builder.Eq{"`action_task`.`repo_id`": repoID}))
b := builder.Delete(builder.In("id", subQuery)).From("`action_runner`")
res, err := db.GetEngine(ctx).Exec(b)
if err != nil {
return fmt.Errorf("find runners: %w", err)
}
affected, _ := res.RowsAffected()
log.Info("Removed %d runners", affected)
return nil
}
// DeleteRun deletes workflow run, including all logs and artifacts.
func DeleteRun(ctx context.Context, run *actions_model.ActionRun) error {
if !run.Status.IsDone() {
return errors.New("run is not done")
}
repoID := run.RepoID
jobs, err := actions_model.GetRunJobsByRunID(ctx, run.ID)
if err != nil {
return err
}
jobIDs := container.FilterSlice(jobs, func(j *actions_model.ActionRunJob) (int64, bool) {
return j.ID, true
})
tasks := make(actions_model.TaskList, 0)
if len(jobIDs) > 0 {
if err := db.GetEngine(ctx).Where("repo_id = ?", repoID).In("job_id", jobIDs).Find(&tasks); err != nil {
return err
}
}
artifacts, err := db.Find[actions_model.ActionArtifact](ctx, actions_model.FindArtifactsOptions{
RepoID: repoID,
RunID: run.ID,
})
if err != nil {
return err
}
var recordsToDelete []any
recordsToDelete = append(recordsToDelete, &actions_model.ActionRun{
RepoID: repoID,
ID: run.ID,
})
recordsToDelete = append(recordsToDelete, &actions_model.ActionRunJob{
RepoID: repoID,
RunID: run.ID,
})
for _, tas := range tasks {
recordsToDelete = append(recordsToDelete, &actions_model.ActionTask{
RepoID: repoID,
ID: tas.ID,
})
recordsToDelete = append(recordsToDelete, &actions_model.ActionTaskStep{
RepoID: repoID,
TaskID: tas.ID,
})
recordsToDelete = append(recordsToDelete, &actions_model.ActionTaskOutput{
TaskID: tas.ID,
})
}
recordsToDelete = append(recordsToDelete, &actions_model.ActionArtifact{
RepoID: repoID,
RunID: run.ID,
})
if err := db.WithTx(ctx, func(ctx context.Context) error {
// TODO: Deleting task records could break current ephemeral runner implementation. This is a temporary workaround suggested by ChristopherHX.
// Since you delete potentially the only task an ephemeral act_runner has ever run, please delete the affected runners first.
// one of
// call cleanup ephemeral runners first
// delete affected ephemeral act_runners
// I would make ephemeral runners fully delete directly before formally finishing the task
//
// See also: https://github.com/go-gitea/gitea/pull/34337#issuecomment-2862222788
if err := CleanupEphemeralRunners(ctx); err != nil {
return err
}
return db.DeleteBeans(ctx, recordsToDelete...)
}); err != nil {
return err
}
// Delete files on storage
for _, tas := range tasks {
removeTaskLog(ctx, tas)
}
for _, art := range artifacts {
if err := storage.ActionsArtifacts.Delete(art.StoragePath); err != nil {
log.Error("remove artifact file %q: %v", art.StoragePath, err)
}
}
return nil
}

132
actions/clear_tasks.go Normal file
View File

@ -0,0 +1,132 @@
// Copyright 2022 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package actions
import (
"context"
"fmt"
"time"
actions_model "code.gitea.io/gitea/models/actions"
"code.gitea.io/gitea/models/db"
repo_model "code.gitea.io/gitea/models/repo"
"code.gitea.io/gitea/modules/actions"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/modules/timeutil"
webhook_module "code.gitea.io/gitea/modules/webhook"
notify_service "code.gitea.io/gitea/services/notify"
)
// StopZombieTasks stops the task which have running status, but haven't been updated for a long time
func StopZombieTasks(ctx context.Context) error {
return stopTasks(ctx, actions_model.FindTaskOptions{
Status: actions_model.StatusRunning,
UpdatedBefore: timeutil.TimeStamp(time.Now().Add(-setting.Actions.ZombieTaskTimeout).Unix()),
})
}
// StopEndlessTasks stops the tasks which have running status and continuous updates, but don't end for a long time
func StopEndlessTasks(ctx context.Context) error {
return stopTasks(ctx, actions_model.FindTaskOptions{
Status: actions_model.StatusRunning,
StartedBefore: timeutil.TimeStamp(time.Now().Add(-setting.Actions.EndlessTaskTimeout).Unix()),
})
}
func notifyWorkflowJobStatusUpdate(ctx context.Context, jobs []*actions_model.ActionRunJob) {
if len(jobs) > 0 {
CreateCommitStatus(ctx, jobs...)
for _, job := range jobs {
_ = job.LoadAttributes(ctx)
notify_service.WorkflowJobStatusUpdate(ctx, job.Run.Repo, job.Run.TriggerUser, job, nil)
}
}
}
func CancelPreviousJobs(ctx context.Context, repoID int64, ref, workflowID string, event webhook_module.HookEventType) error {
jobs, err := actions_model.CancelPreviousJobs(ctx, repoID, ref, workflowID, event)
notifyWorkflowJobStatusUpdate(ctx, jobs)
return err
}
func CleanRepoScheduleTasks(ctx context.Context, repo *repo_model.Repository) error {
jobs, err := actions_model.CleanRepoScheduleTasks(ctx, repo)
notifyWorkflowJobStatusUpdate(ctx, jobs)
return err
}
func stopTasks(ctx context.Context, opts actions_model.FindTaskOptions) error {
tasks, err := db.Find[actions_model.ActionTask](ctx, opts)
if err != nil {
return fmt.Errorf("find tasks: %w", err)
}
jobs := make([]*actions_model.ActionRunJob, 0, len(tasks))
for _, task := range tasks {
if err := db.WithTx(ctx, func(ctx context.Context) error {
if err := actions_model.StopTask(ctx, task.ID, actions_model.StatusFailure); err != nil {
return err
}
if err := task.LoadJob(ctx); err != nil {
return err
}
jobs = append(jobs, task.Job)
return nil
}); err != nil {
log.Warn("Cannot stop task %v: %v", task.ID, err)
continue
}
remove, err := actions.TransferLogs(ctx, task.LogFilename)
if err != nil {
log.Warn("Cannot transfer logs of task %v: %v", task.ID, err)
continue
}
task.LogInStorage = true
if err := actions_model.UpdateTask(ctx, task, "log_in_storage"); err != nil {
log.Warn("Cannot update task %v: %v", task.ID, err)
continue
}
remove()
}
notifyWorkflowJobStatusUpdate(ctx, jobs)
return nil
}
// CancelAbandonedJobs cancels the jobs which have waiting status, but haven't been picked by a runner for a long time
func CancelAbandonedJobs(ctx context.Context) error {
jobs, err := db.Find[actions_model.ActionRunJob](ctx, actions_model.FindRunJobOptions{
Statuses: []actions_model.Status{actions_model.StatusWaiting, actions_model.StatusBlocked},
UpdatedBefore: timeutil.TimeStamp(time.Now().Add(-setting.Actions.AbandonedJobTimeout).Unix()),
})
if err != nil {
log.Warn("find abandoned tasks: %v", err)
return err
}
now := timeutil.TimeStampNow()
for _, job := range jobs {
job.Status = actions_model.StatusCancelled
job.Stopped = now
updated := false
if err := db.WithTx(ctx, func(ctx context.Context) error {
n, err := actions_model.UpdateRunJob(ctx, job, nil, "status", "stopped")
updated = err == nil && n > 0
return err
}); err != nil {
log.Warn("cancel abandoned job %v: %v", job.ID, err)
// go on
}
CreateCommitStatus(ctx, job)
if updated {
_ = job.LoadAttributes(ctx)
notify_service.WorkflowJobStatusUpdate(ctx, job.Run.Repo, job.Run.TriggerUser, job, nil)
}
}
return nil
}

177
actions/commit_status.go Normal file
View File

@ -0,0 +1,177 @@
// Copyright 2023 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package actions
import (
"context"
"errors"
"fmt"
"path"
actions_model "code.gitea.io/gitea/models/actions"
"code.gitea.io/gitea/models/db"
git_model "code.gitea.io/gitea/models/git"
user_model "code.gitea.io/gitea/models/user"
actions_module "code.gitea.io/gitea/modules/actions"
"code.gitea.io/gitea/modules/commitstatus"
git "code.gitea.io/gitea/modules/git"
"code.gitea.io/gitea/modules/log"
webhook_module "code.gitea.io/gitea/modules/webhook"
commitstatus_service "code.gitea.io/gitea/services/repository/commitstatus"
"github.com/nektos/act/pkg/jobparser"
)
// CreateCommitStatus creates a commit status for the given job.
// It won't return an error failed, but will log it, because it's not critical.
func CreateCommitStatus(ctx context.Context, jobs ...*actions_model.ActionRunJob) {
for _, job := range jobs {
if err := createCommitStatus(ctx, job); err != nil {
log.Error("Failed to create commit status for job %d: %v", job.ID, err)
}
}
}
func createCommitStatus(ctx context.Context, job *actions_model.ActionRunJob) error {
if err := job.LoadAttributes(ctx); err != nil {
return fmt.Errorf("load run: %w", err)
}
run := job.Run
var (
sha string
event string
)
switch run.Event {
case webhook_module.HookEventPush:
event = "push"
payload, err := run.GetPushEventPayload()
if err != nil {
return fmt.Errorf("GetPushEventPayload: %w", err)
}
if payload.HeadCommit == nil {
return errors.New("head commit is missing in event payload")
}
sha = payload.HeadCommit.ID
case // pull_request
webhook_module.HookEventPullRequest,
webhook_module.HookEventPullRequestSync,
webhook_module.HookEventPullRequestAssign,
webhook_module.HookEventPullRequestLabel,
webhook_module.HookEventPullRequestReviewRequest,
webhook_module.HookEventPullRequestMilestone:
if run.TriggerEvent == actions_module.GithubEventPullRequestTarget {
event = "pull_request_target"
} else {
event = "pull_request"
}
payload, err := run.GetPullRequestEventPayload()
if err != nil {
return fmt.Errorf("GetPullRequestEventPayload: %w", err)
}
if payload.PullRequest == nil {
return errors.New("pull request is missing in event payload")
} else if payload.PullRequest.Head == nil {
return errors.New("head of pull request is missing in event payload")
}
sha = payload.PullRequest.Head.Sha
case webhook_module.HookEventRelease:
event = string(run.Event)
sha = run.CommitSHA
default:
return nil
}
repo := run.Repo
// TODO: store workflow name as a field in ActionRun to avoid parsing
runName := path.Base(run.WorkflowID)
if wfs, err := jobparser.Parse(job.WorkflowPayload); err == nil && len(wfs) > 0 {
runName = wfs[0].Name
}
ctxname := fmt.Sprintf("%s / %s (%s)", runName, job.Name, event)
state := toCommitStatus(job.Status)
if statuses, err := git_model.GetLatestCommitStatus(ctx, repo.ID, sha, db.ListOptionsAll); err == nil {
for _, v := range statuses {
if v.Context == ctxname {
if v.State == state {
// no need to update
return nil
}
break
}
}
} else {
return fmt.Errorf("GetLatestCommitStatus: %w", err)
}
description := ""
switch job.Status {
// TODO: if we want support description in different languages, we need to support i18n placeholders in it
case actions_model.StatusSuccess:
description = fmt.Sprintf("Successful in %s", job.Duration())
case actions_model.StatusFailure:
description = fmt.Sprintf("Failing after %s", job.Duration())
case actions_model.StatusCancelled:
description = "Has been cancelled"
case actions_model.StatusSkipped:
description = "Has been skipped"
case actions_model.StatusRunning:
description = "Has started running"
case actions_model.StatusWaiting:
description = "Waiting to run"
case actions_model.StatusBlocked:
description = "Blocked by required conditions"
}
index, err := getIndexOfJob(ctx, job)
if err != nil {
return fmt.Errorf("getIndexOfJob: %w", err)
}
creator := user_model.NewActionsUser()
commitID, err := git.NewIDFromString(sha)
if err != nil {
return fmt.Errorf("HashTypeInterfaceFromHashString: %w", err)
}
status := git_model.CommitStatus{
SHA: sha,
TargetURL: fmt.Sprintf("%s/jobs/%d", run.Link(), index),
Description: description,
Context: ctxname,
CreatorID: creator.ID,
State: state,
}
return commitstatus_service.CreateCommitStatus(ctx, repo, creator, commitID.String(), &status)
}
func toCommitStatus(status actions_model.Status) commitstatus.CommitStatusState {
switch status {
case actions_model.StatusSuccess:
return commitstatus.CommitStatusSuccess
case actions_model.StatusFailure, actions_model.StatusCancelled:
return commitstatus.CommitStatusFailure
case actions_model.StatusWaiting, actions_model.StatusBlocked, actions_model.StatusRunning:
return commitstatus.CommitStatusPending
case actions_model.StatusSkipped:
return commitstatus.CommitStatusSkipped
default:
return commitstatus.CommitStatusError
}
}
func getIndexOfJob(ctx context.Context, job *actions_model.ActionRunJob) (int, error) {
// TODO: store job index as a field in ActionRunJob to avoid this
jobs, err := actions_model.GetRunJobsByRunID(ctx, job.RunID)
if err != nil {
return 0, err
}
for i, v := range jobs {
if v.ID == job.ID {
return i, nil
}
}
return 0, nil
}

201
actions/context.go Normal file
View File

@ -0,0 +1,201 @@
// Copyright 2025 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package actions
import (
"context"
"fmt"
"strconv"
actions_model "code.gitea.io/gitea/models/actions"
"code.gitea.io/gitea/models/db"
actions_module "code.gitea.io/gitea/modules/actions"
"code.gitea.io/gitea/modules/container"
"code.gitea.io/gitea/modules/git"
"code.gitea.io/gitea/modules/json"
"code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/modules/util"
"github.com/nektos/act/pkg/model"
)
type GiteaContext map[string]any
// GenerateGiteaContext generate the gitea context without token and gitea_runtime_token
// job can be nil when generating a context for parsing workflow-level expressions
func GenerateGiteaContext(run *actions_model.ActionRun, job *actions_model.ActionRunJob) GiteaContext {
event := map[string]any{}
_ = json.Unmarshal([]byte(run.EventPayload), &event)
baseRef := ""
headRef := ""
ref := run.Ref
sha := run.CommitSHA
if pullPayload, err := run.GetPullRequestEventPayload(); err == nil && pullPayload.PullRequest != nil && pullPayload.PullRequest.Base != nil && pullPayload.PullRequest.Head != nil {
baseRef = pullPayload.PullRequest.Base.Ref
headRef = pullPayload.PullRequest.Head.Ref
// if the TriggerEvent is pull_request_target, ref and sha need to be set according to the base of pull request
// In GitHub's documentation, ref should be the branch or tag that triggered workflow. But when the TriggerEvent is pull_request_target,
// the ref will be the base branch.
if run.TriggerEvent == actions_module.GithubEventPullRequestTarget {
ref = git.BranchPrefix + pullPayload.PullRequest.Base.Name
sha = pullPayload.PullRequest.Base.Sha
}
}
refName := git.RefName(ref)
gitContext := GiteaContext{
// standard contexts, see https://docs.github.com/en/actions/learn-github-actions/contexts#github-context
"action": "", // string, The name of the action currently running, or the id of a step. GitHub removes special characters, and uses the name __run when the current step runs a script without an id. If you use the same action more than once in the same job, the name will include a suffix with the sequence number with underscore before it. For example, the first script you run will have the name __run, and the second script will be named __run_2. Similarly, the second invocation of actions/checkout will be actionscheckout2.
"action_path": "", // string, The path where an action is located. This property is only supported in composite actions. You can use this path to access files located in the same repository as the action.
"action_ref": "", // string, For a step executing an action, this is the ref of the action being executed. For example, v2.
"action_repository": "", // string, For a step executing an action, this is the owner and repository name of the action. For example, actions/checkout.
"action_status": "", // string, For a composite action, the current result of the composite action.
"actor": run.TriggerUser.Name, // string, The username of the user that triggered the initial workflow run. If the workflow run is a re-run, this value may differ from github.triggering_actor. Any workflow re-runs will use the privileges of github.actor, even if the actor initiating the re-run (github.triggering_actor) has different privileges.
"api_url": setting.AppURL + "api/v1", // string, The URL of the GitHub REST API.
"base_ref": baseRef, // string, The base_ref or target branch of the pull request in a workflow run. This property is only available when the event that triggers a workflow run is either pull_request or pull_request_target.
"env": "", // string, Path on the runner to the file that sets environment variables from workflow commands. This file is unique to the current step and is a different file for each step in a job. For more information, see "Workflow commands for GitHub Actions."
"event": event, // object, The full event webhook payload. You can access individual properties of the event using this context. This object is identical to the webhook payload of the event that triggered the workflow run, and is different for each event. The webhooks for each GitHub Actions event is linked in "Events that trigger workflows." For example, for a workflow run triggered by the push event, this object contains the contents of the push webhook payload.
"event_name": run.TriggerEvent, // string, The name of the event that triggered the workflow run.
"event_path": "", // string, The path to the file on the runner that contains the full event webhook payload.
"graphql_url": "", // string, The URL of the GitHub GraphQL API.
"head_ref": headRef, // string, The head_ref or source branch of the pull request in a workflow run. This property is only available when the event that triggers a workflow run is either pull_request or pull_request_target.
"job": "", // string, The job_id of the current job.
"ref": ref, // string, The fully-formed ref of the branch or tag that triggered the workflow run. For workflows triggered by push, this is the branch or tag ref that was pushed. For workflows triggered by pull_request, this is the pull request merge branch. For workflows triggered by release, this is the release tag created. For other triggers, this is the branch or tag ref that triggered the workflow run. This is only set if a branch or tag is available for the event type. The ref given is fully-formed, meaning that for branches the format is refs/heads/<branch_name>, for pull requests it is refs/pull/<pr_number>/merge, and for tags it is refs/tags/<tag_name>. For example, refs/heads/feature-branch-1.
"ref_name": refName.ShortName(), // string, The short ref name of the branch or tag that triggered the workflow run. This value matches the branch or tag name shown on GitHub. For example, feature-branch-1.
"ref_protected": false, // boolean, true if branch protections are configured for the ref that triggered the workflow run.
"ref_type": string(refName.RefType()), // string, The type of ref that triggered the workflow run. Valid values are branch or tag.
"path": "", // string, Path on the runner to the file that sets system PATH variables from workflow commands. This file is unique to the current step and is a different file for each step in a job. For more information, see "Workflow commands for GitHub Actions."
"repository": run.Repo.OwnerName + "/" + run.Repo.Name, // string, The owner and repository name. For example, Codertocat/Hello-World.
"repository_owner": run.Repo.OwnerName, // string, The repository owner's name. For example, Codertocat.
"repositoryUrl": run.Repo.HTMLURL(), // string, The Git URL to the repository. For example, git://github.com/codertocat/hello-world.git.
"retention_days": "", // string, The number of days that workflow run logs and artifacts are kept.
"run_id": "", // string, A unique number for each workflow run within a repository. This number does not change if you re-run the workflow run.
"run_number": strconv.FormatInt(run.Index, 10), // string, A unique number for each run of a particular workflow in a repository. This number begins at 1 for the workflow's first run, and increments with each new run. This number does not change if you re-run the workflow run.
"run_attempt": "", // string, A unique number for each attempt of a particular workflow run in a repository. This number begins at 1 for the workflow run's first attempt, and increments with each re-run.
"secret_source": "Actions", // string, The source of a secret used in a workflow. Possible values are None, Actions, Dependabot, or Codespaces.
"server_url": setting.AppURL, // string, The URL of the GitHub server. For example: https://github.com.
"sha": sha, // string, The commit SHA that triggered the workflow. The value of this commit SHA depends on the event that triggered the workflow. For more information, see "Events that trigger workflows." For example, ffac537e6cbbf934b08745a378932722df287a53.
"triggering_actor": "", // string, The username of the user that initiated the workflow run. If the workflow run is a re-run, this value may differ from github.actor. Any workflow re-runs will use the privileges of github.actor, even if the actor initiating the re-run (github.triggering_actor) has different privileges.
"workflow": run.WorkflowID, // string, The name of the workflow. If the workflow file doesn't specify a name, the value of this property is the full path of the workflow file in the repository.
"workspace": "", // string, The default working directory on the runner for steps, and the default location of your repository when using the checkout action.
// additional contexts
"gitea_default_actions_url": setting.Actions.DefaultActionsURL.URL(),
}
if job != nil {
gitContext["job"] = job.JobID
gitContext["run_id"] = strconv.FormatInt(job.RunID, 10)
gitContext["run_attempt"] = strconv.FormatInt(job.Attempt, 10)
}
return gitContext
}
type TaskNeed struct {
Result actions_model.Status
Outputs map[string]string
}
// FindTaskNeeds finds the `needs` for the task by the task's job
func FindTaskNeeds(ctx context.Context, job *actions_model.ActionRunJob) (map[string]*TaskNeed, error) {
if len(job.Needs) == 0 {
return nil, nil
}
needs := container.SetOf(job.Needs...)
jobs, err := db.Find[actions_model.ActionRunJob](ctx, actions_model.FindRunJobOptions{RunID: job.RunID})
if err != nil {
return nil, fmt.Errorf("FindRunJobs: %w", err)
}
jobIDJobs := make(map[string][]*actions_model.ActionRunJob)
for _, job := range jobs {
jobIDJobs[job.JobID] = append(jobIDJobs[job.JobID], job)
}
ret := make(map[string]*TaskNeed, len(needs))
for jobID, jobsWithSameID := range jobIDJobs {
if !needs.Contains(jobID) {
continue
}
var jobOutputs map[string]string
for _, job := range jobsWithSameID {
if job.TaskID == 0 || !job.Status.IsDone() {
// it shouldn't happen, or the job has been rerun
continue
}
got, err := actions_model.FindTaskOutputByTaskID(ctx, job.TaskID)
if err != nil {
return nil, fmt.Errorf("FindTaskOutputByTaskID: %w", err)
}
outputs := make(map[string]string, len(got))
for _, v := range got {
outputs[v.OutputKey] = v.OutputValue
}
if len(jobOutputs) == 0 {
jobOutputs = outputs
} else {
jobOutputs = mergeTwoOutputs(outputs, jobOutputs)
}
}
ret[jobID] = &TaskNeed{
Outputs: jobOutputs,
Result: actions_model.AggregateJobStatus(jobsWithSameID),
}
}
return ret, nil
}
// mergeTwoOutputs merges two outputs from two different ActionRunJobs
// Values with the same output name may be overridden. The user should ensure the output names are unique.
// See https://docs.github.com/en/actions/writing-workflows/workflow-syntax-for-github-actions#using-job-outputs-in-a-matrix-job
func mergeTwoOutputs(o1, o2 map[string]string) map[string]string {
ret := make(map[string]string, len(o1))
for k1, v1 := range o1 {
if len(v1) > 0 {
ret[k1] = v1
} else {
ret[k1] = o2[k1]
}
}
return ret
}
func (g *GiteaContext) ToGitHubContext() *model.GithubContext {
return &model.GithubContext{
Event: util.GetMapValueOrDefault(*g, "event", map[string]any(nil)),
EventPath: util.GetMapValueOrDefault(*g, "event_path", ""),
Workflow: util.GetMapValueOrDefault(*g, "workflow", ""),
RunID: util.GetMapValueOrDefault(*g, "run_id", ""),
RunNumber: util.GetMapValueOrDefault(*g, "run_number", ""),
Actor: util.GetMapValueOrDefault(*g, "actor", ""),
Repository: util.GetMapValueOrDefault(*g, "repository", ""),
EventName: util.GetMapValueOrDefault(*g, "event_name", ""),
Sha: util.GetMapValueOrDefault(*g, "sha", ""),
Ref: util.GetMapValueOrDefault(*g, "ref", ""),
RefName: util.GetMapValueOrDefault(*g, "ref_name", ""),
RefType: util.GetMapValueOrDefault(*g, "ref_type", ""),
HeadRef: util.GetMapValueOrDefault(*g, "head_ref", ""),
BaseRef: util.GetMapValueOrDefault(*g, "base_ref", ""),
Token: "", // deliberately omitted for security
Workspace: util.GetMapValueOrDefault(*g, "workspace", ""),
Action: util.GetMapValueOrDefault(*g, "action", ""),
ActionPath: util.GetMapValueOrDefault(*g, "action_path", ""),
ActionRef: util.GetMapValueOrDefault(*g, "action_ref", ""),
ActionRepository: util.GetMapValueOrDefault(*g, "action_repository", ""),
Job: util.GetMapValueOrDefault(*g, "job", ""),
JobName: "", // not present in GiteaContext
RepositoryOwner: util.GetMapValueOrDefault(*g, "repository_owner", ""),
RetentionDays: util.GetMapValueOrDefault(*g, "retention_days", ""),
RunnerPerflog: "", // not present in GiteaContext
RunnerTrackingID: "", // not present in GiteaContext
ServerURL: util.GetMapValueOrDefault(*g, "server_url", ""),
APIURL: util.GetMapValueOrDefault(*g, "api_url", ""),
GraphQLURL: util.GetMapValueOrDefault(*g, "graphql_url", ""),
}
}

28
actions/context_test.go Normal file
View File

@ -0,0 +1,28 @@
// Copyright 2024 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package actions
import (
"testing"
actions_model "code.gitea.io/gitea/models/actions"
"code.gitea.io/gitea/models/unittest"
"github.com/stretchr/testify/assert"
)
func TestFindTaskNeeds(t *testing.T) {
assert.NoError(t, unittest.PrepareTestDatabase())
task := unittest.AssertExistsAndLoadBean(t, &actions_model.ActionTask{ID: 51})
job := unittest.AssertExistsAndLoadBean(t, &actions_model.ActionRunJob{ID: task.JobID})
ret, err := FindTaskNeeds(t.Context(), job)
assert.NoError(t, err)
assert.Len(t, ret, 1)
assert.Contains(t, ret, "job1")
assert.Len(t, ret["job1"].Outputs, 2)
assert.Equal(t, "abc", ret["job1"].Outputs["output_a"])
assert.Equal(t, "bbb", ret["job1"].Outputs["output_b"])
}

71
actions/init.go Normal file
View File

@ -0,0 +1,71 @@
// Copyright 2022 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package actions
import (
"context"
"errors"
"fmt"
"os"
"strings"
actions_model "code.gitea.io/gitea/models/actions"
"code.gitea.io/gitea/modules/graceful"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/queue"
"code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/modules/util"
notify_service "code.gitea.io/gitea/services/notify"
)
func initGlobalRunnerToken(ctx context.Context) error {
// use the same env name as the runner, for consistency
token := os.Getenv("GITEA_RUNNER_REGISTRATION_TOKEN")
tokenFile := os.Getenv("GITEA_RUNNER_REGISTRATION_TOKEN_FILE")
if token != "" && tokenFile != "" {
return errors.New("both GITEA_RUNNER_REGISTRATION_TOKEN and GITEA_RUNNER_REGISTRATION_TOKEN_FILE are set, only one can be used")
}
if tokenFile != "" {
file, err := os.ReadFile(tokenFile)
if err != nil {
return fmt.Errorf("unable to read GITEA_RUNNER_REGISTRATION_TOKEN_FILE: %w", err)
}
token = strings.TrimSpace(string(file))
}
if token == "" {
return nil
}
if len(token) < 32 {
return errors.New("GITEA_RUNNER_REGISTRATION_TOKEN must be at least 32 random characters")
}
existing, err := actions_model.GetRunnerToken(ctx, token)
if err != nil && !errors.Is(err, util.ErrNotExist) {
return fmt.Errorf("unable to check existing token: %w", err)
}
if existing != nil {
if !existing.IsActive {
log.Warn("The token defined by GITEA_RUNNER_REGISTRATION_TOKEN is already invalidated, please use the latest one from web UI")
}
return nil
}
_, err = actions_model.NewRunnerTokenWithValue(ctx, 0, 0, token)
return err
}
func Init(ctx context.Context) error {
if !setting.Actions.Enabled {
return nil
}
jobEmitterQueue = queue.CreateUniqueQueue(graceful.GetManager().ShutdownContext(), "actions_ready_job", jobEmitterQueueHandler)
if jobEmitterQueue == nil {
return errors.New("unable to create actions_ready_job queue")
}
go graceful.GetManager().RunWithCancel(jobEmitterQueue)
notify_service.RegisterNotifier(NewNotifier())
return initGlobalRunnerToken(ctx)
}

78
actions/init_test.go Normal file
View File

@ -0,0 +1,78 @@
// Copyright 2024 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package actions
import (
"os"
"testing"
actions_model "code.gitea.io/gitea/models/actions"
"code.gitea.io/gitea/models/db"
"code.gitea.io/gitea/models/unittest"
"code.gitea.io/gitea/modules/util"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestMain(m *testing.M) {
unittest.MainTest(m)
os.Exit(m.Run())
}
func TestInitToken(t *testing.T) {
assert.NoError(t, unittest.PrepareTestDatabase())
t.Run("NoToken", func(t *testing.T) {
_, _ = db.Exec(db.DefaultContext, "DELETE FROM action_runner_token")
t.Setenv("GITEA_RUNNER_REGISTRATION_TOKEN", "")
t.Setenv("GITEA_RUNNER_REGISTRATION_TOKEN_FILE", "")
err := initGlobalRunnerToken(db.DefaultContext)
require.NoError(t, err)
notEmpty, err := db.IsTableNotEmpty(&actions_model.ActionRunnerToken{})
require.NoError(t, err)
assert.False(t, notEmpty)
})
t.Run("EnvToken", func(t *testing.T) {
tokenValue, _ := util.CryptoRandomString(32)
t.Setenv("GITEA_RUNNER_REGISTRATION_TOKEN", tokenValue)
t.Setenv("GITEA_RUNNER_REGISTRATION_TOKEN_FILE", "")
err := initGlobalRunnerToken(db.DefaultContext)
require.NoError(t, err)
token := unittest.AssertExistsAndLoadBean(t, &actions_model.ActionRunnerToken{Token: tokenValue})
assert.True(t, token.IsActive)
// init with the same token again, should not create a new token
err = initGlobalRunnerToken(db.DefaultContext)
require.NoError(t, err)
token2 := unittest.AssertExistsAndLoadBean(t, &actions_model.ActionRunnerToken{Token: tokenValue})
assert.Equal(t, token.ID, token2.ID)
assert.True(t, token.IsActive)
})
t.Run("EnvFileToken", func(t *testing.T) {
tokenValue, _ := util.CryptoRandomString(32)
f := t.TempDir() + "/token"
_ = os.WriteFile(f, []byte(tokenValue), 0o644)
t.Setenv("GITEA_RUNNER_REGISTRATION_TOKEN", "")
t.Setenv("GITEA_RUNNER_REGISTRATION_TOKEN_FILE", f)
err := initGlobalRunnerToken(db.DefaultContext)
require.NoError(t, err)
token := unittest.AssertExistsAndLoadBean(t, &actions_model.ActionRunnerToken{Token: tokenValue})
assert.True(t, token.IsActive)
// if the env token is invalidated by another new token, then it shouldn't be active anymore
_, err = actions_model.NewRunnerToken(db.DefaultContext, 0, 0)
require.NoError(t, err)
token = unittest.AssertExistsAndLoadBean(t, &actions_model.ActionRunnerToken{Token: tokenValue})
assert.False(t, token.IsActive)
})
t.Run("InvalidToken", func(t *testing.T) {
t.Setenv("GITEA_RUNNER_REGISTRATION_TOKEN", "abc")
err := initGlobalRunnerToken(db.DefaultContext)
assert.ErrorContains(t, err, "must be at least")
})
}

36
actions/interface.go Normal file
View File

@ -0,0 +1,36 @@
// Copyright 2024 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package actions
import "code.gitea.io/gitea/services/context"
// API for actions of a repository or organization
type API interface {
// ListActionsSecrets list secrets
ListActionsSecrets(*context.APIContext)
// CreateOrUpdateSecret create or update a secret
CreateOrUpdateSecret(*context.APIContext)
// DeleteSecret delete a secret
DeleteSecret(*context.APIContext)
// ListVariables list variables
ListVariables(*context.APIContext)
// GetVariable get a variable
GetVariable(*context.APIContext)
// DeleteVariable delete a variable
DeleteVariable(*context.APIContext)
// CreateVariable create a variable
CreateVariable(*context.APIContext)
// UpdateVariable update a variable
UpdateVariable(*context.APIContext)
// GetRegistrationToken get registration token
GetRegistrationToken(*context.APIContext)
// CreateRegistrationToken get registration token
CreateRegistrationToken(*context.APIContext)
// ListRunners list runners
ListRunners(*context.APIContext)
// GetRunner get a runner
GetRunner(*context.APIContext)
// DeleteRunner delete runner
DeleteRunner(*context.APIContext)
}

169
actions/job_emitter.go Normal file
View File

@ -0,0 +1,169 @@
// Copyright 2022 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package actions
import (
"context"
"errors"
"fmt"
actions_model "code.gitea.io/gitea/models/actions"
"code.gitea.io/gitea/models/db"
"code.gitea.io/gitea/modules/graceful"
"code.gitea.io/gitea/modules/queue"
notify_service "code.gitea.io/gitea/services/notify"
"github.com/nektos/act/pkg/jobparser"
"xorm.io/builder"
)
var jobEmitterQueue *queue.WorkerPoolQueue[*jobUpdate]
type jobUpdate struct {
RunID int64
}
func EmitJobsIfReady(runID int64) error {
err := jobEmitterQueue.Push(&jobUpdate{
RunID: runID,
})
if errors.Is(err, queue.ErrAlreadyInQueue) {
return nil
}
return err
}
func jobEmitterQueueHandler(items ...*jobUpdate) []*jobUpdate {
ctx := graceful.GetManager().ShutdownContext()
var ret []*jobUpdate
for _, update := range items {
if err := checkJobsOfRun(ctx, update.RunID); err != nil {
ret = append(ret, update)
}
}
return ret
}
func checkJobsOfRun(ctx context.Context, runID int64) error {
jobs, err := db.Find[actions_model.ActionRunJob](ctx, actions_model.FindRunJobOptions{RunID: runID})
if err != nil {
return err
}
var updatedjobs []*actions_model.ActionRunJob
if err := db.WithTx(ctx, func(ctx context.Context) error {
idToJobs := make(map[string][]*actions_model.ActionRunJob, len(jobs))
for _, job := range jobs {
idToJobs[job.JobID] = append(idToJobs[job.JobID], job)
}
updates := newJobStatusResolver(jobs).Resolve()
for _, job := range jobs {
if status, ok := updates[job.ID]; ok {
job.Status = status
if n, err := actions_model.UpdateRunJob(ctx, job, builder.Eq{"status": actions_model.StatusBlocked}, "status"); err != nil {
return err
} else if n != 1 {
return fmt.Errorf("no affected for updating blocked job %v", job.ID)
}
updatedjobs = append(updatedjobs, job)
}
}
return nil
}); err != nil {
return err
}
CreateCommitStatus(ctx, jobs...)
for _, job := range updatedjobs {
_ = job.LoadAttributes(ctx)
notify_service.WorkflowJobStatusUpdate(ctx, job.Run.Repo, job.Run.TriggerUser, job, nil)
}
return nil
}
type jobStatusResolver struct {
statuses map[int64]actions_model.Status
needs map[int64][]int64
jobMap map[int64]*actions_model.ActionRunJob
}
func newJobStatusResolver(jobs actions_model.ActionJobList) *jobStatusResolver {
idToJobs := make(map[string][]*actions_model.ActionRunJob, len(jobs))
jobMap := make(map[int64]*actions_model.ActionRunJob)
for _, job := range jobs {
idToJobs[job.JobID] = append(idToJobs[job.JobID], job)
jobMap[job.ID] = job
}
statuses := make(map[int64]actions_model.Status, len(jobs))
needs := make(map[int64][]int64, len(jobs))
for _, job := range jobs {
statuses[job.ID] = job.Status
for _, need := range job.Needs {
for _, v := range idToJobs[need] {
needs[job.ID] = append(needs[job.ID], v.ID)
}
}
}
return &jobStatusResolver{
statuses: statuses,
needs: needs,
jobMap: jobMap,
}
}
func (r *jobStatusResolver) Resolve() map[int64]actions_model.Status {
ret := map[int64]actions_model.Status{}
for i := 0; i < len(r.statuses); i++ {
updated := r.resolve()
if len(updated) == 0 {
return ret
}
for k, v := range updated {
ret[k] = v
r.statuses[k] = v
}
}
return ret
}
func (r *jobStatusResolver) resolve() map[int64]actions_model.Status {
ret := map[int64]actions_model.Status{}
for id, status := range r.statuses {
if status != actions_model.StatusBlocked {
continue
}
allDone, allSucceed := true, true
for _, need := range r.needs[id] {
needStatus := r.statuses[need]
if !needStatus.IsDone() {
allDone = false
}
if needStatus.In(actions_model.StatusFailure, actions_model.StatusCancelled, actions_model.StatusSkipped) {
allSucceed = false
}
}
if allDone {
if allSucceed {
ret[id] = actions_model.StatusWaiting
} else {
// Check if the job has an "if" condition
hasIf := false
if wfJobs, _ := jobparser.Parse(r.jobMap[id].WorkflowPayload); len(wfJobs) == 1 {
_, wfJob := wfJobs[0].Job()
hasIf = len(wfJob.If.Value) > 0
}
if hasIf {
// act_runner will check the "if" condition
ret[id] = actions_model.StatusWaiting
} else {
// If the "if" condition is empty and not all dependent jobs completed successfully,
// the job should be skipped.
ret[id] = actions_model.StatusSkipped
}
}
}
}
return ret
}

136
actions/job_emitter_test.go Normal file
View File

@ -0,0 +1,136 @@
// Copyright 2022 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package actions
import (
"testing"
actions_model "code.gitea.io/gitea/models/actions"
"github.com/stretchr/testify/assert"
)
func Test_jobStatusResolver_Resolve(t *testing.T) {
tests := []struct {
name string
jobs actions_model.ActionJobList
want map[int64]actions_model.Status
}{
{
name: "no blocked",
jobs: actions_model.ActionJobList{
{ID: 1, JobID: "1", Status: actions_model.StatusWaiting, Needs: []string{}},
{ID: 2, JobID: "2", Status: actions_model.StatusWaiting, Needs: []string{}},
{ID: 3, JobID: "3", Status: actions_model.StatusWaiting, Needs: []string{}},
},
want: map[int64]actions_model.Status{},
},
{
name: "single blocked",
jobs: actions_model.ActionJobList{
{ID: 1, JobID: "1", Status: actions_model.StatusSuccess, Needs: []string{}},
{ID: 2, JobID: "2", Status: actions_model.StatusBlocked, Needs: []string{"1"}},
{ID: 3, JobID: "3", Status: actions_model.StatusWaiting, Needs: []string{}},
},
want: map[int64]actions_model.Status{
2: actions_model.StatusWaiting,
},
},
{
name: "multiple blocked",
jobs: actions_model.ActionJobList{
{ID: 1, JobID: "1", Status: actions_model.StatusSuccess, Needs: []string{}},
{ID: 2, JobID: "2", Status: actions_model.StatusBlocked, Needs: []string{"1"}},
{ID: 3, JobID: "3", Status: actions_model.StatusBlocked, Needs: []string{"1"}},
},
want: map[int64]actions_model.Status{
2: actions_model.StatusWaiting,
3: actions_model.StatusWaiting,
},
},
{
name: "chain blocked",
jobs: actions_model.ActionJobList{
{ID: 1, JobID: "1", Status: actions_model.StatusFailure, Needs: []string{}},
{ID: 2, JobID: "2", Status: actions_model.StatusBlocked, Needs: []string{"1"}},
{ID: 3, JobID: "3", Status: actions_model.StatusBlocked, Needs: []string{"2"}},
},
want: map[int64]actions_model.Status{
2: actions_model.StatusSkipped,
3: actions_model.StatusSkipped,
},
},
{
name: "loop need",
jobs: actions_model.ActionJobList{
{ID: 1, JobID: "1", Status: actions_model.StatusBlocked, Needs: []string{"3"}},
{ID: 2, JobID: "2", Status: actions_model.StatusBlocked, Needs: []string{"1"}},
{ID: 3, JobID: "3", Status: actions_model.StatusBlocked, Needs: []string{"2"}},
},
want: map[int64]actions_model.Status{},
},
{
name: "`if` is not empty and all jobs in `needs` completed successfully",
jobs: actions_model.ActionJobList{
{ID: 1, JobID: "job1", Status: actions_model.StatusSuccess, Needs: []string{}},
{ID: 2, JobID: "job2", Status: actions_model.StatusBlocked, Needs: []string{"job1"}, WorkflowPayload: []byte(
`
name: test
on: push
jobs:
job2:
runs-on: ubuntu-latest
needs: job1
if: ${{ always() && needs.job1.result == 'success' }}
steps:
- run: echo "will be checked by act_runner"
`)},
},
want: map[int64]actions_model.Status{2: actions_model.StatusWaiting},
},
{
name: "`if` is not empty and not all jobs in `needs` completed successfully",
jobs: actions_model.ActionJobList{
{ID: 1, JobID: "job1", Status: actions_model.StatusFailure, Needs: []string{}},
{ID: 2, JobID: "job2", Status: actions_model.StatusBlocked, Needs: []string{"job1"}, WorkflowPayload: []byte(
`
name: test
on: push
jobs:
job2:
runs-on: ubuntu-latest
needs: job1
if: ${{ always() && needs.job1.result == 'failure' }}
steps:
- run: echo "will be checked by act_runner"
`)},
},
want: map[int64]actions_model.Status{2: actions_model.StatusWaiting},
},
{
name: "`if` is empty and not all jobs in `needs` completed successfully",
jobs: actions_model.ActionJobList{
{ID: 1, JobID: "job1", Status: actions_model.StatusFailure, Needs: []string{}},
{ID: 2, JobID: "job2", Status: actions_model.StatusBlocked, Needs: []string{"job1"}, WorkflowPayload: []byte(
`
name: test
on: push
jobs:
job2:
runs-on: ubuntu-latest
needs: job1
steps:
- run: echo "should be skipped"
`)},
},
want: map[int64]actions_model.Status{2: actions_model.StatusSkipped},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
r := newJobStatusResolver(tt.jobs)
assert.Equal(t, tt.want, r.Resolve())
})
}
}

764
actions/notifier.go Normal file
View File

@ -0,0 +1,764 @@
// Copyright 2022 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package actions
import (
"context"
issues_model "code.gitea.io/gitea/models/issues"
packages_model "code.gitea.io/gitea/models/packages"
perm_model "code.gitea.io/gitea/models/perm"
access_model "code.gitea.io/gitea/models/perm/access"
repo_model "code.gitea.io/gitea/models/repo"
user_model "code.gitea.io/gitea/models/user"
"code.gitea.io/gitea/modules/git"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/repository"
"code.gitea.io/gitea/modules/setting"
api "code.gitea.io/gitea/modules/structs"
webhook_module "code.gitea.io/gitea/modules/webhook"
"code.gitea.io/gitea/services/convert"
notify_service "code.gitea.io/gitea/services/notify"
)
type actionsNotifier struct {
notify_service.NullNotifier
}
var _ notify_service.Notifier = &actionsNotifier{}
// NewNotifier create a new actionsNotifier notifier
func NewNotifier() notify_service.Notifier {
return &actionsNotifier{}
}
// NewIssue notifies issue created event
func (n *actionsNotifier) NewIssue(ctx context.Context, issue *issues_model.Issue, _ []*user_model.User) {
ctx = withMethod(ctx, "NewIssue")
if err := issue.LoadRepo(ctx); err != nil {
log.Error("issue.LoadRepo: %v", err)
return
}
if err := issue.LoadPoster(ctx); err != nil {
log.Error("issue.LoadPoster: %v", err)
return
}
permission, _ := access_model.GetUserRepoPermission(ctx, issue.Repo, issue.Poster)
newNotifyInputFromIssue(issue, webhook_module.HookEventIssues).WithPayload(&api.IssuePayload{
Action: api.HookIssueOpened,
Index: issue.Index,
Issue: convert.ToAPIIssue(ctx, issue.Poster, issue),
Repository: convert.ToRepo(ctx, issue.Repo, permission),
Sender: convert.ToUser(ctx, issue.Poster, nil),
}).Notify(withMethod(ctx, "NewIssue"))
}
// IssueChangeContent notifies change content of issue
func (n *actionsNotifier) IssueChangeContent(ctx context.Context, doer *user_model.User, issue *issues_model.Issue, oldContent string) {
ctx = withMethod(ctx, "IssueChangeContent")
n.notifyIssueChangeWithTitleOrContent(ctx, doer, issue)
}
func (n *actionsNotifier) IssueChangeTitle(ctx context.Context, doer *user_model.User, issue *issues_model.Issue, oldTitle string) {
ctx = withMethod(ctx, "IssueChangeTitle")
n.notifyIssueChangeWithTitleOrContent(ctx, doer, issue)
}
func (n *actionsNotifier) notifyIssueChangeWithTitleOrContent(ctx context.Context, doer *user_model.User, issue *issues_model.Issue) {
var err error
if err = issue.LoadRepo(ctx); err != nil {
log.Error("LoadRepo: %v", err)
return
}
permission, _ := access_model.GetUserRepoPermission(ctx, issue.Repo, issue.Poster)
if issue.IsPull {
if err = issue.LoadPullRequest(ctx); err != nil {
log.Error("loadPullRequest: %v", err)
return
}
newNotifyInputFromIssue(issue, webhook_module.HookEventPullRequest).
WithDoer(doer).
WithPayload(&api.PullRequestPayload{
Action: api.HookIssueEdited,
Index: issue.Index,
PullRequest: convert.ToAPIPullRequest(ctx, issue.PullRequest, nil),
Repository: convert.ToRepo(ctx, issue.Repo, access_model.Permission{AccessMode: perm_model.AccessModeNone}),
Sender: convert.ToUser(ctx, doer, nil),
}).
WithPullRequest(issue.PullRequest).
Notify(ctx)
return
}
newNotifyInputFromIssue(issue, webhook_module.HookEventIssues).
WithDoer(doer).
WithPayload(&api.IssuePayload{
Action: api.HookIssueEdited,
Index: issue.Index,
Issue: convert.ToAPIIssue(ctx, doer, issue),
Repository: convert.ToRepo(ctx, issue.Repo, permission),
Sender: convert.ToUser(ctx, doer, nil),
}).
Notify(ctx)
}
// IssueChangeStatus notifies close or reopen issue to notifiers
func (n *actionsNotifier) IssueChangeStatus(ctx context.Context, doer *user_model.User, commitID string, issue *issues_model.Issue, _ *issues_model.Comment, isClosed bool) {
ctx = withMethod(ctx, "IssueChangeStatus")
permission, _ := access_model.GetUserRepoPermission(ctx, issue.Repo, issue.Poster)
if issue.IsPull {
if err := issue.LoadPullRequest(ctx); err != nil {
log.Error("LoadPullRequest: %v", err)
return
}
// Merge pull request calls issue.changeStatus so we need to handle separately.
apiPullRequest := &api.PullRequestPayload{
Index: issue.Index,
PullRequest: convert.ToAPIPullRequest(ctx, issue.PullRequest, nil),
Repository: convert.ToRepo(ctx, issue.Repo, permission),
Sender: convert.ToUser(ctx, doer, nil),
CommitID: commitID,
}
if isClosed {
apiPullRequest.Action = api.HookIssueClosed
} else {
apiPullRequest.Action = api.HookIssueReOpened
}
newNotifyInputFromIssue(issue, webhook_module.HookEventPullRequest).
WithDoer(doer).
WithPayload(apiPullRequest).
WithPullRequest(issue.PullRequest).
Notify(ctx)
return
}
apiIssue := &api.IssuePayload{
Index: issue.Index,
Issue: convert.ToAPIIssue(ctx, doer, issue),
Repository: convert.ToRepo(ctx, issue.Repo, permission),
Sender: convert.ToUser(ctx, doer, nil),
}
if isClosed {
apiIssue.Action = api.HookIssueClosed
} else {
apiIssue.Action = api.HookIssueReOpened
}
newNotifyInputFromIssue(issue, webhook_module.HookEventIssues).
WithDoer(doer).
WithPayload(apiIssue).
Notify(ctx)
}
// IssueChangeAssignee notifies assigned or unassigned to notifiers
func (n *actionsNotifier) IssueChangeAssignee(ctx context.Context, doer *user_model.User, issue *issues_model.Issue, assignee *user_model.User, removed bool, comment *issues_model.Comment) {
ctx = withMethod(ctx, "IssueChangeAssignee")
var action api.HookIssueAction
if removed {
action = api.HookIssueUnassigned
} else {
action = api.HookIssueAssigned
}
hookEvent := webhook_module.HookEventIssueAssign
if issue.IsPull {
hookEvent = webhook_module.HookEventPullRequestAssign
}
notifyIssueChange(ctx, doer, issue, hookEvent, action)
}
// IssueChangeMilestone notifies assignee to notifiers
func (n *actionsNotifier) IssueChangeMilestone(ctx context.Context, doer *user_model.User, issue *issues_model.Issue, oldMilestoneID int64) {
ctx = withMethod(ctx, "IssueChangeMilestone")
var action api.HookIssueAction
if issue.MilestoneID > 0 {
action = api.HookIssueMilestoned
} else {
action = api.HookIssueDemilestoned
}
hookEvent := webhook_module.HookEventIssueMilestone
if issue.IsPull {
hookEvent = webhook_module.HookEventPullRequestMilestone
}
notifyIssueChange(ctx, doer, issue, hookEvent, action)
}
func (n *actionsNotifier) IssueChangeLabels(ctx context.Context, doer *user_model.User, issue *issues_model.Issue,
_, _ []*issues_model.Label,
) {
ctx = withMethod(ctx, "IssueChangeLabels")
hookEvent := webhook_module.HookEventIssueLabel
if issue.IsPull {
hookEvent = webhook_module.HookEventPullRequestLabel
}
notifyIssueChange(ctx, doer, issue, hookEvent, api.HookIssueLabelUpdated)
}
func notifyIssueChange(ctx context.Context, doer *user_model.User, issue *issues_model.Issue, event webhook_module.HookEventType, action api.HookIssueAction) {
var err error
if err = issue.LoadRepo(ctx); err != nil {
log.Error("LoadRepo: %v", err)
return
}
if err = issue.LoadPoster(ctx); err != nil {
log.Error("LoadPoster: %v", err)
return
}
if issue.IsPull {
if err = issue.LoadPullRequest(ctx); err != nil {
log.Error("loadPullRequest: %v", err)
return
}
newNotifyInputFromIssue(issue, event).
WithDoer(doer).
WithPayload(&api.PullRequestPayload{
Action: action,
Index: issue.Index,
PullRequest: convert.ToAPIPullRequest(ctx, issue.PullRequest, nil),
Repository: convert.ToRepo(ctx, issue.Repo, access_model.Permission{AccessMode: perm_model.AccessModeNone}),
Sender: convert.ToUser(ctx, doer, nil),
}).
WithPullRequest(issue.PullRequest).
Notify(ctx)
return
}
permission, _ := access_model.GetUserRepoPermission(ctx, issue.Repo, issue.Poster)
newNotifyInputFromIssue(issue, event).
WithDoer(doer).
WithPayload(&api.IssuePayload{
Action: action,
Index: issue.Index,
Issue: convert.ToAPIIssue(ctx, doer, issue),
Repository: convert.ToRepo(ctx, issue.Repo, permission),
Sender: convert.ToUser(ctx, doer, nil),
}).
Notify(ctx)
}
// CreateIssueComment notifies comment on an issue to notifiers
func (n *actionsNotifier) CreateIssueComment(ctx context.Context, doer *user_model.User, repo *repo_model.Repository,
issue *issues_model.Issue, comment *issues_model.Comment, _ []*user_model.User,
) {
ctx = withMethod(ctx, "CreateIssueComment")
if issue.IsPull {
notifyIssueCommentChange(ctx, doer, comment, "", webhook_module.HookEventPullRequestComment, api.HookIssueCommentCreated)
return
}
notifyIssueCommentChange(ctx, doer, comment, "", webhook_module.HookEventIssueComment, api.HookIssueCommentCreated)
}
func (n *actionsNotifier) UpdateComment(ctx context.Context, doer *user_model.User, c *issues_model.Comment, oldContent string) {
ctx = withMethod(ctx, "UpdateComment")
if err := c.LoadIssue(ctx); err != nil {
log.Error("LoadIssue: %v", err)
return
}
if c.Issue.IsPull {
notifyIssueCommentChange(ctx, doer, c, oldContent, webhook_module.HookEventPullRequestComment, api.HookIssueCommentEdited)
return
}
notifyIssueCommentChange(ctx, doer, c, oldContent, webhook_module.HookEventIssueComment, api.HookIssueCommentEdited)
}
func (n *actionsNotifier) DeleteComment(ctx context.Context, doer *user_model.User, comment *issues_model.Comment) {
ctx = withMethod(ctx, "DeleteComment")
if err := comment.LoadIssue(ctx); err != nil {
log.Error("LoadIssue: %v", err)
return
}
if comment.Issue.IsPull {
notifyIssueCommentChange(ctx, doer, comment, "", webhook_module.HookEventPullRequestComment, api.HookIssueCommentDeleted)
return
}
notifyIssueCommentChange(ctx, doer, comment, "", webhook_module.HookEventIssueComment, api.HookIssueCommentDeleted)
}
func notifyIssueCommentChange(ctx context.Context, doer *user_model.User, comment *issues_model.Comment, oldContent string, event webhook_module.HookEventType, action api.HookIssueCommentAction) {
if err := comment.LoadIssue(ctx); err != nil {
log.Error("LoadIssue: %v", err)
return
}
if err := comment.Issue.LoadAttributes(ctx); err != nil {
log.Error("LoadAttributes: %v", err)
return
}
permission, _ := access_model.GetUserRepoPermission(ctx, comment.Issue.Repo, doer)
payload := &api.IssueCommentPayload{
Action: action,
Issue: convert.ToAPIIssue(ctx, doer, comment.Issue),
Comment: convert.ToAPIComment(ctx, comment.Issue.Repo, comment),
Repository: convert.ToRepo(ctx, comment.Issue.Repo, permission),
Sender: convert.ToUser(ctx, doer, nil),
IsPull: comment.Issue.IsPull,
}
if action == api.HookIssueCommentEdited {
payload.Changes = &api.ChangesPayload{
Body: &api.ChangesFromPayload{
From: oldContent,
},
}
}
if comment.Issue.IsPull {
if err := comment.Issue.LoadPullRequest(ctx); err != nil {
log.Error("LoadPullRequest: %v", err)
return
}
newNotifyInputFromIssue(comment.Issue, event).
WithDoer(doer).
WithPayload(payload).
WithPullRequest(comment.Issue.PullRequest).
Notify(ctx)
return
}
newNotifyInputFromIssue(comment.Issue, event).
WithDoer(doer).
WithPayload(payload).
Notify(ctx)
}
func (n *actionsNotifier) NewPullRequest(ctx context.Context, pull *issues_model.PullRequest, _ []*user_model.User) {
ctx = withMethod(ctx, "NewPullRequest")
if err := pull.LoadIssue(ctx); err != nil {
log.Error("pull.LoadIssue: %v", err)
return
}
if err := pull.Issue.LoadRepo(ctx); err != nil {
log.Error("pull.Issue.LoadRepo: %v", err)
return
}
if err := pull.Issue.LoadPoster(ctx); err != nil {
log.Error("pull.Issue.LoadPoster: %v", err)
return
}
permission, _ := access_model.GetUserRepoPermission(ctx, pull.Issue.Repo, pull.Issue.Poster)
newNotifyInputFromIssue(pull.Issue, webhook_module.HookEventPullRequest).
WithPayload(&api.PullRequestPayload{
Action: api.HookIssueOpened,
Index: pull.Issue.Index,
PullRequest: convert.ToAPIPullRequest(ctx, pull, nil),
Repository: convert.ToRepo(ctx, pull.Issue.Repo, permission),
Sender: convert.ToUser(ctx, pull.Issue.Poster, nil),
}).
WithPullRequest(pull).
Notify(ctx)
}
func (n *actionsNotifier) CreateRepository(ctx context.Context, doer, u *user_model.User, repo *repo_model.Repository) {
ctx = withMethod(ctx, "CreateRepository")
newNotifyInput(repo, doer, webhook_module.HookEventRepository).WithPayload(&api.RepositoryPayload{
Action: api.HookRepoCreated,
Repository: convert.ToRepo(ctx, repo, access_model.Permission{AccessMode: perm_model.AccessModeOwner}),
Organization: convert.ToUser(ctx, u, nil),
Sender: convert.ToUser(ctx, doer, nil),
}).Notify(ctx)
}
func (n *actionsNotifier) ForkRepository(ctx context.Context, doer *user_model.User, oldRepo, repo *repo_model.Repository) {
ctx = withMethod(ctx, "ForkRepository")
oldPermission, _ := access_model.GetUserRepoPermission(ctx, oldRepo, doer)
permission, _ := access_model.GetUserRepoPermission(ctx, repo, doer)
// forked webhook
newNotifyInput(oldRepo, doer, webhook_module.HookEventFork).WithPayload(&api.ForkPayload{
Forkee: convert.ToRepo(ctx, oldRepo, oldPermission),
Repo: convert.ToRepo(ctx, repo, permission),
Sender: convert.ToUser(ctx, doer, nil),
}).Notify(ctx)
u := repo.MustOwner(ctx)
// Add to hook queue for created repo after session commit.
if u.IsOrganization() {
newNotifyInput(repo, doer, webhook_module.HookEventRepository).
WithRef(git.RefNameFromBranch(oldRepo.DefaultBranch).String()).
WithPayload(&api.RepositoryPayload{
Action: api.HookRepoCreated,
Repository: convert.ToRepo(ctx, repo, access_model.Permission{AccessMode: perm_model.AccessModeOwner}),
Organization: convert.ToUser(ctx, u, nil),
Sender: convert.ToUser(ctx, doer, nil),
}).Notify(ctx)
}
}
func (n *actionsNotifier) PullRequestReview(ctx context.Context, pr *issues_model.PullRequest, review *issues_model.Review, _ *issues_model.Comment, _ []*user_model.User) {
ctx = withMethod(ctx, "PullRequestReview")
var reviewHookType webhook_module.HookEventType
switch review.Type {
case issues_model.ReviewTypeApprove:
reviewHookType = webhook_module.HookEventPullRequestReviewApproved
case issues_model.ReviewTypeComment:
reviewHookType = webhook_module.HookEventPullRequestReviewComment
case issues_model.ReviewTypeReject:
reviewHookType = webhook_module.HookEventPullRequestReviewRejected
default:
// unsupported review webhook type here
log.Error("Unsupported review webhook type")
return
}
if err := pr.LoadIssue(ctx); err != nil {
log.Error("pr.LoadIssue: %v", err)
return
}
permission, err := access_model.GetUserRepoPermission(ctx, review.Issue.Repo, review.Issue.Poster)
if err != nil {
log.Error("models.GetUserRepoPermission: %v", err)
return
}
newNotifyInput(review.Issue.Repo, review.Reviewer, reviewHookType).
WithRef(review.CommitID).
WithPayload(&api.PullRequestPayload{
Action: api.HookIssueReviewed,
Index: review.Issue.Index,
PullRequest: convert.ToAPIPullRequest(ctx, pr, nil),
Repository: convert.ToRepo(ctx, review.Issue.Repo, permission),
Sender: convert.ToUser(ctx, review.Reviewer, nil),
Review: &api.ReviewPayload{
Type: string(reviewHookType),
Content: review.Content,
},
}).Notify(ctx)
}
func (n *actionsNotifier) PullRequestReviewRequest(ctx context.Context, doer *user_model.User, issue *issues_model.Issue, reviewer *user_model.User, isRequest bool, comment *issues_model.Comment) {
if !issue.IsPull {
log.Warn("PullRequestReviewRequest: issue is not a pull request: %v", issue.ID)
return
}
ctx = withMethod(ctx, "PullRequestReviewRequest")
permission, _ := access_model.GetUserRepoPermission(ctx, issue.Repo, doer)
if err := issue.LoadPullRequest(ctx); err != nil {
log.Error("LoadPullRequest failed: %v", err)
return
}
var action api.HookIssueAction
if isRequest {
action = api.HookIssueReviewRequested
} else {
action = api.HookIssueReviewRequestRemoved
}
newNotifyInputFromIssue(issue, webhook_module.HookEventPullRequestReviewRequest).
WithDoer(doer).
WithPayload(&api.PullRequestPayload{
Action: action,
Index: issue.Index,
PullRequest: convert.ToAPIPullRequest(ctx, issue.PullRequest, nil),
RequestedReviewer: convert.ToUser(ctx, reviewer, nil),
Repository: convert.ToRepo(ctx, issue.Repo, permission),
Sender: convert.ToUser(ctx, doer, nil),
}).
WithPullRequest(issue.PullRequest).
Notify(ctx)
}
func (*actionsNotifier) MergePullRequest(ctx context.Context, doer *user_model.User, pr *issues_model.PullRequest) {
ctx = withMethod(ctx, "MergePullRequest")
// Reload pull request information.
if err := pr.LoadAttributes(ctx); err != nil {
log.Error("LoadAttributes: %v", err)
return
}
if err := pr.LoadIssue(ctx); err != nil {
log.Error("LoadAttributes: %v", err)
return
}
if err := pr.Issue.LoadRepo(ctx); err != nil {
log.Error("pr.Issue.LoadRepo: %v", err)
return
}
permission, err := access_model.GetUserRepoPermission(ctx, pr.Issue.Repo, doer)
if err != nil {
log.Error("models.GetUserRepoPermission: %v", err)
return
}
// Merge pull request calls issue.changeStatus so we need to handle separately.
apiPullRequest := &api.PullRequestPayload{
Index: pr.Issue.Index,
PullRequest: convert.ToAPIPullRequest(ctx, pr, nil),
Repository: convert.ToRepo(ctx, pr.Issue.Repo, permission),
Sender: convert.ToUser(ctx, doer, nil),
Action: api.HookIssueClosed,
}
newNotifyInput(pr.Issue.Repo, doer, webhook_module.HookEventPullRequest).
WithRef(pr.MergedCommitID).
WithPayload(apiPullRequest).
WithPullRequest(pr).
Notify(ctx)
}
func (n *actionsNotifier) PushCommits(ctx context.Context, pusher *user_model.User, repo *repo_model.Repository, opts *repository.PushUpdateOptions, commits *repository.PushCommits) {
commitID, _ := git.NewIDFromString(opts.NewCommitID)
if commitID.IsZero() {
log.Trace("new commitID is empty")
return
}
ctx = withMethod(ctx, "PushCommits")
apiPusher := convert.ToUser(ctx, pusher, nil)
apiCommits, apiHeadCommit, err := commits.ToAPIPayloadCommits(ctx, repo)
if err != nil {
log.Error("commits.ToAPIPayloadCommits failed: %v", err)
return
}
newNotifyInput(repo, pusher, webhook_module.HookEventPush).
WithRef(opts.RefFullName.String()).
WithPayload(&api.PushPayload{
Ref: opts.RefFullName.String(),
Before: opts.OldCommitID,
After: opts.NewCommitID,
CompareURL: setting.AppURL + commits.CompareURL,
Commits: apiCommits,
HeadCommit: apiHeadCommit,
Repo: convert.ToRepo(ctx, repo, access_model.Permission{AccessMode: perm_model.AccessModeOwner}),
Pusher: apiPusher,
Sender: apiPusher,
}).
Notify(ctx)
}
func (n *actionsNotifier) CreateRef(ctx context.Context, pusher *user_model.User, repo *repo_model.Repository, refFullName git.RefName, refID string) {
ctx = withMethod(ctx, "CreateRef")
apiPusher := convert.ToUser(ctx, pusher, nil)
apiRepo := convert.ToRepo(ctx, repo, access_model.Permission{AccessMode: perm_model.AccessModeNone})
newNotifyInput(repo, pusher, webhook_module.HookEventCreate).
WithRef(refFullName.String()).
WithPayload(&api.CreatePayload{
Ref: refFullName.String(), // HINT: here is inconsistent with the Webhook's payload: webhook uses ShortName
Sha: refID,
RefType: string(refFullName.RefType()),
Repo: apiRepo,
Sender: apiPusher,
}).
Notify(ctx)
}
func (n *actionsNotifier) DeleteRef(ctx context.Context, pusher *user_model.User, repo *repo_model.Repository, refFullName git.RefName) {
ctx = withMethod(ctx, "DeleteRef")
apiPusher := convert.ToUser(ctx, pusher, nil)
apiRepo := convert.ToRepo(ctx, repo, access_model.Permission{AccessMode: perm_model.AccessModeNone})
newNotifyInput(repo, pusher, webhook_module.HookEventDelete).
WithPayload(&api.DeletePayload{
Ref: refFullName.String(), // HINT: here is inconsistent with the Webhook's payload: webhook uses ShortName
RefType: string(refFullName.RefType()),
PusherType: api.PusherTypeUser,
Repo: apiRepo,
Sender: apiPusher,
}).
Notify(ctx)
}
func (n *actionsNotifier) SyncPushCommits(ctx context.Context, pusher *user_model.User, repo *repo_model.Repository, opts *repository.PushUpdateOptions, commits *repository.PushCommits) {
ctx = withMethod(ctx, "SyncPushCommits")
apiPusher := convert.ToUser(ctx, pusher, nil)
apiCommits, apiHeadCommit, err := commits.ToAPIPayloadCommits(ctx, repo)
if err != nil {
log.Error("commits.ToAPIPayloadCommits failed: %v", err)
return
}
newNotifyInput(repo, pusher, webhook_module.HookEventPush).
WithRef(opts.RefFullName.String()).
WithPayload(&api.PushPayload{
Ref: opts.RefFullName.String(),
Before: opts.OldCommitID,
After: opts.NewCommitID,
CompareURL: setting.AppURL + commits.CompareURL,
Commits: apiCommits,
TotalCommits: commits.Len,
HeadCommit: apiHeadCommit,
Repo: convert.ToRepo(ctx, repo, access_model.Permission{AccessMode: perm_model.AccessModeOwner}),
Pusher: apiPusher,
Sender: apiPusher,
}).
Notify(ctx)
}
func (n *actionsNotifier) SyncCreateRef(ctx context.Context, pusher *user_model.User, repo *repo_model.Repository, refFullName git.RefName, refID string) {
ctx = withMethod(ctx, "SyncCreateRef")
n.CreateRef(ctx, pusher, repo, refFullName, refID)
}
func (n *actionsNotifier) SyncDeleteRef(ctx context.Context, pusher *user_model.User, repo *repo_model.Repository, refFullName git.RefName) {
ctx = withMethod(ctx, "SyncDeleteRef")
n.DeleteRef(ctx, pusher, repo, refFullName)
}
func (n *actionsNotifier) NewRelease(ctx context.Context, rel *repo_model.Release) {
ctx = withMethod(ctx, "NewRelease")
notifyRelease(ctx, rel.Publisher, rel, api.HookReleasePublished)
}
func (n *actionsNotifier) UpdateRelease(ctx context.Context, doer *user_model.User, rel *repo_model.Release) {
ctx = withMethod(ctx, "UpdateRelease")
notifyRelease(ctx, doer, rel, api.HookReleaseUpdated)
}
func (n *actionsNotifier) DeleteRelease(ctx context.Context, doer *user_model.User, rel *repo_model.Release) {
if rel.IsTag {
// has sent same action in `PushCommits`, so skip it.
return
}
ctx = withMethod(ctx, "DeleteRelease")
notifyRelease(ctx, doer, rel, api.HookReleaseDeleted)
}
func (n *actionsNotifier) PackageCreate(ctx context.Context, doer *user_model.User, pd *packages_model.PackageDescriptor) {
ctx = withMethod(ctx, "PackageCreate")
notifyPackage(ctx, doer, pd, api.HookPackageCreated)
}
func (n *actionsNotifier) PackageDelete(ctx context.Context, doer *user_model.User, pd *packages_model.PackageDescriptor) {
ctx = withMethod(ctx, "PackageDelete")
notifyPackage(ctx, doer, pd, api.HookPackageDeleted)
}
func (n *actionsNotifier) AutoMergePullRequest(ctx context.Context, doer *user_model.User, pr *issues_model.PullRequest) {
ctx = withMethod(ctx, "AutoMergePullRequest")
n.MergePullRequest(ctx, doer, pr)
}
func (n *actionsNotifier) PullRequestSynchronized(ctx context.Context, doer *user_model.User, pr *issues_model.PullRequest) {
ctx = withMethod(ctx, "PullRequestSynchronized")
if err := pr.LoadIssue(ctx); err != nil {
log.Error("LoadAttributes: %v", err)
return
}
if err := pr.Issue.LoadRepo(ctx); err != nil {
log.Error("pr.Issue.LoadRepo: %v", err)
return
}
newNotifyInput(pr.Issue.Repo, doer, webhook_module.HookEventPullRequestSync).
WithPayload(&api.PullRequestPayload{
Action: api.HookIssueSynchronized,
Index: pr.Issue.Index,
PullRequest: convert.ToAPIPullRequest(ctx, pr, nil),
Repository: convert.ToRepo(ctx, pr.Issue.Repo, access_model.Permission{AccessMode: perm_model.AccessModeNone}),
Sender: convert.ToUser(ctx, doer, nil),
}).
WithPullRequest(pr).
Notify(ctx)
}
func (n *actionsNotifier) PullRequestChangeTargetBranch(ctx context.Context, doer *user_model.User, pr *issues_model.PullRequest, oldBranch string) {
ctx = withMethod(ctx, "PullRequestChangeTargetBranch")
if err := pr.LoadIssue(ctx); err != nil {
log.Error("LoadAttributes: %v", err)
return
}
if err := pr.Issue.LoadRepo(ctx); err != nil {
log.Error("pr.Issue.LoadRepo: %v", err)
return
}
permission, _ := access_model.GetUserRepoPermission(ctx, pr.Issue.Repo, pr.Issue.Poster)
newNotifyInput(pr.Issue.Repo, doer, webhook_module.HookEventPullRequest).
WithPayload(&api.PullRequestPayload{
Action: api.HookIssueEdited,
Index: pr.Issue.Index,
Changes: &api.ChangesPayload{
Ref: &api.ChangesFromPayload{
From: oldBranch,
},
},
PullRequest: convert.ToAPIPullRequest(ctx, pr, nil),
Repository: convert.ToRepo(ctx, pr.Issue.Repo, permission),
Sender: convert.ToUser(ctx, doer, nil),
}).
WithPullRequest(pr).
Notify(ctx)
}
func (n *actionsNotifier) NewWikiPage(ctx context.Context, doer *user_model.User, repo *repo_model.Repository, page, comment string) {
ctx = withMethod(ctx, "NewWikiPage")
newNotifyInput(repo, doer, webhook_module.HookEventWiki).WithPayload(&api.WikiPayload{
Action: api.HookWikiCreated,
Repository: convert.ToRepo(ctx, repo, access_model.Permission{AccessMode: perm_model.AccessModeOwner}),
Sender: convert.ToUser(ctx, doer, nil),
Page: page,
Comment: comment,
}).Notify(ctx)
}
func (n *actionsNotifier) EditWikiPage(ctx context.Context, doer *user_model.User, repo *repo_model.Repository, page, comment string) {
ctx = withMethod(ctx, "EditWikiPage")
newNotifyInput(repo, doer, webhook_module.HookEventWiki).WithPayload(&api.WikiPayload{
Action: api.HookWikiEdited,
Repository: convert.ToRepo(ctx, repo, access_model.Permission{AccessMode: perm_model.AccessModeOwner}),
Sender: convert.ToUser(ctx, doer, nil),
Page: page,
Comment: comment,
}).Notify(ctx)
}
func (n *actionsNotifier) DeleteWikiPage(ctx context.Context, doer *user_model.User, repo *repo_model.Repository, page string) {
ctx = withMethod(ctx, "DeleteWikiPage")
newNotifyInput(repo, doer, webhook_module.HookEventWiki).WithPayload(&api.WikiPayload{
Action: api.HookWikiDeleted,
Repository: convert.ToRepo(ctx, repo, access_model.Permission{AccessMode: perm_model.AccessModeOwner}),
Sender: convert.ToUser(ctx, doer, nil),
Page: page,
}).Notify(ctx)
}
// MigrateRepository is used to detect workflows after a repository has been migrated
func (n *actionsNotifier) MigrateRepository(ctx context.Context, doer, u *user_model.User, repo *repo_model.Repository) {
ctx = withMethod(ctx, "MigrateRepository")
newNotifyInput(repo, doer, webhook_module.HookEventRepository).WithPayload(&api.RepositoryPayload{
Action: api.HookRepoCreated,
Repository: convert.ToRepo(ctx, repo, access_model.Permission{AccessMode: perm_model.AccessModeOwner}),
Organization: convert.ToUser(ctx, u, nil),
Sender: convert.ToUser(ctx, doer, nil),
}).Notify(ctx)
}

587
actions/notifier_helper.go Normal file
View File

@ -0,0 +1,587 @@
// Copyright 2022 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package actions
import (
"bytes"
"context"
"fmt"
"slices"
"strings"
actions_model "code.gitea.io/gitea/models/actions"
"code.gitea.io/gitea/models/db"
issues_model "code.gitea.io/gitea/models/issues"
packages_model "code.gitea.io/gitea/models/packages"
access_model "code.gitea.io/gitea/models/perm/access"
repo_model "code.gitea.io/gitea/models/repo"
unit_model "code.gitea.io/gitea/models/unit"
user_model "code.gitea.io/gitea/models/user"
actions_module "code.gitea.io/gitea/modules/actions"
"code.gitea.io/gitea/modules/git"
"code.gitea.io/gitea/modules/gitrepo"
"code.gitea.io/gitea/modules/json"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/setting"
api "code.gitea.io/gitea/modules/structs"
webhook_module "code.gitea.io/gitea/modules/webhook"
"code.gitea.io/gitea/services/convert"
notify_service "code.gitea.io/gitea/services/notify"
"github.com/nektos/act/pkg/jobparser"
"github.com/nektos/act/pkg/model"
)
var methodCtxKey struct{}
// withMethod sets the notification method that this context currently executes.
// Used for debugging/ troubleshooting purposes.
func withMethod(ctx context.Context, method string) context.Context {
// don't overwrite
if v := ctx.Value(methodCtxKey); v != nil {
if _, ok := v.(string); ok {
return ctx
}
}
// FIXME: review the use of this nolint directive
return context.WithValue(ctx, methodCtxKey, method) //nolint:staticcheck
}
// getMethod gets the notification method that this context currently executes.
// Default: "notify"
// Used for debugging/ troubleshooting purposes.
func getMethod(ctx context.Context) string {
if v := ctx.Value(methodCtxKey); v != nil {
if s, ok := v.(string); ok {
return s
}
}
return "notify"
}
type notifyInput struct {
// required
Repo *repo_model.Repository
Doer *user_model.User
Event webhook_module.HookEventType
// optional
Ref git.RefName
Payload api.Payloader
PullRequest *issues_model.PullRequest
}
func newNotifyInput(repo *repo_model.Repository, doer *user_model.User, event webhook_module.HookEventType) *notifyInput {
return &notifyInput{
Repo: repo,
Doer: doer,
Event: event,
}
}
func newNotifyInputForSchedules(repo *repo_model.Repository) *notifyInput {
// the doer here will be ignored as we force using action user when handling schedules
return newNotifyInput(repo, user_model.NewActionsUser(), webhook_module.HookEventSchedule)
}
func (input *notifyInput) WithDoer(doer *user_model.User) *notifyInput {
input.Doer = doer
return input
}
func (input *notifyInput) WithRef(ref string) *notifyInput {
input.Ref = git.RefName(ref)
return input
}
func (input *notifyInput) WithPayload(payload api.Payloader) *notifyInput {
input.Payload = payload
return input
}
func (input *notifyInput) WithPullRequest(pr *issues_model.PullRequest) *notifyInput {
input.PullRequest = pr
if input.Ref == "" {
input.Ref = git.RefName(pr.GetGitRefName())
}
return input
}
func (input *notifyInput) Notify(ctx context.Context) {
log.Trace("execute %v for event %v whose doer is %v", getMethod(ctx), input.Event, input.Doer.Name)
if err := notify(ctx, input); err != nil {
log.Error("an error occurred while executing the %s actions method: %v", getMethod(ctx), err)
}
}
func notify(ctx context.Context, input *notifyInput) error {
shouldDetectSchedules := input.Event == webhook_module.HookEventPush && input.Ref.BranchName() == input.Repo.DefaultBranch
if input.Doer.IsGiteaActions() {
// avoiding triggering cyclically, for example:
// a comment of an issue will trigger the runner to add a new comment as reply,
// and the new comment will trigger the runner again.
log.Debug("ignore executing %v for event %v whose doer is %v", getMethod(ctx), input.Event, input.Doer.Name)
// we should update schedule tasks in this case, because
// 1. schedule tasks cannot be triggered by other events, so cyclic triggering will not occur
// 2. some schedule tasks may update the repo periodically, so the refs of schedule tasks need to be updated
if shouldDetectSchedules {
return DetectAndHandleSchedules(ctx, input.Repo)
}
return nil
}
if input.Repo.IsEmpty || input.Repo.IsArchived {
return nil
}
if unit_model.TypeActions.UnitGlobalDisabled() {
if err := CleanRepoScheduleTasks(ctx, input.Repo); err != nil {
log.Error("CleanRepoScheduleTasks: %v", err)
}
return nil
}
if err := input.Repo.LoadUnits(ctx); err != nil {
return fmt.Errorf("repo.LoadUnits: %w", err)
} else if !input.Repo.UnitEnabled(ctx, unit_model.TypeActions) {
return nil
}
gitRepo, err := gitrepo.OpenRepository(context.Background(), input.Repo)
if err != nil {
return fmt.Errorf("git.OpenRepository: %w", err)
}
defer gitRepo.Close()
ref := input.Ref
if ref.BranchName() != input.Repo.DefaultBranch && actions_module.IsDefaultBranchWorkflow(input.Event) {
if ref != "" {
log.Warn("Event %q should only trigger workflows on the default branch, but its ref is %q. Will fall back to the default branch",
input.Event, ref)
}
ref = git.RefNameFromBranch(input.Repo.DefaultBranch)
}
if ref == "" {
log.Warn("Ref of event %q is empty, will fall back to the default branch", input.Event)
ref = git.RefNameFromBranch(input.Repo.DefaultBranch)
}
commitID, err := gitRepo.GetRefCommitID(ref.String())
if err != nil {
return fmt.Errorf("gitRepo.GetRefCommitID: %w", err)
}
// Get the commit object for the ref
commit, err := gitRepo.GetCommit(commitID)
if err != nil {
return fmt.Errorf("gitRepo.GetCommit: %w", err)
}
if skipWorkflows(input, commit) {
return nil
}
var detectedWorkflows []*actions_module.DetectedWorkflow
actionsConfig := input.Repo.MustGetUnit(ctx, unit_model.TypeActions).ActionsConfig()
workflows, schedules, err := actions_module.DetectWorkflows(gitRepo, commit,
input.Event,
input.Payload,
shouldDetectSchedules,
)
if err != nil {
return fmt.Errorf("DetectWorkflows: %w", err)
}
log.Trace("repo %s with commit %s event %s find %d workflows and %d schedules",
input.Repo.RepoPath(),
commit.ID,
input.Event,
len(workflows),
len(schedules),
)
for _, wf := range workflows {
if actionsConfig.IsWorkflowDisabled(wf.EntryName) {
log.Trace("repo %s has disable workflows %s", input.Repo.RepoPath(), wf.EntryName)
continue
}
if wf.TriggerEvent.Name != actions_module.GithubEventPullRequestTarget {
detectedWorkflows = append(detectedWorkflows, wf)
}
}
if input.PullRequest != nil {
// detect pull_request_target workflows
baseRef := git.BranchPrefix + input.PullRequest.BaseBranch
baseCommit, err := gitRepo.GetCommit(baseRef)
if err != nil {
return fmt.Errorf("gitRepo.GetCommit: %w", err)
}
baseWorkflows, _, err := actions_module.DetectWorkflows(gitRepo, baseCommit, input.Event, input.Payload, false)
if err != nil {
return fmt.Errorf("DetectWorkflows: %w", err)
}
if len(baseWorkflows) == 0 {
log.Trace("repo %s with commit %s couldn't find pull_request_target workflows", input.Repo.RepoPath(), baseCommit.ID)
} else {
for _, wf := range baseWorkflows {
if wf.TriggerEvent.Name == actions_module.GithubEventPullRequestTarget {
detectedWorkflows = append(detectedWorkflows, wf)
}
}
}
}
if shouldDetectSchedules {
if err := handleSchedules(ctx, schedules, commit, input, ref.String()); err != nil {
return err
}
}
return handleWorkflows(ctx, detectedWorkflows, commit, input, ref.String())
}
func skipWorkflows(input *notifyInput, commit *git.Commit) bool {
// skip workflow runs with a configured skip-ci string in commit message or pr title if the event is push or pull_request(_sync)
// https://docs.github.com/en/actions/managing-workflow-runs/skipping-workflow-runs
skipWorkflowEvents := []webhook_module.HookEventType{
webhook_module.HookEventPush,
webhook_module.HookEventPullRequest,
webhook_module.HookEventPullRequestSync,
}
if slices.Contains(skipWorkflowEvents, input.Event) {
for _, s := range setting.Actions.SkipWorkflowStrings {
if input.PullRequest != nil && strings.Contains(input.PullRequest.Issue.Title, s) {
log.Debug("repo %s: skipped run for pr %v because of %s string", input.Repo.RepoPath(), input.PullRequest.Issue.ID, s)
return true
}
if strings.Contains(commit.CommitMessage, s) {
log.Debug("repo %s with commit %s: skipped run because of %s string", input.Repo.RepoPath(), commit.ID, s)
return true
}
}
}
return false
}
func handleWorkflows(
ctx context.Context,
detectedWorkflows []*actions_module.DetectedWorkflow,
commit *git.Commit,
input *notifyInput,
ref string,
) error {
if len(detectedWorkflows) == 0 {
log.Trace("repo %s with commit %s couldn't find workflows", input.Repo.RepoPath(), commit.ID)
return nil
}
p, err := json.Marshal(input.Payload)
if err != nil {
return fmt.Errorf("json.Marshal: %w", err)
}
isForkPullRequest := false
if pr := input.PullRequest; pr != nil {
switch pr.Flow {
case issues_model.PullRequestFlowGithub:
isForkPullRequest = pr.IsFromFork()
case issues_model.PullRequestFlowAGit:
// There is no fork concept in agit flow, anyone with read permission can push refs/for/<target-branch>/<topic-branch> to the repo.
// So we can treat it as a fork pull request because it may be from an untrusted user
isForkPullRequest = true
default:
// unknown flow, assume it's a fork pull request to be safe
isForkPullRequest = true
}
}
for _, dwf := range detectedWorkflows {
run := &actions_model.ActionRun{
Title: strings.SplitN(commit.CommitMessage, "\n", 2)[0],
RepoID: input.Repo.ID,
Repo: input.Repo,
OwnerID: input.Repo.OwnerID,
WorkflowID: dwf.EntryName,
TriggerUserID: input.Doer.ID,
TriggerUser: input.Doer,
Ref: ref,
CommitSHA: commit.ID.String(),
IsForkPullRequest: isForkPullRequest,
Event: input.Event,
EventPayload: string(p),
TriggerEvent: dwf.TriggerEvent.Name,
Status: actions_model.StatusWaiting,
}
need, err := ifNeedApproval(ctx, run, input.Repo, input.Doer)
if err != nil {
log.Error("check if need approval for repo %d with user %d: %v", input.Repo.ID, input.Doer.ID, err)
continue
}
run.NeedApproval = need
if err := run.LoadAttributes(ctx); err != nil {
log.Error("LoadAttributes: %v", err)
continue
}
vars, err := actions_model.GetVariablesOfRun(ctx, run)
if err != nil {
log.Error("GetVariablesOfRun: %v", err)
continue
}
giteaCtx := GenerateGiteaContext(run, nil)
jobs, err := jobparser.Parse(dwf.Content, jobparser.WithVars(vars), jobparser.WithGitContext(giteaCtx.ToGitHubContext()))
if err != nil {
log.Error("jobparser.Parse: %v", err)
continue
}
if len(jobs) > 0 && jobs[0].RunName != "" {
run.Title = jobs[0].RunName
}
// cancel running jobs if the event is push or pull_request_sync
if run.Event == webhook_module.HookEventPush ||
run.Event == webhook_module.HookEventPullRequestSync {
if err := CancelPreviousJobs(
ctx,
run.RepoID,
run.Ref,
run.WorkflowID,
run.Event,
); err != nil {
log.Error("CancelPreviousJobs: %v", err)
}
}
if err := actions_model.InsertRun(ctx, run, jobs); err != nil {
log.Error("InsertRun: %v", err)
continue
}
alljobs, err := db.Find[actions_model.ActionRunJob](ctx, actions_model.FindRunJobOptions{RunID: run.ID})
if err != nil {
log.Error("FindRunJobs: %v", err)
continue
}
CreateCommitStatus(ctx, alljobs...)
for _, job := range alljobs {
notify_service.WorkflowJobStatusUpdate(ctx, input.Repo, input.Doer, job, nil)
}
}
return nil
}
func newNotifyInputFromIssue(issue *issues_model.Issue, event webhook_module.HookEventType) *notifyInput {
return newNotifyInput(issue.Repo, issue.Poster, event)
}
func notifyRelease(ctx context.Context, doer *user_model.User, rel *repo_model.Release, action api.HookReleaseAction) {
if err := rel.LoadAttributes(ctx); err != nil {
log.Error("LoadAttributes: %v", err)
return
}
permission, _ := access_model.GetUserRepoPermission(ctx, rel.Repo, doer)
newNotifyInput(rel.Repo, doer, webhook_module.HookEventRelease).
WithRef(git.RefNameFromTag(rel.TagName).String()).
WithPayload(&api.ReleasePayload{
Action: action,
Release: convert.ToAPIRelease(ctx, rel.Repo, rel),
Repository: convert.ToRepo(ctx, rel.Repo, permission),
Sender: convert.ToUser(ctx, doer, nil),
}).
Notify(ctx)
}
func notifyPackage(ctx context.Context, sender *user_model.User, pd *packages_model.PackageDescriptor, action api.HookPackageAction) {
if pd.Repository == nil {
// When a package is uploaded to an organization, it could trigger an event to notify.
// So the repository could be nil, however, actions can't support that yet.
// See https://github.com/go-gitea/gitea/pull/17940
return
}
apiPackage, err := convert.ToPackage(ctx, pd, sender)
if err != nil {
log.Error("Error converting package: %v", err)
return
}
newNotifyInput(pd.Repository, sender, webhook_module.HookEventPackage).
WithPayload(&api.PackagePayload{
Action: action,
Package: apiPackage,
Sender: convert.ToUser(ctx, sender, nil),
}).
Notify(ctx)
}
func ifNeedApproval(ctx context.Context, run *actions_model.ActionRun, repo *repo_model.Repository, user *user_model.User) (bool, error) {
// 1. don't need approval if it's not a fork PR
// 2. don't need approval if the event is `pull_request_target` since the workflow will run in the context of base branch
// see https://docs.github.com/en/actions/managing-workflow-runs/approving-workflow-runs-from-public-forks#about-workflow-runs-from-public-forks
if !run.IsForkPullRequest || run.TriggerEvent == actions_module.GithubEventPullRequestTarget {
return false, nil
}
// always need approval if the user is restricted
if user.IsRestricted {
log.Trace("need approval because user %d is restricted", user.ID)
return true, nil
}
// don't need approval if the user can write
if perm, err := access_model.GetUserRepoPermission(ctx, repo, user); err != nil {
return false, fmt.Errorf("GetUserRepoPermission: %w", err)
} else if perm.CanWrite(unit_model.TypeActions) {
log.Trace("do not need approval because user %d can write", user.ID)
return false, nil
}
// don't need approval if the user has been approved before
if count, err := db.Count[actions_model.ActionRun](ctx, actions_model.FindRunOptions{
RepoID: repo.ID,
TriggerUserID: user.ID,
Approved: true,
}); err != nil {
return false, fmt.Errorf("CountRuns: %w", err)
} else if count > 0 {
log.Trace("do not need approval because user %d has been approved before", user.ID)
return false, nil
}
// otherwise, need approval
log.Trace("need approval because it's the first time user %d triggered actions", user.ID)
return true, nil
}
func handleSchedules(
ctx context.Context,
detectedWorkflows []*actions_module.DetectedWorkflow,
commit *git.Commit,
input *notifyInput,
ref string,
) error {
branch, err := commit.GetBranchName()
if err != nil {
return err
}
if branch != input.Repo.DefaultBranch {
log.Trace("commit branch is not default branch in repo")
return nil
}
if count, err := db.Count[actions_model.ActionSchedule](ctx, actions_model.FindScheduleOptions{RepoID: input.Repo.ID}); err != nil {
log.Error("CountSchedules: %v", err)
return err
} else if count > 0 {
if err := CleanRepoScheduleTasks(ctx, input.Repo); err != nil {
log.Error("CleanRepoScheduleTasks: %v", err)
}
}
if len(detectedWorkflows) == 0 {
log.Trace("repo %s with commit %s couldn't find schedules", input.Repo.RepoPath(), commit.ID)
return nil
}
p, err := json.Marshal(input.Payload)
if err != nil {
return fmt.Errorf("json.Marshal: %w", err)
}
crons := make([]*actions_model.ActionSchedule, 0, len(detectedWorkflows))
for _, dwf := range detectedWorkflows {
// Check cron job condition. Only working in default branch
workflow, err := model.ReadWorkflow(bytes.NewReader(dwf.Content))
if err != nil {
log.Error("ReadWorkflow: %v", err)
continue
}
schedules := workflow.OnSchedule()
if len(schedules) == 0 {
log.Warn("no schedule event")
continue
}
run := &actions_model.ActionSchedule{
Title: strings.SplitN(commit.CommitMessage, "\n", 2)[0],
RepoID: input.Repo.ID,
Repo: input.Repo,
OwnerID: input.Repo.OwnerID,
WorkflowID: dwf.EntryName,
TriggerUserID: user_model.ActionsUserID,
TriggerUser: user_model.NewActionsUser(),
Ref: ref,
CommitSHA: commit.ID.String(),
Event: input.Event,
EventPayload: string(p),
Specs: schedules,
Content: dwf.Content,
}
vars, err := actions_model.GetVariablesOfRun(ctx, run.ToActionRun())
if err != nil {
log.Error("GetVariablesOfRun: %v", err)
continue
}
giteaCtx := GenerateGiteaContext(run.ToActionRun(), nil)
jobs, err := jobparser.Parse(dwf.Content, jobparser.WithVars(vars), jobparser.WithGitContext(giteaCtx.ToGitHubContext()))
if err != nil {
log.Error("jobparser.Parse: %v", err)
continue
}
if len(jobs) > 0 && jobs[0].RunName != "" {
run.Title = jobs[0].RunName
}
crons = append(crons, run)
}
return actions_model.CreateScheduleTask(ctx, crons)
}
// DetectAndHandleSchedules detects the schedule workflows on the default branch and create schedule tasks
func DetectAndHandleSchedules(ctx context.Context, repo *repo_model.Repository) error {
if repo.IsEmpty || repo.IsArchived {
return nil
}
gitRepo, err := gitrepo.OpenRepository(context.Background(), repo)
if err != nil {
return fmt.Errorf("git.OpenRepository: %w", err)
}
defer gitRepo.Close()
// Only detect schedule workflows on the default branch
commit, err := gitRepo.GetCommit(repo.DefaultBranch)
if err != nil {
return fmt.Errorf("gitRepo.GetCommit: %w", err)
}
scheduleWorkflows, err := actions_module.DetectScheduledWorkflows(gitRepo, commit)
if err != nil {
return fmt.Errorf("detect schedule workflows: %w", err)
}
if len(scheduleWorkflows) == 0 {
return nil
}
// We need a notifyInput to call handleSchedules
// if repo is a mirror, commit author maybe an external user,
// so we use action user as the Doer of the notifyInput
notifyInput := newNotifyInputForSchedules(repo)
return handleSchedules(ctx, scheduleWorkflows, commit, notifyInput, repo.DefaultBranch)
}

38
actions/rerun.go Normal file
View File

@ -0,0 +1,38 @@
// Copyright 2024 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package actions
import (
actions_model "code.gitea.io/gitea/models/actions"
"code.gitea.io/gitea/modules/container"
)
// GetAllRerunJobs get all jobs that need to be rerun when job should be rerun
func GetAllRerunJobs(job *actions_model.ActionRunJob, allJobs []*actions_model.ActionRunJob) []*actions_model.ActionRunJob {
rerunJobs := []*actions_model.ActionRunJob{job}
rerunJobsIDSet := make(container.Set[string])
rerunJobsIDSet.Add(job.JobID)
for {
found := false
for _, j := range allJobs {
if rerunJobsIDSet.Contains(j.JobID) {
continue
}
for _, need := range j.Needs {
if rerunJobsIDSet.Contains(need) {
found = true
rerunJobs = append(rerunJobs, j)
rerunJobsIDSet.Add(j.JobID)
break
}
}
}
if !found {
break
}
}
return rerunJobs
}

48
actions/rerun_test.go Normal file
View File

@ -0,0 +1,48 @@
// Copyright 2024 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package actions
import (
"testing"
actions_model "code.gitea.io/gitea/models/actions"
"github.com/stretchr/testify/assert"
)
func TestGetAllRerunJobs(t *testing.T) {
job1 := &actions_model.ActionRunJob{JobID: "job1"}
job2 := &actions_model.ActionRunJob{JobID: "job2", Needs: []string{"job1"}}
job3 := &actions_model.ActionRunJob{JobID: "job3", Needs: []string{"job2"}}
job4 := &actions_model.ActionRunJob{JobID: "job4", Needs: []string{"job2", "job3"}}
jobs := []*actions_model.ActionRunJob{job1, job2, job3, job4}
testCases := []struct {
job *actions_model.ActionRunJob
rerunJobs []*actions_model.ActionRunJob
}{
{
job1,
[]*actions_model.ActionRunJob{job1, job2, job3, job4},
},
{
job2,
[]*actions_model.ActionRunJob{job2, job3, job4},
},
{
job3,
[]*actions_model.ActionRunJob{job3, job4},
},
{
job4,
[]*actions_model.ActionRunJob{job4},
},
}
for _, tc := range testCases {
rerunJobs := GetAllRerunJobs(tc.job, jobs)
assert.ElementsMatch(t, tc.rerunJobs, rerunJobs)
}
}

166
actions/schedule_tasks.go Normal file
View File

@ -0,0 +1,166 @@
// Copyright 2023 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package actions
import (
"context"
"fmt"
"time"
actions_model "code.gitea.io/gitea/models/actions"
"code.gitea.io/gitea/models/db"
repo_model "code.gitea.io/gitea/models/repo"
"code.gitea.io/gitea/models/unit"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/timeutil"
webhook_module "code.gitea.io/gitea/modules/webhook"
notify_service "code.gitea.io/gitea/services/notify"
"github.com/nektos/act/pkg/jobparser"
)
// StartScheduleTasks start the task
func StartScheduleTasks(ctx context.Context) error {
return startTasks(ctx)
}
// startTasks retrieves specifications in pages, creates a schedule task for each specification,
// and updates the specification's next run time and previous run time.
// The function returns an error if there's an issue with finding or updating the specifications.
func startTasks(ctx context.Context) error {
// Set the page size
pageSize := 50
// Retrieve specs in pages until all specs have been retrieved
now := time.Now()
for page := 1; ; page++ {
// Retrieve the specs for the current page
specs, _, err := actions_model.FindSpecs(ctx, actions_model.FindSpecOptions{
ListOptions: db.ListOptions{
Page: page,
PageSize: pageSize,
},
Next: now.Unix(),
})
if err != nil {
return fmt.Errorf("find specs: %w", err)
}
if err := specs.LoadRepos(ctx); err != nil {
return fmt.Errorf("LoadRepos: %w", err)
}
// Loop through each spec and create a schedule task for it
for _, row := range specs {
// cancel running jobs if the event is push
if row.Schedule.Event == webhook_module.HookEventPush {
// cancel running jobs of the same workflow
if err := CancelPreviousJobs(
ctx,
row.RepoID,
row.Schedule.Ref,
row.Schedule.WorkflowID,
webhook_module.HookEventSchedule,
); err != nil {
log.Error("CancelPreviousJobs: %v", err)
}
}
if row.Repo.IsArchived {
// Skip if the repo is archived
continue
}
cfg, err := row.Repo.GetUnit(ctx, unit.TypeActions)
if err != nil {
if repo_model.IsErrUnitTypeNotExist(err) {
// Skip the actions unit of this repo is disabled.
continue
}
return fmt.Errorf("GetUnit: %w", err)
}
if cfg.ActionsConfig().IsWorkflowDisabled(row.Schedule.WorkflowID) {
continue
}
if err := CreateScheduleTask(ctx, row.Schedule); err != nil {
log.Error("CreateScheduleTask: %v", err)
return err
}
// Parse the spec
schedule, err := row.Parse()
if err != nil {
log.Error("Parse: %v", err)
return err
}
// Update the spec's next run time and previous run time
row.Prev = row.Next
row.Next = timeutil.TimeStamp(schedule.Next(now.Add(1 * time.Minute)).Unix())
if err := actions_model.UpdateScheduleSpec(ctx, row, "prev", "next"); err != nil {
log.Error("UpdateScheduleSpec: %v", err)
return err
}
}
// Stop if all specs have been retrieved
if len(specs) < pageSize {
break
}
}
return nil
}
// CreateScheduleTask creates a scheduled task from a cron action schedule.
// It creates an action run based on the schedule, inserts it into the database, and creates commit statuses for each job.
func CreateScheduleTask(ctx context.Context, cron *actions_model.ActionSchedule) error {
// Create a new action run based on the schedule
run := &actions_model.ActionRun{
Title: cron.Title,
RepoID: cron.RepoID,
OwnerID: cron.OwnerID,
WorkflowID: cron.WorkflowID,
TriggerUserID: cron.TriggerUserID,
Ref: cron.Ref,
CommitSHA: cron.CommitSHA,
Event: cron.Event,
EventPayload: cron.EventPayload,
TriggerEvent: string(webhook_module.HookEventSchedule),
ScheduleID: cron.ID,
Status: actions_model.StatusWaiting,
}
vars, err := actions_model.GetVariablesOfRun(ctx, run)
if err != nil {
log.Error("GetVariablesOfRun: %v", err)
return err
}
// Parse the workflow specification from the cron schedule
workflows, err := jobparser.Parse(cron.Content, jobparser.WithVars(vars))
if err != nil {
return err
}
// Insert the action run and its associated jobs into the database
if err := actions_model.InsertRun(ctx, run, workflows); err != nil {
return err
}
allJobs, err := db.Find[actions_model.ActionRunJob](ctx, actions_model.FindRunJobOptions{RunID: run.ID})
if err != nil {
log.Error("FindRunJobs: %v", err)
}
err = run.LoadAttributes(ctx)
if err != nil {
log.Error("LoadAttributes: %v", err)
}
for _, job := range allJobs {
notify_service.WorkflowJobStatusUpdate(ctx, run.Repo, run.TriggerUser, job, nil)
}
// Return nil if no errors occurred
return nil
}

132
actions/task.go Normal file
View File

@ -0,0 +1,132 @@
// Copyright 2022 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package actions
import (
"context"
"errors"
"fmt"
actions_model "code.gitea.io/gitea/models/actions"
"code.gitea.io/gitea/models/db"
secret_model "code.gitea.io/gitea/models/secret"
notify_service "code.gitea.io/gitea/services/notify"
runnerv1 "code.gitea.io/actions-proto-go/runner/v1"
"google.golang.org/protobuf/types/known/structpb"
)
func PickTask(ctx context.Context, runner *actions_model.ActionRunner) (*runnerv1.Task, bool, error) {
var (
task *runnerv1.Task
job *actions_model.ActionRunJob
actionTask *actions_model.ActionTask
)
if runner.Ephemeral {
var task actions_model.ActionTask
has, err := db.GetEngine(ctx).Where("runner_id = ?", runner.ID).Get(&task)
// Let the runner retry the request, do not allow to proceed
if err != nil {
return nil, false, err
}
if has {
if task.Status == actions_model.StatusWaiting || task.Status == actions_model.StatusRunning || task.Status == actions_model.StatusBlocked {
return nil, false, nil
}
// task has been finished, remove it
_, err = db.DeleteByID[actions_model.ActionRunner](ctx, runner.ID)
if err != nil {
return nil, false, err
}
return nil, false, errors.New("runner has been removed")
}
}
if err := db.WithTx(ctx, func(ctx context.Context) error {
t, ok, err := actions_model.CreateTaskForRunner(ctx, runner)
if err != nil {
return fmt.Errorf("CreateTaskForRunner: %w", err)
}
if !ok {
return nil
}
if err := t.LoadAttributes(ctx); err != nil {
return fmt.Errorf("task LoadAttributes: %w", err)
}
job = t.Job
actionTask = t
secrets, err := secret_model.GetSecretsOfTask(ctx, t)
if err != nil {
return fmt.Errorf("GetSecretsOfTask: %w", err)
}
vars, err := actions_model.GetVariablesOfRun(ctx, t.Job.Run)
if err != nil {
return fmt.Errorf("GetVariablesOfRun: %w", err)
}
needs, err := findTaskNeeds(ctx, job)
if err != nil {
return fmt.Errorf("findTaskNeeds: %w", err)
}
taskContext, err := generateTaskContext(t)
if err != nil {
return fmt.Errorf("generateTaskContext: %w", err)
}
task = &runnerv1.Task{
Id: t.ID,
WorkflowPayload: t.Job.WorkflowPayload,
Context: taskContext,
Secrets: secrets,
Vars: vars,
Needs: needs,
}
return nil
}); err != nil {
return nil, false, err
}
if task == nil {
return nil, false, nil
}
CreateCommitStatus(ctx, job)
notify_service.WorkflowJobStatusUpdate(ctx, job.Run.Repo, job.Run.TriggerUser, job, actionTask)
return task, true, nil
}
func generateTaskContext(t *actions_model.ActionTask) (*structpb.Struct, error) {
giteaRuntimeToken, err := CreateAuthorizationToken(t.ID, t.Job.RunID, t.JobID)
if err != nil {
return nil, err
}
gitCtx := GenerateGiteaContext(t.Job.Run, t.Job)
gitCtx["token"] = t.Token
gitCtx["gitea_runtime_token"] = giteaRuntimeToken
return structpb.NewStruct(gitCtx)
}
func findTaskNeeds(ctx context.Context, taskJob *actions_model.ActionRunJob) (map[string]*runnerv1.TaskNeed, error) {
taskNeeds, err := FindTaskNeeds(ctx, taskJob)
if err != nil {
return nil, err
}
ret := make(map[string]*runnerv1.TaskNeed, len(taskNeeds))
for jobID, taskNeed := range taskNeeds {
ret[jobID] = &runnerv1.TaskNeed{
Outputs: taskNeed.Outputs,
Result: runnerv1.Result(taskNeed.Result),
}
}
return ret, nil
}

97
actions/variables.go Normal file
View File

@ -0,0 +1,97 @@
// Copyright 2024 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package actions
import (
"context"
"regexp"
actions_model "code.gitea.io/gitea/models/actions"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/util"
secret_service "code.gitea.io/gitea/services/secrets"
)
func CreateVariable(ctx context.Context, ownerID, repoID int64, name, data, description string) (*actions_model.ActionVariable, error) {
if err := secret_service.ValidateName(name); err != nil {
return nil, err
}
if err := envNameCIRegexMatch(name); err != nil {
return nil, err
}
v, err := actions_model.InsertVariable(ctx, ownerID, repoID, name, util.ReserveLineBreakForTextarea(data), description)
if err != nil {
return nil, err
}
return v, nil
}
func UpdateVariableNameData(ctx context.Context, variable *actions_model.ActionVariable) (bool, error) {
if err := secret_service.ValidateName(variable.Name); err != nil {
return false, err
}
if err := envNameCIRegexMatch(variable.Name); err != nil {
return false, err
}
variable.Data = util.ReserveLineBreakForTextarea(variable.Data)
return actions_model.UpdateVariableCols(ctx, variable, "name", "data", "description")
}
func DeleteVariableByID(ctx context.Context, variableID int64) error {
return actions_model.DeleteVariable(ctx, variableID)
}
func DeleteVariableByName(ctx context.Context, ownerID, repoID int64, name string) error {
if err := secret_service.ValidateName(name); err != nil {
return err
}
if err := envNameCIRegexMatch(name); err != nil {
return err
}
v, err := GetVariable(ctx, actions_model.FindVariablesOpts{
OwnerID: ownerID,
RepoID: repoID,
Name: name,
})
if err != nil {
return err
}
return actions_model.DeleteVariable(ctx, v.ID)
}
func GetVariable(ctx context.Context, opts actions_model.FindVariablesOpts) (*actions_model.ActionVariable, error) {
vars, err := actions_model.FindVariables(ctx, opts)
if err != nil {
return nil, err
}
if len(vars) != 1 {
return nil, util.NewNotExistErrorf("variable not found")
}
return vars[0], nil
}
// some regular expression of `variables` and `secrets`
// reference to:
// https://docs.github.com/en/actions/learn-github-actions/variables#naming-conventions-for-configuration-variables
// https://docs.github.com/en/actions/security-guides/encrypted-secrets#naming-your-secrets
var (
forbiddenEnvNameCIRx = regexp.MustCompile("(?i)^CI")
)
func envNameCIRegexMatch(name string) error {
if forbiddenEnvNameCIRx.MatchString(name) {
log.Error("Env Name cannot be ci")
return util.NewInvalidArgumentErrorf("env name cannot be ci")
}
return nil
}

292
actions/workflow.go Normal file
View File

@ -0,0 +1,292 @@
// Copyright 2024 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package actions
import (
"fmt"
"net/http"
"net/url"
"path"
"strings"
actions_model "code.gitea.io/gitea/models/actions"
"code.gitea.io/gitea/models/db"
"code.gitea.io/gitea/models/perm"
access_model "code.gitea.io/gitea/models/perm/access"
repo_model "code.gitea.io/gitea/models/repo"
"code.gitea.io/gitea/models/unit"
user_model "code.gitea.io/gitea/models/user"
"code.gitea.io/gitea/modules/actions"
"code.gitea.io/gitea/modules/git"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/reqctx"
api "code.gitea.io/gitea/modules/structs"
"code.gitea.io/gitea/modules/util"
"code.gitea.io/gitea/services/context"
"code.gitea.io/gitea/services/convert"
notify_service "code.gitea.io/gitea/services/notify"
"github.com/nektos/act/pkg/jobparser"
"github.com/nektos/act/pkg/model"
)
func getActionWorkflowEntry(ctx *context.APIContext, commit *git.Commit, folder string, entry *git.TreeEntry) *api.ActionWorkflow {
cfgUnit := ctx.Repo.Repository.MustGetUnit(ctx, unit.TypeActions)
cfg := cfgUnit.ActionsConfig()
defaultBranch, _ := commit.GetBranchName()
workflowURL := fmt.Sprintf("%s/actions/workflows/%s", ctx.Repo.Repository.APIURL(), url.PathEscape(entry.Name()))
workflowRepoURL := fmt.Sprintf("%s/src/branch/%s/%s/%s", ctx.Repo.Repository.HTMLURL(ctx), util.PathEscapeSegments(defaultBranch), util.PathEscapeSegments(folder), url.PathEscape(entry.Name()))
badgeURL := fmt.Sprintf("%s/actions/workflows/%s/badge.svg?branch=%s", ctx.Repo.Repository.HTMLURL(ctx), url.PathEscape(entry.Name()), url.QueryEscape(ctx.Repo.Repository.DefaultBranch))
// See https://docs.github.com/en/rest/actions/workflows?apiVersion=2022-11-28#get-a-workflow
// State types:
// - active
// - deleted
// - disabled_fork
// - disabled_inactivity
// - disabled_manually
state := "active"
if cfg.IsWorkflowDisabled(entry.Name()) {
state = "disabled_manually"
}
// The CreatedAt and UpdatedAt fields currently reflect the timestamp of the latest commit, which can later be refined
// by retrieving the first and last commits for the file history. The first commit would indicate the creation date,
// while the last commit would represent the modification date. The DeletedAt could be determined by identifying
// the last commit where the file existed. However, this implementation has not been done here yet, as it would likely
// cause a significant performance degradation.
createdAt := commit.Author.When
updatedAt := commit.Author.When
return &api.ActionWorkflow{
ID: entry.Name(),
Name: entry.Name(),
Path: path.Join(folder, entry.Name()),
State: state,
CreatedAt: createdAt,
UpdatedAt: updatedAt,
URL: workflowURL,
HTMLURL: workflowRepoURL,
BadgeURL: badgeURL,
}
}
func EnableOrDisableWorkflow(ctx *context.APIContext, workflowID string, isEnable bool) error {
workflow, err := GetActionWorkflow(ctx, workflowID)
if err != nil {
return err
}
cfgUnit := ctx.Repo.Repository.MustGetUnit(ctx, unit.TypeActions)
cfg := cfgUnit.ActionsConfig()
if isEnable {
cfg.EnableWorkflow(workflow.ID)
} else {
cfg.DisableWorkflow(workflow.ID)
}
return repo_model.UpdateRepoUnit(ctx, cfgUnit)
}
func ListActionWorkflows(ctx *context.APIContext) ([]*api.ActionWorkflow, error) {
defaultBranchCommit, err := ctx.Repo.GitRepo.GetBranchCommit(ctx.Repo.Repository.DefaultBranch)
if err != nil {
ctx.APIErrorInternal(err)
return nil, err
}
folder, entries, err := actions.ListWorkflows(defaultBranchCommit)
if err != nil {
ctx.APIError(http.StatusNotFound, err.Error())
return nil, err
}
workflows := make([]*api.ActionWorkflow, len(entries))
for i, entry := range entries {
workflows[i] = getActionWorkflowEntry(ctx, defaultBranchCommit, folder, entry)
}
return workflows, nil
}
func GetActionWorkflow(ctx *context.APIContext, workflowID string) (*api.ActionWorkflow, error) {
entries, err := ListActionWorkflows(ctx)
if err != nil {
return nil, err
}
for _, entry := range entries {
if entry.Name == workflowID {
return entry, nil
}
}
return nil, util.NewNotExistErrorf("workflow %q not found", workflowID)
}
func DispatchActionWorkflow(ctx reqctx.RequestContext, doer *user_model.User, repo *repo_model.Repository, gitRepo *git.Repository, workflowID, ref string, processInputs func(model *model.WorkflowDispatch, inputs map[string]any) error) error {
if workflowID == "" {
return util.ErrorWrapLocale(
util.NewNotExistErrorf("workflowID is empty"),
"actions.workflow.not_found", workflowID,
)
}
if ref == "" {
return util.ErrorWrapLocale(
util.NewNotExistErrorf("ref is empty"),
"form.target_ref_not_exist", ref,
)
}
// can not rerun job when workflow is disabled
cfgUnit := repo.MustGetUnit(ctx, unit.TypeActions)
cfg := cfgUnit.ActionsConfig()
if cfg.IsWorkflowDisabled(workflowID) {
return util.ErrorWrapLocale(
util.NewPermissionDeniedErrorf("workflow is disabled"),
"actions.workflow.disabled",
)
}
// get target commit of run from specified ref
refName := git.RefName(ref)
var runTargetCommit *git.Commit
var err error
if refName.IsTag() {
runTargetCommit, err = gitRepo.GetTagCommit(refName.TagName())
} else if refName.IsBranch() {
runTargetCommit, err = gitRepo.GetBranchCommit(refName.BranchName())
} else {
refName = git.RefNameFromBranch(ref)
runTargetCommit, err = gitRepo.GetBranchCommit(ref)
}
if err != nil {
return util.ErrorWrapLocale(
util.NewNotExistErrorf("ref %q doesn't exist", ref),
"form.target_ref_not_exist", ref,
)
}
// get workflow entry from runTargetCommit
_, entries, err := actions.ListWorkflows(runTargetCommit)
if err != nil {
return err
}
// find workflow from commit
var workflows []*jobparser.SingleWorkflow
var entry *git.TreeEntry
run := &actions_model.ActionRun{
Title: strings.SplitN(runTargetCommit.CommitMessage, "\n", 2)[0],
RepoID: repo.ID,
Repo: repo,
OwnerID: repo.OwnerID,
WorkflowID: workflowID,
TriggerUserID: doer.ID,
TriggerUser: doer,
Ref: string(refName),
CommitSHA: runTargetCommit.ID.String(),
IsForkPullRequest: false,
Event: "workflow_dispatch",
TriggerEvent: "workflow_dispatch",
Status: actions_model.StatusWaiting,
}
for _, e := range entries {
if e.Name() != workflowID {
continue
}
entry = e
break
}
if entry == nil {
return util.ErrorWrapLocale(
util.NewNotExistErrorf("workflow %q doesn't exist", workflowID),
"actions.workflow.not_found", workflowID,
)
}
content, err := actions.GetContentFromEntry(entry)
if err != nil {
return err
}
giteaCtx := GenerateGiteaContext(run, nil)
workflows, err = jobparser.Parse(content, jobparser.WithGitContext(giteaCtx.ToGitHubContext()))
if err != nil {
return err
}
if len(workflows) > 0 && workflows[0].RunName != "" {
run.Title = workflows[0].RunName
}
if len(workflows) == 0 {
return util.ErrorWrapLocale(
util.NewNotExistErrorf("workflow %q doesn't exist", workflowID),
"actions.workflow.not_found", workflowID,
)
}
// get inputs from post
workflow := &model.Workflow{
RawOn: workflows[0].RawOn,
}
inputsWithDefaults := make(map[string]any)
if workflowDispatch := workflow.WorkflowDispatchConfig(); workflowDispatch != nil {
if err = processInputs(workflowDispatch, inputsWithDefaults); err != nil {
return err
}
}
// ctx.Req.PostForm -> WorkflowDispatchPayload.Inputs -> ActionRun.EventPayload -> runner: ghc.Event
// https://docs.github.com/en/actions/learn-github-actions/contexts#github-context
// https://docs.github.com/en/webhooks/webhook-events-and-payloads#workflow_dispatch
workflowDispatchPayload := &api.WorkflowDispatchPayload{
Workflow: workflowID,
Ref: ref,
Repository: convert.ToRepo(ctx, repo, access_model.Permission{AccessMode: perm.AccessModeNone}),
Inputs: inputsWithDefaults,
Sender: convert.ToUserWithAccessMode(ctx, doer, perm.AccessModeNone),
}
var eventPayload []byte
if eventPayload, err = workflowDispatchPayload.JSONPayload(); err != nil {
return fmt.Errorf("JSONPayload: %w", err)
}
run.EventPayload = string(eventPayload)
// cancel running jobs of the same workflow
if err := CancelPreviousJobs(
ctx,
run.RepoID,
run.Ref,
run.WorkflowID,
run.Event,
); err != nil {
log.Error("CancelRunningJobs: %v", err)
}
// Insert the action run and its associated jobs into the database
if err := actions_model.InsertRun(ctx, run, workflows); err != nil {
return fmt.Errorf("InsertRun: %w", err)
}
allJobs, err := db.Find[actions_model.ActionRunJob](ctx, actions_model.FindRunJobOptions{RunID: run.ID})
if err != nil {
log.Error("FindRunJobs: %v", err)
}
CreateCommitStatus(ctx, allJobs...)
for _, job := range allJobs {
notify_service.WorkflowJobStatusUpdate(ctx, repo, doer, job, nil)
}
return nil
}

252
agit/agit.go Normal file
View File

@ -0,0 +1,252 @@
// Copyright 2021 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package agit
import (
"context"
"fmt"
"os"
"strings"
issues_model "code.gitea.io/gitea/models/issues"
repo_model "code.gitea.io/gitea/models/repo"
user_model "code.gitea.io/gitea/models/user"
"code.gitea.io/gitea/modules/git"
"code.gitea.io/gitea/modules/gitrepo"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/private"
"code.gitea.io/gitea/modules/setting"
notify_service "code.gitea.io/gitea/services/notify"
pull_service "code.gitea.io/gitea/services/pull"
)
// ProcReceive handle proc receive work
func ProcReceive(ctx context.Context, repo *repo_model.Repository, gitRepo *git.Repository, opts *private.HookOptions) ([]private.HookProcReceiveRefResult, error) {
results := make([]private.HookProcReceiveRefResult, 0, len(opts.OldCommitIDs))
forcePush := opts.GitPushOptions.Bool(private.GitPushOptionForcePush)
topicBranch := opts.GitPushOptions["topic"]
title := strings.TrimSpace(opts.GitPushOptions["title"])
description := strings.TrimSpace(opts.GitPushOptions["description"])
objectFormat := git.ObjectFormatFromName(repo.ObjectFormatName)
userName := strings.ToLower(opts.UserName)
pusher, err := user_model.GetUserByID(ctx, opts.UserID)
if err != nil {
return nil, fmt.Errorf("failed to get user. Error: %w", err)
}
for i := range opts.OldCommitIDs {
if opts.NewCommitIDs[i] == objectFormat.EmptyObjectID().String() {
results = append(results, private.HookProcReceiveRefResult{
OriginalRef: opts.RefFullNames[i],
OldOID: opts.OldCommitIDs[i],
NewOID: opts.NewCommitIDs[i],
Err: "Can't delete not exist branch",
})
continue
}
if !opts.RefFullNames[i].IsFor() {
results = append(results, private.HookProcReceiveRefResult{
IsNotMatched: true,
OriginalRef: opts.RefFullNames[i],
})
continue
}
baseBranchName := opts.RefFullNames[i].ForBranchName()
currentTopicBranch := ""
if !gitrepo.IsBranchExist(ctx, repo, baseBranchName) {
// try match refs/for/<target-branch>/<topic-branch>
for p, v := range baseBranchName {
if v == '/' && gitrepo.IsBranchExist(ctx, repo, baseBranchName[:p]) && p != len(baseBranchName)-1 {
currentTopicBranch = baseBranchName[p+1:]
baseBranchName = baseBranchName[:p]
break
}
}
}
if len(topicBranch) == 0 && len(currentTopicBranch) == 0 {
results = append(results, private.HookProcReceiveRefResult{
OriginalRef: opts.RefFullNames[i],
OldOID: opts.OldCommitIDs[i],
NewOID: opts.NewCommitIDs[i],
Err: "topic-branch is not set",
})
continue
}
if len(currentTopicBranch) == 0 {
currentTopicBranch = topicBranch
}
// because different user maybe want to use same topic,
// So it's better to make sure the topic branch name
// has username prefix
var headBranch string
if !strings.HasPrefix(currentTopicBranch, userName+"/") {
headBranch = userName + "/" + currentTopicBranch
} else {
headBranch = currentTopicBranch
}
pr, err := issues_model.GetUnmergedPullRequest(ctx, repo.ID, repo.ID, headBranch, baseBranchName, issues_model.PullRequestFlowAGit)
if err != nil {
if !issues_model.IsErrPullRequestNotExist(err) {
return nil, fmt.Errorf("failed to get unmerged agit flow pull request in repository: %s Error: %w", repo.FullName(), err)
}
var commit *git.Commit
if title == "" || description == "" {
commit, err = gitRepo.GetCommit(opts.NewCommitIDs[i])
if err != nil {
return nil, fmt.Errorf("failed to get commit %s in repository: %s Error: %w", opts.NewCommitIDs[i], repo.FullName(), err)
}
}
// create a new pull request
if title == "" {
title = strings.Split(commit.CommitMessage, "\n")[0]
}
if description == "" {
_, description, _ = strings.Cut(commit.CommitMessage, "\n\n")
}
if description == "" {
description = title
}
prIssue := &issues_model.Issue{
RepoID: repo.ID,
Title: title,
PosterID: pusher.ID,
Poster: pusher,
IsPull: true,
Content: description,
}
pr := &issues_model.PullRequest{
HeadRepoID: repo.ID,
BaseRepoID: repo.ID,
HeadBranch: headBranch,
HeadCommitID: opts.NewCommitIDs[i],
BaseBranch: baseBranchName,
HeadRepo: repo,
BaseRepo: repo,
MergeBase: "",
Type: issues_model.PullRequestGitea,
Flow: issues_model.PullRequestFlowAGit,
}
prOpts := &pull_service.NewPullRequestOptions{
Repo: repo,
Issue: prIssue,
PullRequest: pr,
}
if err := pull_service.NewPullRequest(ctx, prOpts); err != nil {
return nil, err
}
log.Trace("Pull request created: %d/%d", repo.ID, prIssue.ID)
results = append(results, private.HookProcReceiveRefResult{
Ref: pr.GetGitRefName(),
OriginalRef: opts.RefFullNames[i],
OldOID: objectFormat.EmptyObjectID().String(),
NewOID: opts.NewCommitIDs[i],
IsCreatePR: false, // AGit always creates a pull request so there is no point in prompting user to create one
URL: fmt.Sprintf("%s/pulls/%d", repo.HTMLURL(), pr.Index),
ShouldShowMessage: setting.Git.PullRequestPushMessage && repo.AllowsPulls(ctx),
HeadBranch: headBranch,
})
continue
}
// update exist pull request
if err := pr.LoadBaseRepo(ctx); err != nil {
return nil, fmt.Errorf("unable to load base repository for PR[%d] Error: %w", pr.ID, err)
}
oldCommitID, err := gitRepo.GetRefCommitID(pr.GetGitRefName())
if err != nil {
return nil, fmt.Errorf("unable to get ref commit id in base repository for PR[%d] Error: %w", pr.ID, err)
}
if oldCommitID == opts.NewCommitIDs[i] {
results = append(results, private.HookProcReceiveRefResult{
OriginalRef: opts.RefFullNames[i],
OldOID: opts.OldCommitIDs[i],
NewOID: opts.NewCommitIDs[i],
Err: "new commit is same with old commit",
})
continue
}
if !forcePush.Value() {
output, _, err := git.NewCommand("rev-list", "--max-count=1").
AddDynamicArguments(oldCommitID, "^"+opts.NewCommitIDs[i]).
RunStdString(ctx, &git.RunOpts{Dir: repo.RepoPath(), Env: os.Environ()})
if err != nil {
return nil, fmt.Errorf("failed to detect force push: %w", err)
} else if len(output) > 0 {
results = append(results, private.HookProcReceiveRefResult{
OriginalRef: opts.RefFullNames[i],
OldOID: opts.OldCommitIDs[i],
NewOID: opts.NewCommitIDs[i],
Err: "request `force-push` push option",
})
continue
}
}
pr.HeadCommitID = opts.NewCommitIDs[i]
if err = pull_service.UpdateRef(ctx, pr); err != nil {
return nil, fmt.Errorf("failed to update pull ref. Error: %w", err)
}
pull_service.StartPullRequestCheckImmediately(ctx, pr)
err = pr.LoadIssue(ctx)
if err != nil {
return nil, fmt.Errorf("failed to load pull issue. Error: %w", err)
}
comment, err := pull_service.CreatePushPullComment(ctx, pusher, pr, oldCommitID, opts.NewCommitIDs[i])
if err == nil && comment != nil {
notify_service.PullRequestPushCommits(ctx, pusher, pr, comment)
}
notify_service.PullRequestSynchronized(ctx, pusher, pr)
isForcePush := comment != nil && comment.IsForcePush
results = append(results, private.HookProcReceiveRefResult{
OldOID: oldCommitID,
NewOID: opts.NewCommitIDs[i],
Ref: pr.GetGitRefName(),
OriginalRef: opts.RefFullNames[i],
IsForcePush: isForcePush,
IsCreatePR: false,
URL: fmt.Sprintf("%s/pulls/%d", repo.HTMLURL(), pr.Index),
ShouldShowMessage: setting.Git.PullRequestPushMessage && repo.AllowsPulls(ctx),
})
}
return results, nil
}
// UserNameChanged handle user name change for agit flow pull
func UserNameChanged(ctx context.Context, user *user_model.User, newName string) error {
pulls, err := issues_model.GetAllUnmergedAgitPullRequestByPoster(ctx, user.ID)
if err != nil {
return err
}
newName = strings.ToLower(newName)
for _, pull := range pulls {
pull.HeadBranch = strings.TrimPrefix(pull.HeadBranch, user.LowerName+"/")
pull.HeadBranch = newName + "/" + pull.HeadBranch
if err = pull.UpdateCols(ctx, "head_branch"); err != nil {
return err
}
}
return nil
}

475
asymkey/commit.go Normal file
View File

@ -0,0 +1,475 @@
// Copyright 2025 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package asymkey
import (
"context"
"fmt"
"slices"
"strings"
asymkey_model "code.gitea.io/gitea/models/asymkey"
"code.gitea.io/gitea/models/db"
user_model "code.gitea.io/gitea/models/user"
"code.gitea.io/gitea/modules/cache"
"code.gitea.io/gitea/modules/cachegroup"
"code.gitea.io/gitea/modules/git"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/setting"
"github.com/42wim/sshsig"
"github.com/ProtonMail/go-crypto/openpgp/packet"
)
// ParseCommitWithSignature check if signature is good against keystore.
func ParseCommitWithSignature(ctx context.Context, c *git.Commit) *asymkey_model.CommitVerification {
var committer *user_model.User
if c.Committer != nil {
var err error
// Find Committer account
committer, err = user_model.GetUserByEmail(ctx, c.Committer.Email) // This finds the user by primary email or activated email so commit will not be valid if email is not
if err != nil { // Skipping not user for committer
committer = &user_model.User{
Name: c.Committer.Name,
Email: c.Committer.Email,
}
// We can expect this to often be an ErrUserNotExist. in the case
// it is not, however, it is important to log it.
if !user_model.IsErrUserNotExist(err) {
log.Error("GetUserByEmail: %v", err)
return &asymkey_model.CommitVerification{
CommittingUser: committer,
Verified: false,
Reason: "gpg.error.no_committer_account",
}
}
}
}
return ParseCommitWithSignatureCommitter(ctx, c, committer)
}
func ParseCommitWithSignatureCommitter(ctx context.Context, c *git.Commit, committer *user_model.User) *asymkey_model.CommitVerification {
// If no signature just report the committer
if c.Signature == nil {
return &asymkey_model.CommitVerification{
CommittingUser: committer,
Verified: false, // Default value
Reason: "gpg.error.not_signed_commit", // Default value
}
}
// If this a SSH signature handle it differently
if strings.HasPrefix(c.Signature.Signature, "-----BEGIN SSH SIGNATURE-----") {
return ParseCommitWithSSHSignature(ctx, c, committer)
}
// Parsing signature
sig, err := asymkey_model.ExtractSignature(c.Signature.Signature)
if err != nil { // Skipping failed to extract sign
log.Error("SignatureRead err: %v", err)
return &asymkey_model.CommitVerification{
CommittingUser: committer,
Verified: false,
Reason: "gpg.error.extract_sign",
}
}
keyID := asymkey_model.TryGetKeyIDFromSignature(sig)
defaultReason := asymkey_model.NoKeyFound
// First check if the sig has a keyID and if so just look at that
if commitVerification := HashAndVerifyForKeyID(
ctx,
sig,
c.Signature.Payload,
committer,
keyID,
setting.AppName,
""); commitVerification != nil {
if commitVerification.Reason == asymkey_model.BadSignature {
defaultReason = asymkey_model.BadSignature
} else {
return commitVerification
}
}
// Now try to associate the signature with the committer, if present
if committer.ID != 0 {
keys, err := db.Find[asymkey_model.GPGKey](ctx, asymkey_model.FindGPGKeyOptions{
OwnerID: committer.ID,
})
if err != nil { // Skipping failed to get gpg keys of user
log.Error("ListGPGKeys: %v", err)
return &asymkey_model.CommitVerification{
CommittingUser: committer,
Verified: false,
Reason: "gpg.error.failed_retrieval_gpg_keys",
}
}
if err := asymkey_model.GPGKeyList(keys).LoadSubKeys(ctx); err != nil {
log.Error("LoadSubKeys: %v", err)
return &asymkey_model.CommitVerification{
CommittingUser: committer,
Verified: false,
Reason: "gpg.error.failed_retrieval_gpg_keys",
}
}
committerEmailAddresses, _ := cache.GetWithContextCache(ctx, cachegroup.UserEmailAddresses, committer.ID, user_model.GetEmailAddresses)
activated := false
for _, e := range committerEmailAddresses {
if e.IsActivated && strings.EqualFold(e.Email, c.Committer.Email) {
activated = true
break
}
}
for _, k := range keys {
// Pre-check (& optimization) that emails attached to key can be attached to the committer email and can validate
canValidate := false
email := ""
if k.Verified && activated {
canValidate = true
email = c.Committer.Email
}
if !canValidate {
for _, e := range k.Emails {
if e.IsActivated && strings.EqualFold(e.Email, c.Committer.Email) {
canValidate = true
email = e.Email
break
}
}
}
if !canValidate {
continue // Skip this key
}
commitVerification := asymkey_model.HashAndVerifyWithSubKeysCommitVerification(sig, c.Signature.Payload, k, committer, committer, email)
if commitVerification != nil {
return commitVerification
}
}
}
if setting.Repository.Signing.SigningKey != "" && setting.Repository.Signing.SigningKey != "default" && setting.Repository.Signing.SigningKey != "none" {
// OK we should try the default key
gpgSettings := git.GPGSettings{
Sign: true,
KeyID: setting.Repository.Signing.SigningKey,
Name: setting.Repository.Signing.SigningName,
Email: setting.Repository.Signing.SigningEmail,
}
if err := gpgSettings.LoadPublicKeyContent(); err != nil {
log.Error("Error getting default signing key: %s %v", gpgSettings.KeyID, err)
} else if commitVerification := VerifyWithGPGSettings(ctx, &gpgSettings, sig, c.Signature.Payload, committer, keyID); commitVerification != nil {
if commitVerification.Reason == asymkey_model.BadSignature {
defaultReason = asymkey_model.BadSignature
} else {
return commitVerification
}
}
}
defaultGPGSettings, err := c.GetRepositoryDefaultPublicGPGKey(false)
if err != nil {
log.Error("Error getting default public gpg key: %v", err)
} else if defaultGPGSettings == nil {
log.Warn("Unable to get defaultGPGSettings for unattached commit: %s", c.ID.String())
} else if defaultGPGSettings.Sign {
if commitVerification := VerifyWithGPGSettings(ctx, defaultGPGSettings, sig, c.Signature.Payload, committer, keyID); commitVerification != nil {
if commitVerification.Reason == asymkey_model.BadSignature {
defaultReason = asymkey_model.BadSignature
} else {
return commitVerification
}
}
}
return &asymkey_model.CommitVerification{ // Default at this stage
CommittingUser: committer,
Verified: false,
Warning: defaultReason != asymkey_model.NoKeyFound,
Reason: defaultReason,
SigningKey: &asymkey_model.GPGKey{
KeyID: keyID,
},
}
}
func checkKeyEmails(ctx context.Context, email string, keys ...*asymkey_model.GPGKey) (bool, string) {
uid := int64(0)
var userEmails []*user_model.EmailAddress
var user *user_model.User
for _, key := range keys {
for _, e := range key.Emails {
if e.IsActivated && (email == "" || strings.EqualFold(e.Email, email)) {
return true, e.Email
}
}
if key.Verified && key.OwnerID != 0 {
if uid != key.OwnerID {
userEmails, _ = cache.GetWithContextCache(ctx, cachegroup.UserEmailAddresses, key.OwnerID, user_model.GetEmailAddresses)
uid = key.OwnerID
user, _ = cache.GetWithContextCache(ctx, cachegroup.User, uid, user_model.GetUserByID)
}
for _, e := range userEmails {
if e.IsActivated && (email == "" || strings.EqualFold(e.Email, email)) {
return true, e.Email
}
}
if user.KeepEmailPrivate && strings.EqualFold(email, user.GetEmail()) {
return true, user.GetEmail()
}
}
}
return false, email
}
func HashAndVerifyForKeyID(ctx context.Context, sig *packet.Signature, payload string, committer *user_model.User, keyID, name, email string) *asymkey_model.CommitVerification {
if keyID == "" {
return nil
}
keys, err := cache.GetWithContextCache(ctx, cachegroup.GPGKeyWithSubKeys, keyID, asymkey_model.FindGPGKeyWithSubKeys)
if err != nil {
log.Error("GetGPGKeysByKeyID: %v", err)
return &asymkey_model.CommitVerification{
CommittingUser: committer,
Verified: false,
Reason: "gpg.error.failed_retrieval_gpg_keys",
}
}
if len(keys) == 0 {
return nil
}
for _, key := range keys {
var primaryKeys []*asymkey_model.GPGKey
if key.PrimaryKeyID != "" {
primaryKeys, err = cache.GetWithContextCache(ctx, cachegroup.GPGKeyWithSubKeys, key.PrimaryKeyID, asymkey_model.FindGPGKeyWithSubKeys)
if err != nil {
log.Error("GetGPGKeysByKeyID: %v", err)
return &asymkey_model.CommitVerification{
CommittingUser: committer,
Verified: false,
Reason: "gpg.error.failed_retrieval_gpg_keys",
}
}
}
activated, email := checkKeyEmails(ctx, email, append([]*asymkey_model.GPGKey{key}, primaryKeys...)...)
if !activated {
continue
}
signer := &user_model.User{
Name: name,
Email: email,
}
if key.OwnerID > 0 {
owner, err := cache.GetWithContextCache(ctx, cachegroup.User, key.OwnerID, user_model.GetUserByID)
if err == nil {
signer = owner
} else if !user_model.IsErrUserNotExist(err) {
log.Error("Failed to user_model.GetUserByID: %d for key ID: %d (%s) %v", key.OwnerID, key.ID, key.KeyID, err)
return &asymkey_model.CommitVerification{
CommittingUser: committer,
Verified: false,
Reason: "gpg.error.no_committer_account",
}
}
}
commitVerification := asymkey_model.HashAndVerifyWithSubKeysCommitVerification(sig, payload, key, committer, signer, email)
if commitVerification != nil {
return commitVerification
}
}
// This is a bad situation ... We have a key id that is in our database but the signature doesn't match.
return &asymkey_model.CommitVerification{
CommittingUser: committer,
Verified: false,
Warning: true,
Reason: asymkey_model.BadSignature,
}
}
func VerifyWithGPGSettings(ctx context.Context, gpgSettings *git.GPGSettings, sig *packet.Signature, payload string, committer *user_model.User, keyID string) *asymkey_model.CommitVerification {
// First try to find the key in the db
if commitVerification := HashAndVerifyForKeyID(ctx, sig, payload, committer, gpgSettings.KeyID, gpgSettings.Name, gpgSettings.Email); commitVerification != nil {
return commitVerification
}
// Otherwise we have to parse the key
ekeys, err := asymkey_model.CheckArmoredGPGKeyString(gpgSettings.PublicKeyContent)
if err != nil {
log.Error("Unable to get default signing key: %v", err)
return &asymkey_model.CommitVerification{
CommittingUser: committer,
Verified: false,
Reason: "gpg.error.generate_hash",
}
}
for _, ekey := range ekeys {
pubkey := ekey.PrimaryKey
content, err := asymkey_model.Base64EncPubKey(pubkey)
if err != nil {
return &asymkey_model.CommitVerification{
CommittingUser: committer,
Verified: false,
Reason: "gpg.error.generate_hash",
}
}
k := &asymkey_model.GPGKey{
Content: content,
CanSign: pubkey.CanSign(),
KeyID: pubkey.KeyIdString(),
}
for _, subKey := range ekey.Subkeys {
content, err := asymkey_model.Base64EncPubKey(subKey.PublicKey)
if err != nil {
return &asymkey_model.CommitVerification{
CommittingUser: committer,
Verified: false,
Reason: "gpg.error.generate_hash",
}
}
k.SubsKey = append(k.SubsKey, &asymkey_model.GPGKey{
Content: content,
CanSign: subKey.PublicKey.CanSign(),
KeyID: subKey.PublicKey.KeyIdString(),
})
}
if commitVerification := asymkey_model.HashAndVerifyWithSubKeysCommitVerification(sig, payload, k, committer, &user_model.User{
Name: gpgSettings.Name,
Email: gpgSettings.Email,
}, gpgSettings.Email); commitVerification != nil {
return commitVerification
}
if keyID == k.KeyID {
// This is a bad situation ... We have a key id that matches our default key but the signature doesn't match.
return &asymkey_model.CommitVerification{
CommittingUser: committer,
Verified: false,
Warning: true,
Reason: asymkey_model.BadSignature,
}
}
}
return nil
}
func verifySSHCommitVerificationByInstanceKey(c *git.Commit, committerUser, signerUser *user_model.User, committerGitEmail, publicKeyContent string) *asymkey_model.CommitVerification {
fingerprint, err := asymkey_model.CalcFingerprint(publicKeyContent)
if err != nil {
log.Error("Error calculating the fingerprint public key %q, err: %v", publicKeyContent, err)
return nil
}
sshPubKey := &asymkey_model.PublicKey{
Verified: true,
Content: publicKeyContent,
Fingerprint: fingerprint,
HasUsed: true,
}
return verifySSHCommitVerification(c.Signature.Signature, c.Signature.Payload, sshPubKey, committerUser, signerUser, committerGitEmail)
}
// ParseCommitWithSSHSignature check if signature is good against keystore.
func ParseCommitWithSSHSignature(ctx context.Context, c *git.Commit, committerUser *user_model.User) *asymkey_model.CommitVerification {
// Now try to associate the signature with the committer, if present
if committerUser.ID != 0 {
keys, err := db.Find[asymkey_model.PublicKey](ctx, asymkey_model.FindPublicKeyOptions{
OwnerID: committerUser.ID,
NotKeytype: asymkey_model.KeyTypePrincipal,
})
if err != nil { // Skipping failed to get ssh keys of user
log.Error("ListPublicKeys: %v", err)
return &asymkey_model.CommitVerification{
CommittingUser: committerUser,
Verified: false,
Reason: "gpg.error.failed_retrieval_gpg_keys",
}
}
committerEmailAddresses, err := cache.GetWithContextCache(ctx, cachegroup.UserEmailAddresses, committerUser.ID, user_model.GetEmailAddresses)
if err != nil {
log.Error("GetEmailAddresses: %v", err)
}
activated := false
for _, e := range committerEmailAddresses {
if e.IsActivated && strings.EqualFold(e.Email, c.Committer.Email) {
activated = true
break
}
}
for _, k := range keys {
if k.Verified && activated {
commitVerification := verifySSHCommitVerification(c.Signature.Signature, c.Signature.Payload, k, committerUser, committerUser, c.Committer.Email)
if commitVerification != nil {
return commitVerification
}
}
}
}
// Try the pre-set trusted keys (for key-rotation purpose)
// At the moment, we still use the SigningName&SigningEmail for the rotated keys.
// Maybe in the future we can extend the key format to "ssh-xxx .... old-user@example.com" to support different signer emails.
for _, k := range setting.Repository.Signing.TrustedSSHKeys {
signerUser := &user_model.User{
Name: setting.Repository.Signing.SigningName,
Email: setting.Repository.Signing.SigningEmail,
}
commitVerification := verifySSHCommitVerificationByInstanceKey(c, committerUser, signerUser, c.Committer.Email, k)
if commitVerification != nil && commitVerification.Verified {
return commitVerification
}
}
// Try the configured instance-wide SSH public key
if setting.Repository.Signing.SigningFormat == git.SigningKeyFormatSSH && !slices.Contains([]string{"", "default", "none"}, setting.Repository.Signing.SigningKey) {
gpgSettings := git.GPGSettings{
Sign: true,
KeyID: setting.Repository.Signing.SigningKey,
Name: setting.Repository.Signing.SigningName,
Email: setting.Repository.Signing.SigningEmail,
Format: setting.Repository.Signing.SigningFormat,
}
signerUser := &user_model.User{
Name: gpgSettings.Name,
Email: gpgSettings.Email,
}
if err := gpgSettings.LoadPublicKeyContent(); err != nil {
log.Error("Error getting instance-wide SSH signing key %q, err: %v", gpgSettings.KeyID, err)
} else {
commitVerification := verifySSHCommitVerificationByInstanceKey(c, committerUser, signerUser, gpgSettings.Email, gpgSettings.PublicKeyContent)
if commitVerification != nil && commitVerification.Verified {
return commitVerification
}
}
}
return &asymkey_model.CommitVerification{
CommittingUser: committerUser,
Verified: false,
Reason: asymkey_model.NoKeyFound,
}
}
func verifySSHCommitVerification(sig, payload string, k *asymkey_model.PublicKey, committer, signer *user_model.User, email string) *asymkey_model.CommitVerification {
if err := sshsig.Verify(strings.NewReader(payload), []byte(sig), []byte(k.Content), "git"); err != nil {
return nil
}
return &asymkey_model.CommitVerification{ // Everything is ok
CommittingUser: committer,
Verified: true,
Reason: fmt.Sprintf("%s / %s", signer.Name, k.Fingerprint),
SigningUser: signer,
SigningSSHKey: k,
SigningEmail: email,
}
}

54
asymkey/commit_test.go Normal file
View File

@ -0,0 +1,54 @@
// Copyright 2025 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package asymkey
import (
"strings"
"testing"
user_model "code.gitea.io/gitea/models/user"
"code.gitea.io/gitea/modules/git"
"code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/modules/test"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestParseCommitWithSSHSignature(t *testing.T) {
// Here we only test the TrustedSSHKeys. The complete signing test is in tests/integration/gpg_ssh_git_test.go
t.Run("TrustedSSHKey", func(t *testing.T) {
defer test.MockVariableValue(&setting.Repository.Signing.SigningName, "gitea")()
defer test.MockVariableValue(&setting.Repository.Signing.SigningEmail, "gitea@fake.local")()
defer test.MockVariableValue(&setting.Repository.Signing.TrustedSSHKeys, []string{"ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIH6Y4idVaW3E+bLw1uqoAfJD7o5Siu+HqS51E9oQLPE9"})()
commit, err := git.CommitFromReader(nil, git.Sha1ObjectFormat.EmptyObjectID(), strings.NewReader(`tree 9a93ffa76e8b72bdb6431910b3a506fa2b39f42e
author User Two <user2@example.com> 1749230009 +0200
committer User Two <user2@example.com> 1749230009 +0200
gpgsig -----BEGIN SSH SIGNATURE-----
U1NIU0lHAAAAAQAAADMAAAALc3NoLWVkMjU1MTkAAAAgfpjiJ1VpbcT5svDW6qgB8kPujl
KK74epLnUT2hAs8T0AAAADZ2l0AAAAAAAAAAZzaGE1MTIAAABTAAAAC3NzaC1lZDI1NTE5
AAAAQDX2t2iHuuLxEWHLJetYXKsgayv3c43r0pJNfAzdLN55Q65pC5M7rG6++gT2bxcpOu
Y6EXbpLqia9sunEF3+LQY=
-----END SSH SIGNATURE-----
Initial commit with signed file
`))
require.NoError(t, err)
committingUser := &user_model.User{
ID: 2,
Name: "User Two",
Email: "user2@example.com",
}
ret := ParseCommitWithSSHSignature(t.Context(), commit, committingUser)
require.NotNil(t, ret)
assert.True(t, ret.Verified)
assert.False(t, ret.Warning)
assert.Equal(t, committingUser, ret.CommittingUser)
if assert.NotNil(t, ret.SigningUser) {
assert.Equal(t, "gitea", ret.SigningUser.Name)
assert.Equal(t, "gitea@fake.local", ret.SigningUser.Email)
}
})
}

78
asymkey/deploy_key.go Normal file
View File

@ -0,0 +1,78 @@
// Copyright 2021 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package asymkey
import (
"context"
"fmt"
asymkey_model "code.gitea.io/gitea/models/asymkey"
"code.gitea.io/gitea/models/db"
repo_model "code.gitea.io/gitea/models/repo"
)
// DeleteRepoDeployKeys deletes all deploy keys of a repository. permissions check should be done outside
func DeleteRepoDeployKeys(ctx context.Context, repoID int64) (int, error) {
deployKeys, err := db.Find[asymkey_model.DeployKey](ctx, asymkey_model.ListDeployKeysOptions{RepoID: repoID})
if err != nil {
return 0, fmt.Errorf("listDeployKeys: %w", err)
}
for _, dKey := range deployKeys {
if err := deleteDeployKeyFromDB(ctx, dKey); err != nil {
return 0, fmt.Errorf("deleteDeployKeys: %w", err)
}
}
return len(deployKeys), nil
}
// deleteDeployKeyFromDB delete deploy keys from database
func deleteDeployKeyFromDB(ctx context.Context, key *asymkey_model.DeployKey) error {
if _, err := db.DeleteByID[asymkey_model.DeployKey](ctx, key.ID); err != nil {
return fmt.Errorf("delete deploy key [%d]: %w", key.ID, err)
}
// Check if this is the last reference to same key content.
has, err := asymkey_model.IsDeployKeyExistByKeyID(ctx, key.KeyID)
if err != nil {
return err
} else if !has {
if _, err = db.DeleteByID[asymkey_model.PublicKey](ctx, key.KeyID); err != nil {
return err
}
}
return nil
}
// DeleteDeployKey deletes deploy key from its repository authorized_keys file if needed.
// Permissions check should be done outside.
func DeleteDeployKey(ctx context.Context, repo *repo_model.Repository, id int64) error {
dbCtx, committer, err := db.TxContext(ctx)
if err != nil {
return err
}
defer committer.Close()
key, err := asymkey_model.GetDeployKeyByID(ctx, id)
if err != nil {
if asymkey_model.IsErrDeployKeyNotExist(err) {
return nil
}
return fmt.Errorf("GetDeployKeyByID: %w", err)
}
if key.RepoID != repo.ID {
return fmt.Errorf("deploy key %d does not belong to repository %d", id, repo.ID)
}
if err := deleteDeployKeyFromDB(dbCtx, key); err != nil {
return err
}
if err := committer.Commit(); err != nil {
return err
}
return RewriteAllPublicKeys(ctx)
}

18
asymkey/main_test.go Normal file
View File

@ -0,0 +1,18 @@
// Copyright 2019 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package asymkey
import (
"testing"
"code.gitea.io/gitea/models/unittest"
_ "code.gitea.io/gitea/models"
_ "code.gitea.io/gitea/models/actions"
_ "code.gitea.io/gitea/models/activities"
)
func TestMain(m *testing.M) {
unittest.MainTest(m)
}

429
asymkey/sign.go Normal file
View File

@ -0,0 +1,429 @@
// Copyright 2021 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package asymkey
import (
"context"
"fmt"
"os"
"strings"
asymkey_model "code.gitea.io/gitea/models/asymkey"
"code.gitea.io/gitea/models/auth"
"code.gitea.io/gitea/models/db"
git_model "code.gitea.io/gitea/models/git"
issues_model "code.gitea.io/gitea/models/issues"
repo_model "code.gitea.io/gitea/models/repo"
user_model "code.gitea.io/gitea/models/user"
"code.gitea.io/gitea/modules/git"
"code.gitea.io/gitea/modules/gitrepo"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/process"
"code.gitea.io/gitea/modules/setting"
)
type signingMode string
const (
never signingMode = "never"
always signingMode = "always"
pubkey signingMode = "pubkey"
twofa signingMode = "twofa"
parentSigned signingMode = "parentsigned"
baseSigned signingMode = "basesigned"
headSigned signingMode = "headsigned"
commitsSigned signingMode = "commitssigned"
approved signingMode = "approved"
noKey signingMode = "nokey"
)
func signingModeFromStrings(modeStrings []string) []signingMode {
returnable := make([]signingMode, 0, len(modeStrings))
for _, mode := range modeStrings {
signMode := signingMode(strings.ToLower(strings.TrimSpace(mode)))
switch signMode {
case never:
return []signingMode{never}
case always:
return []signingMode{always}
case pubkey:
fallthrough
case twofa:
fallthrough
case parentSigned:
fallthrough
case baseSigned:
fallthrough
case headSigned:
fallthrough
case approved:
fallthrough
case commitsSigned:
returnable = append(returnable, signMode)
}
}
if len(returnable) == 0 {
return []signingMode{never}
}
return returnable
}
// ErrWontSign explains the first reason why a commit would not be signed
// There may be other reasons - this is just the first reason found
type ErrWontSign struct {
Reason signingMode
}
func (e *ErrWontSign) Error() string {
return fmt.Sprintf("wont sign: %s", e.Reason)
}
// IsErrWontSign checks if an error is a ErrWontSign
func IsErrWontSign(err error) bool {
_, ok := err.(*ErrWontSign)
return ok
}
// SigningKey returns the KeyID and git Signature for the repo
func SigningKey(ctx context.Context, repoPath string) (*git.SigningKey, *git.Signature) {
if setting.Repository.Signing.SigningKey == "none" {
return nil, nil
}
if setting.Repository.Signing.SigningKey == "default" || setting.Repository.Signing.SigningKey == "" {
// Can ignore the error here as it means that commit.gpgsign is not set
value, _, _ := git.NewCommand("config", "--get", "commit.gpgsign").RunStdString(ctx, &git.RunOpts{Dir: repoPath})
sign, valid := git.ParseBool(strings.TrimSpace(value))
if !sign || !valid {
return nil, nil
}
format, _, _ := git.NewCommand("config", "--default", git.SigningKeyFormatOpenPGP, "--get", "gpg.format").RunStdString(ctx, &git.RunOpts{Dir: repoPath})
signingKey, _, _ := git.NewCommand("config", "--get", "user.signingkey").RunStdString(ctx, &git.RunOpts{Dir: repoPath})
signingName, _, _ := git.NewCommand("config", "--get", "user.name").RunStdString(ctx, &git.RunOpts{Dir: repoPath})
signingEmail, _, _ := git.NewCommand("config", "--get", "user.email").RunStdString(ctx, &git.RunOpts{Dir: repoPath})
if strings.TrimSpace(signingKey) == "" {
return nil, nil
}
return &git.SigningKey{
KeyID: strings.TrimSpace(signingKey),
Format: strings.TrimSpace(format),
}, &git.Signature{
Name: strings.TrimSpace(signingName),
Email: strings.TrimSpace(signingEmail),
}
}
if setting.Repository.Signing.SigningKey == "" {
return nil, nil
}
return &git.SigningKey{
KeyID: setting.Repository.Signing.SigningKey,
Format: setting.Repository.Signing.SigningFormat,
}, &git.Signature{
Name: setting.Repository.Signing.SigningName,
Email: setting.Repository.Signing.SigningEmail,
}
}
// PublicSigningKey gets the public signing key within a provided repository directory
func PublicSigningKey(ctx context.Context, repoPath string) (content, format string, err error) {
signingKey, _ := SigningKey(ctx, repoPath)
if signingKey == nil {
return "", "", nil
}
if signingKey.Format == git.SigningKeyFormatSSH {
content, err := os.ReadFile(signingKey.KeyID)
if err != nil {
log.Error("Unable to read SSH public key file in %s: %s, %v", repoPath, signingKey, err)
return "", signingKey.Format, err
}
return string(content), signingKey.Format, nil
}
content, stderr, err := process.GetManager().ExecDir(ctx, -1, repoPath,
"gpg --export -a", "gpg", "--export", "-a", signingKey.KeyID)
if err != nil {
log.Error("Unable to get default signing key in %s: %s, %s, %v", repoPath, signingKey, stderr, err)
return "", signingKey.Format, err
}
return content, signingKey.Format, nil
}
// SignInitialCommit determines if we should sign the initial commit to this repository
func SignInitialCommit(ctx context.Context, repoPath string, u *user_model.User) (bool, *git.SigningKey, *git.Signature, error) {
rules := signingModeFromStrings(setting.Repository.Signing.InitialCommit)
signingKey, sig := SigningKey(ctx, repoPath)
if signingKey == nil {
return false, nil, nil, &ErrWontSign{noKey}
}
Loop:
for _, rule := range rules {
switch rule {
case never:
return false, nil, nil, &ErrWontSign{never}
case always:
break Loop
case pubkey:
keys, err := db.Find[asymkey_model.GPGKey](ctx, asymkey_model.FindGPGKeyOptions{
OwnerID: u.ID,
IncludeSubKeys: true,
})
if err != nil {
return false, nil, nil, err
}
if len(keys) == 0 {
return false, nil, nil, &ErrWontSign{pubkey}
}
case twofa:
twofaModel, err := auth.GetTwoFactorByUID(ctx, u.ID)
if err != nil && !auth.IsErrTwoFactorNotEnrolled(err) {
return false, nil, nil, err
}
if twofaModel == nil {
return false, nil, nil, &ErrWontSign{twofa}
}
}
}
return true, signingKey, sig, nil
}
// SignWikiCommit determines if we should sign the commits to this repository wiki
func SignWikiCommit(ctx context.Context, repo *repo_model.Repository, u *user_model.User) (bool, *git.SigningKey, *git.Signature, error) {
repoWikiPath := repo.WikiPath()
rules := signingModeFromStrings(setting.Repository.Signing.Wiki)
signingKey, sig := SigningKey(ctx, repoWikiPath)
if signingKey == nil {
return false, nil, nil, &ErrWontSign{noKey}
}
Loop:
for _, rule := range rules {
switch rule {
case never:
return false, nil, nil, &ErrWontSign{never}
case always:
break Loop
case pubkey:
keys, err := db.Find[asymkey_model.GPGKey](ctx, asymkey_model.FindGPGKeyOptions{
OwnerID: u.ID,
IncludeSubKeys: true,
})
if err != nil {
return false, nil, nil, err
}
if len(keys) == 0 {
return false, nil, nil, &ErrWontSign{pubkey}
}
case twofa:
twofaModel, err := auth.GetTwoFactorByUID(ctx, u.ID)
if err != nil && !auth.IsErrTwoFactorNotEnrolled(err) {
return false, nil, nil, err
}
if twofaModel == nil {
return false, nil, nil, &ErrWontSign{twofa}
}
case parentSigned:
gitRepo, err := gitrepo.OpenRepository(ctx, repo.WikiStorageRepo())
if err != nil {
return false, nil, nil, err
}
defer gitRepo.Close()
commit, err := gitRepo.GetCommit("HEAD")
if err != nil {
return false, nil, nil, err
}
if commit.Signature == nil {
return false, nil, nil, &ErrWontSign{parentSigned}
}
verification := ParseCommitWithSignature(ctx, commit)
if !verification.Verified {
return false, nil, nil, &ErrWontSign{parentSigned}
}
}
}
return true, signingKey, sig, nil
}
// SignCRUDAction determines if we should sign a CRUD commit to this repository
func SignCRUDAction(ctx context.Context, repoPath string, u *user_model.User, tmpBasePath, parentCommit string) (bool, *git.SigningKey, *git.Signature, error) {
rules := signingModeFromStrings(setting.Repository.Signing.CRUDActions)
signingKey, sig := SigningKey(ctx, repoPath)
if signingKey == nil {
return false, nil, nil, &ErrWontSign{noKey}
}
Loop:
for _, rule := range rules {
switch rule {
case never:
return false, nil, nil, &ErrWontSign{never}
case always:
break Loop
case pubkey:
keys, err := db.Find[asymkey_model.GPGKey](ctx, asymkey_model.FindGPGKeyOptions{
OwnerID: u.ID,
IncludeSubKeys: true,
})
if err != nil {
return false, nil, nil, err
}
if len(keys) == 0 {
return false, nil, nil, &ErrWontSign{pubkey}
}
case twofa:
twofaModel, err := auth.GetTwoFactorByUID(ctx, u.ID)
if err != nil && !auth.IsErrTwoFactorNotEnrolled(err) {
return false, nil, nil, err
}
if twofaModel == nil {
return false, nil, nil, &ErrWontSign{twofa}
}
case parentSigned:
gitRepo, err := git.OpenRepository(ctx, tmpBasePath)
if err != nil {
return false, nil, nil, err
}
defer gitRepo.Close()
commit, err := gitRepo.GetCommit(parentCommit)
if err != nil {
return false, nil, nil, err
}
if commit.Signature == nil {
return false, nil, nil, &ErrWontSign{parentSigned}
}
verification := ParseCommitWithSignature(ctx, commit)
if !verification.Verified {
return false, nil, nil, &ErrWontSign{parentSigned}
}
}
}
return true, signingKey, sig, nil
}
// SignMerge determines if we should sign a PR merge commit to the base repository
func SignMerge(ctx context.Context, pr *issues_model.PullRequest, u *user_model.User, tmpBasePath, baseCommit, headCommit string) (bool, *git.SigningKey, *git.Signature, error) {
if err := pr.LoadBaseRepo(ctx); err != nil {
log.Error("Unable to get Base Repo for pull request")
return false, nil, nil, err
}
repo := pr.BaseRepo
signingKey, signer := SigningKey(ctx, repo.RepoPath())
if signingKey == nil {
return false, nil, nil, &ErrWontSign{noKey}
}
rules := signingModeFromStrings(setting.Repository.Signing.Merges)
var gitRepo *git.Repository
var err error
Loop:
for _, rule := range rules {
switch rule {
case never:
return false, nil, nil, &ErrWontSign{never}
case always:
break Loop
case pubkey:
keys, err := db.Find[asymkey_model.GPGKey](ctx, asymkey_model.FindGPGKeyOptions{
OwnerID: u.ID,
IncludeSubKeys: true,
})
if err != nil {
return false, nil, nil, err
}
if len(keys) == 0 {
return false, nil, nil, &ErrWontSign{pubkey}
}
case twofa:
twofaModel, err := auth.GetTwoFactorByUID(ctx, u.ID)
if err != nil && !auth.IsErrTwoFactorNotEnrolled(err) {
return false, nil, nil, err
}
if twofaModel == nil {
return false, nil, nil, &ErrWontSign{twofa}
}
case approved:
protectedBranch, err := git_model.GetFirstMatchProtectedBranchRule(ctx, repo.ID, pr.BaseBranch)
if err != nil {
return false, nil, nil, err
}
if protectedBranch == nil {
return false, nil, nil, &ErrWontSign{approved}
}
if issues_model.GetGrantedApprovalsCount(ctx, protectedBranch, pr) < 1 {
return false, nil, nil, &ErrWontSign{approved}
}
case baseSigned:
if gitRepo == nil {
gitRepo, err = git.OpenRepository(ctx, tmpBasePath)
if err != nil {
return false, nil, nil, err
}
defer gitRepo.Close()
}
commit, err := gitRepo.GetCommit(baseCommit)
if err != nil {
return false, nil, nil, err
}
verification := ParseCommitWithSignature(ctx, commit)
if !verification.Verified {
return false, nil, nil, &ErrWontSign{baseSigned}
}
case headSigned:
if gitRepo == nil {
gitRepo, err = git.OpenRepository(ctx, tmpBasePath)
if err != nil {
return false, nil, nil, err
}
defer gitRepo.Close()
}
commit, err := gitRepo.GetCommit(headCommit)
if err != nil {
return false, nil, nil, err
}
verification := ParseCommitWithSignature(ctx, commit)
if !verification.Verified {
return false, nil, nil, &ErrWontSign{headSigned}
}
case commitsSigned:
if gitRepo == nil {
gitRepo, err = git.OpenRepository(ctx, tmpBasePath)
if err != nil {
return false, nil, nil, err
}
defer gitRepo.Close()
}
commit, err := gitRepo.GetCommit(headCommit)
if err != nil {
return false, nil, nil, err
}
verification := ParseCommitWithSignature(ctx, commit)
if !verification.Verified {
return false, nil, nil, &ErrWontSign{commitsSigned}
}
// need to work out merge-base
mergeBaseCommit, _, err := gitRepo.GetMergeBase("", baseCommit, headCommit)
if err != nil {
return false, nil, nil, err
}
commitList, err := commit.CommitsBeforeUntil(mergeBaseCommit)
if err != nil {
return false, nil, nil, err
}
for _, commit := range commitList {
verification := ParseCommitWithSignature(ctx, commit)
if !verification.Verified {
return false, nil, nil, &ErrWontSign{commitsSigned}
}
}
}
}
return true, signingKey, signer, nil
}

50
asymkey/ssh_key.go Normal file
View File

@ -0,0 +1,50 @@
// Copyright 2021 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package asymkey
import (
"context"
asymkey_model "code.gitea.io/gitea/models/asymkey"
"code.gitea.io/gitea/models/db"
user_model "code.gitea.io/gitea/models/user"
)
// DeletePublicKey deletes SSH key information both in database and authorized_keys file.
func DeletePublicKey(ctx context.Context, doer *user_model.User, id int64) (err error) {
key, err := asymkey_model.GetPublicKeyByID(ctx, id)
if err != nil {
return err
}
// Check if user has access to delete this key.
if !doer.IsAdmin && doer.ID != key.OwnerID {
return asymkey_model.ErrKeyAccessDenied{
UserID: doer.ID,
KeyID: key.ID,
Note: "public",
}
}
dbCtx, committer, err := db.TxContext(ctx)
if err != nil {
return err
}
defer committer.Close()
if _, err = db.DeleteByID[asymkey_model.PublicKey](dbCtx, id); err != nil {
return err
}
if err = committer.Commit(); err != nil {
return err
}
committer.Close()
if key.Type == asymkey_model.KeyTypePrincipal {
return RewriteAllPrincipalKeys(ctx)
}
return RewriteAllPublicKeys(ctx)
}

View File

@ -0,0 +1,79 @@
// Copyright 2024 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package asymkey
import (
"context"
"fmt"
"os"
"path/filepath"
"time"
asymkey_model "code.gitea.io/gitea/models/asymkey"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/modules/util"
)
// RewriteAllPublicKeys removes any authorized key and rewrite all keys from database again.
// Note: db.GetEngine(ctx).Iterate does not get latest data after insert/delete, so we have to call this function
// outside any session scope independently.
func RewriteAllPublicKeys(ctx context.Context) error {
// Don't rewrite key if internal server
if setting.SSH.StartBuiltinServer || !setting.SSH.CreateAuthorizedKeysFile {
return nil
}
return asymkey_model.WithSSHOpLocker(func() error {
return rewriteAllPublicKeys(ctx)
})
}
func rewriteAllPublicKeys(ctx context.Context) error {
if setting.SSH.RootPath != "" {
// First of ensure that the RootPath is present, and if not make it with 0700 permissions
// This of course doesn't guarantee that this is the right directory for authorized_keys
// but at least if it's supposed to be this directory and it doesn't exist and we're the
// right user it will at least be created properly.
err := os.MkdirAll(setting.SSH.RootPath, 0o700)
if err != nil {
log.Error("Unable to MkdirAll(%s): %v", setting.SSH.RootPath, err)
return err
}
}
fPath := filepath.Join(setting.SSH.RootPath, "authorized_keys")
tmpPath := fPath + ".tmp"
t, err := os.OpenFile(tmpPath, os.O_RDWR|os.O_CREATE|os.O_TRUNC, 0o600)
if err != nil {
return err
}
defer func() {
t.Close()
if err := util.Remove(tmpPath); err != nil {
log.Warn("Unable to remove temporary authorized keys file: %s: Error: %v", tmpPath, err)
}
}()
if setting.SSH.AuthorizedKeysBackup {
isExist, err := util.IsExist(fPath)
if err != nil {
log.Error("Unable to check if %s exists. Error: %v", fPath, err)
return err
}
if isExist {
bakPath := fmt.Sprintf("%s_%d.gitea_bak", fPath, time.Now().Unix())
if err = util.CopyFile(fPath, bakPath); err != nil {
return err
}
}
}
if err := asymkey_model.RegeneratePublicKeys(ctx, t); err != nil {
return err
}
t.Close()
return util.Rename(tmpPath, fPath)
}

View File

@ -0,0 +1,131 @@
// Copyright 2021 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package asymkey
import (
"bufio"
"context"
"fmt"
"io"
"os"
"path/filepath"
"strings"
"time"
asymkey_model "code.gitea.io/gitea/models/asymkey"
"code.gitea.io/gitea/models/db"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/modules/util"
)
// This file contains functions for creating authorized_principals files
//
// There is a dependence on the database within RewriteAllPrincipalKeys & RegeneratePrincipalKeys
// The sshOpLocker is used from ssh_key_authorized_keys.go
const (
authorizedPrincipalsFile = "authorized_principals"
tplCommentPrefix = `# gitea public key`
)
// RewriteAllPrincipalKeys removes any authorized principal and rewrite all keys from database again.
// Note: db.GetEngine(ctx).Iterate does not get latest data after insert/delete, so we have to call this function
// outside any session scope independently.
func RewriteAllPrincipalKeys(ctx context.Context) error {
// Don't rewrite key if internal server
if setting.SSH.StartBuiltinServer || !setting.SSH.CreateAuthorizedPrincipalsFile {
return nil
}
return asymkey_model.WithSSHOpLocker(func() error {
return rewriteAllPrincipalKeys(ctx)
})
}
func rewriteAllPrincipalKeys(ctx context.Context) error {
if setting.SSH.RootPath != "" {
// First of ensure that the RootPath is present, and if not make it with 0700 permissions
// This of course doesn't guarantee that this is the right directory for authorized_keys
// but at least if it's supposed to be this directory and it doesn't exist and we're the
// right user it will at least be created properly.
err := os.MkdirAll(setting.SSH.RootPath, 0o700)
if err != nil {
log.Error("Unable to MkdirAll(%s): %v", setting.SSH.RootPath, err)
return err
}
}
fPath := filepath.Join(setting.SSH.RootPath, authorizedPrincipalsFile)
tmpPath := fPath + ".tmp"
t, err := os.OpenFile(tmpPath, os.O_RDWR|os.O_CREATE|os.O_TRUNC, 0o600)
if err != nil {
return err
}
defer func() {
t.Close()
os.Remove(tmpPath)
}()
if setting.SSH.AuthorizedPrincipalsBackup {
isExist, err := util.IsExist(fPath)
if err != nil {
log.Error("Unable to check if %s exists. Error: %v", fPath, err)
return err
}
if isExist {
bakPath := fmt.Sprintf("%s_%d.gitea_bak", fPath, time.Now().Unix())
if err = util.CopyFile(fPath, bakPath); err != nil {
return err
}
}
}
if err := regeneratePrincipalKeys(ctx, t); err != nil {
return err
}
t.Close()
return util.Rename(tmpPath, fPath)
}
func regeneratePrincipalKeys(ctx context.Context, t io.StringWriter) error {
if err := db.GetEngine(ctx).Where("type = ?", asymkey_model.KeyTypePrincipal).Iterate(new(asymkey_model.PublicKey), func(idx int, bean any) (err error) {
_, err = t.WriteString((bean.(*asymkey_model.PublicKey)).AuthorizedString())
return err
}); err != nil {
return err
}
fPath := filepath.Join(setting.SSH.RootPath, authorizedPrincipalsFile)
isExist, err := util.IsExist(fPath)
if err != nil {
log.Error("Unable to check if %s exists. Error: %v", fPath, err)
return err
}
if isExist {
f, err := os.Open(fPath)
if err != nil {
return err
}
defer f.Close()
scanner := bufio.NewScanner(f)
for scanner.Scan() {
line := scanner.Text()
if strings.HasPrefix(line, tplCommentPrefix) {
scanner.Scan()
continue
}
_, err = t.WriteString(line + "\n")
if err != nil {
return err
}
}
if err = scanner.Err(); err != nil {
return fmt.Errorf("regeneratePrincipalKeys scan: %w", err)
}
}
return nil
}

View File

@ -0,0 +1,54 @@
// Copyright 2024 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package asymkey
import (
"context"
"fmt"
asymkey_model "code.gitea.io/gitea/models/asymkey"
"code.gitea.io/gitea/models/db"
"code.gitea.io/gitea/models/perm"
)
// AddPrincipalKey adds new principal to database and authorized_principals file.
func AddPrincipalKey(ctx context.Context, ownerID int64, content string, authSourceID int64) (*asymkey_model.PublicKey, error) {
dbCtx, committer, err := db.TxContext(ctx)
if err != nil {
return nil, err
}
defer committer.Close()
// Principals cannot be duplicated.
has, err := db.GetEngine(dbCtx).
Where("content = ? AND type = ?", content, asymkey_model.KeyTypePrincipal).
Get(new(asymkey_model.PublicKey))
if err != nil {
return nil, err
} else if has {
return nil, asymkey_model.ErrKeyAlreadyExist{
Content: content,
}
}
key := &asymkey_model.PublicKey{
OwnerID: ownerID,
Name: content,
Content: content,
Mode: perm.AccessModeWrite,
Type: asymkey_model.KeyTypePrincipal,
LoginSourceID: authSourceID,
}
if err = db.Insert(dbCtx, key); err != nil {
return nil, fmt.Errorf("addKey: %w", err)
}
if err = committer.Commit(); err != nil {
return nil, err
}
committer.Close()
return key, RewriteAllPrincipalKeys(ctx)
}

87
asymkey/ssh_key_test.go Normal file
View File

@ -0,0 +1,87 @@
// Copyright 2021 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package asymkey
import (
"testing"
asymkey_model "code.gitea.io/gitea/models/asymkey"
"code.gitea.io/gitea/models/auth"
"code.gitea.io/gitea/models/db"
"code.gitea.io/gitea/models/unittest"
user_model "code.gitea.io/gitea/models/user"
"github.com/stretchr/testify/assert"
)
func TestAddLdapSSHPublicKeys(t *testing.T) {
assert.NoError(t, unittest.PrepareTestDatabase())
user := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2})
s := &auth.Source{ID: 1}
testCases := []struct {
keyString string
number int
keyContents []string
}{
{
keyString: "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQC4cn+iXnA4KvcQYSV88vGn0Yi91vG47t1P7okprVmhNTkipNRIHWr6WdCO4VDr/cvsRkuVJAsLO2enwjGWWueOO6BodiBgyAOZ/5t5nJNMCNuLGT5UIo/RI1b0WRQwxEZTRjt6mFNw6lH14wRd8ulsr9toSWBPMOGWoYs1PDeDL0JuTjL+tr1SZi/EyxCngpYszKdXllJEHyI79KQgeD0Vt3pTrkbNVTOEcCNqZePSVmUH8X8Vhugz3bnE0/iE9Pb5fkWO9c4AnM1FgI/8Bvp27Fw2ShryIXuR6kKvUqhVMTuOSDHwu6A8jLE5Owt3GAYugDpDYuwTVNGrHLXKpPzrGGPE/jPmaLCMZcsdkec95dYeU3zKODEm8UQZFhmJmDeWVJ36nGrGZHL4J5aTTaeFUJmmXDaJYiJ+K2/ioKgXqnXvltu0A9R8/LGy4nrTJRr4JMLuJFoUXvGm1gXQ70w2LSpk6yl71RNC0hCtsBe8BP8IhYCM0EP5jh7eCMQZNvM= nocomment\n",
number: 1,
keyContents: []string{
"ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQC4cn+iXnA4KvcQYSV88vGn0Yi91vG47t1P7okprVmhNTkipNRIHWr6WdCO4VDr/cvsRkuVJAsLO2enwjGWWueOO6BodiBgyAOZ/5t5nJNMCNuLGT5UIo/RI1b0WRQwxEZTRjt6mFNw6lH14wRd8ulsr9toSWBPMOGWoYs1PDeDL0JuTjL+tr1SZi/EyxCngpYszKdXllJEHyI79KQgeD0Vt3pTrkbNVTOEcCNqZePSVmUH8X8Vhugz3bnE0/iE9Pb5fkWO9c4AnM1FgI/8Bvp27Fw2ShryIXuR6kKvUqhVMTuOSDHwu6A8jLE5Owt3GAYugDpDYuwTVNGrHLXKpPzrGGPE/jPmaLCMZcsdkec95dYeU3zKODEm8UQZFhmJmDeWVJ36nGrGZHL4J5aTTaeFUJmmXDaJYiJ+K2/ioKgXqnXvltu0A9R8/LGy4nrTJRr4JMLuJFoUXvGm1gXQ70w2LSpk6yl71RNC0hCtsBe8BP8IhYCM0EP5jh7eCMQZNvM=",
},
},
{
keyString: `ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQC4cn+iXnA4KvcQYSV88vGn0Yi91vG47t1P7okprVmhNTkipNRIHWr6WdCO4VDr/cvsRkuVJAsLO2enwjGWWueOO6BodiBgyAOZ/5t5nJNMCNuLGT5UIo/RI1b0WRQwxEZTRjt6mFNw6lH14wRd8ulsr9toSWBPMOGWoYs1PDeDL0JuTjL+tr1SZi/EyxCngpYszKdXllJEHyI79KQgeD0Vt3pTrkbNVTOEcCNqZePSVmUH8X8Vhugz3bnE0/iE9Pb5fkWO9c4AnM1FgI/8Bvp27Fw2ShryIXuR6kKvUqhVMTuOSDHwu6A8jLE5Owt3GAYugDpDYuwTVNGrHLXKpPzrGGPE/jPmaLCMZcsdkec95dYeU3zKODEm8UQZFhmJmDeWVJ36nGrGZHL4J5aTTaeFUJmmXDaJYiJ+K2/ioKgXqnXvltu0A9R8/LGy4nrTJRr4JMLuJFoUXvGm1gXQ70w2LSpk6yl71RNC0hCtsBe8BP8IhYCM0EP5jh7eCMQZNvM= nocomment
ssh-dss AAAAB3NzaC1kc3MAAACBAOChCC7lf6Uo9n7BmZ6M8St19PZf4Tn59NriyboW2x/DZuYAz3ibZ2OkQ3S0SqDIa0HXSEJ1zaExQdmbO+Ux/wsytWZmCczWOVsaszBZSl90q8UnWlSH6P+/YA+RWJm5SFtuV9PtGIhyZgoNuz5kBQ7K139wuQsecdKktISwTakzAAAAFQCzKsO2JhNKlL+wwwLGOcLffoAmkwAAAIBpK7/3xvduajLBD/9vASqBQIHrgK2J+wiQnIb/Wzy0UsVmvfn8A+udRbBo+csM8xrSnlnlJnjkJS3qiM5g+eTwsLIV1IdKPEwmwB+VcP53Cw6lSyWyJcvhFb0N6s08NZysLzvj0N+ZC/FnhKTLzIyMtkHf/IrPCwlM+pV/M/96YgAAAIEAqQcGn9CKgzgPaguIZooTAOQdvBLMI5y0bQjOW6734XOpqQGf/Kra90wpoasLKZjSYKNPjE+FRUOrStLrxcNs4BeVKhy2PYTRnybfYVk1/dmKgH6P1YSRONsGKvTsH6c5IyCRG0ncCgYeF8tXppyd642982daopE7zQ/NPAnJfag= nocomment`,
number: 2,
keyContents: []string{
"ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQC4cn+iXnA4KvcQYSV88vGn0Yi91vG47t1P7okprVmhNTkipNRIHWr6WdCO4VDr/cvsRkuVJAsLO2enwjGWWueOO6BodiBgyAOZ/5t5nJNMCNuLGT5UIo/RI1b0WRQwxEZTRjt6mFNw6lH14wRd8ulsr9toSWBPMOGWoYs1PDeDL0JuTjL+tr1SZi/EyxCngpYszKdXllJEHyI79KQgeD0Vt3pTrkbNVTOEcCNqZePSVmUH8X8Vhugz3bnE0/iE9Pb5fkWO9c4AnM1FgI/8Bvp27Fw2ShryIXuR6kKvUqhVMTuOSDHwu6A8jLE5Owt3GAYugDpDYuwTVNGrHLXKpPzrGGPE/jPmaLCMZcsdkec95dYeU3zKODEm8UQZFhmJmDeWVJ36nGrGZHL4J5aTTaeFUJmmXDaJYiJ+K2/ioKgXqnXvltu0A9R8/LGy4nrTJRr4JMLuJFoUXvGm1gXQ70w2LSpk6yl71RNC0hCtsBe8BP8IhYCM0EP5jh7eCMQZNvM=",
"ssh-dss AAAAB3NzaC1kc3MAAACBAOChCC7lf6Uo9n7BmZ6M8St19PZf4Tn59NriyboW2x/DZuYAz3ibZ2OkQ3S0SqDIa0HXSEJ1zaExQdmbO+Ux/wsytWZmCczWOVsaszBZSl90q8UnWlSH6P+/YA+RWJm5SFtuV9PtGIhyZgoNuz5kBQ7K139wuQsecdKktISwTakzAAAAFQCzKsO2JhNKlL+wwwLGOcLffoAmkwAAAIBpK7/3xvduajLBD/9vASqBQIHrgK2J+wiQnIb/Wzy0UsVmvfn8A+udRbBo+csM8xrSnlnlJnjkJS3qiM5g+eTwsLIV1IdKPEwmwB+VcP53Cw6lSyWyJcvhFb0N6s08NZysLzvj0N+ZC/FnhKTLzIyMtkHf/IrPCwlM+pV/M/96YgAAAIEAqQcGn9CKgzgPaguIZooTAOQdvBLMI5y0bQjOW6734XOpqQGf/Kra90wpoasLKZjSYKNPjE+FRUOrStLrxcNs4BeVKhy2PYTRnybfYVk1/dmKgH6P1YSRONsGKvTsH6c5IyCRG0ncCgYeF8tXppyd642982daopE7zQ/NPAnJfag=",
},
},
{
keyString: `ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQC4cn+iXnA4KvcQYSV88vGn0Yi91vG47t1P7okprVmhNTkipNRIHWr6WdCO4VDr/cvsRkuVJAsLO2enwjGWWueOO6BodiBgyAOZ/5t5nJNMCNuLGT5UIo/RI1b0WRQwxEZTRjt6mFNw6lH14wRd8ulsr9toSWBPMOGWoYs1PDeDL0JuTjL+tr1SZi/EyxCngpYszKdXllJEHyI79KQgeD0Vt3pTrkbNVTOEcCNqZePSVmUH8X8Vhugz3bnE0/iE9Pb5fkWO9c4AnM1FgI/8Bvp27Fw2ShryIXuR6kKvUqhVMTuOSDHwu6A8jLE5Owt3GAYugDpDYuwTVNGrHLXKpPzrGGPE/jPmaLCMZcsdkec95dYeU3zKODEm8UQZFhmJmDeWVJ36nGrGZHL4J5aTTaeFUJmmXDaJYiJ+K2/ioKgXqnXvltu0A9R8/LGy4nrTJRr4JMLuJFoUXvGm1gXQ70w2LSpk6yl71RNC0hCtsBe8BP8IhYCM0EP5jh7eCMQZNvM= nocomment
# comment asmdna,ndp
ssh-dss AAAAB3NzaC1kc3MAAACBAOChCC7lf6Uo9n7BmZ6M8St19PZf4Tn59NriyboW2x/DZuYAz3ibZ2OkQ3S0SqDIa0HXSEJ1zaExQdmbO+Ux/wsytWZmCczWOVsaszBZSl90q8UnWlSH6P+/YA+RWJm5SFtuV9PtGIhyZgoNuz5kBQ7K139wuQsecdKktISwTakzAAAAFQCzKsO2JhNKlL+wwwLGOcLffoAmkwAAAIBpK7/3xvduajLBD/9vASqBQIHrgK2J+wiQnIb/Wzy0UsVmvfn8A+udRbBo+csM8xrSnlnlJnjkJS3qiM5g+eTwsLIV1IdKPEwmwB+VcP53Cw6lSyWyJcvhFb0N6s08NZysLzvj0N+ZC/FnhKTLzIyMtkHf/IrPCwlM+pV/M/96YgAAAIEAqQcGn9CKgzgPaguIZooTAOQdvBLMI5y0bQjOW6734XOpqQGf/Kra90wpoasLKZjSYKNPjE+FRUOrStLrxcNs4BeVKhy2PYTRnybfYVk1/dmKgH6P1YSRONsGKvTsH6c5IyCRG0ncCgYeF8tXppyd642982daopE7zQ/NPAnJfag= nocomment`,
number: 2,
keyContents: []string{
"ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQC4cn+iXnA4KvcQYSV88vGn0Yi91vG47t1P7okprVmhNTkipNRIHWr6WdCO4VDr/cvsRkuVJAsLO2enwjGWWueOO6BodiBgyAOZ/5t5nJNMCNuLGT5UIo/RI1b0WRQwxEZTRjt6mFNw6lH14wRd8ulsr9toSWBPMOGWoYs1PDeDL0JuTjL+tr1SZi/EyxCngpYszKdXllJEHyI79KQgeD0Vt3pTrkbNVTOEcCNqZePSVmUH8X8Vhugz3bnE0/iE9Pb5fkWO9c4AnM1FgI/8Bvp27Fw2ShryIXuR6kKvUqhVMTuOSDHwu6A8jLE5Owt3GAYugDpDYuwTVNGrHLXKpPzrGGPE/jPmaLCMZcsdkec95dYeU3zKODEm8UQZFhmJmDeWVJ36nGrGZHL4J5aTTaeFUJmmXDaJYiJ+K2/ioKgXqnXvltu0A9R8/LGy4nrTJRr4JMLuJFoUXvGm1gXQ70w2LSpk6yl71RNC0hCtsBe8BP8IhYCM0EP5jh7eCMQZNvM=",
"ssh-dss AAAAB3NzaC1kc3MAAACBAOChCC7lf6Uo9n7BmZ6M8St19PZf4Tn59NriyboW2x/DZuYAz3ibZ2OkQ3S0SqDIa0HXSEJ1zaExQdmbO+Ux/wsytWZmCczWOVsaszBZSl90q8UnWlSH6P+/YA+RWJm5SFtuV9PtGIhyZgoNuz5kBQ7K139wuQsecdKktISwTakzAAAAFQCzKsO2JhNKlL+wwwLGOcLffoAmkwAAAIBpK7/3xvduajLBD/9vASqBQIHrgK2J+wiQnIb/Wzy0UsVmvfn8A+udRbBo+csM8xrSnlnlJnjkJS3qiM5g+eTwsLIV1IdKPEwmwB+VcP53Cw6lSyWyJcvhFb0N6s08NZysLzvj0N+ZC/FnhKTLzIyMtkHf/IrPCwlM+pV/M/96YgAAAIEAqQcGn9CKgzgPaguIZooTAOQdvBLMI5y0bQjOW6734XOpqQGf/Kra90wpoasLKZjSYKNPjE+FRUOrStLrxcNs4BeVKhy2PYTRnybfYVk1/dmKgH6P1YSRONsGKvTsH6c5IyCRG0ncCgYeF8tXppyd642982daopE7zQ/NPAnJfag=",
},
},
{
keyString: `ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQC4cn+iXnA4KvcQYSV88vGn0Yi91vG47t1P7okprVmhNTkipNRIHWr6WdCO4VDr/cvsRkuVJAsLO2enwjGWWueOO6BodiBgyAOZ/5t5nJNMCNuLGT5UIo/RI1b0WRQwxEZTRjt6mFNw6lH14wRd8ulsr9toSWBPMOGWoYs1PDeDL0JuTjL+tr1SZi/EyxCngpYszKdXllJEHyI79KQgeD0Vt3pTrkbNVTOEcCNqZePSVmUH8X8Vhugz3bnE0/iE9Pb5fkWO9c4AnM1FgI/8Bvp27Fw2ShryIXuR6kKvUqhVMTuOSDHwu6A8jLE5Owt3GAYugDpDYuwTVNGrHLXKpPzrGGPE/jPmaLCMZcsdkec95dYeU3zKODEm8UQZFhmJmDeWVJ36nGrGZHL4J5aTTaeFUJmmXDaJYiJ+K2/ioKgXqnXvltu0A9R8/LGy4nrTJRr4JMLuJFoUXvGm1gXQ70w2LSpk6yl71RNC0hCtsBe8BP8IhYCM0EP5jh7eCMQZNvM= nocomment
382488320jasdj1lasmva/vasodifipi4193-fksma.cm
ssh-dss AAAAB3NzaC1kc3MAAACBAOChCC7lf6Uo9n7BmZ6M8St19PZf4Tn59NriyboW2x/DZuYAz3ibZ2OkQ3S0SqDIa0HXSEJ1zaExQdmbO+Ux/wsytWZmCczWOVsaszBZSl90q8UnWlSH6P+/YA+RWJm5SFtuV9PtGIhyZgoNuz5kBQ7K139wuQsecdKktISwTakzAAAAFQCzKsO2JhNKlL+wwwLGOcLffoAmkwAAAIBpK7/3xvduajLBD/9vASqBQIHrgK2J+wiQnIb/Wzy0UsVmvfn8A+udRbBo+csM8xrSnlnlJnjkJS3qiM5g+eTwsLIV1IdKPEwmwB+VcP53Cw6lSyWyJcvhFb0N6s08NZysLzvj0N+ZC/FnhKTLzIyMtkHf/IrPCwlM+pV/M/96YgAAAIEAqQcGn9CKgzgPaguIZooTAOQdvBLMI5y0bQjOW6734XOpqQGf/Kra90wpoasLKZjSYKNPjE+FRUOrStLrxcNs4BeVKhy2PYTRnybfYVk1/dmKgH6P1YSRONsGKvTsH6c5IyCRG0ncCgYeF8tXppyd642982daopE7zQ/NPAnJfag= nocomment`,
number: 2,
keyContents: []string{
"ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQC4cn+iXnA4KvcQYSV88vGn0Yi91vG47t1P7okprVmhNTkipNRIHWr6WdCO4VDr/cvsRkuVJAsLO2enwjGWWueOO6BodiBgyAOZ/5t5nJNMCNuLGT5UIo/RI1b0WRQwxEZTRjt6mFNw6lH14wRd8ulsr9toSWBPMOGWoYs1PDeDL0JuTjL+tr1SZi/EyxCngpYszKdXllJEHyI79KQgeD0Vt3pTrkbNVTOEcCNqZePSVmUH8X8Vhugz3bnE0/iE9Pb5fkWO9c4AnM1FgI/8Bvp27Fw2ShryIXuR6kKvUqhVMTuOSDHwu6A8jLE5Owt3GAYugDpDYuwTVNGrHLXKpPzrGGPE/jPmaLCMZcsdkec95dYeU3zKODEm8UQZFhmJmDeWVJ36nGrGZHL4J5aTTaeFUJmmXDaJYiJ+K2/ioKgXqnXvltu0A9R8/LGy4nrTJRr4JMLuJFoUXvGm1gXQ70w2LSpk6yl71RNC0hCtsBe8BP8IhYCM0EP5jh7eCMQZNvM=",
"ssh-dss AAAAB3NzaC1kc3MAAACBAOChCC7lf6Uo9n7BmZ6M8St19PZf4Tn59NriyboW2x/DZuYAz3ibZ2OkQ3S0SqDIa0HXSEJ1zaExQdmbO+Ux/wsytWZmCczWOVsaszBZSl90q8UnWlSH6P+/YA+RWJm5SFtuV9PtGIhyZgoNuz5kBQ7K139wuQsecdKktISwTakzAAAAFQCzKsO2JhNKlL+wwwLGOcLffoAmkwAAAIBpK7/3xvduajLBD/9vASqBQIHrgK2J+wiQnIb/Wzy0UsVmvfn8A+udRbBo+csM8xrSnlnlJnjkJS3qiM5g+eTwsLIV1IdKPEwmwB+VcP53Cw6lSyWyJcvhFb0N6s08NZysLzvj0N+ZC/FnhKTLzIyMtkHf/IrPCwlM+pV/M/96YgAAAIEAqQcGn9CKgzgPaguIZooTAOQdvBLMI5y0bQjOW6734XOpqQGf/Kra90wpoasLKZjSYKNPjE+FRUOrStLrxcNs4BeVKhy2PYTRnybfYVk1/dmKgH6P1YSRONsGKvTsH6c5IyCRG0ncCgYeF8tXppyd642982daopE7zQ/NPAnJfag=",
},
},
}
for i, kase := range testCases {
s.ID = int64(i) + 20
asymkey_model.AddPublicKeysBySource(db.DefaultContext, user, s, []string{kase.keyString})
keys, err := db.Find[asymkey_model.PublicKey](db.DefaultContext, asymkey_model.FindPublicKeyOptions{
OwnerID: user.ID,
LoginSourceID: s.ID,
})
assert.NoError(t, err)
if err != nil {
continue
}
assert.Len(t, keys, kase.number)
for _, key := range keys {
assert.Contains(t, kase.keyContents, key.Content)
}
for _, key := range keys {
DeletePublicKey(db.DefaultContext, user, key.ID)
}
}
}

61
attachment/attachment.go Normal file
View File

@ -0,0 +1,61 @@
// Copyright 2021 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package attachment
import (
"bytes"
"context"
"fmt"
"io"
"code.gitea.io/gitea/models/db"
repo_model "code.gitea.io/gitea/models/repo"
"code.gitea.io/gitea/modules/storage"
"code.gitea.io/gitea/modules/util"
"code.gitea.io/gitea/services/context/upload"
"github.com/google/uuid"
)
// NewAttachment creates a new attachment object, but do not verify.
func NewAttachment(ctx context.Context, attach *repo_model.Attachment, file io.Reader, size int64) (*repo_model.Attachment, error) {
if attach.RepoID == 0 {
return nil, fmt.Errorf("attachment %s should belong to a repository", attach.Name)
}
err := db.WithTx(ctx, func(ctx context.Context) error {
attach.UUID = uuid.New().String()
size, err := storage.Attachments.Save(attach.RelativePath(), file, size)
if err != nil {
return fmt.Errorf("Create: %w", err)
}
attach.Size = size
return db.Insert(ctx, attach)
})
return attach, err
}
// UploadAttachment upload new attachment into storage and update database
func UploadAttachment(ctx context.Context, file io.Reader, allowedTypes string, fileSize int64, attach *repo_model.Attachment) (*repo_model.Attachment, error) {
buf := make([]byte, 1024)
n, _ := util.ReadAtMost(file, buf)
buf = buf[:n]
if err := upload.Verify(buf, attach.Name, allowedTypes); err != nil {
return nil, err
}
return NewAttachment(ctx, attach, io.MultiReader(bytes.NewReader(buf), file), fileSize)
}
// UpdateAttachment updates an attachment, verifying that its name is among the allowed types.
func UpdateAttachment(ctx context.Context, allowedTypes string, attach *repo_model.Attachment) error {
if err := upload.Verify(nil, attach.Name, allowedTypes); err != nil {
return err
}
return repo_model.UpdateAttachment(ctx, attach)
}

View File

@ -0,0 +1,46 @@
// Copyright 2021 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package attachment
import (
"os"
"path/filepath"
"testing"
"code.gitea.io/gitea/models/db"
repo_model "code.gitea.io/gitea/models/repo"
"code.gitea.io/gitea/models/unittest"
user_model "code.gitea.io/gitea/models/user"
_ "code.gitea.io/gitea/models/actions"
"github.com/stretchr/testify/assert"
)
func TestMain(m *testing.M) {
unittest.MainTest(m)
}
func TestUploadAttachment(t *testing.T) {
assert.NoError(t, unittest.PrepareTestDatabase())
user := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 1})
fPath := "./attachment_test.go"
f, err := os.Open(fPath)
assert.NoError(t, err)
defer f.Close()
attach, err := NewAttachment(db.DefaultContext, &repo_model.Attachment{
RepoID: 1,
UploaderID: user.ID,
Name: filepath.Base(fPath),
}, f, -1)
assert.NoError(t, err)
attachment, err := repo_model.GetAttachmentByUUID(db.DefaultContext, attach.UUID)
assert.NoError(t, err)
assert.Equal(t, user.ID, attachment.UploaderID)
assert.Equal(t, int64(0), attachment.DownloadCount)
}

155
auth/auth.go Normal file
View File

@ -0,0 +1,155 @@
// Copyright 2014 The Gogs Authors. All rights reserved.
// Copyright 2019 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package auth
import (
"fmt"
"net/http"
"regexp"
"strings"
"sync"
user_model "code.gitea.io/gitea/models/user"
"code.gitea.io/gitea/modules/auth/webauthn"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/optional"
"code.gitea.io/gitea/modules/session"
"code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/modules/web/middleware"
gitea_context "code.gitea.io/gitea/services/context"
user_service "code.gitea.io/gitea/services/user"
)
type globalVarsStruct struct {
gitRawOrAttachPathRe *regexp.Regexp
lfsPathRe *regexp.Regexp
archivePathRe *regexp.Regexp
feedPathRe *regexp.Regexp
feedRefPathRe *regexp.Regexp
}
var globalVars = sync.OnceValue(func() *globalVarsStruct {
return &globalVarsStruct{
gitRawOrAttachPathRe: regexp.MustCompile(`^/[-.\w]+/[-.\w]+/(?:(?:git-(?:(?:upload)|(?:receive))-pack$)|(?:info/refs$)|(?:HEAD$)|(?:objects/)|(?:raw/)|(?:releases/download/)|(?:attachments/))`),
lfsPathRe: regexp.MustCompile(`^/[-.\w]+/[-.\w]+/info/lfs/`),
archivePathRe: regexp.MustCompile(`^/[-.\w]+/[-.\w]+/archive/`),
feedPathRe: regexp.MustCompile(`^/[-.\w]+(/[-.\w]+)?\.(rss|atom)$`), // "/owner.rss" or "/owner/repo.atom"
feedRefPathRe: regexp.MustCompile(`^/[-.\w]+/[-.\w]+/(rss|atom)/`), // "/owner/repo/rss/branch/..."
}
})
// Init should be called exactly once when the application starts to allow plugins
// to allocate necessary resources
func Init() {
webauthn.Init()
}
type authPathDetector struct {
req *http.Request
vars *globalVarsStruct
}
func newAuthPathDetector(req *http.Request) *authPathDetector {
return &authPathDetector{req: req, vars: globalVars()}
}
// isAPIPath returns true if the specified URL is an API path
func (a *authPathDetector) isAPIPath() bool {
return strings.HasPrefix(a.req.URL.Path, "/api/")
}
// isAttachmentDownload check if request is a file download (GET) with URL to an attachment
func (a *authPathDetector) isAttachmentDownload() bool {
return strings.HasPrefix(a.req.URL.Path, "/attachments/") && a.req.Method == http.MethodGet
}
func (a *authPathDetector) isFeedRequest(req *http.Request) bool {
if !setting.Other.EnableFeed {
return false
}
if req.Method != http.MethodGet {
return false
}
return a.vars.feedPathRe.MatchString(req.URL.Path) || a.vars.feedRefPathRe.MatchString(req.URL.Path)
}
// isContainerPath checks if the request targets the container endpoint
func (a *authPathDetector) isContainerPath() bool {
return strings.HasPrefix(a.req.URL.Path, "/v2/")
}
func (a *authPathDetector) isGitRawOrAttachPath() bool {
return a.vars.gitRawOrAttachPathRe.MatchString(a.req.URL.Path)
}
func (a *authPathDetector) isGitRawOrAttachOrLFSPath() bool {
if a.isGitRawOrAttachPath() {
return true
}
if setting.LFS.StartServer {
return a.vars.lfsPathRe.MatchString(a.req.URL.Path)
}
return false
}
func (a *authPathDetector) isArchivePath() bool {
return a.vars.archivePathRe.MatchString(a.req.URL.Path)
}
func (a *authPathDetector) isAuthenticatedTokenRequest() bool {
switch a.req.URL.Path {
case "/login/oauth/userinfo", "/login/oauth/introspect":
return true
}
return false
}
// handleSignIn clears existing session variables and stores new ones for the specified user object
func handleSignIn(resp http.ResponseWriter, req *http.Request, sess SessionStore, user *user_model.User) {
// We need to regenerate the session...
newSess, err := session.RegenerateSession(resp, req)
if err != nil {
log.Error(fmt.Sprintf("Error regenerating session: %v", err))
} else {
sess = newSess
}
_ = sess.Delete("openid_verified_uri")
_ = sess.Delete("openid_signin_remember")
_ = sess.Delete("openid_determined_email")
_ = sess.Delete("openid_determined_username")
_ = sess.Delete("twofaUid")
_ = sess.Delete("twofaRemember")
_ = sess.Delete("webauthnAssertion")
_ = sess.Delete("linkAccount")
err = sess.Set("uid", user.ID)
if err != nil {
log.Error(fmt.Sprintf("Error setting session: %v", err))
}
err = sess.Set("uname", user.Name)
if err != nil {
log.Error(fmt.Sprintf("Error setting session: %v", err))
}
// Language setting of the user overwrites the one previously set
// If the user does not have a locale set, we save the current one.
if len(user.Language) == 0 {
lc := middleware.Locale(resp, req)
opts := &user_service.UpdateOptions{
Language: optional.Some(lc.Language()),
}
if err := user_service.UpdateUser(req.Context(), user, opts); err != nil {
log.Error(fmt.Sprintf("Error updating user language [user: %d, locale: %s]", user.ID, user.Language))
return
}
}
middleware.SetLocaleCookie(resp, user.Language, 0)
// force to generate a new CSRF token
if ctx := gitea_context.GetWebContext(req.Context()); ctx != nil {
ctx.Csrf.PrepareForSessionUser(ctx)
}
}

155
auth/auth_test.go Normal file
View File

@ -0,0 +1,155 @@
// Copyright 2014 The Gogs Authors. All rights reserved.
// Copyright 2019 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package auth
import (
"net/http"
"testing"
"code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/modules/test"
"github.com/stretchr/testify/assert"
)
func Test_isGitRawOrLFSPath(t *testing.T) {
tests := []struct {
path string
want bool
}{
{
"/owner/repo/git-upload-pack",
true,
},
{
"/owner/repo/git-receive-pack",
true,
},
{
"/owner/repo/info/refs",
true,
},
{
"/owner/repo/HEAD",
true,
},
{
"/owner/repo/objects/info/alternates",
true,
},
{
"/owner/repo/objects/info/http-alternates",
true,
},
{
"/owner/repo/objects/info/packs",
true,
},
{
"/owner/repo/objects/info/blahahsdhsdkla",
true,
},
{
"/owner/repo/objects/01/23456789abcdef0123456789abcdef01234567",
true,
},
{
"/owner/repo/objects/pack/pack-123456789012345678921234567893124567894.pack",
true,
},
{
"/owner/repo/objects/pack/pack-0123456789abcdef0123456789abcdef0123456.idx",
true,
},
{
"/owner/repo/raw/branch/foo/fanaso",
true,
},
{
"/owner/repo/stars",
false,
},
{
"/notowner",
false,
},
{
"/owner/repo",
false,
},
{
"/owner/repo/commit/123456789012345678921234567893124567894",
false,
},
{
"/owner/repo/releases/download/tag/repo.tar.gz",
true,
},
{
"/owner/repo/attachments/6d92a9ee-5d8b-4993-97c9-6181bdaa8955",
true,
},
}
defer test.MockVariableValue(&setting.LFS.StartServer)()
for _, tt := range tests {
t.Run(tt.path, func(t *testing.T) {
req, _ := http.NewRequest(http.MethodPost, "http://localhost"+tt.path, nil)
setting.LFS.StartServer = false
assert.Equal(t, tt.want, newAuthPathDetector(req).isGitRawOrAttachOrLFSPath())
setting.LFS.StartServer = true
assert.Equal(t, tt.want, newAuthPathDetector(req).isGitRawOrAttachOrLFSPath())
})
}
lfsTests := []string{
"/owner/repo/info/lfs/",
"/owner/repo/info/lfs/objects/batch",
"/owner/repo/info/lfs/objects/oid/filename",
"/owner/repo/info/lfs/objects/oid",
"/owner/repo/info/lfs/objects",
"/owner/repo/info/lfs/verify",
"/owner/repo/info/lfs/locks",
"/owner/repo/info/lfs/locks/verify",
"/owner/repo/info/lfs/locks/123/unlock",
}
for _, tt := range lfsTests {
t.Run(tt, func(t *testing.T) {
req, _ := http.NewRequest(http.MethodPost, tt, nil)
setting.LFS.StartServer = false
got := newAuthPathDetector(req).isGitRawOrAttachOrLFSPath()
assert.Equalf(t, setting.LFS.StartServer, got, "isGitOrLFSPath(%q) = %v, want %v, %v", tt, got, setting.LFS.StartServer, globalVars().gitRawOrAttachPathRe.MatchString(tt))
setting.LFS.StartServer = true
got = newAuthPathDetector(req).isGitRawOrAttachOrLFSPath()
assert.Equalf(t, setting.LFS.StartServer, got, "isGitOrLFSPath(%q) = %v, want %v", tt, got, setting.LFS.StartServer)
})
}
}
func Test_isFeedRequest(t *testing.T) {
tests := []struct {
want bool
path string
}{
{true, "/user.rss"},
{true, "/user/repo.atom"},
{false, "/user/repo"},
{false, "/use/repo/file.rss"},
{true, "/org/repo/rss/branch/xxx"},
{true, "/org/repo/atom/tag/xxx"},
{false, "/org/repo/branch/main/rss/any"},
{false, "/org/atom/any"},
}
for _, tt := range tests {
t.Run(tt.path, func(t *testing.T) {
req, _ := http.NewRequest(http.MethodGet, "http://localhost"+tt.path, nil)
assert.Equal(t, tt.want, newAuthPathDetector(req).isFeedRequest(req))
})
}
}

123
auth/auth_token.go Normal file
View File

@ -0,0 +1,123 @@
// Copyright 2023 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package auth
import (
"context"
"crypto/sha256"
"crypto/subtle"
"encoding/hex"
"errors"
"strings"
"time"
auth_model "code.gitea.io/gitea/models/auth"
"code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/modules/timeutil"
"code.gitea.io/gitea/modules/util"
)
// Based on https://paragonie.com/blog/2015/04/secure-authentication-php-with-long-term-persistence#secure-remember-me-cookies
// The auth token consists of two parts: ID and token hash
// Every device login creates a new auth token with an individual id and hash.
// If a device uses the token to login into the instance, a fresh token gets generated which has the same id but a new hash.
var (
ErrAuthTokenInvalidFormat = util.NewInvalidArgumentErrorf("auth token has an invalid format")
ErrAuthTokenExpired = util.NewInvalidArgumentErrorf("auth token has expired")
ErrAuthTokenInvalidHash = util.NewInvalidArgumentErrorf("auth token is invalid")
)
func CheckAuthToken(ctx context.Context, value string) (*auth_model.AuthToken, error) {
if len(value) == 0 {
return nil, nil
}
parts := strings.SplitN(value, ":", 2)
if len(parts) != 2 {
return nil, ErrAuthTokenInvalidFormat
}
t, err := auth_model.GetAuthTokenByID(ctx, parts[0])
if err != nil {
if errors.Is(err, util.ErrNotExist) {
return nil, ErrAuthTokenExpired
}
return nil, err
}
if t.ExpiresUnix < timeutil.TimeStampNow() {
return nil, ErrAuthTokenExpired
}
hashedToken := sha256.Sum256([]byte(parts[1]))
if subtle.ConstantTimeCompare([]byte(t.TokenHash), []byte(hex.EncodeToString(hashedToken[:]))) == 0 {
// If an attacker steals a token and uses the token to create a new session the hash gets updated.
// When the victim uses the old token the hashes don't match anymore and the victim should be notified about the compromised token.
return nil, ErrAuthTokenInvalidHash
}
return t, nil
}
func RegenerateAuthToken(ctx context.Context, t *auth_model.AuthToken) (*auth_model.AuthToken, string, error) {
token, hash, err := generateTokenAndHash()
if err != nil {
return nil, "", err
}
newToken := &auth_model.AuthToken{
ID: t.ID,
TokenHash: hash,
UserID: t.UserID,
ExpiresUnix: timeutil.TimeStampNow().AddDuration(time.Duration(setting.LogInRememberDays*24) * time.Hour),
}
if err := auth_model.UpdateAuthTokenByID(ctx, newToken); err != nil {
return nil, "", err
}
return newToken, token, nil
}
func CreateAuthTokenForUserID(ctx context.Context, userID int64) (*auth_model.AuthToken, string, error) {
t := &auth_model.AuthToken{
UserID: userID,
ExpiresUnix: timeutil.TimeStampNow().AddDuration(time.Duration(setting.LogInRememberDays*24) * time.Hour),
}
var err error
t.ID, err = util.CryptoRandomString(10)
if err != nil {
return nil, "", err
}
token, hash, err := generateTokenAndHash()
if err != nil {
return nil, "", err
}
t.TokenHash = hash
if err := auth_model.InsertAuthToken(ctx, t); err != nil {
return nil, "", err
}
return t, token, nil
}
func generateTokenAndHash() (string, string, error) {
buf, err := util.CryptoRandomBytes(32)
if err != nil {
return "", "", err
}
token := hex.EncodeToString(buf)
hashedToken := sha256.Sum256([]byte(token))
return token, hex.EncodeToString(hashedToken[:]), nil
}

107
auth/auth_token_test.go Normal file
View File

@ -0,0 +1,107 @@
// Copyright 2023 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package auth
import (
"testing"
"time"
auth_model "code.gitea.io/gitea/models/auth"
"code.gitea.io/gitea/models/db"
"code.gitea.io/gitea/models/unittest"
"code.gitea.io/gitea/modules/timeutil"
"github.com/stretchr/testify/assert"
)
func TestCheckAuthToken(t *testing.T) {
assert.NoError(t, unittest.PrepareTestDatabase())
t.Run("Empty", func(t *testing.T) {
token, err := CheckAuthToken(db.DefaultContext, "")
assert.NoError(t, err)
assert.Nil(t, token)
})
t.Run("InvalidFormat", func(t *testing.T) {
token, err := CheckAuthToken(db.DefaultContext, "dummy")
assert.ErrorIs(t, err, ErrAuthTokenInvalidFormat)
assert.Nil(t, token)
})
t.Run("NotFound", func(t *testing.T) {
token, err := CheckAuthToken(db.DefaultContext, "notexists:dummy")
assert.ErrorIs(t, err, ErrAuthTokenExpired)
assert.Nil(t, token)
})
t.Run("Expired", func(t *testing.T) {
timeutil.MockSet(time.Date(2023, 1, 1, 0, 0, 0, 0, time.UTC))
at, token, err := CreateAuthTokenForUserID(db.DefaultContext, 2)
assert.NoError(t, err)
assert.NotNil(t, at)
assert.NotEmpty(t, token)
timeutil.MockUnset()
at2, err := CheckAuthToken(db.DefaultContext, at.ID+":"+token)
assert.ErrorIs(t, err, ErrAuthTokenExpired)
assert.Nil(t, at2)
assert.NoError(t, auth_model.DeleteAuthTokenByID(db.DefaultContext, at.ID))
})
t.Run("InvalidHash", func(t *testing.T) {
at, token, err := CreateAuthTokenForUserID(db.DefaultContext, 2)
assert.NoError(t, err)
assert.NotNil(t, at)
assert.NotEmpty(t, token)
at2, err := CheckAuthToken(db.DefaultContext, at.ID+":"+token+"dummy")
assert.ErrorIs(t, err, ErrAuthTokenInvalidHash)
assert.Nil(t, at2)
assert.NoError(t, auth_model.DeleteAuthTokenByID(db.DefaultContext, at.ID))
})
t.Run("Valid", func(t *testing.T) {
at, token, err := CreateAuthTokenForUserID(db.DefaultContext, 2)
assert.NoError(t, err)
assert.NotNil(t, at)
assert.NotEmpty(t, token)
at2, err := CheckAuthToken(db.DefaultContext, at.ID+":"+token)
assert.NoError(t, err)
assert.NotNil(t, at2)
assert.NoError(t, auth_model.DeleteAuthTokenByID(db.DefaultContext, at.ID))
})
}
func TestRegenerateAuthToken(t *testing.T) {
assert.NoError(t, unittest.PrepareTestDatabase())
timeutil.MockSet(time.Date(2023, 1, 1, 0, 0, 0, 0, time.UTC))
defer timeutil.MockUnset()
at, token, err := CreateAuthTokenForUserID(db.DefaultContext, 2)
assert.NoError(t, err)
assert.NotNil(t, at)
assert.NotEmpty(t, token)
timeutil.MockSet(time.Date(2023, 1, 1, 0, 0, 1, 0, time.UTC))
at2, token2, err := RegenerateAuthToken(db.DefaultContext, at)
assert.NoError(t, err)
assert.NotNil(t, at2)
assert.NotEmpty(t, token2)
assert.Equal(t, at.ID, at2.ID)
assert.Equal(t, at.UserID, at2.UserID)
assert.NotEqual(t, token, token2)
assert.NotEqual(t, at.ExpiresUnix, at2.ExpiresUnix)
assert.NoError(t, auth_model.DeleteAuthTokenByID(db.DefaultContext, at.ID))
}

197
auth/basic.go Normal file
View File

@ -0,0 +1,197 @@
// Copyright 2014 The Gogs Authors. All rights reserved.
// Copyright 2019 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package auth
import (
"errors"
"net/http"
"strings"
actions_model "code.gitea.io/gitea/models/actions"
auth_model "code.gitea.io/gitea/models/auth"
user_model "code.gitea.io/gitea/models/user"
"code.gitea.io/gitea/modules/base"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/modules/timeutil"
"code.gitea.io/gitea/modules/util"
)
// Ensure the struct implements the interface.
var (
_ Method = &Basic{}
)
// BasicMethodName is the constant name of the basic authentication method
const (
BasicMethodName = "basic"
AccessTokenMethodName = "access_token"
OAuth2TokenMethodName = "oauth2_token"
ActionTokenMethodName = "action_token"
)
// Basic implements the Auth interface and authenticates requests (API requests
// only) by looking for Basic authentication data or "x-oauth-basic" token in the "Authorization"
// header.
type Basic struct{}
// Name represents the name of auth method
func (b *Basic) Name() string {
return BasicMethodName
}
// Verify extracts and validates Basic data (username and password/token) from the
// "Authorization" header of the request and returns the corresponding user object for that
// name/token on successful validation.
// Returns nil if header is empty or validation fails.
func (b *Basic) Verify(req *http.Request, w http.ResponseWriter, store DataStore, sess SessionStore) (*user_model.User, error) {
// Basic authentication should only fire on API, Feed, Download or on Git or LFSPaths
// Not all feed (rss/atom) clients feature the ability to add cookies or headers, so we need to allow basic auth for feeds
detector := newAuthPathDetector(req)
if !detector.isAPIPath() && !detector.isFeedRequest(req) && !detector.isContainerPath() && !detector.isAttachmentDownload() && !detector.isGitRawOrAttachOrLFSPath() {
return nil, nil
}
baHead := req.Header.Get("Authorization")
if len(baHead) == 0 {
return nil, nil
}
auths := strings.SplitN(baHead, " ", 2)
if len(auths) != 2 || (strings.ToLower(auths[0]) != "basic") {
return nil, nil
}
uname, passwd, _ := base.BasicAuthDecode(auths[1])
// Check if username or password is a token
isUsernameToken := len(passwd) == 0 || passwd == "x-oauth-basic"
// Assume username is token
authToken := uname
if !isUsernameToken {
log.Trace("Basic Authorization: Attempting login for: %s", uname)
// Assume password is token
authToken = passwd
} else {
log.Trace("Basic Authorization: Attempting login with username as token")
}
// get oauth2 token's user's ID
_, uid := GetOAuthAccessTokenScopeAndUserID(req.Context(), authToken)
if uid != 0 {
log.Trace("Basic Authorization: Valid OAuthAccessToken for user[%d]", uid)
u, err := user_model.GetUserByID(req.Context(), uid)
if err != nil {
log.Error("GetUserByID: %v", err)
return nil, err
}
store.GetData()["LoginMethod"] = OAuth2TokenMethodName
store.GetData()["IsApiToken"] = true
return u, nil
}
// check personal access token
token, err := auth_model.GetAccessTokenBySHA(req.Context(), authToken)
if err == nil {
log.Trace("Basic Authorization: Valid AccessToken for user[%d]", uid)
u, err := user_model.GetUserByID(req.Context(), token.UID)
if err != nil {
log.Error("GetUserByID: %v", err)
return nil, err
}
token.UpdatedUnix = timeutil.TimeStampNow()
if err = auth_model.UpdateAccessToken(req.Context(), token); err != nil {
log.Error("UpdateAccessToken: %v", err)
}
store.GetData()["LoginMethod"] = AccessTokenMethodName
store.GetData()["IsApiToken"] = true
store.GetData()["ApiTokenScope"] = token.Scope
return u, nil
} else if !auth_model.IsErrAccessTokenNotExist(err) && !auth_model.IsErrAccessTokenEmpty(err) {
log.Error("GetAccessTokenBySha: %v", err)
}
// check task token
task, err := actions_model.GetRunningTaskByToken(req.Context(), authToken)
if err == nil && task != nil {
log.Trace("Basic Authorization: Valid AccessToken for task[%d]", task.ID)
store.GetData()["LoginMethod"] = ActionTokenMethodName
store.GetData()["IsActionsToken"] = true
store.GetData()["ActionsTaskID"] = task.ID
return user_model.NewActionsUser(), nil
}
if !setting.Service.EnableBasicAuth {
return nil, nil
}
log.Trace("Basic Authorization: Attempting SignIn for %s", uname)
u, source, err := UserSignIn(req.Context(), uname, passwd)
if err != nil {
if !user_model.IsErrUserNotExist(err) {
log.Error("UserSignIn: %v", err)
}
return nil, err
}
if !source.TwoFactorShouldSkip() {
// Check if the user has WebAuthn registration
hasWebAuthn, err := auth_model.HasWebAuthnRegistrationsByUID(req.Context(), u.ID)
if err != nil {
return nil, err
}
if hasWebAuthn {
return nil, errors.New("basic authorization is not allowed while WebAuthn enrolled")
}
if err := validateTOTP(req, u); err != nil {
return nil, err
}
}
store.GetData()["LoginMethod"] = BasicMethodName
log.Trace("Basic Authorization: Logged in user %-v", u)
return u, nil
}
func validateTOTP(req *http.Request, u *user_model.User) error {
twofa, err := auth_model.GetTwoFactorByUID(req.Context(), u.ID)
if err != nil {
if auth_model.IsErrTwoFactorNotEnrolled(err) {
// No 2FA enrollment for this user
return nil
}
return err
}
if ok, err := twofa.ValidateTOTP(req.Header.Get("X-Gitea-OTP")); err != nil {
return err
} else if !ok {
return util.NewInvalidArgumentErrorf("invalid provided OTP")
}
return nil
}
func GetAccessScope(store DataStore) auth_model.AccessTokenScope {
if v, ok := store.GetData()["ApiTokenScope"]; ok {
return v.(auth_model.AccessTokenScope)
}
switch store.GetData()["LoginMethod"] {
case OAuth2TokenMethodName:
fallthrough
case BasicMethodName, AccessTokenMethodName:
return auth_model.AccessTokenScopeAll
case ActionTokenMethodName:
fallthrough
default:
return ""
}
}

72
auth/group.go Normal file
View File

@ -0,0 +1,72 @@
// Copyright 2021 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package auth
import (
"net/http"
"strings"
user_model "code.gitea.io/gitea/models/user"
)
// Ensure the struct implements the interface.
var (
_ Method = &Group{}
)
// Group implements the Auth interface with serval Auth.
type Group struct {
methods []Method
}
// NewGroup creates a new auth group
func NewGroup(methods ...Method) *Group {
return &Group{
methods: methods,
}
}
// Add adds a new method to group
func (b *Group) Add(method Method) {
b.methods = append(b.methods, method)
}
// Name returns group's methods name
func (b *Group) Name() string {
names := make([]string, 0, len(b.methods))
for _, m := range b.methods {
names = append(names, m.Name())
}
return strings.Join(names, ",")
}
func (b *Group) Verify(req *http.Request, w http.ResponseWriter, store DataStore, sess SessionStore) (*user_model.User, error) {
// Try to sign in with each of the enabled plugins
var retErr error
for _, m := range b.methods {
user, err := m.Verify(req, w, store, sess)
if err != nil {
if retErr == nil {
retErr = err
}
// Try other methods if this one failed.
// Some methods may share the same protocol to detect if they are matched.
// For example, OAuth2 and conan.Auth both read token from "Authorization: Bearer <token>" header,
// If OAuth2 returns error, we should give conan.Auth a chance to try.
continue
}
// If any method returns a user, we can stop trying.
// Return the user and ignore any error returned by previous methods.
if user != nil {
if store.GetData()["AuthedMethod"] == nil {
store.GetData()["AuthedMethod"] = m.Name()
}
return user, nil
}
}
// If no method returns a user, return the error returned by the first method.
return nil, retErr
}

218
auth/httpsign.go Normal file
View File

@ -0,0 +1,218 @@
// Copyright 2021 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package auth
import (
"bytes"
"encoding/base64"
"errors"
"fmt"
"net/http"
"strings"
asymkey_model "code.gitea.io/gitea/models/asymkey"
"code.gitea.io/gitea/models/db"
user_model "code.gitea.io/gitea/models/user"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/setting"
"github.com/42wim/httpsig"
"golang.org/x/crypto/ssh"
)
// Ensure the struct implements the interface.
var (
_ Method = &HTTPSign{}
)
// HTTPSign implements the Auth interface and authenticates requests (API requests
// only) by looking for http signature data in the "Signature" header.
// more information can be found on https://github.com/go-fed/httpsig
type HTTPSign struct{}
// Name represents the name of auth method
func (h *HTTPSign) Name() string {
return "httpsign"
}
// Verify extracts and validates HTTPsign from the Signature header of the request and returns
// the corresponding user object on successful validation.
// Returns nil if header is empty or validation fails.
func (h *HTTPSign) Verify(req *http.Request, w http.ResponseWriter, store DataStore, sess SessionStore) (*user_model.User, error) {
sigHead := req.Header.Get("Signature")
if len(sigHead) == 0 {
return nil, nil
}
var (
publicKey *asymkey_model.PublicKey
err error
)
if len(req.Header.Get("X-Ssh-Certificate")) != 0 {
// Handle Signature signed by SSH certificates
if len(setting.SSH.TrustedUserCAKeys) == 0 {
return nil, nil
}
publicKey, err = VerifyCert(req)
if err != nil {
log.Debug("VerifyCert on request from %s: failed: %v", req.RemoteAddr, err)
log.Warn("Failed authentication attempt from %s", req.RemoteAddr)
return nil, nil
}
} else {
// Handle Signature signed by Public Key
publicKey, err = VerifyPubKey(req)
if err != nil {
log.Debug("VerifyPubKey on request from %s: failed: %v", req.RemoteAddr, err)
log.Warn("Failed authentication attempt from %s", req.RemoteAddr)
return nil, nil
}
}
u, err := user_model.GetUserByID(req.Context(), publicKey.OwnerID)
if err != nil {
log.Error("GetUserByID: %v", err)
return nil, err
}
store.GetData()["IsApiToken"] = true
log.Trace("HTTP Sign: Logged in user %-v", u)
return u, nil
}
func VerifyPubKey(r *http.Request) (*asymkey_model.PublicKey, error) {
verifier, err := httpsig.NewVerifier(r)
if err != nil {
return nil, fmt.Errorf("httpsig.NewVerifier failed: %s", err)
}
keyID := verifier.KeyId()
publicKeys, err := db.Find[asymkey_model.PublicKey](r.Context(), asymkey_model.FindPublicKeyOptions{
Fingerprint: keyID,
})
if err != nil {
return nil, err
}
if len(publicKeys) == 0 {
return nil, fmt.Errorf("no public key found for keyid %s", keyID)
}
sshPublicKey, _, _, _, err := ssh.ParseAuthorizedKey([]byte(publicKeys[0].Content))
if err != nil {
return nil, err
}
if err := doVerify(verifier, []ssh.PublicKey{sshPublicKey}); err != nil {
return nil, err
}
return publicKeys[0], nil
}
// VerifyCert verifies the validity of the ssh certificate and returns the publickey of the signer
// We verify that the certificate is signed with the correct CA
// We verify that the http request is signed with the private key (of the public key mentioned in the certificate)
func VerifyCert(r *http.Request) (*asymkey_model.PublicKey, error) {
// Get our certificate from the header
bcert, err := base64.RawStdEncoding.DecodeString(r.Header.Get("x-ssh-certificate"))
if err != nil {
return nil, err
}
pk, err := ssh.ParsePublicKey(bcert)
if err != nil {
return nil, err
}
// Check if it's really a ssh certificate
cert, ok := pk.(*ssh.Certificate)
if !ok {
return nil, errors.New("no certificate found")
}
c := &ssh.CertChecker{
IsUserAuthority: func(auth ssh.PublicKey) bool {
marshaled := auth.Marshal()
for _, k := range setting.SSH.TrustedUserCAKeysParsed {
if bytes.Equal(marshaled, k.Marshal()) {
return true
}
}
return false
},
}
// check the CA of the cert
if !c.IsUserAuthority(cert.SignatureKey) {
return nil, errors.New("CA check failed")
}
// Create a verifier
verifier, err := httpsig.NewVerifier(r)
if err != nil {
return nil, fmt.Errorf("httpsig.NewVerifier failed: %s", err)
}
// now verify that this request was signed with the private key that matches the certificate public key
if err := doVerify(verifier, []ssh.PublicKey{cert.Key}); err != nil {
return nil, err
}
// Now for each of the certificate valid principals
for _, principal := range cert.ValidPrincipals {
// Look in the db for the public key
publicKey, err := asymkey_model.SearchPublicKeyByContentExact(r.Context(), principal)
if asymkey_model.IsErrKeyNotExist(err) {
// No public key matches this principal - try the next principal
continue
} else if err != nil {
// this error will be a db error therefore we can't solve this and we should abort
log.Error("SearchPublicKeyByContentExact: %v", err)
return nil, err
}
// Validate the cert for this principal
if err := c.CheckCert(principal, cert); err != nil {
// however, because principal is a member of ValidPrincipals - if this fails then the certificate itself is invalid
return nil, err
}
// OK we have a public key for a principal matching a valid certificate whose key has signed this request.
return publicKey, nil
}
// No public key matching a principal in the certificate is registered in gitea
return nil, errors.New("no valid principal found")
}
// doVerify iterates across the provided public keys attempting the verify the current request against each key in turn
func doVerify(verifier httpsig.Verifier, sshPublicKeys []ssh.PublicKey) error {
for _, publicKey := range sshPublicKeys {
cryptoPubkey := publicKey.(ssh.CryptoPublicKey).CryptoPublicKey()
var algos []httpsig.Algorithm
switch {
case strings.HasPrefix(publicKey.Type(), "ssh-ed25519"):
algos = []httpsig.Algorithm{httpsig.ED25519}
case strings.HasPrefix(publicKey.Type(), "ssh-rsa"):
algos = []httpsig.Algorithm{httpsig.RSA_SHA256, httpsig.RSA_SHA512}
}
for _, algo := range algos {
if err := verifier.Verify(cryptoPubkey, algo); err == nil {
return nil
}
}
}
return errors.New("verification failed")
}

41
auth/interface.go Normal file
View File

@ -0,0 +1,41 @@
// Copyright 2019 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package auth
import (
"context"
"net/http"
user_model "code.gitea.io/gitea/models/user"
"code.gitea.io/gitea/modules/reqctx"
"code.gitea.io/gitea/modules/session"
)
type DataStore = reqctx.ContextDataProvider
// SessionStore represents a session store
type SessionStore session.Store
// Method represents an authentication method (plugin) for HTTP requests.
type Method interface {
// Verify tries to verify the authentication data contained in the request.
// If verification is successful returns either an existing user object (with id > 0)
// or a new user object (with id = 0) populated with the information that was found
// in the authentication data (username or email).
// Second argument returns err if verification fails, otherwise
// First return argument returns nil if no matched verification condition
Verify(http *http.Request, w http.ResponseWriter, store DataStore, sess SessionStore) (*user_model.User, error)
Name() string
}
// PasswordAuthenticator represents a source of authentication
type PasswordAuthenticator interface {
Authenticate(ctx context.Context, user *user_model.User, login, password string) (*user_model.User, error)
}
// SynchronizableSource represents a source that can synchronize users
type SynchronizableSource interface {
Sync(ctx context.Context, updateExisting bool) error
}

14
auth/main_test.go Normal file
View File

@ -0,0 +1,14 @@
// Copyright 2023 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package auth
import (
"testing"
"code.gitea.io/gitea/models/unittest"
)
func TestMain(m *testing.M) {
unittest.MainTest(m)
}

192
auth/oauth2.go Normal file
View File

@ -0,0 +1,192 @@
// Copyright 2014 The Gogs Authors. All rights reserved.
// Copyright 2019 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package auth
import (
"context"
"net/http"
"strings"
"time"
actions_model "code.gitea.io/gitea/models/actions"
auth_model "code.gitea.io/gitea/models/auth"
user_model "code.gitea.io/gitea/models/user"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/modules/timeutil"
"code.gitea.io/gitea/services/actions"
"code.gitea.io/gitea/services/oauth2_provider"
)
// Ensure the struct implements the interface.
var (
_ Method = &OAuth2{}
)
// GetOAuthAccessTokenScopeAndUserID returns access token scope and user id
func GetOAuthAccessTokenScopeAndUserID(ctx context.Context, accessToken string) (auth_model.AccessTokenScope, int64) {
var accessTokenScope auth_model.AccessTokenScope
if !setting.OAuth2.Enabled {
return accessTokenScope, 0
}
// JWT tokens require a ".", if the token isn't like that, return early
if !strings.Contains(accessToken, ".") {
return accessTokenScope, 0
}
token, err := oauth2_provider.ParseToken(accessToken, oauth2_provider.DefaultSigningKey)
if err != nil {
log.Trace("oauth2.ParseToken: %v", err)
return accessTokenScope, 0
}
var grant *auth_model.OAuth2Grant
if grant, err = auth_model.GetOAuth2GrantByID(ctx, token.GrantID); err != nil || grant == nil {
return accessTokenScope, 0
}
if token.Kind != oauth2_provider.KindAccessToken {
return accessTokenScope, 0
}
if token.ExpiresAt.Before(time.Now()) || token.IssuedAt.After(time.Now()) {
return accessTokenScope, 0
}
accessTokenScope = oauth2_provider.GrantAdditionalScopes(grant.Scope)
return accessTokenScope, grant.UserID
}
// CheckTaskIsRunning verifies that the TaskID corresponds to a running task
func CheckTaskIsRunning(ctx context.Context, taskID int64) bool {
// Verify the task exists
task, err := actions_model.GetTaskByID(ctx, taskID)
if err != nil {
return false
}
// Verify that it's running
return task.Status == actions_model.StatusRunning
}
// OAuth2 implements the Auth interface and authenticates requests
// (API requests only) by looking for an OAuth token in query parameters or the
// "Authorization" header.
type OAuth2 struct{}
// Name represents the name of auth method
func (o *OAuth2) Name() string {
return "oauth2"
}
// parseToken returns the token from request, and a boolean value
// representing whether the token exists or not
func parseToken(req *http.Request) (string, bool) {
_ = req.ParseForm()
if !setting.DisableQueryAuthToken {
// Check token.
if token := req.Form.Get("token"); token != "" {
return token, true
}
// Check access token.
if token := req.Form.Get("access_token"); token != "" {
return token, true
}
} else if req.Form.Get("token") != "" || req.Form.Get("access_token") != "" {
log.Warn("API token sent in query string but DISABLE_QUERY_AUTH_TOKEN=true")
}
// check header token
if auHead := req.Header.Get("Authorization"); auHead != "" {
auths := strings.Fields(auHead)
if len(auths) == 2 && (auths[0] == "token" || strings.ToLower(auths[0]) == "bearer") {
return auths[1], true
}
}
return "", false
}
// userIDFromToken returns the user id corresponding to the OAuth token.
// It will set 'IsApiToken' to true if the token is an API token and
// set 'ApiTokenScope' to the scope of the access token
func (o *OAuth2) userIDFromToken(ctx context.Context, tokenSHA string, store DataStore) int64 {
// Let's see if token is valid.
if strings.Contains(tokenSHA, ".") {
// First attempt to decode an actions JWT, returning the actions user
if taskID, err := actions.TokenToTaskID(tokenSHA); err == nil {
if CheckTaskIsRunning(ctx, taskID) {
store.GetData()["IsActionsToken"] = true
store.GetData()["ActionsTaskID"] = taskID
return user_model.ActionsUserID
}
}
// Otherwise, check if this is an OAuth access token
accessTokenScope, uid := GetOAuthAccessTokenScopeAndUserID(ctx, tokenSHA)
if uid != 0 {
store.GetData()["IsApiToken"] = true
store.GetData()["ApiTokenScope"] = accessTokenScope
}
return uid
}
t, err := auth_model.GetAccessTokenBySHA(ctx, tokenSHA)
if err != nil {
if auth_model.IsErrAccessTokenNotExist(err) {
// check task token
task, err := actions_model.GetRunningTaskByToken(ctx, tokenSHA)
if err == nil && task != nil {
log.Trace("Basic Authorization: Valid AccessToken for task[%d]", task.ID)
store.GetData()["IsActionsToken"] = true
store.GetData()["ActionsTaskID"] = task.ID
return user_model.ActionsUserID
}
} else if !auth_model.IsErrAccessTokenNotExist(err) && !auth_model.IsErrAccessTokenEmpty(err) {
log.Error("GetAccessTokenBySHA: %v", err)
}
return 0
}
t.UpdatedUnix = timeutil.TimeStampNow()
if err = auth_model.UpdateAccessToken(ctx, t); err != nil {
log.Error("UpdateAccessToken: %v", err)
}
store.GetData()["IsApiToken"] = true
store.GetData()["ApiTokenScope"] = t.Scope
return t.UID
}
// Verify extracts the user ID from the OAuth token in the query parameters
// or the "Authorization" header and returns the corresponding user object for that ID.
// If verification is successful returns an existing user object.
// Returns nil if verification fails.
func (o *OAuth2) Verify(req *http.Request, w http.ResponseWriter, store DataStore, sess SessionStore) (*user_model.User, error) {
// These paths are not API paths, but we still want to check for tokens because they maybe in the API returned URLs
detector := newAuthPathDetector(req)
if !detector.isAPIPath() && !detector.isAttachmentDownload() && !detector.isAuthenticatedTokenRequest() &&
!detector.isGitRawOrAttachPath() && !detector.isArchivePath() {
return nil, nil
}
token, ok := parseToken(req)
if !ok {
return nil, nil
}
id := o.userIDFromToken(req.Context(), token, store)
if id <= 0 && id != -2 { // -2 means actions, so we need to allow it.
return nil, user_model.ErrUserNotExist{}
}
log.Trace("OAuth2 Authorization: Found token for user[%d]", id)
user, err := user_model.GetPossibleUserByID(req.Context(), id)
if err != nil {
if !user_model.IsErrUserNotExist(err) {
log.Error("GetUserByName: %v", err)
}
return nil, err
}
log.Trace("OAuth2 Authorization: Logged in user %-v", user)
return user, nil
}

54
auth/oauth2_test.go Normal file
View File

@ -0,0 +1,54 @@
// Copyright 2024 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package auth
import (
"testing"
"code.gitea.io/gitea/models/unittest"
user_model "code.gitea.io/gitea/models/user"
"code.gitea.io/gitea/modules/reqctx"
"code.gitea.io/gitea/services/actions"
"github.com/stretchr/testify/assert"
)
func TestUserIDFromToken(t *testing.T) {
assert.NoError(t, unittest.PrepareTestDatabase())
t.Run("Actions JWT", func(t *testing.T) {
const RunningTaskID = 47
token, err := actions.CreateAuthorizationToken(RunningTaskID, 1, 2)
assert.NoError(t, err)
ds := make(reqctx.ContextData)
o := OAuth2{}
uid := o.userIDFromToken(t.Context(), token, ds)
assert.Equal(t, user_model.ActionsUserID, uid)
assert.Equal(t, true, ds["IsActionsToken"])
assert.Equal(t, ds["ActionsTaskID"], int64(RunningTaskID))
})
}
func TestCheckTaskIsRunning(t *testing.T) {
assert.NoError(t, unittest.PrepareTestDatabase())
cases := map[string]struct {
TaskID int64
Expected bool
}{
"Running": {TaskID: 47, Expected: true},
"Missing": {TaskID: 1, Expected: false},
"Cancelled": {TaskID: 46, Expected: false},
}
for name := range cases {
c := cases[name]
t.Run(name, func(t *testing.T) {
actual := CheckTaskIsRunning(t.Context(), c.TaskID)
assert.Equal(t, c.Expected, actual)
})
}
}

174
auth/reverseproxy.go Normal file
View File

@ -0,0 +1,174 @@
// Copyright 2014 The Gogs Authors. All rights reserved.
// Copyright 2019 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package auth
import (
"net/http"
"strings"
user_model "code.gitea.io/gitea/models/user"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/optional"
"code.gitea.io/gitea/modules/setting"
gouuid "github.com/google/uuid"
)
// Ensure the struct implements the interface.
var (
_ Method = &ReverseProxy{}
)
// ReverseProxyMethodName is the constant name of the ReverseProxy authentication method
const ReverseProxyMethodName = "reverse_proxy"
// ReverseProxy implements the Auth interface, but actually relies on
// a reverse proxy for authentication of users.
// On successful authentication the proxy is expected to populate the username in the
// "setting.ReverseProxyAuthUser" header. Optionally it can also populate the email of the
// user in the "setting.ReverseProxyAuthEmail" header.
type ReverseProxy struct{}
// getUserName extracts the username from the "setting.ReverseProxyAuthUser" header
func (r *ReverseProxy) getUserName(req *http.Request) string {
return strings.TrimSpace(req.Header.Get(setting.ReverseProxyAuthUser))
}
// Name represents the name of auth method
func (r *ReverseProxy) Name() string {
return ReverseProxyMethodName
}
// getUserFromAuthUser extracts the username from the "setting.ReverseProxyAuthUser" header
// of the request and returns the corresponding user object for that name.
// Verification of header data is not performed as it should have already been done by
// the reverse proxy.
// If a username is available in the "setting.ReverseProxyAuthUser" header an existing
// user object is returned (populated with username or email found in header).
// Returns nil if header is empty.
func (r *ReverseProxy) getUserFromAuthUser(req *http.Request) (*user_model.User, error) {
username := r.getUserName(req)
if len(username) == 0 {
return nil, nil
}
log.Trace("ReverseProxy Authorization: Found username: %s", username)
user, err := user_model.GetUserByName(req.Context(), username)
if err != nil {
if !user_model.IsErrUserNotExist(err) || !r.isAutoRegisterAllowed() {
log.Error("GetUserByName: %v", err)
return nil, err
}
user = r.newUser(req)
}
return user, nil
}
// getEmail extracts the email from the "setting.ReverseProxyAuthEmail" header
func (r *ReverseProxy) getEmail(req *http.Request) string {
return strings.TrimSpace(req.Header.Get(setting.ReverseProxyAuthEmail))
}
// getUserFromAuthEmail extracts the username from the "setting.ReverseProxyAuthEmail" header
// of the request and returns the corresponding user object for that email.
// Verification of header data is not performed as it should have already been done by
// the reverse proxy.
// If an email is available in the "setting.ReverseProxyAuthEmail" header an existing
// user object is returned (populated with the email found in header).
// Returns nil if header is empty or if "setting.EnableReverseProxyEmail" is disabled.
func (r *ReverseProxy) getUserFromAuthEmail(req *http.Request) *user_model.User {
if !setting.Service.EnableReverseProxyEmail {
return nil
}
email := r.getEmail(req)
if len(email) == 0 {
return nil
}
log.Trace("ReverseProxy Authorization: Found email: %s", email)
user, err := user_model.GetUserByEmail(req.Context(), email)
if err != nil {
// Do not allow auto-registration, we don't have a username here
if !user_model.IsErrUserNotExist(err) {
log.Error("GetUserByEmail: %v", err)
}
return nil
}
return user
}
// Verify attempts to load a user object based on headers sent by the reverse proxy.
// First it will attempt to load it based on the username (see docs for getUserFromAuthUser),
// and failing that it will attempt to load it based on the email (see docs for getUserFromAuthEmail).
// Returns nil if the headers are empty or the user is not found.
func (r *ReverseProxy) Verify(req *http.Request, w http.ResponseWriter, store DataStore, sess SessionStore) (*user_model.User, error) {
user, err := r.getUserFromAuthUser(req)
if err != nil {
return nil, err
}
if user == nil {
user = r.getUserFromAuthEmail(req)
if user == nil {
return nil, nil
}
}
// Make sure requests to API paths, attachment downloads, git and LFS do not create a new session
detector := newAuthPathDetector(req)
if !detector.isAPIPath() && !detector.isAttachmentDownload() && !detector.isGitRawOrAttachOrLFSPath() {
if sess != nil && (sess.Get("uid") == nil || sess.Get("uid").(int64) != user.ID) {
handleSignIn(w, req, sess, user)
}
}
store.GetData()["IsReverseProxy"] = true
log.Trace("ReverseProxy Authorization: Logged in user %-v", user)
return user, nil
}
// isAutoRegisterAllowed checks if EnableReverseProxyAutoRegister setting is true
func (r *ReverseProxy) isAutoRegisterAllowed() bool {
return setting.Service.EnableReverseProxyAutoRegister
}
// newUser creates a new user object for the purpose of automatic registration
// and populates its name and email with the information present in request headers.
func (r *ReverseProxy) newUser(req *http.Request) *user_model.User {
username := r.getUserName(req)
if len(username) == 0 {
return nil
}
email := gouuid.New().String() + "@localhost"
if setting.Service.EnableReverseProxyEmail {
webAuthEmail := req.Header.Get(setting.ReverseProxyAuthEmail)
if len(webAuthEmail) > 0 {
email = webAuthEmail
}
}
var fullname string
if setting.Service.EnableReverseProxyFullName {
fullname = req.Header.Get(setting.ReverseProxyAuthFullName)
}
user := &user_model.User{
Name: username,
Email: email,
FullName: fullname,
}
overwriteDefault := user_model.CreateUserOverwriteOptions{
IsActive: optional.Some(true),
}
if err := user_model.CreateUser(req.Context(), user, &user_model.Meta{}, &overwriteDefault); err != nil {
// FIXME: should I create a system notice?
log.Error("CreateUser: %v", err)
return nil
}
return user
}

60
auth/session.go Normal file
View File

@ -0,0 +1,60 @@
// Copyright 2019 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package auth
import (
"net/http"
user_model "code.gitea.io/gitea/models/user"
"code.gitea.io/gitea/modules/log"
)
// Ensure the struct implements the interface.
var (
_ Method = &Session{}
)
// Session checks if there is a user uid stored in the session and returns the user
// object for that uid.
type Session struct{}
// Name represents the name of auth method
func (s *Session) Name() string {
return "session"
}
// Verify checks if there is a user uid stored in the session and returns the user
// object for that uid.
// Returns nil if there is no user uid stored in the session.
func (s *Session) Verify(req *http.Request, w http.ResponseWriter, store DataStore, sess SessionStore) (*user_model.User, error) {
if sess == nil {
return nil, nil
}
// Get user ID
uid := sess.Get("uid")
if uid == nil {
return nil, nil
}
log.Trace("Session Authorization: Found user[%d]", uid)
id, ok := uid.(int64)
if !ok {
return nil, nil
}
// Get user object
user, err := user_model.GetUserByID(req.Context(), id)
if err != nil {
if !user_model.IsErrUserNotExist(err) {
log.Error("GetUserByID: %v", err)
// Return the err as-is to keep current signed-in session, in case the err is something like context.Canceled. Otherwise non-existing user (nil, nil) will make the caller clear the signed-in session.
return nil, err
}
return nil, nil
}
log.Trace("Session Authorization: Logged in user %-v", user)
return user, nil
}

128
auth/signin.go Normal file
View File

@ -0,0 +1,128 @@
// Copyright 2021 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package auth
import (
"context"
"strings"
"code.gitea.io/gitea/models/auth"
"code.gitea.io/gitea/models/db"
user_model "code.gitea.io/gitea/models/user"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/optional"
"code.gitea.io/gitea/services/auth/source/oauth2"
"code.gitea.io/gitea/services/auth/source/smtp"
_ "code.gitea.io/gitea/services/auth/source/db" // register the sources (and below)
_ "code.gitea.io/gitea/services/auth/source/ldap" // register the ldap source
_ "code.gitea.io/gitea/services/auth/source/pam" // register the pam source
_ "code.gitea.io/gitea/services/auth/source/sspi" // register the sspi source
)
// UserSignIn validates user name and password.
func UserSignIn(ctx context.Context, username, password string) (*user_model.User, *auth.Source, error) {
var user *user_model.User
isEmail := false
if strings.Contains(username, "@") {
isEmail = true
emailAddress := user_model.EmailAddress{LowerEmail: strings.ToLower(strings.TrimSpace(username))}
// check same email
has, err := db.GetEngine(ctx).Get(&emailAddress)
if err != nil {
return nil, nil, err
}
if has {
if !emailAddress.IsActivated {
return nil, nil, user_model.ErrEmailAddressNotExist{
Email: username,
}
}
user = &user_model.User{ID: emailAddress.UID}
}
} else {
trimmedUsername := strings.TrimSpace(username)
if len(trimmedUsername) == 0 {
return nil, nil, user_model.ErrUserNotExist{Name: username}
}
user = &user_model.User{LowerName: strings.ToLower(trimmedUsername)}
}
if user != nil {
hasUser, err := user_model.GetUser(ctx, user)
if err != nil {
return nil, nil, err
}
if hasUser {
source, err := auth.GetSourceByID(ctx, user.LoginSource)
if err != nil {
return nil, nil, err
}
if !source.IsActive {
return nil, nil, oauth2.ErrAuthSourceNotActivated
}
authenticator, ok := source.Cfg.(PasswordAuthenticator)
if !ok {
return nil, nil, smtp.ErrUnsupportedLoginType
}
user, err := authenticator.Authenticate(ctx, user, user.LoginName, password)
if err != nil {
return nil, nil, err
}
// WARN: DON'T check user.IsActive, that will be checked on reqSign so that
// user could be hint to resend confirm email.
if user.ProhibitLogin {
return nil, nil, user_model.ErrUserProhibitLogin{UID: user.ID, Name: user.Name}
}
return user, source, nil
}
}
sources, err := db.Find[auth.Source](ctx, auth.FindSourcesOptions{
IsActive: optional.Some(true),
})
if err != nil {
return nil, nil, err
}
for _, source := range sources {
if !source.IsActive {
// don't try to authenticate non-active sources
continue
}
authenticator, ok := source.Cfg.(PasswordAuthenticator)
if !ok {
continue
}
authUser, err := authenticator.Authenticate(ctx, nil, username, password)
if err == nil {
if !authUser.ProhibitLogin {
return authUser, source, nil
}
err = user_model.ErrUserProhibitLogin{UID: authUser.ID, Name: authUser.Name}
}
if user_model.IsErrUserNotExist(err) {
log.Debug("Failed to login '%s' via '%s': %v", username, source.Name, err)
} else {
log.Warn("Failed to login '%s' via '%s': %v", username, source.Name, err)
}
}
if isEmail {
return nil, nil, user_model.ErrEmailAddressNotExist{Email: username}
}
return nil, nil, user_model.ErrUserNotExist{Name: username}
}

42
auth/source.go Normal file
View File

@ -0,0 +1,42 @@
// Copyright 2021 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package auth
import (
"context"
"code.gitea.io/gitea/models/auth"
"code.gitea.io/gitea/models/db"
user_model "code.gitea.io/gitea/models/user"
)
// DeleteSource deletes a AuthSource record in DB.
func DeleteSource(ctx context.Context, source *auth.Source) error {
count, err := db.GetEngine(ctx).Count(&user_model.User{LoginSource: source.ID})
if err != nil {
return err
} else if count > 0 {
return auth.ErrSourceInUse{
ID: source.ID,
}
}
count, err = db.GetEngine(ctx).Count(&user_model.ExternalLoginUser{LoginSourceID: source.ID})
if err != nil {
return err
} else if count > 0 {
return auth.ErrSourceInUse{
ID: source.ID,
}
}
if registerableSource, ok := source.Cfg.(auth.RegisterableSource); ok {
if err := registerableSource.UnregisterSource(); err != nil {
return err
}
}
_, err = db.GetEngine(ctx).ID(source.ID).Delete(new(auth.Source))
return err
}

View File

@ -0,0 +1,20 @@
// Copyright 2021 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package db_test
import (
auth_model "code.gitea.io/gitea/models/auth"
"code.gitea.io/gitea/services/auth"
"code.gitea.io/gitea/services/auth/source/db"
)
// This test file exists to assert that our Source exposes the interfaces that we expect
// It tightly binds the interfaces and implementation without breaking go import cycles
type sourceInterface interface {
auth.PasswordAuthenticator
auth_model.Config
}
var _ (sourceInterface) = &db.Source{}

View File

@ -0,0 +1,87 @@
// Copyright 2021 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package db
import (
"context"
"fmt"
user_model "code.gitea.io/gitea/models/user"
"code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/modules/util"
)
// ErrUserPasswordNotSet represents a "ErrUserPasswordNotSet" kind of error.
type ErrUserPasswordNotSet struct {
UID int64
Name string
}
func (err ErrUserPasswordNotSet) Error() string {
return fmt.Sprintf("user's password isn't set [uid: %d, name: %s]", err.UID, err.Name)
}
// Unwrap unwraps this error as a ErrInvalidArgument error
func (err ErrUserPasswordNotSet) Unwrap() error {
return util.ErrInvalidArgument
}
// ErrUserPasswordInvalid represents a "ErrUserPasswordInvalid" kind of error.
type ErrUserPasswordInvalid struct {
UID int64
Name string
}
func (err ErrUserPasswordInvalid) Error() string {
return fmt.Sprintf("user's password is invalid [uid: %d, name: %s]", err.UID, err.Name)
}
// Unwrap unwraps this error as a ErrInvalidArgument error
func (err ErrUserPasswordInvalid) Unwrap() error {
return util.ErrInvalidArgument
}
// Authenticate authenticates the provided user against the DB
func Authenticate(ctx context.Context, user *user_model.User, login, password string) (*user_model.User, error) {
if user == nil {
return nil, user_model.ErrUserNotExist{Name: login}
}
if !user.IsPasswordSet() {
return nil, ErrUserPasswordNotSet{UID: user.ID, Name: user.Name}
} else if !user.ValidatePassword(password) {
return nil, ErrUserPasswordInvalid{UID: user.ID, Name: user.Name}
}
// Update password hash if server password hash algorithm have changed
// Or update the password when the salt length doesn't match the current
// recommended salt length, this in order to migrate user's salts to a more secure salt.
if user.PasswdHashAlgo != setting.PasswordHashAlgo || len(user.Salt) != user_model.SaltByteLength*2 {
if err := user.SetPassword(password); err != nil {
return nil, err
}
if err := user_model.UpdateUserCols(ctx, user, "passwd", "passwd_hash_algo", "salt"); err != nil {
return nil, err
}
}
// WARN: DON'T check user.IsActive, that will be checked on reqSign so that
// user could be hinted to resend confirm email.
if user.ProhibitLogin {
return nil, user_model.ErrUserProhibitLogin{
UID: user.ID,
Name: user.Name,
}
}
// attempting to login as a non-user account
if user.Type != user_model.UserTypeIndividual {
return nil, user_model.ErrUserProhibitLogin{
UID: user.ID,
Name: user.Name,
}
}
return user, nil
}

37
auth/source/db/source.go Normal file
View File

@ -0,0 +1,37 @@
// Copyright 2021 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package db
import (
"context"
"code.gitea.io/gitea/models/auth"
user_model "code.gitea.io/gitea/models/user"
)
// Source is a password authentication service
type Source struct {
auth.ConfigBase `json:"-"`
}
// FromDB fills up an OAuth2Config from serialized format.
func (source *Source) FromDB(bs []byte) error {
return nil
}
// ToDB exports the config to a byte slice to be saved into database (this method is just dummy and does nothing for DB source)
func (source *Source) ToDB() ([]byte, error) {
return nil, nil
}
// Authenticate queries if login/password is valid against the PAM,
// and create a local user if success when enabled.
func (source *Source) Authenticate(ctx context.Context, user *user_model.User, login, password string) (*user_model.User, error) {
return Authenticate(ctx, user, login, password)
}
func init() {
auth.RegisterTypeConfig(auth.NoType, &Source{})
auth.RegisterTypeConfig(auth.Plain, &Source{})
}

131
auth/source/ldap/README.md Normal file
View File

@ -0,0 +1,131 @@
# Gitea LDAP Authentication Module
## About
This authentication module attempts to authorize and authenticate a user
against an LDAP server. It provides two methods of authentication: LDAP via
BindDN, and LDAP simple authentication.
LDAP via BindDN functions like most LDAP authentication systems. First, it
queries the LDAP server using a Bind DN and searches for the user that is
attempting to sign in. If the user is found, the module attempts to bind to the
server using the user's supplied credentials. If this succeeds, the user has
been authenticated, and his account information is retrieved and passed to the
Gogs login infrastructure.
LDAP simple authentication does not utilize a Bind DN. Instead, it binds
directly with the LDAP server using the user's supplied credentials. If the bind
succeeds and no filter rules out the user, the user is authenticated.
LDAP via BindDN is recommended for most users. By using a Bind DN, the server
can perform authorization by restricting which entries the Bind DN account can
read. Further, using a Bind DN with reduced permissions can reduce security risk
in the face of application bugs.
## Usage
To use this module, add an LDAP authentication source via the Authentications
section in the admin panel. Both the LDAP via BindDN and the simple auth LDAP
share the following fields:
* Authorization Name **(required)**
* A name to assign to the new method of authorization.
* Host **(required)**
* The address where the LDAP server can be reached.
* Example: mydomain.com
* Port **(required)**
* The port to use when connecting to the server.
* Example: 636
* Enable TLS Encryption (optional)
* Whether to use TLS when connecting to the LDAP server.
* Admin Filter (optional)
* An LDAP filter specifying if a user should be given administrator
privileges. If a user accounts passes the filter, the user will be
privileged as an administrator.
* Example: (objectClass=adminAccount)
* First name attribute (optional)
* The attribute of the user's LDAP record containing the user's first name.
This will be used to populate their account information.
* Example: givenName
* Surname attribute (optional)
* The attribute of the user's LDAP record containing the user's surname This
will be used to populate their account information.
* Example: sn
* E-mail attribute **(required)**
* The attribute of the user's LDAP record containing the user's email
address. This will be used to populate their account information.
* Example: mail
**LDAP via BindDN** adds the following fields:
* Bind DN (optional)
* The DN to bind to the LDAP server with when searching for the user. This
may be left blank to perform an anonymous search.
* Example: cn=Search,dc=mydomain,dc=com
* Bind Password (optional)
* The password for the Bind DN specified above, if any. _Note: The password
is stored in plaintext at the server. As such, ensure that your Bind DN
has as few privileges as possible._
* User Search Base **(required)**
* The LDAP base at which user accounts will be searched for.
* Example: ou=Users,dc=mydomain,dc=com
* User Filter **(required)**
* An LDAP filter declaring how to find the user record that is attempting to
authenticate. The '%[1]s' matching parameter will be substituted with the
user's username.
* Example: (&(objectClass=posixAccount)(|(uid=%[1]s)(mail=%[1]s)))
**LDAP using simple auth** adds the following fields:
* User DN **(required)**
* A template to use as the user's DN. The `%s` matching parameter will be
substituted with the user's username.
* Example: cn=%s,ou=Users,dc=mydomain,dc=com
* Example: uid=%s,ou=Users,dc=mydomain,dc=com
* User Search Base (optional)
* The LDAP base at which user accounts will be searched for.
* Example: ou=Users,dc=mydomain,dc=com
* User Filter **(required)**
* An LDAP filter declaring when a user should be allowed to log in. The `%[1]s`
matching parameter will be substituted with the user's username.
* Example: (&(objectClass=posixAccount)(|(cn=%[1]s)(mail=%[1]s)))
* Example: (&(objectClass=posixAccount)(|(uid=%[1]s)(mail=%[1]s)))
**Verify group membership in LDAP** uses the following fields:
* Group Search Base (optional)
* The LDAP DN used for groups.
* Example: ou=group,dc=mydomain,dc=com
* Group Name Filter (optional)
* An LDAP filter declaring how to find valid groups in the above DN.
* Example: (|(cn=gitea_users)(cn=admins))
* User Attribute in Group (optional)
* The user attribute that is used to reference a user in the group object.
* Example: uid if the group objects contains a member: bender and the user object contains a uid: bender.
* Example: dn if the group object contains a member: uid=bender,ou=users,dc=planetexpress,dc=com.
* Group Attribute for User (optional)
* The attribute of the group object that lists/contains the group members.
* Example: memberUid or member
* Team group map (optional)
* Automatically add users to Organization teams, depending on LDAP group memberships.
* Note: this function only adds users to teams, it never removes users.
* Example: {"cn=MyGroup,cn=groups,dc=example,dc=org": {"MyGiteaOrganization": ["MyGiteaTeam1", "MyGiteaTeam2", ...], ...}, ...}
* Team group map removal (optional)
* If set to true, users will be removed from teams if they are not members of the corresponding group.

View File

@ -0,0 +1,25 @@
// Copyright 2021 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package ldap_test
import (
auth_model "code.gitea.io/gitea/models/auth"
"code.gitea.io/gitea/services/auth"
"code.gitea.io/gitea/services/auth/source/ldap"
)
// This test file exists to assert that our Source exposes the interfaces that we expect
// It tightly binds the interfaces and implementation without breaking go import cycles
type sourceInterface interface {
auth.PasswordAuthenticator
auth.SynchronizableSource
auth_model.SSHKeyProvider
auth_model.Config
auth_model.SkipVerifiable
auth_model.HasTLSer
auth_model.UseTLSer
}
var _ (sourceInterface) = &ldap.Source{}

View File

@ -0,0 +1,31 @@
// Copyright 2021 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package ldap
// SecurityProtocol protocol type
type SecurityProtocol int
// Note: new type must be added at the end of list to maintain compatibility.
const (
SecurityProtocolUnencrypted SecurityProtocol = iota
SecurityProtocolLDAPS
SecurityProtocolStartTLS
)
// String returns the name of the SecurityProtocol
func (s SecurityProtocol) String() string {
return SecurityProtocolNames[s]
}
// Int returns the int value of the SecurityProtocol
func (s SecurityProtocol) Int() int {
return int(s)
}
// SecurityProtocolNames contains the name of SecurityProtocol values.
var SecurityProtocolNames = map[SecurityProtocol]string{
SecurityProtocolUnencrypted: "Unencrypted",
SecurityProtocolLDAPS: "LDAPS",
SecurityProtocolStartTLS: "StartTLS",
}

114
auth/source/ldap/source.go Normal file
View File

@ -0,0 +1,114 @@
// Copyright 2021 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package ldap
import (
"strings"
"code.gitea.io/gitea/models/auth"
"code.gitea.io/gitea/modules/json"
"code.gitea.io/gitea/modules/secret"
"code.gitea.io/gitea/modules/setting"
)
// .____ ________ _____ __________
// | | \______ \ / _ \\______ \
// | | | | \ / /_\ \| ___/
// | |___ | ` \/ | \ |
// |_______ \/_______ /\____|__ /____|
// \/ \/ \/
// Package ldap provide functions & structure to query a LDAP ldap directory
// For now, it's mainly tested again an MS Active Directory service, see README.md for more information
// Source Basic LDAP authentication service
type Source struct {
auth.ConfigBase `json:"-"`
Name string // canonical name (ie. corporate.ad)
Host string // LDAP host
Port int // port number
SecurityProtocol SecurityProtocol
SkipVerify bool
BindDN string // DN to bind with
BindPasswordEncrypt string // Encrypted Bind BN password
BindPassword string // Bind DN password
UserBase string // Base search path for users
UserDN string // Template for the DN of the user for simple auth
AttributeUsername string // Username attribute
AttributeName string // First name attribute
AttributeSurname string // Surname attribute
AttributeMail string // E-mail attribute
AttributesInBind bool // fetch attributes in bind context (not user)
AttributeSSHPublicKey string // LDAP SSH Public Key attribute
AttributeAvatar string
SearchPageSize uint32 // Search with paging page size
Filter string // Query filter to validate entry
AdminFilter string // Query filter to check if user is admin
RestrictedFilter string // Query filter to check if user is restricted
Enabled bool // if this source is disabled
AllowDeactivateAll bool // Allow an empty search response to deactivate all users from this source
GroupsEnabled bool // if the group checking is enabled
GroupDN string // Group Search Base
GroupFilter string // Group Name Filter
GroupMemberUID string // Group Attribute containing array of UserUID
GroupTeamMap string // Map LDAP groups to teams
GroupTeamMapRemoval bool // Remove user from teams which are synchronized and user is not a member of the corresponding LDAP group
UserUID string // User Attribute listed in Group
}
// FromDB fills up a LDAPConfig from serialized format.
func (source *Source) FromDB(bs []byte) error {
err := json.UnmarshalHandleDoubleEncode(bs, &source)
if err != nil {
return err
}
if source.BindPasswordEncrypt != "" {
source.BindPassword, err = secret.DecryptSecret(setting.SecretKey, source.BindPasswordEncrypt)
source.BindPasswordEncrypt = ""
}
return err
}
// ToDB exports a LDAPConfig to a serialized format.
func (source *Source) ToDB() ([]byte, error) {
var err error
source.BindPasswordEncrypt, err = secret.EncryptSecret(setting.SecretKey, source.BindPassword)
if err != nil {
return nil, err
}
source.BindPassword = ""
return json.Marshal(source)
}
// SecurityProtocolName returns the name of configured security
// protocol.
func (source *Source) SecurityProtocolName() string {
return SecurityProtocolNames[source.SecurityProtocol]
}
// IsSkipVerify returns if SkipVerify is set
func (source *Source) IsSkipVerify() bool {
return source.SkipVerify
}
// HasTLS returns if HasTLS
func (source *Source) HasTLS() bool {
return source.SecurityProtocol > SecurityProtocolUnencrypted
}
// UseTLS returns if UseTLS
func (source *Source) UseTLS() bool {
return source.SecurityProtocol != SecurityProtocolUnencrypted
}
// ProvidesSSHKeys returns if this source provides SSH Keys
func (source *Source) ProvidesSSHKeys() bool {
return strings.TrimSpace(source.AttributeSSHPublicKey) != ""
}
func init() {
auth.RegisterTypeConfig(auth.LDAP, &Source{})
auth.RegisterTypeConfig(auth.DLDAP, &Source{})
}

View File

@ -0,0 +1,125 @@
// Copyright 2021 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package ldap
import (
"context"
"strings"
asymkey_model "code.gitea.io/gitea/models/asymkey"
"code.gitea.io/gitea/models/auth"
user_model "code.gitea.io/gitea/models/user"
auth_module "code.gitea.io/gitea/modules/auth"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/optional"
asymkey_service "code.gitea.io/gitea/services/asymkey"
source_service "code.gitea.io/gitea/services/auth/source"
user_service "code.gitea.io/gitea/services/user"
)
// Authenticate queries if login/password is valid against the LDAP directory pool,
// and create a local user if success when enabled.
func (source *Source) Authenticate(ctx context.Context, user *user_model.User, userName, password string) (*user_model.User, error) {
loginName := userName
if user != nil {
loginName = user.LoginName
}
sr := source.SearchEntry(loginName, password, source.AuthSource.Type == auth.DLDAP)
if sr == nil {
// User not in LDAP, do nothing
return nil, user_model.ErrUserNotExist{Name: loginName}
}
// Fallback.
// FIXME: this fallback would cause problems when the "Username" attribute is not set and a user inputs their email.
// In this case, the email would be used as the username, and will cause the "CreateUser" failure for the first login.
if sr.Username == "" {
if strings.Contains(userName, "@") {
log.Error("No username in search result (Username Attribute is not set properly?), using email as username might cause problems")
}
sr.Username = userName
}
if sr.Mail == "" {
sr.Mail = sr.Username + "@localhost.local"
}
isAttributeSSHPublicKeySet := strings.TrimSpace(source.AttributeSSHPublicKey) != ""
// Update User admin flag if exist
if isExist, err := user_model.IsUserExist(ctx, 0, sr.Username); err != nil {
return nil, err
} else if isExist {
if user == nil {
user, err = user_model.GetUserByName(ctx, sr.Username)
if err != nil {
return nil, err
}
}
if user != nil && !user.ProhibitLogin {
opts := &user_service.UpdateOptions{}
if source.AdminFilter != "" && user.IsAdmin != sr.IsAdmin {
// Change existing admin flag only if AdminFilter option is set
opts.IsAdmin = user_service.UpdateOptionFieldFromSync(sr.IsAdmin)
}
if !sr.IsAdmin && source.RestrictedFilter != "" && user.IsRestricted != sr.IsRestricted {
// Change existing restricted flag only if RestrictedFilter option is set
opts.IsRestricted = optional.Some(sr.IsRestricted)
}
if opts.IsAdmin.Has() || opts.IsRestricted.Has() {
if err := user_service.UpdateUser(ctx, user, opts); err != nil {
return nil, err
}
}
}
}
if user != nil {
if isAttributeSSHPublicKeySet && asymkey_model.SynchronizePublicKeys(ctx, user, source.AuthSource, sr.SSHPublicKey) {
if err := asymkey_service.RewriteAllPublicKeys(ctx); err != nil {
return user, err
}
}
} else {
user = &user_model.User{
LowerName: strings.ToLower(sr.Username),
Name: sr.Username,
FullName: composeFullName(sr.Name, sr.Surname, sr.Username),
Email: sr.Mail,
LoginType: source.AuthSource.Type,
LoginSource: source.AuthSource.ID,
LoginName: userName,
IsAdmin: sr.IsAdmin,
}
overwriteDefault := &user_model.CreateUserOverwriteOptions{
IsRestricted: optional.Some(sr.IsRestricted),
IsActive: optional.Some(true),
}
err := user_model.CreateUser(ctx, user, &user_model.Meta{}, overwriteDefault)
if err != nil {
return user, err
}
if isAttributeSSHPublicKeySet && asymkey_model.AddPublicKeysBySource(ctx, user, source.AuthSource, sr.SSHPublicKey) {
if err := asymkey_service.RewriteAllPublicKeys(ctx); err != nil {
return user, err
}
}
if source.AttributeAvatar != "" {
if err := user_service.UploadAvatar(ctx, user, sr.Avatar); err != nil {
return user, err
}
}
}
if source.GroupsEnabled && (source.GroupTeamMap != "" || source.GroupTeamMapRemoval) {
groupTeamMapping, err := auth_module.UnmarshalGroupTeamMapping(source.GroupTeamMap)
if err != nil {
return user, err
}
if err := source_service.SyncGroupsToTeams(ctx, user, sr.Groups, groupTeamMapping, source.GroupTeamMapRemoval); err != nil {
return user, err
}
}
return user, nil
}

View File

@ -0,0 +1,525 @@
// Copyright 2014 The Gogs Authors. All rights reserved.
// Copyright 2020 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package ldap
import (
"crypto/tls"
"fmt"
"net"
"strconv"
"strings"
"code.gitea.io/gitea/modules/container"
"code.gitea.io/gitea/modules/log"
"github.com/go-ldap/ldap/v3"
)
// SearchResult : user data
type SearchResult struct {
Username string // Username
Name string // Name
Surname string // Surname
Mail string // E-mail address
SSHPublicKey []string // SSH Public Key
IsAdmin bool // if user is administrator
IsRestricted bool // if user is restricted
LowerName string // LowerName
Avatar []byte
Groups container.Set[string]
}
func (source *Source) sanitizedUserQuery(username string) (string, bool) {
// See http://tools.ietf.org/search/rfc4515
badCharacters := "\x00()*\\"
if strings.ContainsAny(username, badCharacters) {
log.Debug("'%s' contains invalid query characters. Aborting.", username)
return "", false
}
return fmt.Sprintf(source.Filter, username), true
}
func (source *Source) sanitizedUserDN(username string) (string, bool) {
// See http://tools.ietf.org/search/rfc4514: "special characters"
badCharacters := "\x00()*\\,='\"#+;<>"
if strings.ContainsAny(username, badCharacters) {
log.Debug("'%s' contains invalid DN characters. Aborting.", username)
return "", false
}
return fmt.Sprintf(source.UserDN, username), true
}
func (source *Source) sanitizedGroupFilter(group string) (string, bool) {
// See http://tools.ietf.org/search/rfc4515
badCharacters := "\x00*\\"
if strings.ContainsAny(group, badCharacters) {
log.Trace("Group filter invalid query characters: %s", group)
return "", false
}
return group, true
}
func (source *Source) sanitizedGroupDN(groupDn string) (string, bool) {
// See http://tools.ietf.org/search/rfc4514: "special characters"
badCharacters := "\x00()*\\'\"#+;<>"
if strings.ContainsAny(groupDn, badCharacters) || strings.HasPrefix(groupDn, " ") || strings.HasSuffix(groupDn, " ") {
log.Trace("Group DN contains invalid query characters: %s", groupDn)
return "", false
}
return groupDn, true
}
func (source *Source) findUserDN(l *ldap.Conn, name string) (string, bool) {
log.Trace("Search for LDAP user: %s", name)
// A search for the user.
userFilter, ok := source.sanitizedUserQuery(name)
if !ok {
return "", false
}
log.Trace("Searching for DN using filter %s and base %s", userFilter, source.UserBase)
search := ldap.NewSearchRequest(
source.UserBase, ldap.ScopeWholeSubtree, ldap.NeverDerefAliases, 0, 0,
false, userFilter, []string{}, nil)
// Ensure we found a user
sr, err := l.Search(search)
if err != nil || len(sr.Entries) < 1 {
log.Debug("Failed search using filter[%s]: %v", userFilter, err)
return "", false
} else if len(sr.Entries) > 1 {
log.Debug("Filter '%s' returned more than one user.", userFilter)
return "", false
}
userDN := sr.Entries[0].DN
if userDN == "" {
log.Error("LDAP search was successful, but found no DN!")
return "", false
}
return userDN, true
}
func dial(source *Source) (*ldap.Conn, error) {
log.Trace("Dialing LDAP with security protocol (%v) without verifying: %v", source.SecurityProtocol, source.SkipVerify)
tlsConfig := &tls.Config{
ServerName: source.Host,
InsecureSkipVerify: source.SkipVerify,
}
if source.SecurityProtocol == SecurityProtocolLDAPS {
return ldap.DialTLS("tcp", net.JoinHostPort(source.Host, strconv.Itoa(source.Port)), tlsConfig) //nolint:staticcheck
}
conn, err := ldap.Dial("tcp", net.JoinHostPort(source.Host, strconv.Itoa(source.Port))) //nolint:staticcheck
if err != nil {
return nil, fmt.Errorf("error during Dial: %w", err)
}
if source.SecurityProtocol == SecurityProtocolStartTLS {
if err = conn.StartTLS(tlsConfig); err != nil {
conn.Close()
return nil, fmt.Errorf("error during StartTLS: %w", err)
}
}
return conn, nil
}
func bindUser(l *ldap.Conn, userDN, passwd string) error {
log.Trace("Binding with userDN: %s", userDN)
err := l.Bind(userDN, passwd)
if err != nil {
log.Debug("LDAP auth. failed for %s, reason: %v", userDN, err)
return err
}
log.Trace("Bound successfully with userDN: %s", userDN)
return err
}
func checkAdmin(l *ldap.Conn, ls *Source, userDN string) bool {
if ls.AdminFilter == "" {
return false
}
log.Trace("Checking admin with filter %s and base %s", ls.AdminFilter, userDN)
search := ldap.NewSearchRequest(
userDN, ldap.ScopeWholeSubtree, ldap.NeverDerefAliases, 0, 0, false, ls.AdminFilter,
[]string{ls.AttributeName},
nil)
sr, err := l.Search(search)
if err != nil {
log.Error("LDAP Admin Search with filter %s for %s failed unexpectedly! (%v)", ls.AdminFilter, userDN, err)
} else if len(sr.Entries) < 1 {
log.Trace("LDAP Admin Search found no matching entries.")
} else {
return true
}
return false
}
func checkRestricted(l *ldap.Conn, ls *Source, userDN string) bool {
if ls.RestrictedFilter == "" {
return false
}
if ls.RestrictedFilter == "*" {
return true
}
log.Trace("Checking restricted with filter %s and base %s", ls.RestrictedFilter, userDN)
search := ldap.NewSearchRequest(
userDN, ldap.ScopeWholeSubtree, ldap.NeverDerefAliases, 0, 0, false, ls.RestrictedFilter,
[]string{ls.AttributeName},
nil)
sr, err := l.Search(search)
if err != nil {
log.Error("LDAP Restrictred Search with filter %s for %s failed unexpectedly! (%v)", ls.RestrictedFilter, userDN, err)
} else if len(sr.Entries) < 1 {
log.Trace("LDAP Restricted Search found no matching entries.")
} else {
return true
}
return false
}
// List all group memberships of a user
func (source *Source) listLdapGroupMemberships(l *ldap.Conn, uid string, applyGroupFilter bool) container.Set[string] {
ldapGroups := make(container.Set[string])
groupFilter, ok := source.sanitizedGroupFilter(source.GroupFilter)
if !ok {
return ldapGroups
}
groupDN, ok := source.sanitizedGroupDN(source.GroupDN)
if !ok {
return ldapGroups
}
var searchFilter string
if applyGroupFilter && groupFilter != "" {
searchFilter = fmt.Sprintf("(&(%s)(%s=%s))", groupFilter, source.GroupMemberUID, ldap.EscapeFilter(uid))
} else {
searchFilter = fmt.Sprintf("(%s=%s)", source.GroupMemberUID, ldap.EscapeFilter(uid))
}
result, err := l.Search(ldap.NewSearchRequest(
groupDN,
ldap.ScopeWholeSubtree,
ldap.NeverDerefAliases,
0,
0,
false,
searchFilter,
[]string{},
nil,
))
if err != nil {
log.Error("Failed group search in LDAP with filter [%s]: %v", searchFilter, err)
return ldapGroups
}
for _, entry := range result.Entries {
if entry.DN == "" {
log.Error("LDAP search was successful, but found no DN!")
continue
}
ldapGroups.Add(entry.DN)
}
return ldapGroups
}
func (source *Source) getUserAttributeListedInGroup(entry *ldap.Entry) string {
if strings.ToLower(source.UserUID) == "dn" {
return entry.DN
}
return entry.GetAttributeValue(source.UserUID)
}
// SearchEntry : search an LDAP source if an entry (name, passwd) is valid and in the specific filter
func (source *Source) SearchEntry(name, passwd string, directBind bool) *SearchResult {
if MockedSearchEntry != nil {
return MockedSearchEntry(source, name, passwd, directBind)
}
return realSearchEntry(source, name, passwd, directBind)
}
var MockedSearchEntry func(source *Source, name, passwd string, directBind bool) *SearchResult
func realSearchEntry(source *Source, name, passwd string, directBind bool) *SearchResult {
// See https://tools.ietf.org/search/rfc4513#section-5.1.2
if passwd == "" {
log.Debug("Auth. failed for %s, password cannot be empty", name)
return nil
}
l, err := dial(source)
if err != nil {
log.Error("LDAP Connect error, %s:%v", source.Host, err)
source.Enabled = false
return nil
}
defer l.Close()
var userDN string
if directBind {
log.Trace("LDAP will bind directly via UserDN template: %s", source.UserDN)
var ok bool
userDN, ok = source.sanitizedUserDN(name)
if !ok {
return nil
}
err = bindUser(l, userDN, passwd)
if err != nil {
return nil
}
if source.UserBase != "" {
// not everyone has a CN compatible with input name so we need to find
// the real userDN in that case
userDN, ok = source.findUserDN(l, name)
if !ok {
return nil
}
}
} else {
log.Trace("LDAP will use BindDN.")
var found bool
if source.BindDN != "" && source.BindPassword != "" {
err := l.Bind(source.BindDN, source.BindPassword)
if err != nil {
log.Debug("Failed to bind as BindDN[%s]: %v", source.BindDN, err)
return nil
}
log.Trace("Bound as BindDN %s", source.BindDN)
} else {
log.Trace("Proceeding with anonymous LDAP search.")
}
userDN, found = source.findUserDN(l, name)
if !found {
return nil
}
}
if !source.AttributesInBind {
// binds user (checking password) before looking-up attributes in user context
err = bindUser(l, userDN, passwd)
if err != nil {
return nil
}
}
userFilter, ok := source.sanitizedUserQuery(name)
if !ok {
return nil
}
isAttributeSSHPublicKeySet := strings.TrimSpace(source.AttributeSSHPublicKey) != ""
isAttributeAvatarSet := strings.TrimSpace(source.AttributeAvatar) != ""
attribs := []string{source.AttributeUsername, source.AttributeName, source.AttributeSurname, source.AttributeMail}
if strings.TrimSpace(source.UserUID) != "" {
attribs = append(attribs, source.UserUID)
}
if isAttributeSSHPublicKeySet {
attribs = append(attribs, source.AttributeSSHPublicKey)
}
if isAttributeAvatarSet {
attribs = append(attribs, source.AttributeAvatar)
}
log.Trace("Fetching attributes '%v', '%v', '%v', '%v', '%v', '%v', '%v' with filter '%s' and base '%s'", source.AttributeUsername, source.AttributeName, source.AttributeSurname, source.AttributeMail, source.AttributeSSHPublicKey, source.AttributeAvatar, source.UserUID, userFilter, userDN)
search := ldap.NewSearchRequest(
userDN, ldap.ScopeWholeSubtree, ldap.NeverDerefAliases, 0, 0, false, userFilter,
attribs, nil)
sr, err := l.Search(search)
if err != nil {
log.Error("LDAP Search failed unexpectedly! (%v)", err)
return nil
} else if len(sr.Entries) < 1 {
if directBind {
log.Trace("User filter inhibited user login.")
} else {
log.Trace("LDAP Search found no matching entries.")
}
return nil
}
var sshPublicKey []string
var Avatar []byte
username := sr.Entries[0].GetAttributeValue(source.AttributeUsername)
firstname := sr.Entries[0].GetAttributeValue(source.AttributeName)
surname := sr.Entries[0].GetAttributeValue(source.AttributeSurname)
mail := sr.Entries[0].GetAttributeValue(source.AttributeMail)
if isAttributeSSHPublicKeySet {
sshPublicKey = sr.Entries[0].GetAttributeValues(source.AttributeSSHPublicKey)
}
isAdmin := checkAdmin(l, source, userDN)
var isRestricted bool
if !isAdmin {
isRestricted = checkRestricted(l, source, userDN)
}
if isAttributeAvatarSet {
Avatar = sr.Entries[0].GetRawAttributeValue(source.AttributeAvatar)
}
// Check group membership
var usersLdapGroups container.Set[string]
if source.GroupsEnabled {
userAttributeListedInGroup := source.getUserAttributeListedInGroup(sr.Entries[0])
usersLdapGroups = source.listLdapGroupMemberships(l, userAttributeListedInGroup, true)
if source.GroupFilter != "" && len(usersLdapGroups) == 0 {
return nil
}
}
if !directBind && source.AttributesInBind {
// binds user (checking password) after looking-up attributes in BindDN context
err = bindUser(l, userDN, passwd)
if err != nil {
return nil
}
}
return &SearchResult{
LowerName: strings.ToLower(username),
Username: username,
Name: firstname,
Surname: surname,
Mail: mail,
SSHPublicKey: sshPublicKey,
IsAdmin: isAdmin,
IsRestricted: isRestricted,
Avatar: Avatar,
Groups: usersLdapGroups,
}
}
// UsePagedSearch returns if need to use paged search
func (source *Source) UsePagedSearch() bool {
return source.SearchPageSize > 0
}
// SearchEntries : search an LDAP source for all users matching userFilter
func (source *Source) SearchEntries() ([]*SearchResult, error) {
l, err := dial(source)
if err != nil {
log.Error("LDAP Connect error, %s:%v", source.Host, err)
source.Enabled = false
return nil, err
}
defer l.Close()
if source.BindDN != "" && source.BindPassword != "" {
err := l.Bind(source.BindDN, source.BindPassword)
if err != nil {
log.Debug("Failed to bind as BindDN[%s]: %v", source.BindDN, err)
return nil, err
}
log.Trace("Bound as BindDN %s", source.BindDN)
} else {
log.Trace("Proceeding with anonymous LDAP search.")
}
userFilter := fmt.Sprintf(source.Filter, "*")
isAttributeSSHPublicKeySet := strings.TrimSpace(source.AttributeSSHPublicKey) != ""
isAttributeAvatarSet := strings.TrimSpace(source.AttributeAvatar) != ""
attribs := []string{source.AttributeUsername, source.AttributeName, source.AttributeSurname, source.AttributeMail, source.UserUID}
if isAttributeSSHPublicKeySet {
attribs = append(attribs, source.AttributeSSHPublicKey)
}
if isAttributeAvatarSet {
attribs = append(attribs, source.AttributeAvatar)
}
log.Trace("Fetching attributes '%v', '%v', '%v', '%v', '%v', '%v' with filter %s and base %s", source.AttributeUsername, source.AttributeName, source.AttributeSurname, source.AttributeMail, source.AttributeSSHPublicKey, source.AttributeAvatar, userFilter, source.UserBase)
search := ldap.NewSearchRequest(
source.UserBase, ldap.ScopeWholeSubtree, ldap.NeverDerefAliases, 0, 0, false, userFilter,
attribs, nil)
var sr *ldap.SearchResult
if source.UsePagedSearch() {
sr, err = l.SearchWithPaging(search, source.SearchPageSize)
} else {
sr, err = l.Search(search)
}
if err != nil {
log.Error("LDAP Search failed unexpectedly! (%v)", err)
return nil, err
}
result := make([]*SearchResult, 0, len(sr.Entries))
for _, v := range sr.Entries {
var usersLdapGroups container.Set[string]
if source.GroupsEnabled {
userAttributeListedInGroup := source.getUserAttributeListedInGroup(v)
if source.GroupFilter != "" {
usersLdapGroups = source.listLdapGroupMemberships(l, userAttributeListedInGroup, true)
if len(usersLdapGroups) == 0 {
continue
}
}
if source.GroupTeamMap != "" || source.GroupTeamMapRemoval {
usersLdapGroups = source.listLdapGroupMemberships(l, userAttributeListedInGroup, false)
}
}
user := &SearchResult{
Username: v.GetAttributeValue(source.AttributeUsername),
Name: v.GetAttributeValue(source.AttributeName),
Surname: v.GetAttributeValue(source.AttributeSurname),
Mail: v.GetAttributeValue(source.AttributeMail),
IsAdmin: checkAdmin(l, source, v.DN),
Groups: usersLdapGroups,
}
if !user.IsAdmin {
user.IsRestricted = checkRestricted(l, source, v.DN)
}
if isAttributeSSHPublicKeySet {
user.SSHPublicKey = v.GetAttributeValues(source.AttributeSSHPublicKey)
}
if isAttributeAvatarSet {
user.Avatar = v.GetRawAttributeValue(source.AttributeAvatar)
}
user.LowerName = strings.ToLower(user.Username)
result = append(result, user)
}
return result, nil
}

View File

@ -0,0 +1,229 @@
// Copyright 2021 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package ldap
import (
"context"
"strings"
asymkey_model "code.gitea.io/gitea/models/asymkey"
"code.gitea.io/gitea/models/db"
"code.gitea.io/gitea/models/organization"
user_model "code.gitea.io/gitea/models/user"
auth_module "code.gitea.io/gitea/modules/auth"
"code.gitea.io/gitea/modules/container"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/optional"
asymkey_service "code.gitea.io/gitea/services/asymkey"
source_service "code.gitea.io/gitea/services/auth/source"
user_service "code.gitea.io/gitea/services/user"
)
// Sync causes this ldap source to synchronize its users with the db
func (source *Source) Sync(ctx context.Context, updateExisting bool) error {
log.Trace("Doing: SyncExternalUsers[%s]", source.AuthSource.Name)
isAttributeSSHPublicKeySet := strings.TrimSpace(source.AttributeSSHPublicKey) != ""
var sshKeysNeedUpdate bool
// Find all users with this login type - FIXME: Should this be an iterator?
users, err := user_model.GetUsersBySource(ctx, source.AuthSource)
if err != nil {
log.Error("SyncExternalUsers: %v", err)
return err
}
select {
case <-ctx.Done():
log.Warn("SyncExternalUsers: Cancelled before update of %s", source.AuthSource.Name)
return db.ErrCancelledf("Before update of %s", source.AuthSource.Name)
default:
}
usernameUsers := make(map[string]*user_model.User, len(users))
mailUsers := make(map[string]*user_model.User, len(users))
keepActiveUsers := make(container.Set[int64])
for _, u := range users {
usernameUsers[u.LowerName] = u
mailUsers[strings.ToLower(u.Email)] = u
}
sr, err := source.SearchEntries()
if err != nil {
log.Error("SyncExternalUsers LDAP source failure [%s], skipped", source.AuthSource.Name)
return nil
}
if len(sr) == 0 {
if !source.AllowDeactivateAll {
log.Error("LDAP search found no entries but did not report an error. Refusing to deactivate all users")
return nil
}
log.Warn("LDAP search found no entries but did not report an error. All users will be deactivated as per settings")
}
orgCache := make(map[string]*organization.Organization)
teamCache := make(map[string]*organization.Team)
groupTeamMapping, err := auth_module.UnmarshalGroupTeamMapping(source.GroupTeamMap)
if err != nil {
return err
}
for _, su := range sr {
select {
case <-ctx.Done():
log.Warn("SyncExternalUsers: Cancelled at update of %s before completed update of users", source.AuthSource.Name)
// Rewrite authorized_keys file if LDAP Public SSH Key attribute is set and any key was added or removed
if sshKeysNeedUpdate {
err = asymkey_service.RewriteAllPublicKeys(ctx)
if err != nil {
log.Error("RewriteAllPublicKeys: %v", err)
}
}
return db.ErrCancelledf("During update of %s before completed update of users", source.AuthSource.Name)
default:
}
if su.Username == "" && su.Mail == "" {
continue
}
var usr *user_model.User
if su.Username != "" {
usr = usernameUsers[su.LowerName]
}
if usr == nil && su.Mail != "" {
usr = mailUsers[strings.ToLower(su.Mail)]
}
if usr != nil {
keepActiveUsers.Add(usr.ID)
} else if su.Username == "" {
// we cannot create the user if su.Username is empty
continue
}
if su.Mail == "" {
su.Mail = su.Username + "@localhost.local"
}
fullName := composeFullName(su.Name, su.Surname, su.Username)
// If no existing user found, create one
if usr == nil {
log.Trace("SyncExternalUsers[%s]: Creating user %s", source.AuthSource.Name, su.Username)
usr = &user_model.User{
LowerName: su.LowerName,
Name: su.Username,
FullName: fullName,
LoginType: source.AuthSource.Type,
LoginSource: source.AuthSource.ID,
LoginName: su.Username,
Email: su.Mail,
IsAdmin: su.IsAdmin,
}
overwriteDefault := &user_model.CreateUserOverwriteOptions{
IsRestricted: optional.Some(su.IsRestricted),
IsActive: optional.Some(true),
}
err = user_model.CreateUser(ctx, usr, &user_model.Meta{}, overwriteDefault)
if err != nil {
log.Error("SyncExternalUsers[%s]: Error creating user %s: %v", source.AuthSource.Name, su.Username, err)
}
if err == nil && isAttributeSSHPublicKeySet {
log.Trace("SyncExternalUsers[%s]: Adding LDAP Public SSH Keys for user %s", source.AuthSource.Name, usr.Name)
if asymkey_model.AddPublicKeysBySource(ctx, usr, source.AuthSource, su.SSHPublicKey) {
sshKeysNeedUpdate = true
}
}
if err == nil && source.AttributeAvatar != "" {
_ = user_service.UploadAvatar(ctx, usr, su.Avatar)
}
} else if updateExisting {
// Synchronize SSH Public Key if that attribute is set
if isAttributeSSHPublicKeySet && asymkey_model.SynchronizePublicKeys(ctx, usr, source.AuthSource, su.SSHPublicKey) {
sshKeysNeedUpdate = true
}
// Check if user data has changed
if (source.AdminFilter != "" && usr.IsAdmin != su.IsAdmin) ||
(source.RestrictedFilter != "" && usr.IsRestricted != su.IsRestricted) ||
!strings.EqualFold(usr.Email, su.Mail) ||
usr.FullName != fullName ||
!usr.IsActive {
log.Trace("SyncExternalUsers[%s]: Updating user %s", source.AuthSource.Name, usr.Name)
opts := &user_service.UpdateOptions{
FullName: optional.Some(fullName),
IsActive: optional.Some(true),
}
if source.AdminFilter != "" {
opts.IsAdmin = user_service.UpdateOptionFieldFromSync(su.IsAdmin)
}
// Change existing restricted flag only if RestrictedFilter option is set
if !su.IsAdmin && source.RestrictedFilter != "" {
opts.IsRestricted = optional.Some(su.IsRestricted)
}
if err := user_service.UpdateUser(ctx, usr, opts); err != nil {
log.Error("SyncExternalUsers[%s]: Error updating user %s: %v", source.AuthSource.Name, usr.Name, err)
}
if err := user_service.ReplacePrimaryEmailAddress(ctx, usr, su.Mail); err != nil {
log.Error("SyncExternalUsers[%s]: Error updating user %s primary email %s: %v", source.AuthSource.Name, usr.Name, su.Mail, err)
}
}
if source.AttributeAvatar != "" {
if len(su.Avatar) > 0 && usr.IsUploadAvatarChanged(su.Avatar) {
log.Trace("SyncExternalUsers[%s]: Uploading new avatar for %s", source.AuthSource.Name, usr.Name)
_ = user_service.UploadAvatar(ctx, usr, su.Avatar)
}
}
}
// Synchronize LDAP groups with organization and team memberships
if source.GroupsEnabled && (source.GroupTeamMap != "" || source.GroupTeamMapRemoval) {
if err := source_service.SyncGroupsToTeamsCached(ctx, usr, su.Groups, groupTeamMapping, source.GroupTeamMapRemoval, orgCache, teamCache); err != nil {
log.Error("SyncGroupsToTeamsCached: %v", err)
}
}
}
// Rewrite authorized_keys file if LDAP Public SSH Key attribute is set and any key was added or removed
if sshKeysNeedUpdate {
err = asymkey_service.RewriteAllPublicKeys(ctx)
if err != nil {
log.Error("RewriteAllPublicKeys: %v", err)
}
}
select {
case <-ctx.Done():
log.Warn("SyncExternalUsers: Cancelled during update of %s before delete users", source.AuthSource.Name)
return db.ErrCancelledf("During update of %s before delete users", source.AuthSource.Name)
default:
}
// Deactivate users not present in LDAP
if updateExisting {
for _, usr := range users {
if keepActiveUsers.Contains(usr.ID) {
continue
}
log.Trace("SyncExternalUsers[%s]: Deactivating user %s", source.AuthSource.Name, usr.Name)
opts := &user_service.UpdateOptions{
IsActive: optional.Some(false),
}
if err := user_service.UpdateUser(ctx, usr, opts); err != nil {
log.Error("SyncExternalUsers[%s]: Error deactivating user %s: %v", source.AuthSource.Name, usr.Name, err)
}
}
}
return nil
}

18
auth/source/ldap/util.go Normal file
View File

@ -0,0 +1,18 @@
// Copyright 2021 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package ldap
// composeFullName composes a firstname surname or username
func composeFullName(firstname, surname, username string) string {
switch {
case firstname == "" && surname == "":
return username
case firstname == "":
return surname
case surname == "":
return firstname
default:
return firstname + " " + surname
}
}

View File

@ -0,0 +1,21 @@
// Copyright 2021 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package oauth2_test
import (
auth_model "code.gitea.io/gitea/models/auth"
"code.gitea.io/gitea/services/auth"
"code.gitea.io/gitea/services/auth/source/oauth2"
)
// This test file exists to assert that our Source exposes the interfaces that we expect
// It tightly binds the interfaces and implementation without breaking go import cycles
type sourceInterface interface {
auth_model.Config
auth_model.RegisterableSource
auth.PasswordAuthenticator
}
var _ (sourceInterface) = &oauth2.Source{}

View File

@ -0,0 +1,82 @@
// Copyright 2021 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package oauth2
import (
"context"
"encoding/gob"
"net/http"
"sync"
"code.gitea.io/gitea/models/auth"
"code.gitea.io/gitea/models/db"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/optional"
"code.gitea.io/gitea/modules/setting"
"github.com/google/uuid"
"github.com/gorilla/sessions"
"github.com/markbates/goth/gothic"
)
var gothRWMutex = sync.RWMutex{}
// UsersStoreKey is the key for the store
const UsersStoreKey = "gitea-oauth2-sessions"
// ProviderHeaderKey is the HTTP header key
const ProviderHeaderKey = "gitea-oauth2-provider"
// Init initializes the oauth source
func Init(ctx context.Context) error {
// Lock our mutex
gothRWMutex.Lock()
gob.Register(&sessions.Session{})
gothic.Store = &SessionsStore{
maxLength: int64(setting.OAuth2.MaxTokenLength),
}
gothic.SetState = func(req *http.Request) string {
return uuid.New().String()
}
gothic.GetProviderName = func(req *http.Request) (string, error) {
return req.Header.Get(ProviderHeaderKey), nil
}
// Unlock our mutex
gothRWMutex.Unlock()
return initOAuth2Sources(ctx)
}
// ResetOAuth2 clears existing OAuth2 providers and loads them from DB
func ResetOAuth2(ctx context.Context) error {
ClearProviders()
return initOAuth2Sources(ctx)
}
// initOAuth2Sources is used to load and register all active OAuth2 providers
func initOAuth2Sources(ctx context.Context) error {
authSources, err := db.Find[auth.Source](ctx, auth.FindSourcesOptions{
IsActive: optional.Some(true),
LoginType: auth.OAuth2,
})
if err != nil {
return err
}
for _, source := range authSources {
oauth2Source, ok := source.Cfg.(*Source)
if !ok {
continue
}
err := oauth2Source.RegisterSource()
if err != nil {
log.Critical("Unable to register source: %s due to Error: %v.", source.Name, err)
}
}
return nil
}

View File

@ -0,0 +1,14 @@
// Copyright 2024 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package oauth2
import (
"testing"
"code.gitea.io/gitea/models/unittest"
)
func TestMain(m *testing.M) {
unittest.MainTest(m, &unittest.TestOptions{})
}

View File

@ -0,0 +1,190 @@
// Copyright 2021 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package oauth2
import (
"context"
"errors"
"fmt"
"html"
"html/template"
"net/url"
"sort"
"code.gitea.io/gitea/models/auth"
"code.gitea.io/gitea/models/db"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/optional"
"code.gitea.io/gitea/modules/setting"
"github.com/markbates/goth"
)
// Provider is an interface for describing a single OAuth2 provider
type Provider interface {
Name() string
DisplayName() string
IconHTML(size int) template.HTML
CustomURLSettings() *CustomURLSettings
}
// GothProviderCreator provides a function to create a goth.Provider
type GothProviderCreator interface {
CreateGothProvider(providerName, callbackURL string, source *Source) (goth.Provider, error)
}
// GothProvider is an interface for describing a single OAuth2 provider
type GothProvider interface {
Provider
GothProviderCreator
}
// AuthSourceProvider provides a provider for an AuthSource. Multiple auth sources could use the same registered GothProvider
// So each auth source should have its own DisplayName and IconHTML for display.
// The Name is the GothProvider's name, to help to find the GothProvider to sign in.
// The DisplayName is the auth source config's name, site admin set it on the admin page, the IconURL can also be set there.
type AuthSourceProvider struct {
GothProvider
sourceName, iconURL string
}
func (p *AuthSourceProvider) Name() string {
return p.GothProvider.Name()
}
func (p *AuthSourceProvider) DisplayName() string {
return p.sourceName
}
func (p *AuthSourceProvider) IconHTML(size int) template.HTML {
if p.iconURL != "" {
img := fmt.Sprintf(`<img class="tw-object-contain tw-mr-2" width="%d" height="%d" src="%s" alt="%s">`,
size,
size,
html.EscapeString(p.iconURL), html.EscapeString(p.DisplayName()),
)
return template.HTML(img)
}
return p.GothProvider.IconHTML(size)
}
// Providers contains the map of registered OAuth2 providers in Gitea (based on goth)
// key is used to map the OAuth2Provider with the goth provider type (also in AuthSource.OAuth2Config.Provider)
// value is used to store display data
var gothProviders = map[string]GothProvider{}
// RegisterGothProvider registers a GothProvider
func RegisterGothProvider(provider GothProvider) {
if _, has := gothProviders[provider.Name()]; has {
log.Fatal("Duplicate oauth2provider type provided: %s", provider.Name())
}
gothProviders[provider.Name()] = provider
}
// GetSupportedOAuth2Providers returns the map of unconfigured OAuth2 providers
// key is used as technical name (like in the callbackURL)
// values to display
func GetSupportedOAuth2Providers() []Provider {
providers := make([]Provider, 0, len(gothProviders))
for _, provider := range gothProviders {
providers = append(providers, provider)
}
sort.Slice(providers, func(i, j int) bool {
return providers[i].Name() < providers[j].Name()
})
return providers
}
func CreateProviderFromSource(source *auth.Source) (Provider, error) {
oauth2Cfg, ok := source.Cfg.(*Source)
if !ok {
return nil, fmt.Errorf("invalid OAuth2 source config: %v", oauth2Cfg)
}
gothProv := gothProviders[oauth2Cfg.Provider]
return &AuthSourceProvider{GothProvider: gothProv, sourceName: source.Name, iconURL: oauth2Cfg.IconURL}, nil
}
// GetOAuth2Providers returns the list of configured OAuth2 providers
func GetOAuth2Providers(ctx context.Context, isActive optional.Option[bool]) ([]Provider, error) {
authSources, err := db.Find[auth.Source](ctx, auth.FindSourcesOptions{
IsActive: isActive,
LoginType: auth.OAuth2,
})
if err != nil {
return nil, err
}
providers := make([]Provider, 0, len(authSources))
for _, source := range authSources {
provider, err := CreateProviderFromSource(source)
if err != nil {
return nil, err
}
providers = append(providers, provider)
}
sort.Slice(providers, func(i, j int) bool {
return providers[i].Name() < providers[j].Name()
})
return providers, nil
}
// RegisterProviderWithGothic register a OAuth2 provider in goth lib
func RegisterProviderWithGothic(providerName string, source *Source) error {
provider, err := createProvider(providerName, source)
if err == nil && provider != nil {
gothRWMutex.Lock()
defer gothRWMutex.Unlock()
goth.UseProviders(provider)
}
return err
}
// RemoveProviderFromGothic removes the given OAuth2 provider from the goth lib
func RemoveProviderFromGothic(providerName string) {
gothRWMutex.Lock()
defer gothRWMutex.Unlock()
delete(goth.GetProviders(), providerName)
}
// ClearProviders clears all OAuth2 providers from the goth lib
func ClearProviders() {
gothRWMutex.Lock()
defer gothRWMutex.Unlock()
goth.ClearProviders()
}
var ErrAuthSourceNotActivated = errors.New("auth source is not activated")
// used to create different types of goth providers
func createProvider(providerName string, source *Source) (goth.Provider, error) {
callbackURL := setting.AppURL + "user/oauth2/" + url.PathEscape(providerName) + "/callback"
var provider goth.Provider
var err error
p, ok := gothProviders[source.Provider]
if !ok {
return nil, ErrAuthSourceNotActivated
}
provider, err = p.CreateGothProvider(providerName, callbackURL, source)
if err != nil {
return provider, err
}
// always set the name if provider is created so we can support multiple setups of 1 provider
if provider != nil {
provider.SetName(providerName)
}
return provider, err
}

View File

@ -0,0 +1,51 @@
// Copyright 2021 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package oauth2
import (
"html/template"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/svg"
)
// BaseProvider represents a common base for Provider
type BaseProvider struct {
name string
displayName string
}
// Name provides the technical name for this provider
func (b *BaseProvider) Name() string {
return b.name
}
// DisplayName returns the friendly name for this provider
func (b *BaseProvider) DisplayName() string {
return b.displayName
}
// IconHTML returns icon HTML for this provider
func (b *BaseProvider) IconHTML(size int) template.HTML {
svgName := "gitea-" + b.name
switch b.name {
case "gplus":
svgName = "gitea-google"
case "github":
svgName = "octicon-mark-github"
}
svgHTML := svg.RenderHTML(svgName, size, "tw-mr-2")
if svgHTML == "" {
log.Error("No SVG icon for oauth2 provider %q", b.name)
svgHTML = svg.RenderHTML("gitea-openid", size, "tw-mr-2")
}
return svgHTML
}
// CustomURLSettings returns the custom url settings for this provider
func (b *BaseProvider) CustomURLSettings() *CustomURLSettings {
return nil
}
var _ Provider = &BaseProvider{}

View File

@ -0,0 +1,123 @@
// Copyright 2021 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package oauth2
import (
"code.gitea.io/gitea/modules/setting"
"github.com/markbates/goth"
"github.com/markbates/goth/providers/azureadv2"
"github.com/markbates/goth/providers/gitea"
"github.com/markbates/goth/providers/github"
"github.com/markbates/goth/providers/gitlab"
"github.com/markbates/goth/providers/mastodon"
"github.com/markbates/goth/providers/nextcloud"
)
// CustomProviderNewFn creates a goth.Provider using a custom url mapping
type CustomProviderNewFn func(clientID, secret, callbackURL string, custom *CustomURLMapping, scopes []string) (goth.Provider, error)
// CustomProvider is a GothProvider that has CustomURL features
type CustomProvider struct {
BaseProvider
customURLSettings *CustomURLSettings
newFn CustomProviderNewFn
}
// CustomURLSettings returns the CustomURLSettings for this provider
func (c *CustomProvider) CustomURLSettings() *CustomURLSettings {
return c.customURLSettings
}
// CreateGothProvider creates a GothProvider from this Provider
func (c *CustomProvider) CreateGothProvider(providerName, callbackURL string, source *Source) (goth.Provider, error) {
custom := c.customURLSettings.OverrideWith(source.CustomURLMapping)
return c.newFn(source.ClientID, source.ClientSecret, callbackURL, custom, source.Scopes)
}
// NewCustomProvider is a constructor function for custom providers
func NewCustomProvider(name, displayName string, customURLSetting *CustomURLSettings, newFn CustomProviderNewFn) *CustomProvider {
return &CustomProvider{
BaseProvider: BaseProvider{
name: name,
displayName: displayName,
},
customURLSettings: customURLSetting,
newFn: newFn,
}
}
var _ GothProvider = &CustomProvider{}
func init() {
RegisterGothProvider(NewCustomProvider(
"github", "GitHub", &CustomURLSettings{
TokenURL: availableAttribute(github.TokenURL),
AuthURL: availableAttribute(github.AuthURL),
ProfileURL: availableAttribute(github.ProfileURL),
EmailURL: availableAttribute(github.EmailURL),
},
func(clientID, secret, callbackURL string, custom *CustomURLMapping, scopes []string) (goth.Provider, error) {
if setting.OAuth2Client.EnableAutoRegistration {
scopes = append(scopes, "user:email")
}
return github.NewCustomisedURL(clientID, secret, callbackURL, custom.AuthURL, custom.TokenURL, custom.ProfileURL, custom.EmailURL, scopes...), nil
}))
RegisterGothProvider(NewCustomProvider(
"gitlab", "GitLab", &CustomURLSettings{
AuthURL: availableAttribute(gitlab.AuthURL),
TokenURL: availableAttribute(gitlab.TokenURL),
ProfileURL: availableAttribute(gitlab.ProfileURL),
}, func(clientID, secret, callbackURL string, custom *CustomURLMapping, scopes []string) (goth.Provider, error) {
scopes = append(scopes, "read_user")
return gitlab.NewCustomisedURL(clientID, secret, callbackURL, custom.AuthURL, custom.TokenURL, custom.ProfileURL, scopes...), nil
}))
RegisterGothProvider(NewCustomProvider(
"gitea", "Gitea", &CustomURLSettings{
TokenURL: requiredAttribute(gitea.TokenURL),
AuthURL: requiredAttribute(gitea.AuthURL),
ProfileURL: requiredAttribute(gitea.ProfileURL),
},
func(clientID, secret, callbackURL string, custom *CustomURLMapping, scopes []string) (goth.Provider, error) {
return gitea.NewCustomisedURL(clientID, secret, callbackURL, custom.AuthURL, custom.TokenURL, custom.ProfileURL, scopes...), nil
}))
RegisterGothProvider(NewCustomProvider(
"nextcloud", "Nextcloud", &CustomURLSettings{
TokenURL: requiredAttribute(nextcloud.TokenURL),
AuthURL: requiredAttribute(nextcloud.AuthURL),
ProfileURL: requiredAttribute(nextcloud.ProfileURL),
},
func(clientID, secret, callbackURL string, custom *CustomURLMapping, scopes []string) (goth.Provider, error) {
return nextcloud.NewCustomisedURL(clientID, secret, callbackURL, custom.AuthURL, custom.TokenURL, custom.ProfileURL, scopes...), nil
}))
RegisterGothProvider(NewCustomProvider(
"mastodon", "Mastodon", &CustomURLSettings{
AuthURL: requiredAttribute(mastodon.InstanceURL),
},
func(clientID, secret, callbackURL string, custom *CustomURLMapping, scopes []string) (goth.Provider, error) {
return mastodon.NewCustomisedURL(clientID, secret, callbackURL, custom.AuthURL, scopes...), nil
}))
RegisterGothProvider(NewCustomProvider(
"azureadv2", "Azure AD v2", &CustomURLSettings{
Tenant: requiredAttribute("organizations"),
},
func(clientID, secret, callbackURL string, custom *CustomURLMapping, scopes []string) (goth.Provider, error) {
azureScopes := make([]azureadv2.ScopeType, len(scopes))
for i, scope := range scopes {
azureScopes[i] = azureadv2.ScopeType(scope)
}
return azureadv2.New(clientID, secret, callbackURL, azureadv2.ProviderOptions{
Tenant: azureadv2.TenantType(custom.Tenant),
Scopes: azureScopes,
}), nil
},
))
}

View File

@ -0,0 +1,58 @@
// Copyright 2021 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package oauth2
import (
"html/template"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/modules/svg"
"github.com/markbates/goth"
"github.com/markbates/goth/providers/openidConnect"
)
// OpenIDProvider is a GothProvider for OpenID
type OpenIDProvider struct{}
// Name provides the technical name for this provider
func (o *OpenIDProvider) Name() string {
return "openidConnect"
}
// DisplayName returns the friendly name for this provider
func (o *OpenIDProvider) DisplayName() string {
return "OpenID Connect"
}
// IconHTML returns icon HTML for this provider
func (o *OpenIDProvider) IconHTML(size int) template.HTML {
return svg.RenderHTML("gitea-openid", size, "tw-mr-2")
}
// CreateGothProvider creates a GothProvider from this Provider
func (o *OpenIDProvider) CreateGothProvider(providerName, callbackURL string, source *Source) (goth.Provider, error) {
scopes := setting.OAuth2Client.OpenIDConnectScopes
if len(scopes) == 0 {
scopes = append(scopes, source.Scopes...)
}
provider, err := openidConnect.New(source.ClientID, source.ClientSecret, callbackURL, source.OpenIDConnectAutoDiscoveryURL, scopes...)
if err != nil {
log.Warn("Failed to create OpenID Connect Provider with name '%s' with url '%s': %v", providerName, source.OpenIDConnectAutoDiscoveryURL, err)
}
return provider, err
}
// CustomURLSettings returns the custom url settings for this provider
func (o *OpenIDProvider) CustomURLSettings() *CustomURLSettings {
return nil
}
var _ GothProvider = &OpenIDProvider{}
func init() {
RegisterGothProvider(&OpenIDProvider{})
}

View File

@ -0,0 +1,109 @@
// Copyright 2021 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package oauth2
import (
"code.gitea.io/gitea/modules/setting"
"github.com/markbates/goth"
"github.com/markbates/goth/providers/azuread"
"github.com/markbates/goth/providers/bitbucket"
"github.com/markbates/goth/providers/discord"
"github.com/markbates/goth/providers/dropbox"
"github.com/markbates/goth/providers/facebook"
"github.com/markbates/goth/providers/google"
"github.com/markbates/goth/providers/microsoftonline"
"github.com/markbates/goth/providers/twitter"
"github.com/markbates/goth/providers/yandex"
)
// SimpleProviderNewFn create goth.Providers without custom url features
type SimpleProviderNewFn func(clientKey, secret, callbackURL string, scopes ...string) goth.Provider
// SimpleProvider is a GothProvider which does not have custom url features
type SimpleProvider struct {
BaseProvider
scopes []string
newFn SimpleProviderNewFn
}
// CreateGothProvider creates a GothProvider from this Provider
func (c *SimpleProvider) CreateGothProvider(providerName, callbackURL string, source *Source) (goth.Provider, error) {
scopes := make([]string, len(c.scopes)+len(source.Scopes))
copy(scopes, c.scopes)
copy(scopes[len(c.scopes):], source.Scopes)
return c.newFn(source.ClientID, source.ClientSecret, callbackURL, scopes...), nil
}
// NewSimpleProvider is a constructor function for simple providers
func NewSimpleProvider(name, displayName string, scopes []string, newFn SimpleProviderNewFn) *SimpleProvider {
return &SimpleProvider{
BaseProvider: BaseProvider{
name: name,
displayName: displayName,
},
scopes: scopes,
newFn: newFn,
}
}
var _ GothProvider = &SimpleProvider{}
func init() {
RegisterGothProvider(
NewSimpleProvider("bitbucket", "Bitbucket", []string{"account"},
func(clientKey, secret, callbackURL string, scopes ...string) goth.Provider {
return bitbucket.New(clientKey, secret, callbackURL, scopes...)
}))
RegisterGothProvider(
NewSimpleProvider("dropbox", "Dropbox", nil,
func(clientKey, secret, callbackURL string, scopes ...string) goth.Provider {
return dropbox.New(clientKey, secret, callbackURL, scopes...)
}))
RegisterGothProvider(NewSimpleProvider("facebook", "Facebook", nil,
func(clientKey, secret, callbackURL string, scopes ...string) goth.Provider {
return facebook.New(clientKey, secret, callbackURL, scopes...)
}))
// named gplus due to legacy gplus -> google migration (Google killed Google+). This ensures old connections still work
RegisterGothProvider(NewSimpleProvider("gplus", "Google", []string{"email"},
func(clientKey, secret, callbackURL string, scopes ...string) goth.Provider {
if setting.OAuth2Client.UpdateAvatar || setting.OAuth2Client.EnableAutoRegistration {
scopes = append(scopes, "profile")
}
return google.New(clientKey, secret, callbackURL, scopes...)
}))
RegisterGothProvider(NewSimpleProvider("twitter", "Twitter", nil,
func(clientKey, secret, callbackURL string, scopes ...string) goth.Provider {
return twitter.New(clientKey, secret, callbackURL)
}))
RegisterGothProvider(NewSimpleProvider("discord", "Discord", []string{discord.ScopeIdentify, discord.ScopeEmail},
func(clientKey, secret, callbackURL string, scopes ...string) goth.Provider {
return discord.New(clientKey, secret, callbackURL, scopes...)
}))
// See https://tech.yandex.com/passport/doc/dg/reference/response-docpage/
RegisterGothProvider(NewSimpleProvider("yandex", "Yandex", []string{"login:email", "login:info", "login:avatar"},
func(clientKey, secret, callbackURL string, scopes ...string) goth.Provider {
return yandex.New(clientKey, secret, callbackURL, scopes...)
}))
RegisterGothProvider(NewSimpleProvider(
"azuread", "Azure AD", nil,
func(clientID, secret, callbackURL string, scopes ...string) goth.Provider {
return azuread.New(clientID, secret, callbackURL, nil, scopes...)
},
))
RegisterGothProvider(NewSimpleProvider(
"microsoftonline", "Microsoft Online", nil,
func(clientID, secret, callbackURL string, scopes ...string) goth.Provider {
return microsoftonline.New(clientID, secret, callbackURL, scopes...)
},
))
}

View File

@ -0,0 +1,62 @@
// Copyright 2024 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package oauth2
import (
"time"
"github.com/markbates/goth"
"golang.org/x/oauth2"
)
type fakeProvider struct{}
func (p *fakeProvider) Name() string {
return "fake"
}
func (p *fakeProvider) SetName(name string) {}
func (p *fakeProvider) BeginAuth(state string) (goth.Session, error) {
return nil, nil
}
func (p *fakeProvider) UnmarshalSession(string) (goth.Session, error) {
return nil, nil
}
func (p *fakeProvider) FetchUser(goth.Session) (goth.User, error) {
return goth.User{}, nil
}
func (p *fakeProvider) Debug(bool) {
}
func (p *fakeProvider) RefreshToken(refreshToken string) (*oauth2.Token, error) {
switch refreshToken {
case "expired":
return nil, &oauth2.RetrieveError{
ErrorCode: "invalid_grant",
}
default:
return &oauth2.Token{
AccessToken: "token",
TokenType: "Bearer",
RefreshToken: "refresh",
Expiry: time.Now().Add(time.Hour),
}, nil
}
}
func (p *fakeProvider) RefreshTokenAvailable() bool {
return true
}
func init() {
RegisterGothProvider(
NewSimpleProvider("fake", "Fake", []string{"account"},
func(clientKey, secret, callbackURL string, scopes ...string) goth.Provider {
return &fakeProvider{}
}))
}

View File

@ -0,0 +1,44 @@
// Copyright 2021 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package oauth2
import (
"code.gitea.io/gitea/models/auth"
"code.gitea.io/gitea/modules/json"
)
// Source holds configuration for the OAuth2 login source.
type Source struct {
auth.ConfigBase `json:"-"`
Provider string
ClientID string
ClientSecret string
OpenIDConnectAutoDiscoveryURL string
CustomURLMapping *CustomURLMapping
IconURL string
Scopes []string
RequiredClaimName string
RequiredClaimValue string
GroupClaimName string
AdminGroup string
GroupTeamMap string
GroupTeamMapRemoval bool
RestrictedGroup string
}
// FromDB fills up an OAuth2Config from serialized format.
func (source *Source) FromDB(bs []byte) error {
return json.UnmarshalHandleDoubleEncode(bs, &source)
}
// ToDB exports an OAuth2Config to a serialized format.
func (source *Source) ToDB() ([]byte, error) {
return json.Marshal(source)
}
func init() {
auth.RegisterTypeConfig(auth.OAuth2, &Source{})
}

View File

@ -0,0 +1,19 @@
// Copyright 2021 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package oauth2
import (
"context"
user_model "code.gitea.io/gitea/models/user"
"code.gitea.io/gitea/services/auth/source/db"
)
// Authenticate falls back to the db authenticator
func (source *Source) Authenticate(ctx context.Context, user *user_model.User, login, password string) (*user_model.User, error) {
return db.Authenticate(ctx, user, login, password)
}
// NB: Oauth2 does not implement LocalTwoFASkipper for password authentication
// as its password authentication drops to db authentication

View File

@ -0,0 +1,47 @@
// Copyright 2021 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package oauth2
import (
"net/http"
"github.com/markbates/goth"
"github.com/markbates/goth/gothic"
)
// Callout redirects request/response pair to authenticate against the provider
func (source *Source) Callout(request *http.Request, response http.ResponseWriter) error {
// not sure if goth is thread safe (?) when using multiple providers
request.Header.Set(ProviderHeaderKey, source.AuthSource.Name)
// don't use the default gothic begin handler to prevent issues when some error occurs
// normally the gothic library will write some custom stuff to the response instead of our own nice error page
// gothic.BeginAuthHandler(response, request)
gothRWMutex.RLock()
defer gothRWMutex.RUnlock()
url, err := gothic.GetAuthURL(response, request)
if err == nil {
http.Redirect(response, request, url, http.StatusTemporaryRedirect)
}
return err
}
// Callback handles OAuth callback, resolve to a goth user and send back to original url
// this will trigger a new authentication request, but because we save it in the session we can use that
func (source *Source) Callback(request *http.Request, response http.ResponseWriter) (goth.User, error) {
// not sure if goth is thread safe (?) when using multiple providers
request.Header.Set(ProviderHeaderKey, source.AuthSource.Name)
gothRWMutex.RLock()
defer gothRWMutex.RUnlock()
user, err := gothic.CompleteUserAuth(response, request)
if err != nil {
return user, err
}
return user, nil
}

View File

@ -0,0 +1,18 @@
// Copyright 2021 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package oauth2
// Name returns the provider name of this source
func (source *Source) Name() string {
return source.Provider
}
// DisplayName returns the display name of this source
func (source *Source) DisplayName() string {
provider, has := gothProviders[source.Provider]
if !has {
return source.Provider
}
return provider.DisplayName()
}

View File

@ -0,0 +1,50 @@
// Copyright 2021 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package oauth2
import (
"fmt"
)
// RegisterSource causes an OAuth2 configuration to be registered
func (source *Source) RegisterSource() error {
err := RegisterProviderWithGothic(source.AuthSource.Name, source)
return wrapOpenIDConnectInitializeError(err, source.AuthSource.Name, source)
}
// UnregisterSource causes an OAuth2 configuration to be unregistered
func (source *Source) UnregisterSource() error {
RemoveProviderFromGothic(source.AuthSource.Name)
return nil
}
// ErrOpenIDConnectInitialize represents a "OpenIDConnectInitialize" kind of error.
type ErrOpenIDConnectInitialize struct {
OpenIDConnectAutoDiscoveryURL string
ProviderName string
Cause error
}
// IsErrOpenIDConnectInitialize checks if an error is a ExternalLoginUserAlreadyExist.
func IsErrOpenIDConnectInitialize(err error) bool {
_, ok := err.(ErrOpenIDConnectInitialize)
return ok
}
func (err ErrOpenIDConnectInitialize) Error() string {
return fmt.Sprintf("Failed to initialize OpenID Connect Provider with name '%s' with url '%s': %v", err.ProviderName, err.OpenIDConnectAutoDiscoveryURL, err.Cause)
}
func (err ErrOpenIDConnectInitialize) Unwrap() error {
return err.Cause
}
// wrapOpenIDConnectInitializeError is used to wrap the error but this cannot be done in modules/auth/oauth2
// inside oauth2: import cycle not allowed models -> modules/auth/oauth2 -> models
func wrapOpenIDConnectInitializeError(err error, providerName string, source *Source) error {
if err != nil && source.Provider == "openidConnect" {
err = ErrOpenIDConnectInitialize{ProviderName: providerName, OpenIDConnectAutoDiscoveryURL: source.OpenIDConnectAutoDiscoveryURL, Cause: err}
}
return err
}

View File

@ -0,0 +1,114 @@
// Copyright 2024 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package oauth2
import (
"context"
"time"
"code.gitea.io/gitea/models/auth"
"code.gitea.io/gitea/models/db"
user_model "code.gitea.io/gitea/models/user"
"code.gitea.io/gitea/modules/log"
"github.com/markbates/goth"
"golang.org/x/oauth2"
)
// Sync causes this OAuth2 source to synchronize its users with the db.
func (source *Source) Sync(ctx context.Context, updateExisting bool) error {
log.Trace("Doing: SyncExternalUsers[%s] %d", source.AuthSource.Name, source.AuthSource.ID)
if !updateExisting {
log.Info("SyncExternalUsers[%s] not running since updateExisting is false", source.AuthSource.Name)
return nil
}
provider, err := createProvider(source.AuthSource.Name, source)
if err != nil {
return err
}
if !provider.RefreshTokenAvailable() {
log.Trace("SyncExternalUsers[%s] provider doesn't support refresh tokens, can't synchronize", source.AuthSource.Name)
return nil
}
opts := user_model.FindExternalUserOptions{
HasRefreshToken: true,
Expired: true,
LoginSourceID: source.AuthSource.ID,
}
return user_model.IterateExternalLogin(ctx, opts, func(ctx context.Context, u *user_model.ExternalLoginUser) error {
return source.refresh(ctx, provider, u)
})
}
func (source *Source) refresh(ctx context.Context, provider goth.Provider, u *user_model.ExternalLoginUser) error {
log.Trace("Syncing login_source_id=%d external_id=%s expiration=%s", u.LoginSourceID, u.ExternalID, u.ExpiresAt)
shouldDisable := false
token, err := provider.RefreshToken(u.RefreshToken)
if err != nil {
if err, ok := err.(*oauth2.RetrieveError); ok && err.ErrorCode == "invalid_grant" {
// this signals that the token is not valid and the user should be disabled
shouldDisable = true
} else {
return err
}
}
user := &user_model.User{
LoginName: u.ExternalID,
LoginType: auth.OAuth2,
LoginSource: u.LoginSourceID,
}
hasUser, err := user_model.GetUser(ctx, user)
if err != nil {
return err
}
// If the grant is no longer valid, disable the user and
// delete local tokens. If the OAuth2 provider still
// recognizes them as a valid user, they will be able to login
// via their provider and reactivate their account.
if shouldDisable {
log.Info("SyncExternalUsers[%s] disabling user %d", source.AuthSource.Name, user.ID)
return db.WithTx(ctx, func(ctx context.Context) error {
if hasUser {
user.IsActive = false
err := user_model.UpdateUserCols(ctx, user, "is_active")
if err != nil {
return err
}
}
// Delete stored tokens, since they are invalid. This
// also provents us from checking this in subsequent runs.
u.AccessToken = ""
u.RefreshToken = ""
u.ExpiresAt = time.Time{}
return user_model.UpdateExternalUserByExternalID(ctx, u)
})
}
// Otherwise, update the tokens
u.AccessToken = token.AccessToken
u.ExpiresAt = token.Expiry
// Some providers only update access tokens provide a new
// refresh token, so avoid updating it if it's empty
if token.RefreshToken != "" {
u.RefreshToken = token.RefreshToken
}
err = user_model.UpdateExternalUserByExternalID(ctx, u)
return err
}

View File

@ -0,0 +1,101 @@
// Copyright 2024 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package oauth2
import (
"testing"
"code.gitea.io/gitea/models/auth"
"code.gitea.io/gitea/models/unittest"
user_model "code.gitea.io/gitea/models/user"
"github.com/stretchr/testify/assert"
)
func TestSource(t *testing.T) {
assert.NoError(t, unittest.PrepareTestDatabase())
source := &Source{
Provider: "fake",
ConfigBase: auth.ConfigBase{
AuthSource: &auth.Source{
ID: 12,
Type: auth.OAuth2,
Name: "fake",
IsActive: true,
IsSyncEnabled: true,
},
},
}
user := &user_model.User{
LoginName: "external",
LoginType: auth.OAuth2,
LoginSource: source.AuthSource.ID,
Name: "test",
Email: "external@example.com",
}
err := user_model.CreateUser(t.Context(), user, &user_model.Meta{}, &user_model.CreateUserOverwriteOptions{})
assert.NoError(t, err)
e := &user_model.ExternalLoginUser{
ExternalID: "external",
UserID: user.ID,
LoginSourceID: user.LoginSource,
RefreshToken: "valid",
}
err = user_model.LinkExternalToUser(t.Context(), user, e)
assert.NoError(t, err)
provider, err := createProvider(source.AuthSource.Name, source)
assert.NoError(t, err)
t.Run("refresh", func(t *testing.T) {
t.Run("valid", func(t *testing.T) {
err := source.refresh(t.Context(), provider, e)
assert.NoError(t, err)
e := &user_model.ExternalLoginUser{
ExternalID: e.ExternalID,
LoginSourceID: e.LoginSourceID,
}
ok, err := user_model.GetExternalLogin(t.Context(), e)
assert.NoError(t, err)
assert.True(t, ok)
assert.Equal(t, "refresh", e.RefreshToken)
assert.Equal(t, "token", e.AccessToken)
u, err := user_model.GetUserByID(t.Context(), user.ID)
assert.NoError(t, err)
assert.True(t, u.IsActive)
})
t.Run("expired", func(t *testing.T) {
err := source.refresh(t.Context(), provider, &user_model.ExternalLoginUser{
ExternalID: "external",
UserID: user.ID,
LoginSourceID: user.LoginSource,
RefreshToken: "expired",
})
assert.NoError(t, err)
e := &user_model.ExternalLoginUser{
ExternalID: e.ExternalID,
LoginSourceID: e.LoginSourceID,
}
ok, err := user_model.GetExternalLogin(t.Context(), e)
assert.NoError(t, err)
assert.True(t, ok)
assert.Empty(t, e.RefreshToken)
assert.Empty(t, e.AccessToken)
u, err := user_model.GetUserByID(t.Context(), user.ID)
assert.NoError(t, err)
assert.False(t, u.IsActive)
})
})
}

View File

@ -0,0 +1,98 @@
// Copyright 2021 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package oauth2
import (
"encoding/gob"
"fmt"
"net/http"
"code.gitea.io/gitea/modules/log"
session_module "code.gitea.io/gitea/modules/session"
chiSession "gitea.com/go-chi/session"
"github.com/gorilla/sessions"
)
// SessionsStore creates a gothic store from our session
type SessionsStore struct {
maxLength int64
}
// Get should return a cached session.
func (st *SessionsStore) Get(r *http.Request, name string) (*sessions.Session, error) {
return st.getOrNew(r, name, false)
}
// New should create and return a new session.
//
// Note that New should never return a nil session, even in the case of
// an error if using the Registry infrastructure to cache the session.
func (st *SessionsStore) New(r *http.Request, name string) (*sessions.Session, error) {
return st.getOrNew(r, name, true)
}
// getOrNew gets the session from the chi-session if it exists. Override permits the overriding of an unexpected object.
func (st *SessionsStore) getOrNew(r *http.Request, name string, override bool) (*sessions.Session, error) {
chiStore := chiSession.GetSession(r)
session := sessions.NewSession(st, name)
rawData := chiStore.Get(name)
if rawData != nil {
oldSession, ok := rawData.(*sessions.Session)
if ok {
session.ID = oldSession.ID
session.IsNew = oldSession.IsNew
session.Options = oldSession.Options
session.Values = oldSession.Values
return session, nil
} else if !override {
log.Error("Unexpected object in session at name: %s: %v", name, rawData)
return nil, fmt.Errorf("unexpected object in session at name: %s", name)
}
}
session.IsNew = override
session.ID = chiStore.ID() // Simply copy the session id from the chi store
return session, chiStore.Set(name, session)
}
// Save should persist session to the underlying store implementation.
func (st *SessionsStore) Save(r *http.Request, w http.ResponseWriter, session *sessions.Session) error {
chiStore := chiSession.GetSession(r)
if session.IsNew {
_, _ = session_module.RegenerateSession(w, r)
session.IsNew = false
}
if err := chiStore.Set(session.Name(), session); err != nil {
return err
}
if st.maxLength > 0 {
sizeWriter := &sizeWriter{}
_ = gob.NewEncoder(sizeWriter).Encode(session)
if sizeWriter.size > st.maxLength {
return fmt.Errorf("encode session: Data too long: %d > %d", sizeWriter.size, st.maxLength)
}
}
return chiStore.Release()
}
type sizeWriter struct {
size int64
}
func (s *sizeWriter) Write(data []byte) (int, error) {
s.size += int64(len(data))
return len(data), nil
}
var _ (sessions.Store) = &SessionsStore{}

View File

@ -0,0 +1,77 @@
// Copyright 2021 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package oauth2
// CustomURLMapping describes the urls values to use when customizing OAuth2 provider URLs
type CustomURLMapping struct {
AuthURL string `json:",omitempty"`
TokenURL string `json:",omitempty"`
ProfileURL string `json:",omitempty"`
EmailURL string `json:",omitempty"`
Tenant string `json:",omitempty"`
}
// CustomURLSettings describes the urls values and availability to use when customizing OAuth2 provider URLs
type CustomURLSettings struct {
AuthURL Attribute
TokenURL Attribute
ProfileURL Attribute
EmailURL Attribute
Tenant Attribute
}
// Attribute describes the availability, and required status for a custom url configuration
type Attribute struct {
Value string
Available bool
Required bool
}
func availableAttribute(value string) Attribute {
return Attribute{Value: value, Available: true}
}
func requiredAttribute(value string) Attribute {
return Attribute{Value: value, Available: true, Required: true}
}
// Required is true if any attribute is required
func (c *CustomURLSettings) Required() bool {
if c == nil {
return false
}
if c.AuthURL.Required || c.EmailURL.Required || c.ProfileURL.Required || c.TokenURL.Required || c.Tenant.Required {
return true
}
return false
}
// OverrideWith copies the current customURLMapping and overrides it with values from the provided mapping
func (c *CustomURLSettings) OverrideWith(override *CustomURLMapping) *CustomURLMapping {
custom := &CustomURLMapping{
AuthURL: c.AuthURL.Value,
TokenURL: c.TokenURL.Value,
ProfileURL: c.ProfileURL.Value,
EmailURL: c.EmailURL.Value,
Tenant: c.Tenant.Value,
}
if override != nil {
if len(override.AuthURL) > 0 && c.AuthURL.Available {
custom.AuthURL = override.AuthURL
}
if len(override.TokenURL) > 0 && c.TokenURL.Available {
custom.TokenURL = override.TokenURL
}
if len(override.ProfileURL) > 0 && c.ProfileURL.Available {
custom.ProfileURL = override.ProfileURL
}
if len(override.EmailURL) > 0 && c.EmailURL.Available {
custom.EmailURL = override.EmailURL
}
if len(override.Tenant) > 0 && c.Tenant.Available {
custom.Tenant = override.Tenant
}
}
return custom
}

View File

@ -0,0 +1,20 @@
// Copyright 2021 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package pam_test
import (
auth_model "code.gitea.io/gitea/models/auth"
"code.gitea.io/gitea/services/auth"
"code.gitea.io/gitea/services/auth/source/pam"
)
// This test file exists to assert that our Source exposes the interfaces that we expect
// It tightly binds the interfaces and implementation without breaking go import cycles
type sourceInterface interface {
auth.PasswordAuthenticator
auth_model.Config
}
var _ (sourceInterface) = &pam.Source{}

38
auth/source/pam/source.go Normal file
View File

@ -0,0 +1,38 @@
// Copyright 2021 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package pam
import (
"code.gitea.io/gitea/models/auth"
"code.gitea.io/gitea/modules/json"
)
// __________ _____ _____
// \______ \/ _ \ / \
// | ___/ /_\ \ / \ / \
// | | / | \/ Y \
// |____| \____|__ /\____|__ /
// \/ \/
// Source holds configuration for the PAM login source.
type Source struct {
auth.ConfigBase `json:"-"`
ServiceName string // pam service (e.g. system-auth)
EmailDomain string
}
// FromDB fills up a PAMConfig from serialized format.
func (source *Source) FromDB(bs []byte) error {
return json.UnmarshalHandleDoubleEncode(bs, &source)
}
// ToDB exports a PAMConfig to a serialized format.
func (source *Source) ToDB() ([]byte, error) {
return json.Marshal(source)
}
func init() {
auth.RegisterTypeConfig(auth.PAM, &Source{})
}

View File

@ -0,0 +1,71 @@
// Copyright 2021 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package pam
import (
"context"
"fmt"
"strings"
"code.gitea.io/gitea/models/auth"
user_model "code.gitea.io/gitea/models/user"
"code.gitea.io/gitea/modules/auth/pam"
"code.gitea.io/gitea/modules/optional"
"code.gitea.io/gitea/modules/setting"
"github.com/google/uuid"
)
// Authenticate queries if login/password is valid against the PAM,
// and create a local user if success when enabled.
func (source *Source) Authenticate(ctx context.Context, user *user_model.User, userName, password string) (*user_model.User, error) {
pamLogin, err := pam.Auth(source.ServiceName, userName, password)
if err != nil {
if strings.Contains(err.Error(), "Authentication failure") {
return nil, user_model.ErrUserNotExist{Name: userName}
}
return nil, err
}
if user != nil {
return user, nil
}
// Allow PAM sources with `@` in their name, like from Active Directory
username := pamLogin
email := pamLogin
idx := strings.Index(pamLogin, "@")
if idx > -1 {
username = pamLogin[:idx]
}
if user_model.ValidateEmail(email) != nil {
if source.EmailDomain != "" {
email = fmt.Sprintf("%s@%s", username, source.EmailDomain)
} else {
email = fmt.Sprintf("%s@%s", username, setting.Service.NoReplyAddress)
}
if user_model.ValidateEmail(email) != nil {
email = uuid.New().String() + "@localhost"
}
}
user = &user_model.User{
LowerName: strings.ToLower(username),
Name: username,
Email: email,
Passwd: password,
LoginType: auth.PAM,
LoginSource: source.AuthSource.ID,
LoginName: userName, // This is what the user typed in
}
overwriteDefault := &user_model.CreateUserOverwriteOptions{
IsActive: optional.Some(true),
}
if err := user_model.CreateUser(ctx, user, &user_model.Meta{}, overwriteDefault); err != nil {
return user, err
}
return user, nil
}

View File

@ -0,0 +1,23 @@
// Copyright 2021 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package smtp_test
import (
auth_model "code.gitea.io/gitea/models/auth"
"code.gitea.io/gitea/services/auth"
"code.gitea.io/gitea/services/auth/source/smtp"
)
// This test file exists to assert that our Source exposes the interfaces that we expect
// It tightly binds the interfaces and implementation without breaking go import cycles
type sourceInterface interface {
auth.PasswordAuthenticator
auth_model.Config
auth_model.SkipVerifiable
auth_model.HasTLSer
auth_model.UseTLSer
}
var _ (sourceInterface) = &smtp.Source{}

106
auth/source/smtp/auth.go Normal file
View File

@ -0,0 +1,106 @@
// Copyright 2021 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package smtp
import (
"crypto/tls"
"errors"
"fmt"
"net"
"net/smtp"
"os"
"strconv"
)
// _________ __________________________
// / _____/ / \__ ___/\______ \
// \_____ \ / \ / \| | | ___/
// / \/ Y \ | | |
// /_______ /\____|__ /____| |____|
// \/ \/
type loginAuthenticator struct {
username, password string
}
func (auth *loginAuthenticator) Start(server *smtp.ServerInfo) (string, []byte, error) {
return "LOGIN", []byte(auth.username), nil
}
func (auth *loginAuthenticator) Next(fromServer []byte, more bool) ([]byte, error) {
if more {
switch string(fromServer) {
case "Username:":
return []byte(auth.username), nil
case "Password:":
return []byte(auth.password), nil
}
}
return nil, nil
}
// SMTP authentication type names.
const (
PlainAuthentication = "PLAIN"
LoginAuthentication = "LOGIN"
CRAMMD5Authentication = "CRAM-MD5"
)
// Authenticators contains available SMTP authentication type names.
var Authenticators = []string{PlainAuthentication, LoginAuthentication, CRAMMD5Authentication}
// ErrUnsupportedLoginType login source is unknown error
var ErrUnsupportedLoginType = errors.New("Login source is unknown")
// Authenticate performs an SMTP authentication.
func Authenticate(a smtp.Auth, source *Source) error {
tlsConfig := &tls.Config{
InsecureSkipVerify: source.SkipVerify,
ServerName: source.Host,
}
conn, err := net.Dial("tcp", net.JoinHostPort(source.Host, strconv.Itoa(source.Port)))
if err != nil {
return err
}
defer conn.Close()
if source.UseTLS() {
conn = tls.Client(conn, tlsConfig)
}
client, err := smtp.NewClient(conn, source.Host)
if err != nil {
return fmt.Errorf("failed to create NewClient: %w", err)
}
defer client.Close()
if !source.DisableHelo {
hostname := source.HeloHostname
if len(hostname) == 0 {
hostname, err = os.Hostname()
if err != nil {
return fmt.Errorf("failed to find Hostname: %w", err)
}
}
if err = client.Hello(hostname); err != nil {
return fmt.Errorf("failed to send Helo: %w", err)
}
}
// If not using SMTPS, always use STARTTLS if available
hasStartTLS, _ := client.Extension("STARTTLS")
if !source.UseTLS() && hasStartTLS {
if err = client.StartTLS(tlsConfig); err != nil {
return fmt.Errorf("failed to start StartTLS: %w", err)
}
}
if ok, _ := client.Extension("AUTH"); ok {
return client.Auth(a)
}
return ErrUnsupportedLoginType
}

View File

@ -0,0 +1,59 @@
// Copyright 2021 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package smtp
import (
"code.gitea.io/gitea/models/auth"
"code.gitea.io/gitea/modules/json"
)
// _________ __________________________
// / _____/ / \__ ___/\______ \
// \_____ \ / \ / \| | | ___/
// / \/ Y \ | | |
// /_______ /\____|__ /____| |____|
// \/ \/
// Source holds configuration for the SMTP login source.
type Source struct {
auth.ConfigBase `json:"-"`
Auth string
Host string
Port int
AllowedDomains string `xorm:"TEXT"`
ForceSMTPS bool
SkipVerify bool
HeloHostname string
DisableHelo bool
}
// FromDB fills up an SMTPConfig from serialized format.
func (source *Source) FromDB(bs []byte) error {
return json.UnmarshalHandleDoubleEncode(bs, &source)
}
// ToDB exports an SMTPConfig to a serialized format.
func (source *Source) ToDB() ([]byte, error) {
return json.Marshal(source)
}
// IsSkipVerify returns if SkipVerify is set
func (source *Source) IsSkipVerify() bool {
return source.SkipVerify
}
// HasTLS returns true for SMTP
func (source *Source) HasTLS() bool {
return true
}
// UseTLS returns if TLS is set
func (source *Source) UseTLS() bool {
return source.ForceSMTPS || source.Port == 465
}
func init() {
auth.RegisterTypeConfig(auth.SMTP, &Source{})
}

View File

@ -0,0 +1,87 @@
// Copyright 2021 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package smtp
import (
"context"
"errors"
"net/smtp"
"net/textproto"
"strings"
auth_model "code.gitea.io/gitea/models/auth"
user_model "code.gitea.io/gitea/models/user"
"code.gitea.io/gitea/modules/optional"
"code.gitea.io/gitea/modules/util"
)
// Authenticate queries if the provided login/password is authenticates against the SMTP server
// Users will be autoregistered as required
func (source *Source) Authenticate(ctx context.Context, user *user_model.User, userName, password string) (*user_model.User, error) {
// Verify allowed domains.
if len(source.AllowedDomains) > 0 {
idx := strings.Index(userName, "@")
if idx == -1 {
return nil, user_model.ErrUserNotExist{Name: userName}
} else if !util.SliceContainsString(strings.Split(source.AllowedDomains, ","), userName[idx+1:], true) {
return nil, user_model.ErrUserNotExist{Name: userName}
}
}
var auth smtp.Auth
switch source.Auth {
case PlainAuthentication:
auth = smtp.PlainAuth("", userName, password, source.Host)
case LoginAuthentication:
auth = &loginAuthenticator{userName, password}
case CRAMMD5Authentication:
auth = smtp.CRAMMD5Auth(userName, password)
default:
return nil, errors.New("unsupported SMTP auth type")
}
if err := Authenticate(auth, source); err != nil {
// Check standard error format first,
// then fallback to worse case.
tperr, ok := err.(*textproto.Error)
if (ok && tperr.Code == 535) ||
strings.Contains(err.Error(), "Username and Password not accepted") {
return nil, user_model.ErrUserNotExist{Name: userName}
}
if (ok && tperr.Code == 534) ||
strings.Contains(err.Error(), "Application-specific password required") {
return nil, user_model.ErrUserNotExist{Name: userName}
}
return nil, err
}
if user != nil {
return user, nil
}
username := userName
idx := strings.Index(userName, "@")
if idx > -1 {
username = userName[:idx]
}
user = &user_model.User{
LowerName: strings.ToLower(username),
Name: strings.ToLower(username),
Email: userName,
Passwd: password,
LoginType: auth_model.SMTP,
LoginSource: source.AuthSource.ID,
LoginName: userName,
}
overwriteDefault := &user_model.CreateUserOverwriteOptions{
IsActive: optional.Some(true),
}
if err := user_model.CreateUser(ctx, user, &user_model.Meta{}, overwriteDefault); err != nil {
return user, err
}
return user, nil
}

View File

@ -0,0 +1,116 @@
// Copyright 2022 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package source
import (
"context"
"fmt"
"code.gitea.io/gitea/models/organization"
user_model "code.gitea.io/gitea/models/user"
"code.gitea.io/gitea/modules/container"
"code.gitea.io/gitea/modules/log"
org_service "code.gitea.io/gitea/services/org"
)
type syncType int
const (
syncAdd syncType = iota
syncRemove
)
// SyncGroupsToTeams maps authentication source groups to organization and team memberships
func SyncGroupsToTeams(ctx context.Context, user *user_model.User, sourceUserGroups container.Set[string], sourceGroupTeamMapping map[string]map[string][]string, performRemoval bool) error {
orgCache := make(map[string]*organization.Organization)
teamCache := make(map[string]*organization.Team)
return SyncGroupsToTeamsCached(ctx, user, sourceUserGroups, sourceGroupTeamMapping, performRemoval, orgCache, teamCache)
}
// SyncGroupsToTeamsCached maps authentication source groups to organization and team memberships
func SyncGroupsToTeamsCached(ctx context.Context, user *user_model.User, sourceUserGroups container.Set[string], sourceGroupTeamMapping map[string]map[string][]string, performRemoval bool, orgCache map[string]*organization.Organization, teamCache map[string]*organization.Team) error {
membershipsToAdd, membershipsToRemove := resolveMappedMemberships(sourceUserGroups, sourceGroupTeamMapping)
if performRemoval {
if err := syncGroupsToTeamsCached(ctx, user, membershipsToRemove, syncRemove, orgCache, teamCache); err != nil {
return fmt.Errorf("could not sync[remove] user groups: %w", err)
}
}
if err := syncGroupsToTeamsCached(ctx, user, membershipsToAdd, syncAdd, orgCache, teamCache); err != nil {
return fmt.Errorf("could not sync[add] user groups: %w", err)
}
return nil
}
func resolveMappedMemberships(sourceUserGroups container.Set[string], sourceGroupTeamMapping map[string]map[string][]string) (map[string][]string, map[string][]string) {
membershipsToAdd := map[string][]string{}
membershipsToRemove := map[string][]string{}
for group, memberships := range sourceGroupTeamMapping {
isUserInGroup := sourceUserGroups.Contains(group)
if isUserInGroup {
for org, teams := range memberships {
membershipsToAdd[org] = append(membershipsToAdd[org], teams...)
}
} else {
for org, teams := range memberships {
membershipsToRemove[org] = append(membershipsToRemove[org], teams...)
}
}
}
return membershipsToAdd, membershipsToRemove
}
func syncGroupsToTeamsCached(ctx context.Context, user *user_model.User, orgTeamMap map[string][]string, action syncType, orgCache map[string]*organization.Organization, teamCache map[string]*organization.Team) error {
for orgName, teamNames := range orgTeamMap {
var err error
org, ok := orgCache[orgName]
if !ok {
org, err = organization.GetOrgByName(ctx, orgName)
if err != nil {
if organization.IsErrOrgNotExist(err) {
// organization must be created before group sync
log.Warn("group sync: Could not find organisation %s: %v", orgName, err)
continue
}
return err
}
orgCache[orgName] = org
}
for _, teamName := range teamNames {
team, ok := teamCache[orgName+teamName]
if !ok {
team, err = org.GetTeam(ctx, teamName)
if err != nil {
if organization.IsErrTeamNotExist(err) {
// team must be created before group sync
log.Warn("group sync: Could not find team %s: %v", teamName, err)
continue
}
return err
}
teamCache[orgName+teamName] = team
}
isMember, err := organization.IsTeamMember(ctx, org.ID, team.ID, user.ID)
if err != nil {
return err
}
if action == syncAdd && !isMember {
if err := org_service.AddTeamMember(ctx, team, user); err != nil {
log.Error("group sync: Could not add user to team: %v", err)
return err
}
} else if action == syncRemove && isMember {
if err := org_service.RemoveTeamMember(ctx, team, user); err != nil {
log.Error("group sync: Could not remove user from team: %v", err)
return err
}
}
}
}
return nil
}

View File

@ -0,0 +1,18 @@
// Copyright 2021 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package sspi_test
import (
"code.gitea.io/gitea/models/auth"
"code.gitea.io/gitea/services/auth/source/sspi"
)
// This test file exists to assert that our Source exposes the interfaces that we expect
// It tightly binds the interfaces and implementation without breaking go import cycles
type sourceInterface interface {
auth.Config
}
var _ (sourceInterface) = &sspi.Source{}

View File

@ -0,0 +1,41 @@
// Copyright 2021 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package sspi
import (
"code.gitea.io/gitea/models/auth"
"code.gitea.io/gitea/modules/json"
)
// _________ ___________________.___
// / _____// _____/\______ \ |
// \_____ \ \_____ \ | ___/ |
// / \/ \ | | | |
// /_______ /_______ / |____| |___|
// \/ \/
// Source holds configuration for SSPI single sign-on.
type Source struct {
auth.ConfigBase `json:"-"`
AutoCreateUsers bool
AutoActivateUsers bool
StripDomainNames bool
SeparatorReplacement string
DefaultLanguage string
}
// FromDB fills up an SSPIConfig from serialized format.
func (cfg *Source) FromDB(bs []byte) error {
return json.UnmarshalHandleDoubleEncode(bs, &cfg)
}
// ToDB exports an SSPIConfig to a serialized format.
func (cfg *Source) ToDB() ([]byte, error) {
return json.Marshal(cfg)
}
func init() {
auth.RegisterTypeConfig(auth.SSPI, &Source{})
}

224
auth/sspi.go Normal file
View File

@ -0,0 +1,224 @@
// Copyright 2019 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package auth
import (
"context"
"errors"
"net/http"
"strings"
"sync"
"code.gitea.io/gitea/models/auth"
"code.gitea.io/gitea/models/db"
user_model "code.gitea.io/gitea/models/user"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/optional"
"code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/modules/templates"
"code.gitea.io/gitea/services/auth/source/sspi"
gitea_context "code.gitea.io/gitea/services/context"
gouuid "github.com/google/uuid"
)
const (
tplSignIn templates.TplName = "user/auth/signin"
)
type SSPIAuth interface {
AppendAuthenticateHeader(w http.ResponseWriter, data string)
Authenticate(r *http.Request, w http.ResponseWriter) (userInfo *SSPIUserInfo, outToken string, err error)
}
var (
sspiAuth SSPIAuth // a global instance of the websspi authenticator to avoid acquiring the server credential handle on every request
sspiAuthOnce sync.Once
sspiAuthErrInit error
// Ensure the struct implements the interface.
_ Method = &SSPI{}
)
// SSPI implements the SingleSignOn interface and authenticates requests
// via the built-in SSPI module in Windows for SPNEGO authentication.
// The SSPI plugin is expected to be executed last, as it returns 401 status code if negotiation
// fails (or if negotiation should continue), which would prevent other authentication methods
// to execute at all.
type SSPI struct{}
// Name represents the name of auth method
func (s *SSPI) Name() string {
return "sspi"
}
// Verify uses SSPI (Windows implementation of SPNEGO) to authenticate the request.
// If authentication is successful, returns the corresponding user object.
// If negotiation should continue or authentication fails, immediately returns a 401 HTTP
// response code, as required by the SPNEGO protocol.
func (s *SSPI) Verify(req *http.Request, w http.ResponseWriter, store DataStore, sess SessionStore) (*user_model.User, error) {
sspiAuthOnce.Do(func() { sspiAuthErrInit = sspiAuthInit() })
if sspiAuthErrInit != nil {
return nil, sspiAuthErrInit
}
if !s.shouldAuthenticate(req) {
return nil, nil
}
cfg, err := s.getConfig(req.Context())
if err != nil {
log.Error("could not get SSPI config: %v", err)
return nil, err
}
log.Trace("SSPI Authorization: Attempting to authenticate")
userInfo, outToken, err := sspiAuth.Authenticate(req, w)
if err != nil {
log.Warn("Authentication failed with error: %v\n", err)
sspiAuth.AppendAuthenticateHeader(w, outToken)
// Include the user login page in the 401 response to allow the user
// to login with another authentication method if SSPI authentication
// fails
store.GetData()["Flash"] = map[string]string{
"ErrorMsg": err.Error(),
}
store.GetData()["EnableOpenIDSignIn"] = setting.Service.EnableOpenIDSignIn
store.GetData()["EnableSSPI"] = true
// in this case, the Verify function is called in Gitea's web context
// FIXME: it doesn't look good to render the page here, why not redirect?
gitea_context.GetWebContext(req.Context()).HTML(http.StatusUnauthorized, tplSignIn)
return nil, err
}
if outToken != "" {
sspiAuth.AppendAuthenticateHeader(w, outToken)
}
username := sanitizeUsername(userInfo.Username, cfg)
if len(username) == 0 {
return nil, nil
}
log.Info("Authenticated as %s\n", username)
user, err := user_model.GetUserByName(req.Context(), username)
if err != nil {
if !user_model.IsErrUserNotExist(err) {
log.Error("GetUserByName: %v", err)
return nil, err
}
if !cfg.AutoCreateUsers {
log.Error("User '%s' not found", username)
return nil, nil
}
user, err = s.newUser(req.Context(), username, cfg)
if err != nil {
log.Error("CreateUser: %v", err)
return nil, err
}
}
// Make sure requests to API paths and PWA resources do not create a new session
detector := newAuthPathDetector(req)
if !detector.isAPIPath() && !detector.isAttachmentDownload() {
handleSignIn(w, req, sess, user)
}
log.Trace("SSPI Authorization: Logged in user %-v", user)
return user, nil
}
// getConfig retrieves the SSPI configuration from login sources
func (s *SSPI) getConfig(ctx context.Context) (*sspi.Source, error) {
sources, err := db.Find[auth.Source](ctx, auth.FindSourcesOptions{
IsActive: optional.Some(true),
LoginType: auth.SSPI,
})
if err != nil {
return nil, err
}
if len(sources) == 0 {
return nil, errors.New("no active login sources of type SSPI found")
}
if len(sources) > 1 {
return nil, errors.New("more than one active login source of type SSPI found")
}
return sources[0].Cfg.(*sspi.Source), nil
}
func (s *SSPI) shouldAuthenticate(req *http.Request) (shouldAuth bool) {
shouldAuth = false
path := strings.TrimSuffix(req.URL.Path, "/")
if path == "/user/login" {
if req.FormValue("user_name") != "" && req.FormValue("password") != "" {
shouldAuth = false
} else if req.FormValue("auth_with_sspi") == "1" {
shouldAuth = true
}
} else {
detector := newAuthPathDetector(req)
shouldAuth = detector.isAPIPath() || detector.isAttachmentDownload()
}
return shouldAuth
}
// newUser creates a new user object for the purpose of automatic registration
// and populates its name and email with the information present in request headers.
func (s *SSPI) newUser(ctx context.Context, username string, cfg *sspi.Source) (*user_model.User, error) {
email := gouuid.New().String() + "@localhost.localdomain"
user := &user_model.User{
Name: username,
Email: email,
Language: cfg.DefaultLanguage,
}
emailNotificationPreference := user_model.EmailNotificationsDisabled
overwriteDefault := &user_model.CreateUserOverwriteOptions{
IsActive: optional.Some(cfg.AutoActivateUsers),
KeepEmailPrivate: optional.Some(true),
EmailNotificationsPreference: &emailNotificationPreference,
}
if err := user_model.CreateUser(ctx, user, &user_model.Meta{}, overwriteDefault); err != nil {
return nil, err
}
return user, nil
}
// stripDomainNames removes NETBIOS domain name and separator from down-level logon names
// (eg. "DOMAIN\user" becomes "user"), and removes the UPN suffix (domain name) and separator
// from UPNs (eg. "user@domain.local" becomes "user")
func stripDomainNames(username string) string {
if strings.Contains(username, "\\") {
parts := strings.SplitN(username, "\\", 2)
if len(parts) > 1 {
username = parts[1]
}
} else if strings.Contains(username, "@") {
parts := strings.Split(username, "@")
if len(parts) > 1 {
username = parts[0]
}
}
return username
}
func replaceSeparators(username string, cfg *sspi.Source) string {
newSep := cfg.SeparatorReplacement
username = strings.ReplaceAll(username, "\\", newSep)
username = strings.ReplaceAll(username, "/", newSep)
username = strings.ReplaceAll(username, "@", newSep)
return username
}
func sanitizeUsername(username string, cfg *sspi.Source) string {
if len(username) == 0 {
return ""
}
if cfg.StripDomainNames {
username = stripDomainNames(username)
}
// Replace separators even if we have already stripped the domain name part,
// as the username can contain several separators: eg. "MICROSOFT\useremail@live.com"
username = replaceSeparators(username, cfg)
return username
}

30
auth/sspiauth_posix.go Normal file
View File

@ -0,0 +1,30 @@
// Copyright 2023 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
//go:build !windows
package auth
import (
"errors"
"net/http"
)
type SSPIUserInfo struct {
Username string // Name of user, usually in the form DOMAIN\User
Groups []string // The global groups the user is a member of
}
type sspiAuthMock struct{}
func (s sspiAuthMock) AppendAuthenticateHeader(w http.ResponseWriter, data string) {
}
func (s sspiAuthMock) Authenticate(r *http.Request, w http.ResponseWriter) (userInfo *SSPIUserInfo, outToken string, err error) {
return nil, "", errors.New("not implemented")
}
func sspiAuthInit() error {
sspiAuth = &sspiAuthMock{} // TODO: we can mock the SSPI auth in tests
return nil
}

19
auth/sspiauth_windows.go Normal file
View File

@ -0,0 +1,19 @@
// Copyright 2023 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
//go:build windows
package auth
import (
"github.com/quasoft/websspi"
)
type SSPIUserInfo = websspi.UserInfo
func sspiAuthInit() error {
var err error
config := websspi.NewConfig()
sspiAuth, err = websspi.New(config)
return err
}

43
auth/sync.go Normal file
View File

@ -0,0 +1,43 @@
// Copyright 2021 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package auth
import (
"context"
"code.gitea.io/gitea/models/auth"
"code.gitea.io/gitea/models/db"
"code.gitea.io/gitea/modules/log"
)
// SyncExternalUsers is used to synchronize users with external authorization source
func SyncExternalUsers(ctx context.Context, updateExisting bool) error {
log.Trace("Doing: SyncExternalUsers")
ls, err := db.Find[auth.Source](ctx, auth.FindSourcesOptions{})
if err != nil {
log.Error("SyncExternalUsers: %v", err)
return err
}
for _, s := range ls {
if !s.IsActive || !s.IsSyncEnabled {
continue
}
select {
case <-ctx.Done():
log.Warn("SyncExternalUsers: Cancelled before update of %s", s.Name)
return db.ErrCancelledf("Before update of %s", s.Name)
default:
}
if syncable, ok := s.Cfg.(SynchronizableSource); ok {
err := syncable.Sync(ctx, updateExisting)
if err != nil {
return err
}
}
}
return nil
}

313
automerge/automerge.go Normal file
View File

@ -0,0 +1,313 @@
// Copyright 2021 Gitea. All rights reserved.
// SPDX-License-Identifier: MIT
package automerge
import (
"context"
"errors"
"fmt"
"strconv"
"strings"
"code.gitea.io/gitea/models/db"
issues_model "code.gitea.io/gitea/models/issues"
access_model "code.gitea.io/gitea/models/perm/access"
pull_model "code.gitea.io/gitea/models/pull"
repo_model "code.gitea.io/gitea/models/repo"
user_model "code.gitea.io/gitea/models/user"
"code.gitea.io/gitea/modules/git"
"code.gitea.io/gitea/modules/gitrepo"
"code.gitea.io/gitea/modules/graceful"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/process"
"code.gitea.io/gitea/modules/queue"
notify_service "code.gitea.io/gitea/services/notify"
pull_service "code.gitea.io/gitea/services/pull"
repo_service "code.gitea.io/gitea/services/repository"
)
// prAutoMergeQueue represents a queue to handle update pull request tests
var prAutoMergeQueue *queue.WorkerPoolQueue[string]
// Init runs the task queue to that handles auto merges
func Init() error {
notify_service.RegisterNotifier(NewNotifier())
prAutoMergeQueue = queue.CreateUniqueQueue(graceful.GetManager().ShutdownContext(), "pr_auto_merge", handler)
if prAutoMergeQueue == nil {
return errors.New("unable to create pr_auto_merge queue")
}
go graceful.GetManager().RunWithCancel(prAutoMergeQueue)
return nil
}
// handle passed PR IDs and test the PRs
func handler(items ...string) []string {
for _, s := range items {
var id int64
var sha string
if _, err := fmt.Sscanf(s, "%d_%s", &id, &sha); err != nil {
log.Error("could not parse data from pr_auto_merge queue (%v): %v", s, err)
continue
}
handlePullRequestAutoMerge(id, sha)
}
return nil
}
func addToQueue(pr *issues_model.PullRequest, sha string) {
log.Trace("Adding pullID: %d to the pull requests patch checking queue with sha %s", pr.ID, sha)
if err := prAutoMergeQueue.Push(fmt.Sprintf("%d_%s", pr.ID, sha)); err != nil {
log.Error("Error adding pullID: %d to the pull requests patch checking queue %v", pr.ID, err)
}
}
// ScheduleAutoMerge if schedule is false and no error, pull can be merged directly
func ScheduleAutoMerge(ctx context.Context, doer *user_model.User, pull *issues_model.PullRequest, style repo_model.MergeStyle, message string, deleteBranchAfterMerge bool) (scheduled bool, err error) {
err = db.WithTx(ctx, func(ctx context.Context) error {
if err := pull_model.ScheduleAutoMerge(ctx, doer, pull.ID, style, message, deleteBranchAfterMerge); err != nil {
return err
}
scheduled = true
_, err = issues_model.CreateAutoMergeComment(ctx, issues_model.CommentTypePRScheduledToAutoMerge, pull, doer)
return err
})
return scheduled, err
}
// RemoveScheduledAutoMerge cancels a previously scheduled pull request
func RemoveScheduledAutoMerge(ctx context.Context, doer *user_model.User, pull *issues_model.PullRequest) error {
return db.WithTx(ctx, func(ctx context.Context) error {
if err := pull_model.DeleteScheduledAutoMerge(ctx, pull.ID); err != nil {
return err
}
_, err := issues_model.CreateAutoMergeComment(ctx, issues_model.CommentTypePRUnScheduledToAutoMerge, pull, doer)
return err
})
}
// StartPRCheckAndAutoMergeBySHA start an automerge check and auto merge task for all pull requests of repository and SHA
func StartPRCheckAndAutoMergeBySHA(ctx context.Context, sha string, repo *repo_model.Repository) error {
pulls, err := getPullRequestsByHeadSHA(ctx, sha, repo, func(pr *issues_model.PullRequest) bool {
return !pr.HasMerged && pr.CanAutoMerge()
})
if err != nil {
return err
}
for _, pr := range pulls {
addToQueue(pr, sha)
}
return nil
}
// StartPRCheckAndAutoMerge start an automerge check and auto merge task for a pull request
func StartPRCheckAndAutoMerge(ctx context.Context, pull *issues_model.PullRequest) {
if pull == nil || pull.HasMerged || !pull.CanAutoMerge() {
return
}
if err := pull.LoadBaseRepo(ctx); err != nil {
log.Error("LoadBaseRepo: %v", err)
return
}
gitRepo, err := gitrepo.OpenRepository(ctx, pull.BaseRepo)
if err != nil {
log.Error("OpenRepository: %v", err)
return
}
defer gitRepo.Close()
commitID, err := gitRepo.GetRefCommitID(pull.GetGitRefName())
if err != nil {
log.Error("GetRefCommitID: %v", err)
return
}
addToQueue(pull, commitID)
}
func getPullRequestsByHeadSHA(ctx context.Context, sha string, repo *repo_model.Repository, filter func(*issues_model.PullRequest) bool) (map[int64]*issues_model.PullRequest, error) {
gitRepo, err := gitrepo.OpenRepository(ctx, repo)
if err != nil {
return nil, err
}
defer gitRepo.Close()
refs, err := gitRepo.GetRefsBySha(sha, "")
if err != nil {
return nil, err
}
pulls := make(map[int64]*issues_model.PullRequest)
for _, ref := range refs {
// Each pull branch starts with refs/pull/ we then go from there to find the index of the pr and then
// use that to get the pr.
if strings.HasPrefix(ref, git.PullPrefix) {
parts := strings.Split(ref[len(git.PullPrefix):], "/")
// e.g. 'refs/pull/1/head' would be []string{"1", "head"}
if len(parts) != 2 {
log.Error("getPullRequestsByHeadSHA found broken pull ref [%s] on repo [%-v]", ref, repo)
continue
}
prIndex, err := strconv.ParseInt(parts[0], 10, 64)
if err != nil {
log.Error("getPullRequestsByHeadSHA found broken pull ref [%s] on repo [%-v]", ref, repo)
continue
}
p, err := issues_model.GetPullRequestByIndex(ctx, repo.ID, prIndex)
if err != nil {
// If there is no pull request for this branch, we don't try to merge it.
if issues_model.IsErrPullRequestNotExist(err) {
continue
}
return nil, err
}
if filter(p) {
pulls[p.ID] = p
}
}
}
return pulls, nil
}
// handlePullRequestAutoMerge merge the pull request if all checks are successful
func handlePullRequestAutoMerge(pullID int64, sha string) {
ctx, _, finished := process.GetManager().AddContext(graceful.GetManager().HammerContext(),
fmt.Sprintf("Handle AutoMerge of PR[%d] with sha[%s]", pullID, sha))
defer finished()
pr, err := issues_model.GetPullRequestByID(ctx, pullID)
if err != nil {
log.Error("GetPullRequestByID[%d]: %v", pullID, err)
return
}
// Check if there is a scheduled pr in the db
exists, scheduledPRM, err := pull_model.GetScheduledMergeByPullID(ctx, pr.ID)
if err != nil {
log.Error("%-v GetScheduledMergeByPullID: %v", pr, err)
return
}
if !exists {
return
}
if err = pr.LoadBaseRepo(ctx); err != nil {
log.Error("%-v LoadBaseRepo: %v", pr, err)
return
}
// check the sha is the same as pull request head commit id
baseGitRepo, err := gitrepo.OpenRepository(ctx, pr.BaseRepo)
if err != nil {
log.Error("OpenRepository: %v", err)
return
}
defer baseGitRepo.Close()
headCommitID, err := baseGitRepo.GetRefCommitID(pr.GetGitRefName())
if err != nil {
log.Error("GetRefCommitID: %v", err)
return
}
if headCommitID != sha {
log.Warn("Head commit id of auto merge %-v does not match sha [%s], it may means the head branch has been updated. Just ignore this request because a new request expected in the queue", pr, sha)
return
}
// Get all checks for this pr
// We get the latest sha commit hash again to handle the case where the check of a previous push
// did not succeed or was not finished yet.
if err = pr.LoadHeadRepo(ctx); err != nil {
log.Error("%-v LoadHeadRepo: %v", pr, err)
return
}
var headGitRepo *git.Repository
if pr.BaseRepoID == pr.HeadRepoID {
headGitRepo = baseGitRepo
} else {
headGitRepo, err = gitrepo.OpenRepository(ctx, pr.HeadRepo)
if err != nil {
log.Error("OpenRepository %-v: %v", pr.HeadRepo, err)
return
}
defer headGitRepo.Close()
}
switch pr.Flow {
case issues_model.PullRequestFlowGithub:
headBranchExist := pr.HeadRepo != nil && gitrepo.IsBranchExist(ctx, pr.HeadRepo, pr.HeadBranch)
if !headBranchExist {
log.Warn("Head branch of auto merge %-v does not exist [HeadRepoID: %d, Branch: %s]", pr, pr.HeadRepoID, pr.HeadBranch)
return
}
case issues_model.PullRequestFlowAGit:
headBranchExist := gitrepo.IsReferenceExist(ctx, pr.BaseRepo, pr.GetGitRefName())
if !headBranchExist {
log.Warn("Head branch of auto merge %-v does not exist [HeadRepoID: %d, Branch(Agit): %s]", pr, pr.HeadRepoID, pr.HeadBranch)
return
}
default:
log.Error("wrong flow type %d", pr.Flow)
return
}
// Check if all checks succeeded
pass, err := pull_service.IsPullCommitStatusPass(ctx, pr)
if err != nil {
log.Error("%-v IsPullCommitStatusPass: %v", pr, err)
return
}
if !pass {
log.Info("Scheduled auto merge %-v has unsuccessful status checks", pr)
return
}
// Merge if all checks succeeded
doer, err := user_model.GetUserByID(ctx, scheduledPRM.DoerID)
if err != nil {
log.Error("Unable to get scheduled User[%d]: %v", scheduledPRM.DoerID, err)
return
}
perm, err := access_model.GetUserRepoPermission(ctx, pr.HeadRepo, doer)
if err != nil {
log.Error("GetUserRepoPermission %-v: %v", pr.HeadRepo, err)
return
}
if err := pull_service.CheckPullMergeable(ctx, doer, &perm, pr, pull_service.MergeCheckTypeGeneral, false); err != nil {
if errors.Is(err, pull_service.ErrNotReadyToMerge) {
log.Info("%-v was scheduled to automerge by an unauthorized user", pr)
return
}
log.Error("%-v CheckPullMergeable: %v", pr, err)
return
}
if err := pull_service.Merge(ctx, pr, doer, baseGitRepo, scheduledPRM.MergeStyle, "", scheduledPRM.Message, true); err != nil {
log.Error("pull_service.Merge: %v", err)
// FIXME: if merge failed, we should display some error message to the pull request page.
// The resolution is add a new column on automerge table named `error_message` to store the error message and displayed
// on the pull request page. But this should not be finished in a bug fix PR which will be backport to release branch.
return
}
if pr.Flow == issues_model.PullRequestFlowGithub && scheduledPRM.DeleteBranchAfterMerge {
if err := repo_service.DeleteBranch(ctx, doer, pr.HeadRepo, headGitRepo, pr.HeadBranch, pr); err != nil {
log.Error("DeletePullRequestHeadBranch: %v", err)
}
}
}

57
automerge/notify.go Normal file
View File

@ -0,0 +1,57 @@
// Copyright 2024 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package automerge
import (
"context"
git_model "code.gitea.io/gitea/models/git"
issues_model "code.gitea.io/gitea/models/issues"
repo_model "code.gitea.io/gitea/models/repo"
user_model "code.gitea.io/gitea/models/user"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/repository"
notify_service "code.gitea.io/gitea/services/notify"
)
type automergeNotifier struct {
notify_service.NullNotifier
}
var _ notify_service.Notifier = &automergeNotifier{}
// NewNotifier create a new automergeNotifier notifier
func NewNotifier() notify_service.Notifier {
return &automergeNotifier{}
}
func (n *automergeNotifier) PullRequestReview(ctx context.Context, pr *issues_model.PullRequest, review *issues_model.Review, comment *issues_model.Comment, mentions []*user_model.User) {
// as a missing / blocking reviews could have blocked a pending automerge let's recheck
if review.Type == issues_model.ReviewTypeApprove {
if err := StartPRCheckAndAutoMergeBySHA(ctx, review.CommitID, pr.BaseRepo); err != nil {
log.Error("StartPullRequestAutoMergeCheckBySHA: %v", err)
}
}
}
func (n *automergeNotifier) PullReviewDismiss(ctx context.Context, doer *user_model.User, review *issues_model.Review, comment *issues_model.Comment) {
if err := review.LoadIssue(ctx); err != nil {
log.Error("LoadIssue: %v", err)
return
}
if err := review.Issue.LoadPullRequest(ctx); err != nil {
log.Error("LoadPullRequest: %v", err)
return
}
// as reviews could have blocked a pending automerge let's recheck
StartPRCheckAndAutoMerge(ctx, review.Issue.PullRequest)
}
func (n *automergeNotifier) CreateCommitStatus(ctx context.Context, repo *repo_model.Repository, commit *repository.PushCommit, sender *user_model.User, status *git_model.CommitStatus) {
if status.State.IsSuccess() {
if err := StartPRCheckAndAutoMergeBySHA(ctx, commit.Sha1, repo); err != nil {
log.Error("MergeScheduledPullRequest[repo_id: %d, user_id: %d, sha: %s]: %w", repo.ID, sender.ID, commit.Sha1, err)
}
}
}

115
context/access_log.go Normal file
View File

@ -0,0 +1,115 @@
// Copyright 2020 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package context
import (
"bytes"
"net"
"net/http"
"strings"
"text/template"
"time"
user_model "code.gitea.io/gitea/models/user"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/modules/web/middleware"
)
type accessLoggerTmplData struct {
Identity *string
Start *time.Time
ResponseWriter struct {
Status, Size int
}
Ctx map[string]any
RequestID *string
}
const keyOfRequestIDInTemplate = ".RequestID"
// According to:
// TraceId: A valid trace identifier is a 16-byte array with at least one non-zero byte
// MD5 output is 16 or 32 bytes: md5-bytes is 16, md5-hex is 32
// SHA1: similar, SHA1-bytes is 20, SHA1-hex is 40.
// UUID is 128-bit, 32 hex chars, 36 ASCII chars with 4 dashes
// So, we accept a Request ID with a maximum character length of 40
const maxRequestIDByteLength = 40
func parseRequestIDFromRequestHeader(req *http.Request) string {
requestID := "-"
for _, key := range setting.Log.RequestIDHeaders {
if req.Header.Get(key) != "" {
requestID = req.Header.Get(key)
break
}
}
if len(requestID) > maxRequestIDByteLength {
requestID = requestID[:maxRequestIDByteLength] + "..."
}
return requestID
}
type accessLogRecorder struct {
logger log.BaseLogger
logTemplate *template.Template
needRequestID bool
}
func (lr *accessLogRecorder) record(start time.Time, respWriter ResponseWriter, req *http.Request) {
var requestID string
if lr.needRequestID {
requestID = parseRequestIDFromRequestHeader(req)
}
reqHost, _, err := net.SplitHostPort(req.RemoteAddr)
if err != nil {
reqHost = req.RemoteAddr
}
identity := "-"
data := middleware.GetContextData(req.Context())
if signedUser, ok := data[middleware.ContextDataKeySignedUser].(*user_model.User); ok {
identity = signedUser.Name
}
buf := bytes.NewBuffer([]byte{})
tmplData := accessLoggerTmplData{
Identity: &identity,
Start: &start,
Ctx: map[string]any{
"RemoteAddr": req.RemoteAddr,
"RemoteHost": reqHost,
"Req": req,
},
RequestID: &requestID,
}
tmplData.ResponseWriter.Status = respWriter.WrittenStatus()
tmplData.ResponseWriter.Size = respWriter.WrittenSize()
err = lr.logTemplate.Execute(buf, tmplData)
if err != nil {
log.Error("Could not execute access logger template: %v", err.Error())
}
lr.logger.Log(1, &log.Event{Level: log.INFO}, "%s", buf.String())
}
func newAccessLogRecorder() *accessLogRecorder {
return &accessLogRecorder{
logger: log.GetLogger("access"),
logTemplate: template.Must(template.New("log").Parse(setting.Log.AccessLogTemplate)),
needRequestID: len(setting.Log.RequestIDHeaders) > 0 && strings.Contains(setting.Log.AccessLogTemplate, keyOfRequestIDInTemplate),
}
}
// AccessLogger returns a middleware to log access logger
func AccessLogger() func(http.Handler) http.Handler {
recorder := newAccessLogRecorder()
return func(next http.Handler) http.Handler {
return http.HandlerFunc(func(w http.ResponseWriter, req *http.Request) {
start := time.Now()
next.ServeHTTP(w, req)
recorder.record(start, w.(ResponseWriter), req)
})
}
}

View File

@ -0,0 +1,71 @@
// Copyright 2025 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package context
import (
"fmt"
"net/http"
"net/url"
"testing"
"time"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/setting"
"github.com/stretchr/testify/assert"
)
type testAccessLoggerMock struct {
logs []string
}
func (t *testAccessLoggerMock) Log(skip int, event *log.Event, format string, v ...any) {
t.logs = append(t.logs, fmt.Sprintf(format, v...))
}
func (t *testAccessLoggerMock) GetLevel() log.Level {
return log.INFO
}
type testAccessLoggerResponseWriterMock struct{}
func (t testAccessLoggerResponseWriterMock) Header() http.Header {
return nil
}
func (t testAccessLoggerResponseWriterMock) Before(f func(ResponseWriter)) {}
func (t testAccessLoggerResponseWriterMock) WriteHeader(statusCode int) {}
func (t testAccessLoggerResponseWriterMock) Write(bytes []byte) (int, error) {
return 0, nil
}
func (t testAccessLoggerResponseWriterMock) Flush() {}
func (t testAccessLoggerResponseWriterMock) WrittenStatus() int {
return http.StatusOK
}
func (t testAccessLoggerResponseWriterMock) WrittenSize() int {
return 123123
}
func TestAccessLogger(t *testing.T) {
setting.Log.AccessLogTemplate = `{{.Ctx.RemoteHost}} - {{.Identity}} {{.Start.Format "[02/Jan/2006:15:04:05 -0700]" }} "{{.Ctx.Req.Method}} {{.Ctx.Req.URL.RequestURI}} {{.Ctx.Req.Proto}}" {{.ResponseWriter.Status}} {{.ResponseWriter.Size}} "{{.Ctx.Req.Referer}}" "{{.Ctx.Req.UserAgent}}"`
recorder := newAccessLogRecorder()
mockLogger := &testAccessLoggerMock{}
recorder.logger = mockLogger
req := &http.Request{
RemoteAddr: "remote-addr",
Method: http.MethodGet,
Proto: "https",
URL: &url.URL{Path: "/path"},
}
req.Header = http.Header{}
req.Header.Add("Referer", "referer")
req.Header.Add("User-Agent", "user-agent")
recorder.record(time.Date(2000, 1, 2, 3, 4, 5, 0, time.UTC), &testAccessLoggerResponseWriterMock{}, req)
assert.Equal(t, []string{`remote-addr - - [02/Jan/2000:03:04:05 +0000] "GET /path https" 200 123123 "referer" "user-agent"`}, mockLogger.logs)
}

369
context/api.go Normal file
View File

@ -0,0 +1,369 @@
// Copyright 2016 The Gogs Authors. All rights reserved.
// Copyright 2019 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package context
import (
"errors"
"fmt"
"net/http"
"net/url"
"slices"
"strconv"
"strings"
"code.gitea.io/gitea/models/unit"
user_model "code.gitea.io/gitea/models/user"
"code.gitea.io/gitea/modules/cache"
"code.gitea.io/gitea/modules/git"
"code.gitea.io/gitea/modules/gitrepo"
"code.gitea.io/gitea/modules/httpcache"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/modules/util"
"code.gitea.io/gitea/modules/web"
web_types "code.gitea.io/gitea/modules/web/types"
)
// APIContext is a specific context for API service
// ATTENTION: This struct should never be manually constructed in routes/services,
// it has many internal details which should be carefully prepared by the framework.
// If it is abused, it would cause strange bugs like panic/resource-leak.
type APIContext struct {
*Base
Cache cache.StringCache
Doer *user_model.User // current signed-in user
IsSigned bool
IsBasicAuth bool
ContextUser *user_model.User // the user which is being visited, in most cases it differs from Doer
Repo *Repository
Org *APIOrganization
Package *Package
PublicOnly bool // Whether the request is for a public endpoint
}
func init() {
web.RegisterResponseStatusProvider[*APIContext](func(req *http.Request) web_types.ResponseStatusProvider {
return req.Context().Value(apiContextKey).(*APIContext)
})
}
// Currently, we have the following common fields in error response:
// * message: the message for end users (it shouldn't be used for error type detection)
// if we need to indicate some errors, we should introduce some new fields like ErrorCode or ErrorType
// * url: the swagger document URL
// APIError is error format response
// swagger:response error
type APIError struct {
Message string `json:"message"`
URL string `json:"url"`
}
// APIValidationError is error format response related to input validation
// swagger:response validationError
type APIValidationError struct {
Message string `json:"message"`
URL string `json:"url"`
}
// APIInvalidTopicsError is error format response to invalid topics
// swagger:response invalidTopicsError
type APIInvalidTopicsError struct {
Message string `json:"message"`
InvalidTopics []string `json:"invalidTopics"`
}
// APIEmpty is an empty response
// swagger:response empty
type APIEmpty struct{}
// APIForbiddenError is a forbidden error response
// swagger:response forbidden
type APIForbiddenError struct {
APIError
}
// APINotFound is a not found empty response
// swagger:response notFound
type APINotFound struct{}
// APIConflict is a conflict empty response
// swagger:response conflict
type APIConflict struct{}
// APIRedirect is a redirect response
// swagger:response redirect
type APIRedirect struct{}
// APIString is a string response
// swagger:response string
type APIString string
// APIRepoArchivedError is an error that is raised when an archived repo should be modified
// swagger:response repoArchivedError
type APIRepoArchivedError struct {
APIError
}
// APIErrorInternal responds with error message, status is 500
func (ctx *APIContext) APIErrorInternal(err error) {
ctx.apiErrorInternal(1, err)
}
func (ctx *APIContext) apiErrorInternal(skip int, err error) {
log.ErrorWithSkip(skip+1, "InternalServerError: %v", err)
var message string
if !setting.IsProd || (ctx.Doer != nil && ctx.Doer.IsAdmin) {
message = err.Error()
}
ctx.JSON(http.StatusInternalServerError, APIError{
Message: message,
URL: setting.API.SwaggerURL,
})
}
// APIError responds with an error message to client with given obj as the message.
// If status is 500, also it prints error to log.
func (ctx *APIContext) APIError(status int, obj any) {
var message string
if err, ok := obj.(error); ok {
message = err.Error()
} else {
message = fmt.Sprintf("%s", obj)
}
if status == http.StatusInternalServerError {
log.ErrorWithSkip(1, "APIError: %s", message)
if setting.IsProd && !(ctx.Doer != nil && ctx.Doer.IsAdmin) {
message = ""
}
}
ctx.JSON(status, APIError{
Message: message,
URL: setting.API.SwaggerURL,
})
}
type apiContextKeyType struct{}
var apiContextKey = apiContextKeyType{}
// GetAPIContext returns a context for API routes
func GetAPIContext(req *http.Request) *APIContext {
return req.Context().Value(apiContextKey).(*APIContext)
}
func genAPILinks(curURL *url.URL, total, pageSize, curPage int) []string {
page := NewPagination(total, pageSize, curPage, 0)
paginater := page.Paginater
links := make([]string, 0, 4)
if paginater.HasNext() {
u := *curURL
queries := u.Query()
queries.Set("page", strconv.Itoa(paginater.Next()))
u.RawQuery = queries.Encode()
links = append(links, fmt.Sprintf("<%s%s>; rel=\"next\"", setting.AppURL, u.RequestURI()[1:]))
}
if !paginater.IsLast() {
u := *curURL
queries := u.Query()
queries.Set("page", strconv.Itoa(paginater.TotalPages()))
u.RawQuery = queries.Encode()
links = append(links, fmt.Sprintf("<%s%s>; rel=\"last\"", setting.AppURL, u.RequestURI()[1:]))
}
if !paginater.IsFirst() {
u := *curURL
queries := u.Query()
queries.Set("page", "1")
u.RawQuery = queries.Encode()
links = append(links, fmt.Sprintf("<%s%s>; rel=\"first\"", setting.AppURL, u.RequestURI()[1:]))
}
if paginater.HasPrevious() {
u := *curURL
queries := u.Query()
queries.Set("page", strconv.Itoa(paginater.Previous()))
u.RawQuery = queries.Encode()
links = append(links, fmt.Sprintf("<%s%s>; rel=\"prev\"", setting.AppURL, u.RequestURI()[1:]))
}
return links
}
// SetLinkHeader sets pagination link header by given total number and page size.
func (ctx *APIContext) SetLinkHeader(total, pageSize int) {
links := genAPILinks(ctx.Req.URL, total, pageSize, ctx.FormInt("page"))
if len(links) > 0 {
ctx.RespHeader().Set("Link", strings.Join(links, ","))
ctx.AppendAccessControlExposeHeaders("Link")
}
}
// APIContexter returns APIContext middleware
func APIContexter() func(http.Handler) http.Handler {
return func(next http.Handler) http.Handler {
return http.HandlerFunc(func(w http.ResponseWriter, req *http.Request) {
base := NewBaseContext(w, req)
ctx := &APIContext{
Base: base,
Cache: cache.GetCache(),
Repo: &Repository{PullRequest: &PullRequest{}},
Org: &APIOrganization{},
}
ctx.SetContextValue(apiContextKey, ctx)
// If request sends files, parse them here otherwise the Query() can't be parsed and the CsrfToken will be invalid.
if ctx.Req.Method == http.MethodPost && strings.Contains(ctx.Req.Header.Get("Content-Type"), "multipart/form-data") {
if err := ctx.Req.ParseMultipartForm(setting.Attachment.MaxSize << 20); err != nil && !strings.Contains(err.Error(), "EOF") { // 32MB max size
ctx.APIErrorInternal(err)
return
}
}
httpcache.SetCacheControlInHeader(ctx.Resp.Header(), &httpcache.CacheControlOptions{NoTransform: true})
ctx.Resp.Header().Set(`X-Frame-Options`, setting.CORSConfig.XFrameOptions)
next.ServeHTTP(ctx.Resp, ctx.Req)
})
}
}
// APIErrorNotFound handles 404s for APIContext
// String will replace message, errors will be added to a slice
func (ctx *APIContext) APIErrorNotFound(objs ...any) {
var message string
var errs []string
for _, obj := range objs {
// Ignore nil
if obj == nil {
continue
}
if err, ok := obj.(error); ok {
errs = append(errs, err.Error())
} else {
message = obj.(string)
}
}
ctx.JSON(http.StatusNotFound, map[string]any{
"message": util.IfZero(message, "not found"), // do not use locale in API
"url": setting.API.SwaggerURL,
"errors": errs,
})
}
// ReferencesGitRepo injects the GitRepo into the Context
// you can optional skip the IsEmpty check
func ReferencesGitRepo(allowEmpty ...bool) func(ctx *APIContext) {
return func(ctx *APIContext) {
// Empty repository does not have reference information.
if ctx.Repo.Repository.IsEmpty && !(len(allowEmpty) != 0 && allowEmpty[0]) {
return
}
// For API calls.
if ctx.Repo.GitRepo == nil {
var err error
ctx.Repo.GitRepo, err = gitrepo.RepositoryFromRequestContextOrOpen(ctx, ctx.Repo.Repository)
if err != nil {
ctx.APIErrorInternal(err)
return
}
}
}
}
// RepoRefForAPI handles repository reference names when the ref name is not explicitly given
func RepoRefForAPI(next http.Handler) http.Handler {
return http.HandlerFunc(func(w http.ResponseWriter, req *http.Request) {
ctx := GetAPIContext(req)
if ctx.Repo.Repository.IsEmpty {
ctx.APIErrorNotFound("repository is empty")
return
}
if ctx.Repo.GitRepo == nil {
panic("no GitRepo, forgot to call the middleware?") // it is a programming error
}
refName, refType, _ := getRefNameLegacy(ctx.Base, ctx.Repo, ctx.PathParam("*"), ctx.FormTrim("ref"))
var err error
switch refType {
case git.RefTypeBranch:
ctx.Repo.Commit, err = ctx.Repo.GitRepo.GetBranchCommit(refName)
case git.RefTypeTag:
ctx.Repo.Commit, err = ctx.Repo.GitRepo.GetTagCommit(refName)
case git.RefTypeCommit:
ctx.Repo.Commit, err = ctx.Repo.GitRepo.GetCommit(refName)
}
if ctx.Repo.Commit == nil || errors.Is(err, util.ErrNotExist) {
ctx.APIErrorNotFound("unable to find a git ref")
return
} else if err != nil {
ctx.APIErrorInternal(err)
return
}
ctx.Repo.CommitID = ctx.Repo.Commit.ID.String()
next.ServeHTTP(w, req)
})
}
// HasAPIError returns true if error occurs in form validation.
func (ctx *APIContext) HasAPIError() bool {
hasErr, ok := ctx.Data["HasError"]
if !ok {
return false
}
return hasErr.(bool)
}
// GetErrMsg returns error message in form validation.
func (ctx *APIContext) GetErrMsg() string {
msg, _ := ctx.Data["ErrorMsg"].(string)
if msg == "" {
msg = "invalid form data"
}
return msg
}
// NotFoundOrServerError use error check function to determine if the error
// is about not found. It responds with 404 status code for not found error,
// or error context description for logging purpose of 500 server error.
func (ctx *APIContext) NotFoundOrServerError(err error) {
if errors.Is(err, util.ErrNotExist) {
ctx.JSON(http.StatusNotFound, nil)
return
}
ctx.APIErrorInternal(err)
}
// IsUserSiteAdmin returns true if current user is a site admin
func (ctx *APIContext) IsUserSiteAdmin() bool {
return ctx.IsSigned && ctx.Doer.IsAdmin
}
// IsUserRepoAdmin returns true if current user is admin in current repo
func (ctx *APIContext) IsUserRepoAdmin() bool {
return ctx.Repo.IsAdmin()
}
// IsUserRepoWriter returns true if current user has "write" privilege in current repo
func (ctx *APIContext) IsUserRepoWriter(unitTypes []unit.Type) bool {
return slices.ContainsFunc(unitTypes, ctx.Repo.CanWrite)
}

12
context/api_org.go Normal file
View File

@ -0,0 +1,12 @@
// Copyright 2016 The Gogs Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package context
import "code.gitea.io/gitea/models/organization"
// APIOrganization contains organization and team
type APIOrganization struct {
Organization *organization.Organization
Team *organization.Team
}

50
context/api_test.go Normal file
View File

@ -0,0 +1,50 @@
// Copyright 2019 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package context
import (
"net/url"
"strconv"
"testing"
"code.gitea.io/gitea/modules/setting"
"github.com/stretchr/testify/assert"
)
func TestGenAPILinks(t *testing.T) {
setting.AppURL = "http://localhost:3000/"
kases := map[string][]string{
"api/v1/repos/jerrykan/example-repo/issues?state=all": {
`<http://localhost:3000/api/v1/repos/jerrykan/example-repo/issues?page=2&state=all>; rel="next"`,
`<http://localhost:3000/api/v1/repos/jerrykan/example-repo/issues?page=5&state=all>; rel="last"`,
},
"api/v1/repos/jerrykan/example-repo/issues?state=all&page=1": {
`<http://localhost:3000/api/v1/repos/jerrykan/example-repo/issues?page=2&state=all>; rel="next"`,
`<http://localhost:3000/api/v1/repos/jerrykan/example-repo/issues?page=5&state=all>; rel="last"`,
},
"api/v1/repos/jerrykan/example-repo/issues?state=all&page=2": {
`<http://localhost:3000/api/v1/repos/jerrykan/example-repo/issues?page=3&state=all>; rel="next"`,
`<http://localhost:3000/api/v1/repos/jerrykan/example-repo/issues?page=5&state=all>; rel="last"`,
`<http://localhost:3000/api/v1/repos/jerrykan/example-repo/issues?page=1&state=all>; rel="first"`,
`<http://localhost:3000/api/v1/repos/jerrykan/example-repo/issues?page=1&state=all>; rel="prev"`,
},
"api/v1/repos/jerrykan/example-repo/issues?state=all&page=5": {
`<http://localhost:3000/api/v1/repos/jerrykan/example-repo/issues?page=1&state=all>; rel="first"`,
`<http://localhost:3000/api/v1/repos/jerrykan/example-repo/issues?page=4&state=all>; rel="prev"`,
},
}
for req, response := range kases {
u, err := url.Parse(setting.AppURL + req)
assert.NoError(t, err)
p := u.Query().Get("page")
curPage, _ := strconv.Atoi(p)
links := genAPILinks(u, 100, 20, curPage)
assert.Equal(t, links, response)
}
}

201
context/base.go Normal file
View File

@ -0,0 +1,201 @@
// Copyright 2020 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package context
import (
"fmt"
"html/template"
"io"
"net/http"
"strconv"
"strings"
"code.gitea.io/gitea/modules/httplib"
"code.gitea.io/gitea/modules/json"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/reqctx"
"code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/modules/translation"
"code.gitea.io/gitea/modules/web/middleware"
)
type BaseContextKeyType struct{}
var BaseContextKey BaseContextKeyType
// Base is the base context for all web handlers
// ATTENTION: This struct should never be manually constructed in routes/services,
// it has many internal details which should be carefully prepared by the framework.
// If it is abused, it would cause strange bugs like panic/resource-leak.
type Base struct {
reqctx.RequestContext
Resp ResponseWriter
Req *http.Request
// Data is prepared by ContextDataStore middleware, this field only refers to the pre-created/prepared ContextData.
// Although it's mainly used for MVC templates, sometimes it's also used to pass data between middlewares/handler
Data reqctx.ContextData
// Locale is mainly for Web context, although the API context also uses it in some cases: message response, form validation
Locale translation.Locale
}
// AppendAccessControlExposeHeaders append headers by name to "Access-Control-Expose-Headers" header
func (b *Base) AppendAccessControlExposeHeaders(names ...string) {
val := b.RespHeader().Get("Access-Control-Expose-Headers")
if len(val) != 0 {
b.RespHeader().Set("Access-Control-Expose-Headers", fmt.Sprintf("%s, %s", val, strings.Join(names, ", ")))
} else {
b.RespHeader().Set("Access-Control-Expose-Headers", strings.Join(names, ", "))
}
}
// SetTotalCountHeader set "X-Total-Count" header
func (b *Base) SetTotalCountHeader(total int64) {
b.RespHeader().Set("X-Total-Count", strconv.FormatInt(total, 10))
b.AppendAccessControlExposeHeaders("X-Total-Count")
}
// Written returns true if there are something sent to web browser
func (b *Base) Written() bool {
return b.Resp.WrittenStatus() != 0
}
func (b *Base) WrittenStatus() int {
return b.Resp.WrittenStatus()
}
// Status writes status code
func (b *Base) Status(status int) {
b.Resp.WriteHeader(status)
}
// Write writes data to web browser
func (b *Base) Write(bs []byte) (int, error) {
return b.Resp.Write(bs)
}
// RespHeader returns the response header
func (b *Base) RespHeader() http.Header {
return b.Resp.Header()
}
// HTTPError returned an error to web browser
func (b *Base) HTTPError(status int, contents ...string) {
v := http.StatusText(status)
if len(contents) > 0 {
v = contents[0]
}
http.Error(b.Resp, v, status)
}
// JSON render content as JSON
func (b *Base) JSON(status int, content any) {
b.Resp.Header().Set("Content-Type", "application/json;charset=utf-8")
b.Resp.WriteHeader(status)
if err := json.NewEncoder(b.Resp).Encode(content); err != nil {
log.Error("Render JSON failed: %v", err)
}
}
// RemoteAddr returns the client machine ip address
func (b *Base) RemoteAddr() string {
return b.Req.RemoteAddr
}
// PlainTextBytes renders bytes as plain text
func (b *Base) plainTextInternal(skip, status int, bs []byte) {
statusPrefix := status / 100
if statusPrefix == 4 || statusPrefix == 5 {
log.Log(skip, log.TRACE, "plainTextInternal (status=%d): %s", status, string(bs))
}
b.Resp.Header().Set("Content-Type", "text/plain;charset=utf-8")
b.Resp.Header().Set("X-Content-Type-Options", "nosniff")
b.Resp.WriteHeader(status)
_, _ = b.Resp.Write(bs)
}
// PlainTextBytes renders bytes as plain text
func (b *Base) PlainTextBytes(status int, bs []byte) {
b.plainTextInternal(2, status, bs)
}
// PlainText renders content as plain text
func (b *Base) PlainText(status int, text string) {
b.plainTextInternal(2, status, []byte(text))
}
// Redirect redirects the request
func (b *Base) Redirect(location string, status ...int) {
code := http.StatusSeeOther
if len(status) == 1 {
code = status[0]
}
if !httplib.IsRelativeURL(location) {
// Some browsers (Safari) have buggy behavior for Cookie + Cache + External Redirection, eg: /my-path => https://other/path
// 1. the first request to "/my-path" contains cookie
// 2. some time later, the request to "/my-path" doesn't contain cookie (caused by Prevent web tracking)
// 3. Gitea's Sessioner doesn't see the session cookie, so it generates a new session id, and returns it to browser
// 4. then the browser accepts the empty session, then the user is logged out
// So in this case, we should remove the session cookie from the response header
removeSessionCookieHeader(b.Resp)
}
// in case the request is made by htmx, have it redirect the browser instead of trying to follow the redirect inside htmx
if b.Req.Header.Get("HX-Request") == "true" {
b.Resp.Header().Set("HX-Redirect", location)
// we have to return a non-redirect status code so XMLHTTPRequest will not immediately follow the redirect
// so as to give htmx redirect logic a chance to run
b.Status(http.StatusNoContent)
return
}
http.Redirect(b.Resp, b.Req, location, code)
}
type ServeHeaderOptions httplib.ServeHeaderOptions
func (b *Base) SetServeHeaders(opt *ServeHeaderOptions) {
httplib.ServeSetHeaders(b.Resp, (*httplib.ServeHeaderOptions)(opt))
}
// ServeContent serves content to http request
func (b *Base) ServeContent(r io.ReadSeeker, opts *ServeHeaderOptions) {
httplib.ServeSetHeaders(b.Resp, (*httplib.ServeHeaderOptions)(opts))
http.ServeContent(b.Resp, b.Req, opts.Filename, opts.LastModified, r)
}
func (b *Base) Tr(msg string, args ...any) template.HTML {
return b.Locale.Tr(msg, args...)
}
func (b *Base) TrN(cnt any, key1, keyN string, args ...any) template.HTML {
return b.Locale.TrN(cnt, key1, keyN, args...)
}
func NewBaseContext(resp http.ResponseWriter, req *http.Request) *Base {
reqCtx := reqctx.FromContext(req.Context())
b := &Base{
RequestContext: reqCtx,
Req: req,
Resp: WrapResponseWriter(resp),
Locale: middleware.Locale(resp, req),
Data: reqCtx.GetData(),
}
b.Req = b.Req.WithContext(b)
reqCtx.SetContextValue(BaseContextKey, b)
reqCtx.SetContextValue(translation.ContextKey, b.Locale)
reqCtx.SetContextValue(httplib.RequestContextKey, b.Req)
return b
}
func NewBaseContextForTest(resp http.ResponseWriter, req *http.Request) *Base {
if !setting.IsInTesting {
panic("This function is only for testing")
}
ctx := reqctx.NewRequestContextForTest(req.Context())
*req = *req.WithContext(ctx)
return NewBaseContext(resp, req)
}

77
context/base_form.go Normal file
View File

@ -0,0 +1,77 @@
// Copyright 2024 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package context
import (
"strconv"
"strings"
"code.gitea.io/gitea/modules/optional"
"code.gitea.io/gitea/modules/util"
)
// FormString returns the first value matching the provided key in the form as a string
func (b *Base) FormString(key string, def ...string) string {
s := b.Req.FormValue(key)
if s == "" {
s = util.OptionalArg(def)
}
return s
}
// FormStrings returns a string slice for the provided key from the form
func (b *Base) FormStrings(key string) []string {
if b.Req.Form == nil {
if err := b.Req.ParseMultipartForm(32 << 20); err != nil {
return nil
}
}
if v, ok := b.Req.Form[key]; ok {
return v
}
return nil
}
// FormTrim returns the first value for the provided key in the form as a space trimmed string
func (b *Base) FormTrim(key string) string {
return strings.TrimSpace(b.Req.FormValue(key))
}
// FormInt returns the first value for the provided key in the form as an int
func (b *Base) FormInt(key string) int {
v, _ := strconv.Atoi(b.Req.FormValue(key))
return v
}
// FormInt64 returns the first value for the provided key in the form as an int64
func (b *Base) FormInt64(key string) int64 {
v, _ := strconv.ParseInt(b.Req.FormValue(key), 10, 64)
return v
}
// FormBool returns true if the value for the provided key in the form is "1", "true" or "on"
func (b *Base) FormBool(key string) bool {
s := b.Req.FormValue(key)
v, _ := strconv.ParseBool(s)
v = v || strings.EqualFold(s, "on")
return v
}
// FormOptionalBool returns an optional.Some(true) or optional.Some(false) if the value
// for the provided key exists in the form else it returns optional.None[bool]()
func (b *Base) FormOptionalBool(key string) optional.Option[bool] {
value := b.Req.FormValue(key)
if len(value) == 0 {
return optional.None[bool]()
}
s := b.Req.FormValue(key)
v, _ := strconv.ParseBool(s)
v = v || strings.EqualFold(s, "on")
return optional.Some(v)
}
func (b *Base) SetFormString(key, value string) {
_ = b.Req.FormValue(key) // force parse form
b.Req.Form.Set(key, value)
}

Some files were not shown because too many files have changed in this diff Show More