Browse Source

Add logging to long migrations (#11647)

* Add logging to long migrations

Also fix v136 to not use models

Signed-off-by: Andrew Thornton <art27@cantab.net>
tags/v1.13.0-rc1
zeripath 3 years ago
parent
commit
495354c4bd
No account linked to committer's email address
4 changed files with 154 additions and 13 deletions
  1. 43
    0
      models/migrations/v115.go
  2. 21
    3
      models/migrations/v128.go
  3. 21
    2
      models/migrations/v134.go
  4. 69
    8
      models/migrations/v136.go

+ 43
- 0
models/migrations/v115.go View File

@@ -8,8 +8,10 @@ import (
"crypto/md5"
"fmt"
"io/ioutil"
"math"
"os"
"path/filepath"
"time"

"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/setting"
@@ -26,8 +28,19 @@ func renameExistingUserAvatarName(x *xorm.Engine) error {
LowerName string `xorm:"UNIQUE NOT NULL"`
Avatar string
}

ticker := time.NewTicker(5 * time.Second)
defer ticker.Stop()

count, err := x.Count(new(User))
if err != nil {
return err
}
log.Info("%d User Avatar(s) to migrate ...", count)

deleteList := make(map[string]struct{})
start := 0
migrated := 0
for {
if err := sess.Begin(); err != nil {
return fmt.Errorf("session.Begin: %v", err)
@@ -73,6 +86,19 @@ func renameExistingUserAvatarName(x *xorm.Engine) error {
}

deleteList[filepath.Join(setting.AvatarUploadPath, oldAvatar)] = struct{}{}
migrated++
select {
case <-ticker.C:
log.Info(
"%d/%d (%2.0f%%) User Avatar(s) migrated (%d old avatars to be deleted) in %d batches. %d Remaining ...",
migrated,
count,
float64(migrated)/float64(count)*100,
len(deleteList),
int(math.Ceil(float64(migrated)/float64(50))),
count-int64(migrated))
default:
}
}
if err := sess.Commit(); err != nil {
_ = sess.Rollback()
@@ -80,11 +106,28 @@ func renameExistingUserAvatarName(x *xorm.Engine) error {
}
}

deleteCount := len(deleteList)
log.Info("Deleting %d old avatars ...", deleteCount)
i := 0
for file := range deleteList {
if err := os.Remove(file); err != nil {
log.Warn("os.Remove: %v", err)
}
i++
select {
case <-ticker.C:
log.Info(
"%d/%d (%2.0f%%) Old User Avatar(s) deleted. %d Remaining ...",
i,
deleteCount,
float64(i)/float64(deleteCount)*100,
deleteCount-i)
default:
}
}

log.Info("Completed migrating %d User Avatar(s) and deleting %d Old Avatars", count, deleteCount)

return nil
}


+ 21
- 3
models/migrations/v128.go View File

@@ -6,8 +6,10 @@ package migrations

import (
"fmt"
"math"
"path/filepath"
"strings"
"time"

"code.gitea.io/gitea/modules/git"
"code.gitea.io/gitea/modules/log"
@@ -43,17 +45,27 @@ func fixMergeBase(x *xorm.Engine) error {
limit = 50
}

ticker := time.NewTicker(5 * time.Second)
defer ticker.Stop()

count, err := x.Count(new(PullRequest))
if err != nil {
return err
}
log.Info("%d Pull Request(s) to migrate ...", count)

i := 0
start := 0
for {
prs := make([]PullRequest, 0, 50)
if err := x.Limit(limit, i).Asc("id").Find(&prs); err != nil {
if err := x.Limit(limit, start).Asc("id").Find(&prs); err != nil {
return fmt.Errorf("Find: %v", err)
}
if len(prs) == 0 {
break
}

i += len(prs)
start += 50
for _, pr := range prs {
baseRepo := &Repository{ID: pr.BaseRepoID}
has, err := x.Table("repository").Get(baseRepo)
@@ -102,8 +114,14 @@ func fixMergeBase(x *xorm.Engine) error {
}
pr.MergeBase = strings.TrimSpace(pr.MergeBase)
x.ID(pr.ID).Cols("merge_base").Update(pr)
i++
select {
case <-ticker.C:
log.Info("%d/%d (%2.0f%%) Pull Request(s) migrated in %d batches. %d PRs Remaining ...", i, count, float64(i)/float64(count)*100, int(math.Ceil(float64(i)/float64(limit))), count-int64(i))
default:
}
}
}

log.Info("Completed migrating %d Pull Request(s) in: %d batches", count, int(math.Ceil(float64(i)/float64(limit))))
return nil
}

+ 21
- 2
models/migrations/v134.go View File

@@ -6,8 +6,10 @@ package migrations

import (
"fmt"
"math"
"path/filepath"
"strings"
"time"

"code.gitea.io/gitea/modules/git"
"code.gitea.io/gitea/modules/log"
@@ -43,17 +45,26 @@ func refixMergeBase(x *xorm.Engine) error {
limit = 50
}

ticker := time.NewTicker(5 * time.Second)
defer ticker.Stop()
count, err := x.Where("has_merged = ?", true).Count(new(PullRequest))
if err != nil {
return err
}
log.Info("%d Merged Pull Request(s) to migrate ...", count)

i := 0
start := 0
for {
prs := make([]PullRequest, 0, 50)
if err := x.Limit(limit, i).Asc("id").Where("has_merged = ?", true).Find(&prs); err != nil {
if err := x.Limit(limit, start).Asc("id").Where("has_merged = ?", true).Find(&prs); err != nil {
return fmt.Errorf("Find: %v", err)
}
if len(prs) == 0 {
break
}

i += len(prs)
start += 50
for _, pr := range prs {
baseRepo := &Repository{ID: pr.BaseRepoID}
has, err := x.Table("repository").Get(baseRepo)
@@ -90,7 +101,15 @@ func refixMergeBase(x *xorm.Engine) error {
}
pr.MergeBase = strings.TrimSpace(pr.MergeBase)
x.ID(pr.ID).Cols("merge_base").Update(pr)
i++
select {
case <-ticker.C:
log.Info("%d/%d (%2.0f%%) Pull Request(s) migrated in %d batches. %d PRs Remaining ...", i, count, float64(i)/float64(count)*100, int(math.Ceil(float64(i)/float64(limit))), count-int64(i))
default:
}
}
}

log.Info("Completed migrating %d Pull Request(s) in: %d batches", count, int(math.Ceil(float64(i)/float64(limit))))
return nil
}

+ 69
- 8
models/migrations/v136.go View File

@@ -6,25 +6,60 @@ package migrations

import (
"fmt"
"math"
"path/filepath"
"strings"
"time"

"code.gitea.io/gitea/models"
"code.gitea.io/gitea/modules/git"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/setting"
pull_service "code.gitea.io/gitea/services/pull"

"xorm.io/xorm"
)

func addCommitDivergenceToPulls(x *xorm.Engine) error {
type Repository struct {
ID int64 `xorm:"pk autoincr"`
OwnerID int64 `xorm:"UNIQUE(s) index"`
OwnerName string
LowerName string `xorm:"UNIQUE(s) INDEX NOT NULL"`
Name string `xorm:"INDEX NOT NULL"`
}

type PullRequest struct {
ID int64 `xorm:"pk autoincr"`

CommitsAhead int
CommitsBehind int

BaseRepoID int64 `xorm:"INDEX"`
BaseBranch string

HasMerged bool `xorm:"INDEX"`
MergedCommitID string `xorm:"VARCHAR(40)"`
}

if err := x.Sync2(new(models.PullRequest)); err != nil {
return fmt.Errorf("Sync2: %v", err)
}

var last int
last := 0
migrated := 0

batchSize := setting.Database.IterateBufferSize
sess := x.NewSession()
defer sess.Close()

ticker := time.NewTicker(5 * time.Second)
defer ticker.Stop()
count, err := sess.Where("has_merged = ?", false).Count(new(PullRequest))
if err != nil {
return err
}
log.Info("%d Unmerged Pull Request(s) to migrate ...", count)

for {
if err := sess.Begin(); err != nil {
return err
@@ -37,27 +72,53 @@ func addCommitDivergenceToPulls(x *xorm.Engine) error {
if len(results) == 0 {
break
}
last += len(results)
last += batchSize

for _, pr := range results {
divergence, err := pull_service.GetDiverging(pr)
baseRepo := &Repository{ID: pr.BaseRepoID}
has, err := x.Table("repository").Get(baseRepo)
if err != nil {
return fmt.Errorf("Unable to get base repo %d %v", pr.BaseRepoID, err)
}
if !has {
log.Error("Missing base repo with id %d for PR ID %d", pr.BaseRepoID, pr.ID)
continue
}
userPath := filepath.Join(setting.RepoRootPath, strings.ToLower(baseRepo.OwnerName))
repoPath := filepath.Join(userPath, strings.ToLower(baseRepo.Name)+".git")

gitRefName := fmt.Sprintf("refs/pull/%d/head", pr.Index)

divergence, err := git.GetDivergingCommits(repoPath, pr.BaseBranch, gitRefName)
if err != nil {
log.Warn("Could not recalculate Divergence for pull: %d", pr.ID)
pr.CommitsAhead = 0
pr.CommitsBehind = 0
}
if divergence != nil {
pr.CommitsAhead = divergence.Ahead
pr.CommitsBehind = divergence.Behind
}
pr.CommitsAhead = divergence.Ahead
pr.CommitsBehind = divergence.Behind

if _, err = sess.ID(pr.ID).Cols("commits_ahead", "commits_behind").Update(pr); err != nil {
return fmt.Errorf("Update Cols: %v", err)
}
migrated++
}

if err := sess.Commit(); err != nil {
return err
}
select {
case <-ticker.C:
log.Info(
"%d/%d (%2.0f%%) Pull Request(s) migrated in %d batches. %d PRs Remaining ...",
migrated,
count,
float64(migrated)/float64(count)*100,
int(math.Ceil(float64(migrated)/float64(batchSize))),
count-int64(migrated))
default:
}
}
log.Info("Completed migrating %d Pull Request(s) in: %d batches", count, int(math.Ceil(float64(migrated)/float64(batchSize))))
return nil
}

Loading…
Cancel
Save