The tests on migration tests failed but CI reports successfully https://github.com/go-gitea/gitea/actions/runs/7364373807/job/20044685969#step:8:141 This PR will fix the bug on migration v283 and also the CI hidden behaviour. The reason is on the Makefile `GITEA_ROOT="$(CURDIR)" GITEA_CONF=tests/mysql.ini $(GO) test $(GOTESTFLAGS) -tags='$(TEST_TAGS)' $(MIGRATE_TEST_PACKAGES)` will return the error exit code. But `for pkg in $(shell $(GO) list code.gitea.io/gitea/models/migrations/...); do \ GITEA_ROOT="$(CURDIR)" GITEA_CONF=tests/mysql.ini $(GO) test $(GOTESTFLAGS) -tags '$(TEST_TAGS)' $$pkg; \ done` will not work. This also fix #29602tags/v1.22.0-rc0
@@ -49,7 +49,10 @@ jobs: | |||
- run: make backend | |||
env: | |||
TAGS: bindata | |||
- run: make test-pgsql-migration test-pgsql | |||
- name: run migration tests | |||
run: make test-pgsql-migration | |||
- name: run tests | |||
run: make test-pgsql | |||
timeout-minutes: 50 | |||
env: | |||
TAGS: bindata gogit | |||
@@ -72,7 +75,10 @@ jobs: | |||
- run: make backend | |||
env: | |||
TAGS: bindata gogit sqlite sqlite_unlock_notify | |||
- run: make test-sqlite-migration test-sqlite | |||
- name: run migration tests | |||
run: make test-sqlite-migration | |||
- name: run tests | |||
run: make test-sqlite | |||
timeout-minutes: 50 | |||
env: | |||
TAGS: bindata gogit sqlite sqlite_unlock_notify | |||
@@ -175,8 +181,10 @@ jobs: | |||
- run: make backend | |||
env: | |||
TAGS: bindata | |||
- name: run migration tests | |||
run: make test-mysql-migration | |||
- name: run tests | |||
run: make test-mysql-migration integration-test-coverage | |||
run: make integration-test-coverage | |||
env: | |||
TAGS: bindata | |||
RACE_ENABLED: true | |||
@@ -208,7 +216,9 @@ jobs: | |||
- run: make backend | |||
env: | |||
TAGS: bindata | |||
- run: make test-mssql-migration test-mssql | |||
- run: make test-mssql-migration | |||
- name: run tests | |||
run: make test-mssql | |||
timeout-minutes: 50 | |||
env: | |||
TAGS: bindata |
@@ -115,6 +115,7 @@ LINUX_ARCHS ?= linux/amd64,linux/386,linux/arm-5,linux/arm-6,linux/arm64 | |||
GO_PACKAGES ?= $(filter-out code.gitea.io/gitea/tests/integration/migration-test code.gitea.io/gitea/tests code.gitea.io/gitea/tests/integration code.gitea.io/gitea/tests/e2e,$(shell $(GO) list ./... | grep -v /vendor/)) | |||
GO_TEST_PACKAGES ?= $(filter-out $(shell $(GO) list code.gitea.io/gitea/models/migrations/...) code.gitea.io/gitea/tests/integration/migration-test code.gitea.io/gitea/tests code.gitea.io/gitea/tests/integration code.gitea.io/gitea/tests/e2e,$(shell $(GO) list ./... | grep -v /vendor/)) | |||
MIGRATE_TEST_PACKAGES ?= $(shell $(GO) list code.gitea.io/gitea/models/migrations/...) | |||
FOMANTIC_WORK_DIR := web_src/fomantic | |||
@@ -710,9 +711,7 @@ migrations.sqlite.test: $(GO_SOURCES) generate-ini-sqlite | |||
.PHONY: migrations.individual.mysql.test | |||
migrations.individual.mysql.test: $(GO_SOURCES) | |||
for pkg in $(shell $(GO) list code.gitea.io/gitea/models/migrations/...); do \ | |||
GITEA_ROOT="$(CURDIR)" GITEA_CONF=tests/mysql.ini $(GO) test $(GOTESTFLAGS) -tags '$(TEST_TAGS)' $$pkg; \ | |||
done | |||
GITEA_ROOT="$(CURDIR)" GITEA_CONF=tests/mysql.ini $(GO) test $(GOTESTFLAGS) -tags='$(TEST_TAGS)' -p 1 $(MIGRATE_TEST_PACKAGES) | |||
.PHONY: migrations.individual.sqlite.test\#% | |||
migrations.individual.sqlite.test\#%: $(GO_SOURCES) generate-ini-sqlite | |||
@@ -720,20 +719,15 @@ migrations.individual.sqlite.test\#%: $(GO_SOURCES) generate-ini-sqlite | |||
.PHONY: migrations.individual.pgsql.test | |||
migrations.individual.pgsql.test: $(GO_SOURCES) | |||
for pkg in $(shell $(GO) list code.gitea.io/gitea/models/migrations/...); do \ | |||
GITEA_ROOT="$(CURDIR)" GITEA_CONF=tests/pgsql.ini $(GO) test $(GOTESTFLAGS) -tags '$(TEST_TAGS)' $$pkg; \ | |||
done | |||
GITEA_ROOT="$(CURDIR)" GITEA_CONF=tests/pgsql.ini $(GO) test $(GOTESTFLAGS) -tags='$(TEST_TAGS)' -p 1 $(MIGRATE_TEST_PACKAGES) | |||
.PHONY: migrations.individual.pgsql.test\#% | |||
migrations.individual.pgsql.test\#%: $(GO_SOURCES) generate-ini-pgsql | |||
GITEA_ROOT="$(CURDIR)" GITEA_CONF=tests/pgsql.ini $(GO) test $(GOTESTFLAGS) -tags '$(TEST_TAGS)' code.gitea.io/gitea/models/migrations/$* | |||
.PHONY: migrations.individual.mssql.test | |||
migrations.individual.mssql.test: $(GO_SOURCES) generate-ini-mssql | |||
for pkg in $(shell $(GO) list code.gitea.io/gitea/models/migrations/...); do \ | |||
GITEA_ROOT="$(CURDIR)" GITEA_CONF=tests/mssql.ini $(GO) test $(GOTESTFLAGS) -tags '$(TEST_TAGS)' $$pkg -test.failfast; \ | |||
done | |||
GITEA_ROOT="$(CURDIR)" GITEA_CONF=tests/mssql.ini $(GO) test $(GOTESTFLAGS) -tags='$(TEST_TAGS)' -p 1 $(MIGRATE_TEST_PACKAGES) | |||
.PHONY: migrations.individual.mssql.test\#% | |||
migrations.individual.mssql.test\#%: $(GO_SOURCES) generate-ini-mssql | |||
@@ -741,9 +735,7 @@ migrations.individual.mssql.test\#%: $(GO_SOURCES) generate-ini-mssql | |||
.PHONY: migrations.individual.sqlite.test | |||
migrations.individual.sqlite.test: $(GO_SOURCES) generate-ini-sqlite | |||
for pkg in $(shell $(GO) list code.gitea.io/gitea/models/migrations/...); do \ | |||
GITEA_ROOT="$(CURDIR)" GITEA_CONF=tests/sqlite.ini $(GO) test $(GOTESTFLAGS) -tags '$(TEST_TAGS)' $$pkg; \ | |||
done | |||
GITEA_ROOT="$(CURDIR)" GITEA_CONF=tests/sqlite.ini $(GO) test $(GOTESTFLAGS) -tags='$(TEST_TAGS)' -p 1 $(MIGRATE_TEST_PACKAGES) | |||
.PHONY: migrations.individual.sqlite.test\#% | |||
migrations.individual.sqlite.test\#%: $(GO_SOURCES) generate-ini-sqlite |
@@ -36,12 +36,14 @@ func Test_DropTableColumns(t *testing.T) { | |||
"updated_unix", | |||
} | |||
x.SetMapper(names.GonicMapper{}) | |||
for i := range columns { | |||
x.SetMapper(names.GonicMapper{}) | |||
if err := x.Sync(new(DropTest)); err != nil { | |||
t.Errorf("unable to create DropTest table: %v", err) | |||
return | |||
} | |||
sess := x.NewSession() | |||
if err := sess.Begin(); err != nil { | |||
sess.Close() | |||
@@ -64,7 +66,6 @@ func Test_DropTableColumns(t *testing.T) { | |||
return | |||
} | |||
for j := range columns[i+1:] { | |||
x.SetMapper(names.GonicMapper{}) | |||
if err := x.Sync(new(DropTest)); err != nil { | |||
t.Errorf("unable to create DropTest table: %v", err) | |||
return |
@@ -0,0 +1,4 @@ | |||
- | |||
id: 1 | |||
repo_id: 1 | |||
index: 1 |
@@ -0,0 +1,11 @@ | |||
- | |||
id: 1 | |||
uuid: a0eebc99-9c0b-4ef8-bb6d-6bb9bd380a11 | |||
issue_id: 1 | |||
release_id: 0 | |||
- | |||
id: 2 | |||
uuid: a0eebc99-9c0b-4ef8-bb6d-6bb9bd380a12 | |||
issue_id: 0 | |||
release_id: 1 |
@@ -0,0 +1,3 @@ | |||
- | |||
id: 1 | |||
repo_id: 1 |
@@ -0,0 +1,3 @@ | |||
- | |||
id: 1 | |||
repo_id: 1 |
@@ -0,0 +1,3 @@ | |||
- | |||
id: 1 | |||
commit_sha: 19fe5caf872476db265596eaac1dc35ad1c6422d |
@@ -0,0 +1,3 @@ | |||
- | |||
id: 1 | |||
context_hash: 19fe5caf872476db265596eaac1dc35ad1c6422d |
@@ -0,0 +1,5 @@ | |||
- | |||
id: 1 | |||
commit_sha: 19fe5caf872476db265596eaac1dc35ad1c6422d | |||
merge_base: 19fe5caf872476db265596eaac1dc35ad1c6422d | |||
merged_commit_id: 19fe5caf872476db265596eaac1dc35ad1c6422d |
@@ -0,0 +1,3 @@ | |||
- | |||
id: 1 | |||
sha1: 19fe5caf872476db265596eaac1dc35ad1c6422d |
@@ -0,0 +1,3 @@ | |||
- | |||
id: 1 | |||
commit_id: 19fe5caf872476db265596eaac1dc35ad1c6422d |
@@ -0,0 +1,3 @@ | |||
- | |||
id: 1 | |||
commit_sha: 19fe5caf872476db265596eaac1dc35ad1c6422d |
@@ -0,0 +1,3 @@ | |||
- | |||
id: 1 | |||
commit_sha: 19fe5caf872476db265596eaac1dc35ad1c6422d |
@@ -0,0 +1,4 @@ | |||
- | |||
id: 1 | |||
description: the badge | |||
image_url: https://gitea.com/myimage.png |
@@ -15,7 +15,6 @@ func Test_AddRepoIDForAttachment(t *testing.T) { | |||
type Attachment struct { | |||
ID int64 `xorm:"pk autoincr"` | |||
UUID string `xorm:"uuid UNIQUE"` | |||
RepoID int64 `xorm:"INDEX"` // this should not be zero | |||
IssueID int64 `xorm:"INDEX"` // maybe zero when creating | |||
ReleaseID int64 `xorm:"INDEX"` // maybe zero when creating | |||
UploaderID int64 `xorm:"INDEX DEFAULT 0"` | |||
@@ -44,12 +43,21 @@ func Test_AddRepoIDForAttachment(t *testing.T) { | |||
return | |||
} | |||
var issueAttachments []*Attachment | |||
err := x.Where("issue_id > 0").Find(&issueAttachments) | |||
type NewAttachment struct { | |||
ID int64 `xorm:"pk autoincr"` | |||
UUID string `xorm:"uuid UNIQUE"` | |||
RepoID int64 `xorm:"INDEX"` // this should not be zero | |||
IssueID int64 `xorm:"INDEX"` // maybe zero when creating | |||
ReleaseID int64 `xorm:"INDEX"` // maybe zero when creating | |||
UploaderID int64 `xorm:"INDEX DEFAULT 0"` | |||
} | |||
var issueAttachments []*NewAttachment | |||
err := x.Table("attachment").Where("issue_id > 0").Find(&issueAttachments) | |||
assert.NoError(t, err) | |||
for _, attach := range issueAttachments { | |||
assert.Greater(t, attach.RepoID, 0) | |||
assert.Greater(t, attach.IssueID, 0) | |||
assert.Greater(t, attach.RepoID, int64(0)) | |||
assert.Greater(t, attach.IssueID, int64(0)) | |||
var issue Issue | |||
has, err := x.ID(attach.IssueID).Get(&issue) | |||
assert.NoError(t, err) | |||
@@ -57,12 +65,12 @@ func Test_AddRepoIDForAttachment(t *testing.T) { | |||
assert.EqualValues(t, attach.RepoID, issue.RepoID) | |||
} | |||
var releaseAttachments []*Attachment | |||
err = x.Where("release_id > 0").Find(&releaseAttachments) | |||
var releaseAttachments []*NewAttachment | |||
err = x.Table("attachment").Where("release_id > 0").Find(&releaseAttachments) | |||
assert.NoError(t, err) | |||
for _, attach := range releaseAttachments { | |||
assert.Greater(t, attach.RepoID, 0) | |||
assert.Greater(t, attach.IssueID, 0) | |||
assert.Greater(t, attach.RepoID, int64(0)) | |||
assert.Greater(t, attach.ReleaseID, int64(0)) | |||
var release Release | |||
has, err := x.ID(attach.ReleaseID).Get(&release) | |||
assert.NoError(t, err) |
@@ -4,7 +4,10 @@ | |||
package v1_22 //nolint | |||
import ( | |||
"fmt" | |||
"xorm.io/xorm" | |||
"xorm.io/xorm/schemas" | |||
) | |||
func AddCombinedIndexToIssueUser(x *xorm.Engine) error { | |||
@@ -20,8 +23,18 @@ func AddCombinedIndexToIssueUser(x *xorm.Engine) error { | |||
return err | |||
} | |||
for _, issueUser := range duplicatedIssueUsers { | |||
if _, err := x.Exec("delete from issue_user where id in (SELECT id FROM issue_user WHERE issue_id = ? and uid = ? limit ?)", issueUser.IssueID, issueUser.UID, issueUser.Cnt-1); err != nil { | |||
return err | |||
if x.Dialect().URI().DBType == schemas.MSSQL { | |||
if _, err := x.Exec(fmt.Sprintf("delete from issue_user where id in (SELECT top %d id FROM issue_user WHERE issue_id = ? and uid = ?)", issueUser.Cnt-1), issueUser.IssueID, issueUser.UID); err != nil { | |||
return err | |||
} | |||
} else { | |||
var ids []int64 | |||
if err := x.SQL("SELECT id FROM issue_user WHERE issue_id = ? and uid = ? limit ?", issueUser.IssueID, issueUser.UID, issueUser.Cnt-1).Find(&ids); err != nil { | |||
return err | |||
} | |||
if _, err := x.Table("issue_user").In("id", ids).Delete(); err != nil { | |||
return err | |||
} | |||
} | |||
} | |||
@@ -36,9 +36,9 @@ func expandHashReferencesToSha256(x *xorm.Engine) error { | |||
if setting.Database.Type.IsMSSQL() { | |||
// drop indexes that need to be re-created afterwards | |||
droppedIndexes := []string{ | |||
"DROP INDEX commit_status.IDX_commit_status_context_hash", | |||
"DROP INDEX review_state.UQE_review_state_pull_commit_user", | |||
"DROP INDEX repo_archiver.UQE_repo_archiver_s", | |||
"DROP INDEX IF EXISTS [IDX_commit_status_context_hash] ON [commit_status]", | |||
"DROP INDEX IF EXISTS [UQE_review_state_pull_commit_user] ON [review_state]", | |||
"DROP INDEX IF EXISTS [UQE_repo_archiver_s] ON [repo_archiver]", | |||
} | |||
for _, s := range droppedIndexes { | |||
_, err := db.Exec(s) | |||
@@ -53,7 +53,7 @@ func expandHashReferencesToSha256(x *xorm.Engine) error { | |||
if setting.Database.Type.IsMySQL() { | |||
_, err = db.Exec(fmt.Sprintf("ALTER TABLE `%s` MODIFY COLUMN `%s` VARCHAR(64)", alts[0], alts[1])) | |||
} else if setting.Database.Type.IsMSSQL() { | |||
_, err = db.Exec(fmt.Sprintf("ALTER TABLE `%s` ALTER COLUMN `%s` VARCHAR(64)", alts[0], alts[1])) | |||
_, err = db.Exec(fmt.Sprintf("ALTER TABLE [%s] ALTER COLUMN [%s] VARCHAR(64)", alts[0], alts[1])) | |||
} else { | |||
_, err = db.Exec(fmt.Sprintf("ALTER TABLE `%s` ALTER COLUMN `%s` TYPE VARCHAR(64)", alts[0], alts[1])) | |||
} |
@@ -17,14 +17,72 @@ func PrepareOldRepository(t *testing.T) (*xorm.Engine, func()) { | |||
ID int64 `xorm:"pk autoincr"` | |||
} | |||
type CommitStatus struct { | |||
ID int64 | |||
ContextHash string | |||
} | |||
type RepoArchiver struct { | |||
ID int64 | |||
RepoID int64 | |||
Type int | |||
CommitID string | |||
} | |||
type ReviewState struct { | |||
ID int64 | |||
CommitSHA string | |||
UserID int64 | |||
PullID int64 | |||
} | |||
type Comment struct { | |||
ID int64 | |||
CommitSHA string | |||
} | |||
type PullRequest struct { | |||
ID int64 | |||
CommitSHA string | |||
MergeBase string | |||
MergedCommitID string | |||
} | |||
type Release struct { | |||
ID int64 | |||
Sha1 string | |||
} | |||
type RepoIndexerStatus struct { | |||
ID int64 | |||
CommitSHA string | |||
} | |||
type Review struct { | |||
ID int64 | |||
CommitID string | |||
} | |||
// Prepare and load the testing database | |||
return base.PrepareTestEnv(t, 0, new(Repository)) | |||
return base.PrepareTestEnv(t, 0, | |||
new(Repository), | |||
new(CommitStatus), | |||
new(RepoArchiver), | |||
new(ReviewState), | |||
new(Review), | |||
new(Comment), | |||
new(PullRequest), | |||
new(Release), | |||
new(RepoIndexerStatus), | |||
) | |||
} | |||
func Test_RepositoryFormat(t *testing.T) { | |||
x, deferable := PrepareOldRepository(t) | |||
defer deferable() | |||
assert.NoError(t, AdjustDBForSha256(x)) | |||
type Repository struct { | |||
ID int64 `xorm:"pk autoincr"` | |||
ObjectFormatName string `xorg:"not null default('sha1')"` | |||
@@ -37,12 +95,10 @@ func Test_RepositoryFormat(t *testing.T) { | |||
assert.NoError(t, err) | |||
assert.EqualValues(t, 4, count) | |||
assert.NoError(t, AdjustDBForSha256(x)) | |||
repo.ID = 20 | |||
repo.ObjectFormatName = "sha256" | |||
_, err = x.Insert(repo) | |||
assert.NoError(t, err) | |||
id := repo.ID | |||
count, err = x.Count(new(Repository)) | |||
assert.NoError(t, err) | |||
@@ -55,7 +111,7 @@ func Test_RepositoryFormat(t *testing.T) { | |||
assert.EqualValues(t, "sha1", repo.ObjectFormatName) | |||
repo = new(Repository) | |||
ok, err = x.ID(20).Get(repo) | |||
ok, err = x.ID(id).Get(repo) | |||
assert.NoError(t, err) | |||
assert.EqualValues(t, true, ok) | |||
assert.EqualValues(t, "sha256", repo.ObjectFormatName) |
@@ -20,20 +20,20 @@ func Test_UpdateBadgeColName(t *testing.T) { | |||
} | |||
// Prepare and load the testing database | |||
x, deferable := base.PrepareTestEnv(t, 0, new(BadgeUnique), new(Badge)) | |||
x, deferable := base.PrepareTestEnv(t, 0, new(Badge)) | |||
defer deferable() | |||
if x == nil || t.Failed() { | |||
return | |||
} | |||
oldBadges := []Badge{ | |||
{ID: 1, Description: "Test Badge 1", ImageURL: "https://example.com/badge1.png"}, | |||
{ID: 2, Description: "Test Badge 2", ImageURL: "https://example.com/badge2.png"}, | |||
{ID: 3, Description: "Test Badge 3", ImageURL: "https://example.com/badge3.png"}, | |||
oldBadges := []*Badge{ | |||
{Description: "Test Badge 1", ImageURL: "https://example.com/badge1.png"}, | |||
{Description: "Test Badge 2", ImageURL: "https://example.com/badge2.png"}, | |||
{Description: "Test Badge 3", ImageURL: "https://example.com/badge3.png"}, | |||
} | |||
for _, badge := range oldBadges { | |||
_, err := x.Insert(&badge) | |||
_, err := x.Insert(badge) | |||
assert.NoError(t, err) | |||
} | |||
@@ -48,7 +48,7 @@ func Test_UpdateBadgeColName(t *testing.T) { | |||
} | |||
for i, e := range oldBadges { | |||
got := got[i] | |||
got := got[i+1] // 1 is in the badge.yml | |||
assert.Equal(t, e.ID, got.ID) | |||
assert.Equal(t, fmt.Sprintf("%d", e.ID), got.Slug) | |||
} |