summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--Makefile37
-rw-r--r--models/migrations/fixtures/Test_deleteOrphanedIssueLabels/issue_label.yml29
-rw-r--r--models/migrations/fixtures/Test_deleteOrphanedIssueLabels/label.yml43
-rw-r--r--models/migrations/fixtures/Test_removeInvalidLabels/comment.yml52
-rw-r--r--models/migrations/fixtures/Test_removeInvalidLabels/issue.yml21
-rw-r--r--models/migrations/fixtures/Test_removeInvalidLabels/issue_label.yml35
-rw-r--r--models/migrations/fixtures/Test_removeInvalidLabels/label.yml26
-rw-r--r--models/migrations/fixtures/Test_removeInvalidLabels/repository.yml17
-rw-r--r--models/migrations/migrations.go17
-rw-r--r--models/migrations/migrations_test.go338
-rw-r--r--models/migrations/testlogger_test.go188
-rw-r--r--models/migrations/v176.go3
-rw-r--r--models/migrations/v176_test.go128
-rw-r--r--models/migrations/v177.go1
-rw-r--r--models/migrations/v177_test.go88
-rw-r--r--models/models.go7
-rw-r--r--models/test_fixtures.go26
17 files changed, 1038 insertions, 18 deletions
diff --git a/Makefile b/Makefile
index 00bdbab259..b75be16afb 100644
--- a/Makefile
+++ b/Makefile
@@ -89,7 +89,7 @@ LDFLAGS := $(LDFLAGS) -X "main.MakeVersion=$(MAKE_VERSION)" -X "main.Version=$(G
LINUX_ARCHS ?= linux/amd64,linux/386,linux/arm-5,linux/arm-6,linux/arm64
-GO_PACKAGES ?= $(filter-out code.gitea.io/gitea/integrations/migration-test,$(filter-out code.gitea.io/gitea/integrations,$(shell $(GO) list -mod=vendor ./... | grep -v /vendor/)))
+GO_PACKAGES ?= $(filter-out code.gitea.io/gitea/models/migrations code.gitea.io/gitea/integrations/migration-test code.gitea.io/gitea/integrations,$(shell $(GO) list -mod=vendor ./... | grep -v /vendor/))
FOMANTIC_CONFIGS := semantic.json web_src/fomantic/theme.config.less web_src/fomantic/_site/globals/site.variables
FOMANTIC_DEST := web_src/fomantic/build/semantic.js web_src/fomantic/build/semantic.css
@@ -399,8 +399,9 @@ test-sqlite\#%: integrations.sqlite.test generate-ini-sqlite
GITEA_ROOT="$(CURDIR)" GITEA_CONF=integrations/sqlite.ini ./integrations.sqlite.test -test.run $(subst .,/,$*)
.PHONY: test-sqlite-migration
-test-sqlite-migration: migrations.sqlite.test generate-ini-sqlite
+test-sqlite-migration: migrations.sqlite.test migrations.individual.sqlite.test generate-ini-sqlite
GITEA_ROOT="$(CURDIR)" GITEA_CONF=integrations/sqlite.ini ./migrations.sqlite.test
+ GITEA_ROOT="$(CURDIR)" GITEA_CONF=integrations/sqlite.ini ./migrations.individual.sqlite.test
generate-ini-mysql:
sed -e 's|{{TEST_MYSQL_HOST}}|${TEST_MYSQL_HOST}|g' \
@@ -419,8 +420,9 @@ test-mysql\#%: integrations.mysql.test generate-ini-mysql
GITEA_ROOT="$(CURDIR)" GITEA_CONF=integrations/mysql.ini ./integrations.mysql.test -test.run $(subst .,/,$*)
.PHONY: test-mysql-migration
-test-mysql-migration: migrations.mysql.test generate-ini-mysql
+test-mysql-migration: migrations.mysql.test migrations.individual.mysql.test generate-ini-mysql
GITEA_ROOT="$(CURDIR)" GITEA_CONF=integrations/mysql.ini ./migrations.mysql.test
+ GITEA_ROOT="$(CURDIR)" GITEA_CONF=integrations/mysql.ini ./migrations.individual.mysql.test
generate-ini-mysql8:
sed -e 's|{{TEST_MYSQL8_HOST}}|${TEST_MYSQL8_HOST}|g' \
@@ -439,8 +441,9 @@ test-mysql8\#%: integrations.mysql8.test generate-ini-mysql8
GITEA_ROOT="$(CURDIR)" GITEA_CONF=integrations/mysql8.ini ./integrations.mysql8.test -test.run $(subst .,/,$*)
.PHONY: test-mysql8-migration
-test-mysql8-migration: migrations.mysql8.test generate-ini-mysql8
+test-mysql8-migration: migrations.mysql8.test migrations.individual.mysql8.test generate-ini-mysql8
GITEA_ROOT="$(CURDIR)" GITEA_CONF=integrations/mysql8.ini ./migrations.mysql8.test
+ GITEA_ROOT="$(CURDIR)" GITEA_CONF=integrations/mysql8.ini ./migrations.individual.mysql8.test
generate-ini-pgsql:
sed -e 's|{{TEST_PGSQL_HOST}}|${TEST_PGSQL_HOST}|g' \
@@ -460,8 +463,9 @@ test-pgsql\#%: integrations.pgsql.test generate-ini-pgsql
GITEA_ROOT="$(CURDIR)" GITEA_CONF=integrations/pgsql.ini ./integrations.pgsql.test -test.run $(subst .,/,$*)
.PHONY: test-pgsql-migration
-test-pgsql-migration: migrations.pgsql.test generate-ini-pgsql
+test-pgsql-migration: migrations.pgsql.test migrations.individual.pgsql.test generate-ini-pgsql
GITEA_ROOT="$(CURDIR)" GITEA_CONF=integrations/pgsql.ini ./migrations.pgsql.test
+ GITEA_ROOT="$(CURDIR)" GITEA_CONF=integrations/pgsql.ini ./migrations.individual.pgsql.test
generate-ini-mssql:
sed -e 's|{{TEST_MSSQL_HOST}}|${TEST_MSSQL_HOST}|g' \
@@ -480,8 +484,9 @@ test-mssql\#%: integrations.mssql.test generate-ini-mssql
GITEA_ROOT="$(CURDIR)" GITEA_CONF=integrations/mssql.ini ./integrations.mssql.test -test.run $(subst .,/,$*)
.PHONY: test-mssql-migration
-test-mssql-migration: migrations.mssql.test generate-ini-mssql
+test-mssql-migration: migrations.mssql.test migrations.individual.mssql.test generate-ini-mssql
GITEA_ROOT="$(CURDIR)" GITEA_CONF=integrations/mssql.ini ./migrations.mssql.test -test.failfast
+ GITEA_ROOT="$(CURDIR)" GITEA_CONF=integrations/mssql.ini ./migrations.individual.mssql.test -test.failfast
.PHONY: bench-sqlite
bench-sqlite: integrations.sqlite.test generate-ini-sqlite
@@ -541,6 +546,26 @@ migrations.mssql.test: $(GO_SOURCES)
migrations.sqlite.test: $(GO_SOURCES)
$(GO) test $(GOTESTFLAGS) -c code.gitea.io/gitea/integrations/migration-test -o migrations.sqlite.test -tags '$(TEST_TAGS)'
+.PHONY: migrations.individual.mysql.test
+migrations.individual.mysql.test: $(GO_SOURCES)
+ $(GO) test $(GOTESTFLAGS) -c code.gitea.io/gitea/models/migrations -o migrations.individual.mysql.test
+
+.PHONY: migrations.individual.mysql8.test
+migrations.individual.mysql8.test: $(GO_SOURCES)
+ $(GO) test $(GOTESTFLAGS) -c code.gitea.io/gitea/models/migrations -o migrations.individual.mysql8.test
+
+.PHONY: migrations.individual.pgsql.test
+migrations.individual.pgsql.test: $(GO_SOURCES)
+ $(GO) test $(GOTESTFLAGS) -c code.gitea.io/gitea/models/migrations -o migrations.individual.pgsql.test
+
+.PHONY: migrations.individual.mssql.test
+migrations.individual.mssql.test: $(GO_SOURCES)
+ $(GO) test $(GOTESTFLAGS) -c code.gitea.io/gitea/models/migrations -o migrations.individual.mssql.test
+
+.PHONY: migrations.individual.sqlite.test
+migrations.individual.sqlite.test: $(GO_SOURCES)
+ $(GO) test $(GOTESTFLAGS) -c code.gitea.io/gitea/models/migrations -o migrations.individual.sqlite.test -tags '$(TEST_TAGS)'
+
.PHONY: check
check: test
diff --git a/models/migrations/fixtures/Test_deleteOrphanedIssueLabels/issue_label.yml b/models/migrations/fixtures/Test_deleteOrphanedIssueLabels/issue_label.yml
new file mode 100644
index 0000000000..96104ac04a
--- /dev/null
+++ b/models/migrations/fixtures/Test_deleteOrphanedIssueLabels/issue_label.yml
@@ -0,0 +1,29 @@
+# Issue_Label 1 should not be deleted
+-
+ id: 1
+ issue_id: 1
+ label_id: 1
+
+# Issue_label 2 should be deleted
+-
+ id: 2
+ issue_id: 5
+ label_id: 99
+
+# Issue_Label 3 should not be deleted
+-
+ id: 3
+ issue_id: 2
+ label_id: 1
+
+# Issue_Label 4 should not be deleted
+-
+ id: 4
+ issue_id: 2
+ label_id: 4
+
+-
+ id: 5
+ issue_id: 2
+ label_id: 87
+
diff --git a/models/migrations/fixtures/Test_deleteOrphanedIssueLabels/label.yml b/models/migrations/fixtures/Test_deleteOrphanedIssueLabels/label.yml
new file mode 100644
index 0000000000..d651c87d5b
--- /dev/null
+++ b/models/migrations/fixtures/Test_deleteOrphanedIssueLabels/label.yml
@@ -0,0 +1,43 @@
+-
+ id: 1
+ repo_id: 1
+ org_id: 0
+ name: label1
+ color: '#abcdef'
+ num_issues: 2
+ num_closed_issues: 0
+
+-
+ id: 2
+ repo_id: 1
+ org_id: 0
+ name: label2
+ color: '#000000'
+ num_issues: 1
+ num_closed_issues: 1
+-
+ id: 3
+ repo_id: 0
+ org_id: 3
+ name: orglabel3
+ color: '#abcdef'
+ num_issues: 0
+ num_closed_issues: 0
+
+-
+ id: 4
+ repo_id: 0
+ org_id: 3
+ name: orglabel4
+ color: '#000000'
+ num_issues: 1
+ num_closed_issues: 0
+
+-
+ id: 5
+ repo_id: 10
+ org_id: 0
+ name: pull-test-label
+ color: '#000000'
+ num_issues: 0
+ num_closed_issues: 0
diff --git a/models/migrations/fixtures/Test_removeInvalidLabels/comment.yml b/models/migrations/fixtures/Test_removeInvalidLabels/comment.yml
new file mode 100644
index 0000000000..4f44e29661
--- /dev/null
+++ b/models/migrations/fixtures/Test_removeInvalidLabels/comment.yml
@@ -0,0 +1,52 @@
+# type Comment struct {
+# ID int64 `xorm:"pk autoincr"`
+# Type int `xorm:"INDEX"`
+# IssueID int64 `xorm:"INDEX"`
+# LabelID int64
+# }
+#
+# we are only interested in type 7
+#
+
+-
+ id: 1 # Should remain
+ type: 6
+ issue_id: 1
+ label_id: 0
+ should_remain: true
+-
+ id: 2 # Should remain
+ type: 7
+ issue_id: 1 # repo_id: 1
+ label_id: 1 # repo_id: 1
+ should_remain: true
+-
+ id: 3 # Should remain
+ type: 7
+ issue_id: 2 # repo_id: 2 owner_id: 1
+ label_id: 2 # org_id: 1
+ should_remain: true
+-
+ id: 4 # Should be DELETED
+ type: 7
+ issue_id: 1 # repo_id: 1
+ label_id: 3 # repo_id: 2
+ should_remain: false
+-
+ id: 5 # Should remain
+ type: 7
+ issue_id: 3 # repo_id: 1
+ label_id: 1 # repo_id: 1
+ should_remain: true
+-
+ id: 6 # Should be DELETED
+ type: 7
+ issue_id: 3 # repo_id: 1 owner_id: 2
+ label_id: 2 # org_id: 1
+ should_remain: false
+-
+ id: 7 # Should be DELETED
+ type: 7
+ issue_id: 3 # repo_id: 1 owner_id: 2
+ label_id: 5 # repo_id: 3
+ should_remain: false
diff --git a/models/migrations/fixtures/Test_removeInvalidLabels/issue.yml b/models/migrations/fixtures/Test_removeInvalidLabels/issue.yml
new file mode 100644
index 0000000000..46ad46c174
--- /dev/null
+++ b/models/migrations/fixtures/Test_removeInvalidLabels/issue.yml
@@ -0,0 +1,21 @@
+# type Issue struct {
+# ID int64 `xorm:"pk autoincr"`
+# RepoID int64 `xorm:"INDEX UNIQUE(repo_index)"`
+# Index int64 `xorm:"UNIQUE(repo_index)"` // Index in one repository.
+# }
+-
+ id: 1
+ repo_id: 1
+ index: 1
+-
+ id: 2
+ repo_id: 2
+ index: 1
+-
+ id: 3
+ repo_id: 1
+ index: 2
+-
+ id: 4
+ repo_id: 3
+ index: 1
diff --git a/models/migrations/fixtures/Test_removeInvalidLabels/issue_label.yml b/models/migrations/fixtures/Test_removeInvalidLabels/issue_label.yml
new file mode 100644
index 0000000000..5f5b8cb102
--- /dev/null
+++ b/models/migrations/fixtures/Test_removeInvalidLabels/issue_label.yml
@@ -0,0 +1,35 @@
+# type IssueLabel struct {
+# ID int64 `xorm:"pk autoincr"`
+# IssueID int64 `xorm:"UNIQUE(s)"`
+# LabelID int64 `xorm:"UNIQUE(s)"`
+# }
+-
+ id: 1 # Should remain - matches comment 2
+ issue_id: 1
+ label_id: 1
+ should_remain: true
+-
+ id: 2 # Should remain
+ issue_id: 2
+ label_id: 2
+ should_remain: true
+-
+ id: 3 # Should be deleted
+ issue_id: 1
+ label_id: 3
+ should_remain: false
+-
+ id: 4 # Should remain
+ issue_id: 3
+ label_id: 1
+ should_remain: true
+-
+ id: 5 # Should be deleted
+ issue_id: 3
+ label_id: 2
+ should_remain: false
+-
+ id: 6 # Should be deleted
+ issue_id: 3
+ label_id: 5
+ should_remain: false
diff --git a/models/migrations/fixtures/Test_removeInvalidLabels/label.yml b/models/migrations/fixtures/Test_removeInvalidLabels/label.yml
new file mode 100644
index 0000000000..094b811d91
--- /dev/null
+++ b/models/migrations/fixtures/Test_removeInvalidLabels/label.yml
@@ -0,0 +1,26 @@
+# type Label struct {
+# ID int64 `xorm:"pk autoincr"`
+# RepoID int64 `xorm:"INDEX"`
+# OrgID int64 `xorm:"INDEX"`
+# }
+-
+ id: 1
+ repo_id: 1
+ org_id: 0
+-
+ id: 2
+ repo_id: 0
+ org_id: 1
+-
+ id: 3
+ repo_id: 2
+ org_id: 0
+-
+ id: 4
+ repo_id: 1
+ org_id: 0
+-
+ id: 5
+ repo_id: 3
+ org_id: 0
+
diff --git a/models/migrations/fixtures/Test_removeInvalidLabels/repository.yml b/models/migrations/fixtures/Test_removeInvalidLabels/repository.yml
new file mode 100644
index 0000000000..180f11b37c
--- /dev/null
+++ b/models/migrations/fixtures/Test_removeInvalidLabels/repository.yml
@@ -0,0 +1,17 @@
+# type Repository struct {
+# ID int64 `xorm:"pk autoincr"`
+# OwnerID int64 `xorm:"UNIQUE(s) index"`
+# LowerName string `xorm:"UNIQUE(s) INDEX NOT NULL"`
+# }
+-
+ id: 1
+ owner_id: 2
+ lower_name: "repo1"
+-
+ id: 2
+ owner_id: 1
+ lower_name: "repo2"
+-
+ id: 3
+ owner_id: 2
+ lower_name: "repo3"
diff --git a/models/migrations/migrations.go b/models/migrations/migrations.go
index 2c85ebdfd7..c8e687b382 100644
--- a/models/migrations/migrations.go
+++ b/models/migrations/migrations.go
@@ -819,9 +819,24 @@ func dropTableColumns(sess *xorm.Session, tableName string, columnNames ...strin
}
for _, constraint := range constraints {
if _, err := sess.Exec(fmt.Sprintf("ALTER TABLE `%s` DROP CONSTRAINT `%s`", tableName, constraint)); err != nil {
- return fmt.Errorf("Drop table `%s` constraint `%s`: %v", tableName, constraint, err)
+ return fmt.Errorf("Drop table `%s` default constraint `%s`: %v", tableName, constraint, err)
}
}
+ sql = fmt.Sprintf("SELECT DISTINCT Name FROM SYS.INDEXES INNER JOIN SYS.INDEX_COLUMNS ON INDEXES.INDEX_ID = INDEX_COLUMNS.INDEX_ID AND INDEXES.OBJECT_ID = INDEX_COLUMNS.OBJECT_ID WHERE INDEXES.OBJECT_ID = OBJECT_ID('%[1]s') AND INDEX_COLUMNS.COLUMN_ID IN (SELECT column_id FROM sys.columns WHERE lower(NAME) IN (%[2]s) AND object_id = OBJECT_ID('%[1]s'))",
+ tableName, strings.ReplaceAll(cols, "`", "'"))
+ constraints = make([]string, 0)
+ if err := sess.SQL(sql).Find(&constraints); err != nil {
+ return fmt.Errorf("Find constraints: %v", err)
+ }
+ for _, constraint := range constraints {
+ if _, err := sess.Exec(fmt.Sprintf("ALTER TABLE `%s` DROP CONSTRAINT IF EXISTS `%s`", tableName, constraint)); err != nil {
+ return fmt.Errorf("Drop table `%s` index constraint `%s`: %v", tableName, constraint, err)
+ }
+ if _, err := sess.Exec(fmt.Sprintf("DROP INDEX IF EXISTS `%[2]s` ON `%[1]s`", tableName, constraint)); err != nil {
+ return fmt.Errorf("Drop index `%[2]s` on `%[1]s`: %v", tableName, constraint, err)
+ }
+ }
+
if _, err := sess.Exec(fmt.Sprintf("ALTER TABLE `%s` DROP COLUMN %s", tableName, cols)); err != nil {
return fmt.Errorf("Drop table `%s` columns %v: %v", tableName, columnNames, err)
}
diff --git a/models/migrations/migrations_test.go b/models/migrations/migrations_test.go
new file mode 100644
index 0000000000..641d972b8b
--- /dev/null
+++ b/models/migrations/migrations_test.go
@@ -0,0 +1,338 @@
+// Copyright 2021 The Gitea Authors. All rights reserved.
+// Use of this source code is governed by a MIT-style
+// license that can be found in the LICENSE file.
+
+package migrations
+
+import (
+ "database/sql"
+ "fmt"
+ "os"
+ "path"
+ "path/filepath"
+ "runtime"
+ "testing"
+ "time"
+
+ "code.gitea.io/gitea/models"
+ "code.gitea.io/gitea/modules/base"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/timeutil"
+ "code.gitea.io/gitea/modules/util"
+ "github.com/stretchr/testify/assert"
+ "github.com/unknwon/com"
+ "xorm.io/xorm"
+ "xorm.io/xorm/names"
+)
+
+func TestMain(m *testing.M) {
+ giteaRoot := base.SetupGiteaRoot()
+ if giteaRoot == "" {
+ fmt.Println("Environment variable $GITEA_ROOT not set")
+ os.Exit(1)
+ }
+ giteaBinary := "gitea"
+ if runtime.GOOS == "windows" {
+ giteaBinary += ".exe"
+ }
+ setting.AppPath = path.Join(giteaRoot, giteaBinary)
+ if _, err := os.Stat(setting.AppPath); err != nil {
+ fmt.Printf("Could not find gitea binary at %s\n", setting.AppPath)
+ os.Exit(1)
+ }
+
+ giteaConf := os.Getenv("GITEA_CONF")
+ if giteaConf == "" {
+ giteaConf = path.Join(filepath.Dir(setting.AppPath), "integrations/sqlite.ini")
+ fmt.Printf("Environment variable $GITEA_CONF not set - defaulting to %s\n", giteaConf)
+ }
+
+ if !path.IsAbs(giteaConf) {
+ setting.CustomConf = path.Join(giteaRoot, giteaConf)
+ } else {
+ setting.CustomConf = giteaConf
+ }
+
+ setting.SetCustomPathAndConf("", "", "")
+ setting.NewContext()
+ setting.CheckLFSVersion()
+ setting.InitDBConfig()
+ setting.NewLogServices(true)
+
+ exitStatus := m.Run()
+
+ if err := removeAllWithRetry(setting.RepoRootPath); err != nil {
+ fmt.Fprintf(os.Stderr, "os.RemoveAll: %v\n", err)
+ }
+ if err := removeAllWithRetry(setting.AppDataPath); err != nil {
+ fmt.Fprintf(os.Stderr, "os.RemoveAll: %v\n", err)
+ }
+ os.Exit(exitStatus)
+}
+
+func removeAllWithRetry(dir string) error {
+ var err error
+ for i := 0; i < 20; i++ {
+ err = os.RemoveAll(dir)
+ if err == nil {
+ break
+ }
+ time.Sleep(100 * time.Millisecond)
+ }
+ return err
+}
+
+// SetEngine sets the xorm.Engine
+func SetEngine() (*xorm.Engine, error) {
+ x, err := models.GetNewEngine()
+ if err != nil {
+ return x, fmt.Errorf("Failed to connect to database: %v", err)
+ }
+
+ x.SetMapper(names.GonicMapper{})
+ // WARNING: for serv command, MUST remove the output to os.stdout,
+ // so use log file to instead print to stdout.
+ x.SetLogger(models.NewXORMLogger(setting.Database.LogSQL))
+ x.ShowSQL(setting.Database.LogSQL)
+ x.SetMaxOpenConns(setting.Database.MaxOpenConns)
+ x.SetMaxIdleConns(setting.Database.MaxIdleConns)
+ x.SetConnMaxLifetime(setting.Database.ConnMaxLifetime)
+ return x, nil
+}
+
+func deleteDB() error {
+ switch {
+ case setting.Database.UseSQLite3:
+ if err := util.Remove(setting.Database.Path); err != nil {
+ return err
+ }
+ return os.MkdirAll(path.Dir(setting.Database.Path), os.ModePerm)
+
+ case setting.Database.UseMySQL:
+ db, err := sql.Open("mysql", fmt.Sprintf("%s:%s@tcp(%s)/",
+ setting.Database.User, setting.Database.Passwd, setting.Database.Host))
+ if err != nil {
+ return err
+ }
+ defer db.Close()
+
+ if _, err = db.Exec(fmt.Sprintf("DROP DATABASE IF EXISTS %s", setting.Database.Name)); err != nil {
+ return err
+ }
+
+ if _, err = db.Exec(fmt.Sprintf("CREATE DATABASE IF NOT EXISTS %s", setting.Database.Name)); err != nil {
+ return err
+ }
+ return nil
+ case setting.Database.UsePostgreSQL:
+ db, err := sql.Open("postgres", fmt.Sprintf("postgres://%s:%s@%s/?sslmode=%s",
+ setting.Database.User, setting.Database.Passwd, setting.Database.Host, setting.Database.SSLMode))
+ if err != nil {
+ return err
+ }
+ defer db.Close()
+
+ if _, err = db.Exec(fmt.Sprintf("DROP DATABASE IF EXISTS %s", setting.Database.Name)); err != nil {
+ return err
+ }
+
+ if _, err = db.Exec(fmt.Sprintf("CREATE DATABASE %s", setting.Database.Name)); err != nil {
+ return err
+ }
+ db.Close()
+
+ // Check if we need to setup a specific schema
+ if len(setting.Database.Schema) != 0 {
+ db, err = sql.Open("postgres", fmt.Sprintf("postgres://%s:%s@%s/%s?sslmode=%s",
+ setting.Database.User, setting.Database.Passwd, setting.Database.Host, setting.Database.Name, setting.Database.SSLMode))
+ if err != nil {
+ return err
+ }
+ defer db.Close()
+
+ schrows, err := db.Query(fmt.Sprintf("SELECT 1 FROM information_schema.schemata WHERE schema_name = '%s'", setting.Database.Schema))
+ if err != nil {
+ return err
+ }
+ defer schrows.Close()
+
+ if !schrows.Next() {
+ // Create and setup a DB schema
+ _, err = db.Exec(fmt.Sprintf("CREATE SCHEMA %s", setting.Database.Schema))
+ if err != nil {
+ return err
+ }
+ }
+
+ // Make the user's default search path the created schema; this will affect new connections
+ _, err = db.Exec(fmt.Sprintf(`ALTER USER "%s" SET search_path = %s`, setting.Database.User, setting.Database.Schema))
+ if err != nil {
+ return err
+ }
+ return nil
+ }
+ case setting.Database.UseMSSQL:
+ host, port := setting.ParseMSSQLHostPort(setting.Database.Host)
+ db, err := sql.Open("mssql", fmt.Sprintf("server=%s; port=%s; database=%s; user id=%s; password=%s;",
+ host, port, "master", setting.Database.User, setting.Database.Passwd))
+ if err != nil {
+ return err
+ }
+ defer db.Close()
+
+ if _, err = db.Exec(fmt.Sprintf("DROP DATABASE IF EXISTS [%s]", setting.Database.Name)); err != nil {
+ return err
+ }
+ if _, err = db.Exec(fmt.Sprintf("CREATE DATABASE [%s]", setting.Database.Name)); err != nil {
+ return err
+ }
+ }
+
+ return nil
+}
+
+// prepareTestEnv prepares the test environment and reset the database. The skip parameter should usually be 0.
+// Provide models to be sync'd with the database - in particular any models you expect fixtures to be loaded from.
+//
+// fixtures in `models/migrations/fixtures/<TestName>` will be loaded automatically
+func prepareTestEnv(t *testing.T, skip int, syncModels ...interface{}) (*xorm.Engine, func()) {
+ t.Helper()
+ ourSkip := 2
+ ourSkip += skip
+ deferFn := PrintCurrentTest(t, ourSkip)
+ assert.NoError(t, os.RemoveAll(setting.RepoRootPath))
+
+ assert.NoError(t, com.CopyDir(path.Join(filepath.Dir(setting.AppPath), "integrations/gitea-repositories-meta"),
+ setting.RepoRootPath))
+
+ if err := deleteDB(); err != nil {
+ t.Errorf("unable to reset database: %v", err)
+ return nil, deferFn
+ }
+
+ x, err := SetEngine()
+ assert.NoError(t, err)
+ if x != nil {
+ oldDefer := deferFn
+ deferFn = func() {
+ oldDefer()
+ if err := x.Close(); err != nil {
+ t.Errorf("error during close: %v", err)
+ }
+ }
+ }
+ if err != nil {
+ return x, deferFn
+ }
+
+ if len(syncModels) > 0 {
+ if err := x.Sync2(syncModels...); err != nil {
+ t.Errorf("error during sync: %v", err)
+ return x, deferFn
+ }
+ }
+
+ fixturesDir := filepath.Join(filepath.Dir(setting.AppPath), "models", "migrations", "fixtures", t.Name())
+
+ if _, err := os.Stat(fixturesDir); err == nil {
+ t.Logf("initializing fixtures from: %s", fixturesDir)
+ if err := models.InitFixtures(fixturesDir, x); err != nil {
+ t.Errorf("error whilst initializing fixtures from %s: %v", fixturesDir, err)
+ return x, deferFn
+ }
+ if err := models.LoadFixtures(x); err != nil {
+ t.Errorf("error whilst loading fixtures from %s: %v", fixturesDir, err)
+ return x, deferFn
+ }
+ } else if !os.IsNotExist(err) {
+ t.Errorf("unexpected error whilst checking for existence of fixtures: %v", err)
+ } else {
+ t.Logf("no fixtures found in: %s", fixturesDir)
+ }
+
+ return x, deferFn
+}
+
+func Test_dropTableColumns(t *testing.T) {
+ x, deferable := prepareTestEnv(t, 0)
+ if x == nil || t.Failed() {
+ defer deferable()
+ return
+ }
+ defer deferable()
+
+ type DropTest struct {
+ ID int64 `xorm:"pk autoincr"`
+ FirstColumn string
+ ToDropColumn string `xorm:"unique"`
+ AnotherColumn int64
+ CreatedUnix timeutil.TimeStamp `xorm:"INDEX created"`
+ UpdatedUnix timeutil.TimeStamp `xorm:"INDEX updated"`
+ }
+
+ columns := []string{
+ "first_column",
+ "to_drop_column",
+ "another_column",
+ "created_unix",
+ "updated_unix",
+ }
+
+ for i := range columns {
+ x.SetMapper(names.GonicMapper{})
+ if err := x.Sync2(new(DropTest)); err != nil {
+ t.Errorf("unable to create DropTest table: %v", err)
+ return
+ }
+ sess := x.NewSession()
+ if err := sess.Begin(); err != nil {
+ sess.Close()
+ t.Errorf("unable to begin transaction: %v", err)
+ return
+ }
+ if err := dropTableColumns(sess, "drop_test", columns[i:]...); err != nil {
+ sess.Close()
+ t.Errorf("Unable to drop columns[%d:]: %s from drop_test: %v", i, columns[i:], err)
+ return
+ }
+ if err := sess.Commit(); err != nil {
+ sess.Close()
+ t.Errorf("unable to commit transaction: %v", err)
+ return
+ }
+ sess.Close()
+ if err := x.DropTables(new(DropTest)); err != nil {
+ t.Errorf("unable to drop table: %v", err)
+ return
+ }
+ for j := range columns[i+1:] {
+ x.SetMapper(names.GonicMapper{})
+ if err := x.Sync2(new(DropTest)); err != nil {
+ t.Errorf("unable to create DropTest table: %v", err)
+ return
+ }
+ dropcols := append([]string{columns[i]}, columns[j+i+1:]...)
+ sess := x.NewSession()
+ if err := sess.Begin(); err != nil {
+ sess.Close()
+ t.Errorf("unable to begin transaction: %v", err)
+ return
+ }
+ if err := dropTableColumns(sess, "drop_test", dropcols...); err != nil {
+ sess.Close()
+ t.Errorf("Unable to drop columns: %s from drop_test: %v", dropcols, err)
+ return
+ }
+ if err := sess.Commit(); err != nil {
+ sess.Close()
+ t.Errorf("unable to commit transaction: %v", err)
+ return
+ }
+ sess.Close()
+ if err := x.DropTables(new(DropTest)); err != nil {
+ t.Errorf("unable to drop table: %v", err)
+ return
+ }
+ }
+ }
+}
diff --git a/models/migrations/testlogger_test.go b/models/migrations/testlogger_test.go
new file mode 100644
index 0000000000..8d6e61ae64
--- /dev/null
+++ b/models/migrations/testlogger_test.go
@@ -0,0 +1,188 @@
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// Use of this source code is governed by a MIT-style
+// license that can be found in the LICENSE file.
+
+package migrations
+
+import (
+ "context"
+ "fmt"
+ "os"
+ "runtime"
+ "strings"
+ "sync"
+ "testing"
+ "time"
+
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/queue"
+ jsoniter "github.com/json-iterator/go"
+)
+
+var (
+ prefix string
+ slowTest = 10 * time.Second
+ slowFlush = 5 * time.Second
+)
+
+// TestLogger is a logger which will write to the testing log
+type TestLogger struct {
+ log.WriterLogger
+}
+
+var writerCloser = &testLoggerWriterCloser{}
+
+type testLoggerWriterCloser struct {
+ sync.RWMutex
+ t []*testing.TB
+}
+
+func (w *testLoggerWriterCloser) setT(t *testing.TB) {
+ w.Lock()
+ w.t = append(w.t, t)
+ w.Unlock()
+}
+
+func (w *testLoggerWriterCloser) Write(p []byte) (int, error) {
+ w.RLock()
+ var t *testing.TB
+ if len(w.t) > 0 {
+ t = w.t[len(w.t)-1]
+ }
+ w.RUnlock()
+ if t != nil && *t != nil {
+ if len(p) > 0 && p[len(p)-1] == '\n' {
+ p = p[:len(p)-1]
+ }
+
+ defer func() {
+ err := recover()
+ if err == nil {
+ return
+ }
+ var errString string
+ errErr, ok := err.(error)
+ if ok {
+ errString = errErr.Error()
+ } else {
+ errString, ok = err.(string)
+ }
+ if !ok {
+ panic(err)
+ }
+ if !strings.HasPrefix(errString, "Log in goroutine after ") {
+ panic(err)
+ }
+ }()
+
+ (*t).Log(string(p))
+ return len(p), nil
+ }
+ return len(p), nil
+}
+
+func (w *testLoggerWriterCloser) Close() error {
+ w.Lock()
+ if len(w.t) > 0 {
+ w.t = w.t[:len(w.t)-1]
+ }
+ w.Unlock()
+ return nil
+}
+
+// PrintCurrentTest prints the current test to os.Stdout
+func PrintCurrentTest(t testing.TB, skip ...int) func() {
+ start := time.Now()
+ actualSkip := 1
+ if len(skip) > 0 {
+ actualSkip = skip[0]
+ }
+ _, filename, line, _ := runtime.Caller(actualSkip)
+
+ if log.CanColorStdout {
+ fmt.Fprintf(os.Stdout, "=== %s (%s:%d)\n", fmt.Formatter(log.NewColoredValue(t.Name())), strings.TrimPrefix(filename, prefix), line)
+ } else {
+ fmt.Fprintf(os.Stdout, "=== %s (%s:%d)\n", t.Name(), strings.TrimPrefix(filename, prefix), line)
+ }
+ writerCloser.setT(&t)
+ return func() {
+ took := time.Since(start)
+ if took > slowTest {
+ if log.CanColorStdout {
+ fmt.Fprintf(os.Stdout, "+++ %s is a slow test (took %v)\n", fmt.Formatter(log.NewColoredValue(t.Name(), log.Bold, log.FgYellow)), fmt.Formatter(log.NewColoredValue(took, log.Bold, log.FgYellow)))
+ } else {
+ fmt.Fprintf(os.Stdout, "+++ %s is a slow tets (took %v)\n", t.Name(), took)
+ }
+ }
+ timer := time.AfterFunc(slowFlush, func() {
+ if log.CanColorStdout {
+ fmt.Fprintf(os.Stdout, "+++ %s ... still flushing after %v ...\n", fmt.Formatter(log.NewColoredValue(t.Name(), log.Bold, log.FgRed)), slowFlush)
+ } else {
+ fmt.Fprintf(os.Stdout, "+++ %s ... still flushing after %v ...\n", t.Name(), slowFlush)
+ }
+ })
+ if err := queue.GetManager().FlushAll(context.Background(), -1); err != nil {
+ t.Errorf("Flushing queues failed with error %v", err)
+ }
+ timer.Stop()
+ flushTook := time.Since(start) - took
+ if flushTook > slowFlush {
+ if log.CanColorStdout {
+ fmt.Fprintf(os.Stdout, "+++ %s had a slow clean-up flush (took %v)\n", fmt.Formatter(log.NewColoredValue(t.Name(), log.Bold, log.FgRed)), fmt.Formatter(log.NewColoredValue(flushTook, log.Bold, log.FgRed)))
+ } else {
+ fmt.Fprintf(os.Stdout, "+++ %s had a slow clean-up flush (took %v)\n", t.Name(), flushTook)
+ }
+ }
+ _ = writerCloser.Close()
+ }
+}
+
+// Printf takes a format and args and prints the string to os.Stdout
+func Printf(format string, args ...interface{}) {
+ if log.CanColorStdout {
+ for i := 0; i < len(args); i++ {
+ args[i] = log.NewColoredValue(args[i])
+ }
+ }
+ fmt.Fprintf(os.Stdout, "\t"+format, args...)
+}
+
+// NewTestLogger creates a TestLogger as a log.LoggerProvider
+func NewTestLogger() log.LoggerProvider {
+ logger := &TestLogger{}
+ logger.Colorize = log.CanColorStdout
+ logger.Level = log.TRACE
+ return logger
+}
+
+// Init inits connection writer with json config.
+// json config only need key "level".
+func (log *TestLogger) Init(config string) error {
+ json := jsoniter.ConfigCompatibleWithStandardLibrary
+ err := json.Unmarshal([]byte(config), log)
+ if err != nil {
+ return err
+ }
+ log.NewWriterLogger(writerCloser)
+ return nil
+}
+
+// Flush when log should be flushed
+func (log *TestLogger) Flush() {
+}
+
+//ReleaseReopen does nothing
+func (log *TestLogger) ReleaseReopen() error {
+ return nil
+}
+
+// GetName returns the default name for this implementation
+func (log *TestLogger) GetName() string {
+ return "test"
+}
+
+func init() {
+ log.Register("test", NewTestLogger)
+ _, filename, _, _ := runtime.Caller(0)
+ prefix = strings.TrimSuffix(filename, "integrations/testlogger.go")
+}
diff --git a/models/migrations/v176.go b/models/migrations/v176.go
index cec23e08f4..6436330a8d 100644
--- a/models/migrations/v176.go
+++ b/models/migrations/v176.go
@@ -8,6 +8,9 @@ import (
"xorm.io/xorm"
)
+// removeInvalidLabels looks through the database to look for comments and issue_labels
+// that refer to labels do not belong to the repository or organization that repository
+// that the issue is in
func removeInvalidLabels(x *xorm.Engine) error {
type Comment struct {
ID int64 `xorm:"pk autoincr"`
diff --git a/models/migrations/v176_test.go b/models/migrations/v176_test.go
new file mode 100644
index 0000000000..2763e4f120
--- /dev/null
+++ b/models/migrations/v176_test.go
@@ -0,0 +1,128 @@
+// Copyright 2021 The Gitea Authors. All rights reserved.
+// Use of this source code is governed by a MIT-style
+// license that can be found in the LICENSE file.
+
+package migrations
+
+import (
+ "testing"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func Test_removeInvalidLabels(t *testing.T) {
+ // Models used by the migration
+ type Comment struct {
+ ID int64 `xorm:"pk autoincr"`
+ Type int `xorm:"INDEX"`
+ IssueID int64 `xorm:"INDEX"`
+ LabelID int64
+ ShouldRemain bool // <- Flag for testing the migration
+ }
+
+ type Issue struct {
+ ID int64 `xorm:"pk autoincr"`
+ RepoID int64 `xorm:"INDEX UNIQUE(repo_index)"`
+ Index int64 `xorm:"UNIQUE(repo_index)"` // Index in one repository.
+ }
+
+ type Repository struct {
+ ID int64 `xorm:"pk autoincr"`
+ OwnerID int64 `xorm:"UNIQUE(s) index"`
+ LowerName string `xorm:"UNIQUE(s) INDEX NOT NULL"`
+ }
+
+ type Label struct {
+ ID int64 `xorm:"pk autoincr"`
+ RepoID int64 `xorm:"INDEX"`
+ OrgID int64 `xorm:"INDEX"`
+ }
+
+ type IssueLabel struct {
+ ID int64 `xorm:"pk autoincr"`
+ IssueID int64 `xorm:"UNIQUE(s)"`
+ LabelID int64 `xorm:"UNIQUE(s)"`
+ ShouldRemain bool // <- Flag for testing the migration
+ }
+
+ // load and prepare the test database
+ x, deferable := prepareTestEnv(t, 0, new(Comment), new(Issue), new(Repository), new(IssueLabel), new(Label))
+ if x == nil || t.Failed() {
+ defer deferable()
+ return
+ }
+ defer deferable()
+
+ var issueLabels []*IssueLabel
+ ilPreMigration := map[int64]*IssueLabel{}
+ ilPostMigration := map[int64]*IssueLabel{}
+
+ var comments []*Comment
+ comPreMigration := map[int64]*Comment{}
+ comPostMigration := map[int64]*Comment{}
+
+ // Get pre migration values
+ if err := x.Find(&issueLabels); err != nil {
+ t.Errorf("Unable to find issueLabels: %v", err)
+ return
+ }
+ for _, issueLabel := range issueLabels {
+ ilPreMigration[issueLabel.ID] = issueLabel
+ }
+ if err := x.Find(&comments); err != nil {
+ t.Errorf("Unable to find comments: %v", err)
+ return
+ }
+ for _, comment := range comments {
+ comPreMigration[comment.ID] = comment
+ }
+
+ // Run the migration
+ if err := removeInvalidLabels(x); err != nil {
+ t.Errorf("unable to RemoveInvalidLabels: %v", err)
+ }
+
+ // Get the post migration values
+ issueLabels = issueLabels[:0]
+ if err := x.Find(&issueLabels); err != nil {
+ t.Errorf("Unable to find issueLabels: %v", err)
+ return
+ }
+ for _, issueLabel := range issueLabels {
+ ilPostMigration[issueLabel.ID] = issueLabel
+ }
+ comments = comments[:0]
+ if err := x.Find(&comments); err != nil {
+ t.Errorf("Unable to find comments: %v", err)
+ return
+ }
+ for _, comment := range comments {
+ comPostMigration[comment.ID] = comment
+ }
+
+ // Finally test results of the migration
+ for id, comment := range comPreMigration {
+ post, ok := comPostMigration[id]
+ if ok {
+ if !comment.ShouldRemain {
+ t.Errorf("Comment[%d] remained but should have been deleted", id)
+ }
+ assert.Equal(t, comment, post)
+ } else if comment.ShouldRemain {
+ t.Errorf("Comment[%d] was deleted but should have remained", id)
+ }
+ }
+
+ for id, il := range ilPreMigration {
+ post, ok := ilPostMigration[id]
+ if ok {
+ if !il.ShouldRemain {
+ t.Errorf("IssueLabel[%d] remained but should have been deleted", id)
+ }
+ assert.Equal(t, il, post)
+ } else if il.ShouldRemain {
+ t.Errorf("IssueLabel[%d] was deleted but should have remained", id)
+ }
+ }
+
+}
diff --git a/models/migrations/v177.go b/models/migrations/v177.go
index 2329a8dc48..c65fd3de00 100644
--- a/models/migrations/v177.go
+++ b/models/migrations/v177.go
@@ -10,6 +10,7 @@ import (
"xorm.io/xorm"
)
+// deleteOrphanedIssueLabels looks through the database for issue_labels where the label no longer exists and deletes them.
func deleteOrphanedIssueLabels(x *xorm.Engine) error {
type IssueLabel struct {
ID int64 `xorm:"pk autoincr"`
diff --git a/models/migrations/v177_test.go b/models/migrations/v177_test.go
new file mode 100644
index 0000000000..02cb1d2652
--- /dev/null
+++ b/models/migrations/v177_test.go
@@ -0,0 +1,88 @@
+// Copyright 2021 The Gitea Authors. All rights reserved.
+// Use of this source code is governed by a MIT-style
+// license that can be found in the LICENSE file.
+
+package migrations
+
+import (
+ "testing"
+
+ "code.gitea.io/gitea/modules/timeutil"
+ "github.com/stretchr/testify/assert"
+)
+
+func Test_deleteOrphanedIssueLabels(t *testing.T) {
+ // Create the models used in the migration
+ type IssueLabel struct {
+ ID int64 `xorm:"pk autoincr"`
+ IssueID int64 `xorm:"UNIQUE(s)"`
+ LabelID int64 `xorm:"UNIQUE(s)"`
+ }
+
+ type Label struct {
+ ID int64 `xorm:"pk autoincr"`
+ RepoID int64 `xorm:"INDEX"`
+ OrgID int64 `xorm:"INDEX"`
+ Name string
+ Description string
+ Color string `xorm:"VARCHAR(7)"`
+ NumIssues int
+ NumClosedIssues int
+ CreatedUnix timeutil.TimeStamp `xorm:"INDEX created"`
+ UpdatedUnix timeutil.TimeStamp `xorm:"INDEX updated"`
+ }
+
+ // Prepare and load the testing database
+ x, deferable := prepareTestEnv(t, 0, new(IssueLabel), new(Label))
+ if x == nil || t.Failed() {
+ defer deferable()
+ return
+ }
+ defer deferable()
+
+ var issueLabels []*IssueLabel
+ preMigration := map[int64]*IssueLabel{}
+ postMigration := map[int64]*IssueLabel{}
+
+ // Load issue labels that exist in the database pre-migration
+ if err := x.Find(&issueLabels); err != nil {
+ assert.NoError(t, err)
+ return
+ }
+ for _, issueLabel := range issueLabels {
+ preMigration[issueLabel.ID] = issueLabel
+ }
+
+ // Run the migration
+ if err := deleteOrphanedIssueLabels(x); err != nil {
+ assert.NoError(t, err)
+ return
+ }
+
+ // Load the remaining issue-labels
+ issueLabels = issueLabels[:0]
+ if err := x.Find(&issueLabels); err != nil {
+ assert.NoError(t, err)
+ return
+ }
+ for _, issueLabel := range issueLabels {
+ postMigration[issueLabel.ID] = issueLabel
+ }
+
+ // Now test what is left
+ if _, ok := postMigration[2]; ok {
+ t.Errorf("Orphaned Label[2] survived the migration")
+ return
+ }
+
+ if _, ok := postMigration[5]; ok {
+ t.Errorf("Orphaned Label[5] survived the migration")
+ return
+ }
+
+ for id, post := range postMigration {
+ pre := preMigration[id]
+ assert.Equal(t, pre, post, "migration changed issueLabel %d", id)
+ }
+
+}
diff --git a/models/models.go b/models/models.go
index 05cafccd14..73e65d828b 100644
--- a/models/models.go
+++ b/models/models.go
@@ -142,7 +142,8 @@ func init() {
}
}
-func getEngine() (*xorm.Engine, error) {
+// GetNewEngine returns a new xorm engine from the configuration
+func GetNewEngine() (*xorm.Engine, error) {
connStr, err := setting.DBConnStr()
if err != nil {
return nil, err
@@ -172,7 +173,7 @@ func getEngine() (*xorm.Engine, error) {
// NewTestEngine sets a new test xorm.Engine
func NewTestEngine() (err error) {
- x, err = getEngine()
+ x, err = GetNewEngine()
if err != nil {
return fmt.Errorf("Connect to database: %v", err)
}
@@ -185,7 +186,7 @@ func NewTestEngine() (err error) {
// SetEngine sets the xorm.Engine
func SetEngine() (err error) {
- x, err = getEngine()
+ x, err = GetNewEngine()
if err != nil {
return fmt.Errorf("Failed to connect to database: %v", err)
}
diff --git a/models/test_fixtures.go b/models/test_fixtures.go
index 5cca7c16e8..c1f84c6617 100644
--- a/models/test_fixtures.go
+++ b/models/test_fixtures.go
@@ -10,16 +10,22 @@ import (
"time"
"github.com/go-testfixtures/testfixtures/v3"
+ "xorm.io/xorm"
"xorm.io/xorm/schemas"
)
var fixtures *testfixtures.Loader
// InitFixtures initialize test fixtures for a test database
-func InitFixtures(dir string) (err error) {
+func InitFixtures(dir string, engine ...*xorm.Engine) (err error) {
+ e := x
+ if len(engine) == 1 {
+ e = engine[0]
+ }
+
testfiles := testfixtures.Directory(dir)
dialect := "unknown"
- switch x.Dialect().URI().DBType {
+ switch e.Dialect().URI().DBType {
case schemas.POSTGRES:
dialect = "postgres"
case schemas.MYSQL:
@@ -33,13 +39,13 @@ func InitFixtures(dir string) (err error) {
os.Exit(1)
}
loaderOptions := []func(loader *testfixtures.Loader) error{
- testfixtures.Database(x.DB().DB),
+ testfixtures.Database(e.DB().DB),
testfixtures.Dialect(dialect),
testfixtures.DangerousSkipTestDatabaseCheck(),
testfiles,
}
- if x.Dialect().URI().DBType == schemas.POSTGRES {
+ if e.Dialect().URI().DBType == schemas.POSTGRES {
loaderOptions = append(loaderOptions, testfixtures.SkipResetSequences())
}
@@ -52,7 +58,11 @@ func InitFixtures(dir string) (err error) {
}
// LoadFixtures load fixtures for a test database
-func LoadFixtures() error {
+func LoadFixtures(engine ...*xorm.Engine) error {
+ e := x
+ if len(engine) == 1 {
+ e = engine[0]
+ }
var err error
// Database transaction conflicts could occur and result in ROLLBACK
// As a simple workaround, we just retry 20 times.
@@ -67,8 +77,8 @@ func LoadFixtures() error {
fmt.Printf("LoadFixtures failed after retries: %v\n", err)
}
// Now if we're running postgres we need to tell it to update the sequences
- if x.Dialect().URI().DBType == schemas.POSTGRES {
- results, err := x.QueryString(`SELECT 'SELECT SETVAL(' ||
+ if e.Dialect().URI().DBType == schemas.POSTGRES {
+ results, err := e.QueryString(`SELECT 'SELECT SETVAL(' ||
quote_literal(quote_ident(PGT.schemaname) || '.' || quote_ident(S.relname)) ||
', COALESCE(MAX(' ||quote_ident(C.attname)|| '), 1) ) FROM ' ||
quote_ident(PGT.schemaname)|| '.'||quote_ident(T.relname)|| ';'
@@ -90,7 +100,7 @@ func LoadFixtures() error {
}
for _, r := range results {
for _, value := range r {
- _, err = x.Exec(value)
+ _, err = e.Exec(value)
if err != nil {
fmt.Printf("Failed to update sequence: %s Error: %v\n", value, err)
return err