Merge branch 'go-gorm:master' into master
This commit is contained in:
commit
8f5622a48d
12
.github/workflows/tests.yml
vendored
12
.github/workflows/tests.yml
vendored
@ -30,7 +30,7 @@ jobs:
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: go mod package cache
|
||||
uses: actions/cache@v3
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: ~/go/pkg/mod
|
||||
key: ${{ runner.os }}-go-${{ matrix.go }}-${{ hashFiles('tests/go.mod') }}
|
||||
@ -73,7 +73,7 @@ jobs:
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: go mod package cache
|
||||
uses: actions/cache@v3
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: ~/go/pkg/mod
|
||||
key: ${{ runner.os }}-go-${{ matrix.go }}-${{ hashFiles('tests/go.mod') }}
|
||||
@ -116,7 +116,7 @@ jobs:
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: go mod package cache
|
||||
uses: actions/cache@v3
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: ~/go/pkg/mod
|
||||
key: ${{ runner.os }}-go-${{ matrix.go }}-${{ hashFiles('tests/go.mod') }}
|
||||
@ -159,7 +159,7 @@ jobs:
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: go mod package cache
|
||||
uses: actions/cache@v3
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: ~/go/pkg/mod
|
||||
key: ${{ runner.os }}-go-${{ matrix.go }}-${{ hashFiles('tests/go.mod') }}
|
||||
@ -202,7 +202,7 @@ jobs:
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: go mod package cache
|
||||
uses: actions/cache@v3
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: ~/go/pkg/mod
|
||||
key: ${{ runner.os }}-go-${{ matrix.go }}-${{ hashFiles('tests/go.mod') }}
|
||||
@ -235,7 +235,7 @@ jobs:
|
||||
|
||||
|
||||
- name: go mod package cache
|
||||
uses: actions/cache@v3
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: ~/go/pkg/mod
|
||||
key: ${{ runner.os }}-go-${{ matrix.go }}-${{ hashFiles('tests/go.mod') }}
|
||||
|
@ -3,6 +3,7 @@ package callbacks
|
||||
import (
|
||||
"fmt"
|
||||
"reflect"
|
||||
"sort"
|
||||
"strings"
|
||||
|
||||
"gorm.io/gorm"
|
||||
@ -82,27 +83,80 @@ func embeddedValues(embeddedRelations *schema.Relationships) []string {
|
||||
return names
|
||||
}
|
||||
|
||||
func preloadEmbedded(tx *gorm.DB, relationships *schema.Relationships, s *schema.Schema, preloads map[string][]interface{}, as []interface{}) error {
|
||||
if relationships == nil {
|
||||
return nil
|
||||
// preloadEntryPoint enters layer by layer. It will call real preload if it finds the right entry point.
|
||||
// If the current relationship is embedded or joined, current query will be ignored.
|
||||
//
|
||||
//nolint:cyclop
|
||||
func preloadEntryPoint(db *gorm.DB, joins []string, relationships *schema.Relationships, preloads map[string][]interface{}, associationsConds []interface{}) error {
|
||||
preloadMap := parsePreloadMap(db.Statement.Schema, preloads)
|
||||
|
||||
// avoid random traversal of the map
|
||||
preloadNames := make([]string, 0, len(preloadMap))
|
||||
for key := range preloadMap {
|
||||
preloadNames = append(preloadNames, key)
|
||||
}
|
||||
preloadMap := parsePreloadMap(s, preloads)
|
||||
for name := range preloadMap {
|
||||
if embeddedRelations := relationships.EmbeddedRelations[name]; embeddedRelations != nil {
|
||||
if err := preloadEmbedded(tx, embeddedRelations, s, preloadMap[name], as); err != nil {
|
||||
sort.Strings(preloadNames)
|
||||
|
||||
isJoined := func(name string) (joined bool, nestedJoins []string) {
|
||||
for _, join := range joins {
|
||||
if _, ok := relationships.Relations[join]; ok && name == join {
|
||||
joined = true
|
||||
continue
|
||||
}
|
||||
joinNames := strings.SplitN(join, ".", 2)
|
||||
if len(joinNames) == 2 {
|
||||
if _, ok := relationships.Relations[joinNames[0]]; ok && name == joinNames[0] {
|
||||
joined = true
|
||||
nestedJoins = append(nestedJoins, joinNames[1])
|
||||
}
|
||||
}
|
||||
}
|
||||
return joined, nestedJoins
|
||||
}
|
||||
|
||||
for _, name := range preloadNames {
|
||||
if relations := relationships.EmbeddedRelations[name]; relations != nil {
|
||||
if err := preloadEntryPoint(db, joins, relations, preloadMap[name], associationsConds); err != nil {
|
||||
return err
|
||||
}
|
||||
} else if rel := relationships.Relations[name]; rel != nil {
|
||||
if err := preload(tx, rel, append(preloads[name], as), preloadMap[name]); err != nil {
|
||||
if joined, nestedJoins := isJoined(name); joined {
|
||||
reflectValue := rel.Field.ReflectValueOf(db.Statement.Context, db.Statement.ReflectValue)
|
||||
tx := preloadDB(db, reflectValue, reflectValue.Interface())
|
||||
if err := preloadEntryPoint(tx, nestedJoins, &tx.Statement.Schema.Relationships, preloadMap[name], associationsConds); err != nil {
|
||||
return err
|
||||
}
|
||||
} else {
|
||||
return fmt.Errorf("%s: %w (embedded) for schema %s", name, gorm.ErrUnsupportedRelation, s.Name)
|
||||
tx := db.Table("").Session(&gorm.Session{Context: db.Statement.Context, SkipHooks: db.Statement.SkipHooks})
|
||||
tx.Statement.ReflectValue = db.Statement.ReflectValue
|
||||
tx.Statement.Unscoped = db.Statement.Unscoped
|
||||
if err := preload(tx, rel, append(preloads[name], associationsConds...), preloadMap[name]); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
} else {
|
||||
return fmt.Errorf("%s: %w for schema %s", name, gorm.ErrUnsupportedRelation, db.Statement.Schema.Name)
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func preloadDB(db *gorm.DB, reflectValue reflect.Value, dest interface{}) *gorm.DB {
|
||||
tx := db.Session(&gorm.Session{Context: db.Statement.Context, NewDB: true, SkipHooks: db.Statement.SkipHooks, Initialized: true})
|
||||
db.Statement.Settings.Range(func(k, v interface{}) bool {
|
||||
tx.Statement.Settings.Store(k, v)
|
||||
return true
|
||||
})
|
||||
|
||||
if err := tx.Statement.Parse(dest); err != nil {
|
||||
tx.AddError(err)
|
||||
return tx
|
||||
}
|
||||
tx.Statement.ReflectValue = reflectValue
|
||||
tx.Statement.Unscoped = db.Statement.Unscoped
|
||||
return tx
|
||||
}
|
||||
|
||||
func preload(tx *gorm.DB, rel *schema.Relationship, conds []interface{}, preloads map[string][]interface{}) error {
|
||||
var (
|
||||
reflectValue = tx.Statement.ReflectValue
|
||||
|
@ -3,7 +3,6 @@ package callbacks
|
||||
import (
|
||||
"fmt"
|
||||
"reflect"
|
||||
"sort"
|
||||
"strings"
|
||||
|
||||
"gorm.io/gorm"
|
||||
@ -254,7 +253,6 @@ func BuildQuerySQL(db *gorm.DB) {
|
||||
}
|
||||
|
||||
db.Statement.AddClause(fromClause)
|
||||
db.Statement.Joins = nil
|
||||
} else {
|
||||
db.Statement.AddClauseIfNotExists(clause.From{})
|
||||
}
|
||||
@ -272,38 +270,23 @@ func Preload(db *gorm.DB) {
|
||||
return
|
||||
}
|
||||
|
||||
preloadMap := parsePreloadMap(db.Statement.Schema, db.Statement.Preloads)
|
||||
preloadNames := make([]string, 0, len(preloadMap))
|
||||
for key := range preloadMap {
|
||||
preloadNames = append(preloadNames, key)
|
||||
joins := make([]string, 0, len(db.Statement.Joins))
|
||||
for _, join := range db.Statement.Joins {
|
||||
joins = append(joins, join.Name)
|
||||
}
|
||||
sort.Strings(preloadNames)
|
||||
|
||||
preloadDB := db.Session(&gorm.Session{Context: db.Statement.Context, NewDB: true, SkipHooks: db.Statement.SkipHooks, Initialized: true})
|
||||
db.Statement.Settings.Range(func(k, v interface{}) bool {
|
||||
preloadDB.Statement.Settings.Store(k, v)
|
||||
return true
|
||||
})
|
||||
|
||||
if err := preloadDB.Statement.Parse(db.Statement.Dest); err != nil {
|
||||
tx := preloadDB(db, db.Statement.ReflectValue, db.Statement.Dest)
|
||||
if tx.Error != nil {
|
||||
return
|
||||
}
|
||||
preloadDB.Statement.ReflectValue = db.Statement.ReflectValue
|
||||
preloadDB.Statement.Unscoped = db.Statement.Unscoped
|
||||
|
||||
for _, name := range preloadNames {
|
||||
if relations := preloadDB.Statement.Schema.Relationships.EmbeddedRelations[name]; relations != nil {
|
||||
db.AddError(preloadEmbedded(preloadDB.Table("").Session(&gorm.Session{Context: db.Statement.Context, SkipHooks: db.Statement.SkipHooks}), relations, db.Statement.Schema, preloadMap[name], db.Statement.Preloads[clause.Associations]))
|
||||
} else if rel := preloadDB.Statement.Schema.Relationships.Relations[name]; rel != nil {
|
||||
db.AddError(preload(preloadDB.Table("").Session(&gorm.Session{Context: db.Statement.Context, SkipHooks: db.Statement.SkipHooks}), rel, append(db.Statement.Preloads[name], db.Statement.Preloads[clause.Associations]...), preloadMap[name]))
|
||||
} else {
|
||||
db.AddError(fmt.Errorf("%s: %w for schema %s", name, gorm.ErrUnsupportedRelation, db.Statement.Schema.Name))
|
||||
}
|
||||
}
|
||||
db.AddError(preloadEntryPoint(tx, joins, &tx.Statement.Schema.Relationships, db.Statement.Preloads, db.Statement.Preloads[clause.Associations]))
|
||||
}
|
||||
}
|
||||
|
||||
func AfterQuery(db *gorm.DB) {
|
||||
// clear the joins after query because preload need it
|
||||
db.Statement.Joins = nil
|
||||
if db.Error == nil && db.Statement.Schema != nil && !db.Statement.SkipHooks && db.Statement.Schema.AfterFind && db.RowsAffected > 0 {
|
||||
callMethod(db, func(value interface{}, tx *gorm.DB) bool {
|
||||
if i, ok := value.(AfterFindInterface); ok {
|
||||
|
@ -110,15 +110,20 @@ func (m Migrator) FullDataTypeOf(field *schema.Field) (expr clause.Expr) {
|
||||
return
|
||||
}
|
||||
|
||||
// AutoMigrate auto migrate values
|
||||
func (m Migrator) AutoMigrate(values ...interface{}) error {
|
||||
for _, value := range m.ReorderModels(values, true) {
|
||||
queryTx := m.DB.Session(&gorm.Session{})
|
||||
execTx := queryTx
|
||||
func (m Migrator) GetQueryAndExecTx() (queryTx, execTx *gorm.DB) {
|
||||
queryTx = m.DB.Session(&gorm.Session{})
|
||||
execTx = queryTx
|
||||
if m.DB.DryRun {
|
||||
queryTx.DryRun = false
|
||||
execTx = m.DB.Session(&gorm.Session{Logger: &printSQLLogger{Interface: m.DB.Logger}})
|
||||
}
|
||||
return queryTx, execTx
|
||||
}
|
||||
|
||||
// AutoMigrate auto migrate values
|
||||
func (m Migrator) AutoMigrate(values ...interface{}) error {
|
||||
for _, value := range m.ReorderModels(values, true) {
|
||||
queryTx, execTx := m.GetQueryAndExecTx()
|
||||
if !queryTx.Migrator().HasTable(value) {
|
||||
if err := execTx.Migrator().CreateTable(value); err != nil {
|
||||
return err
|
||||
@ -268,7 +273,7 @@ func (m Migrator) CreateTable(values ...interface{}) error {
|
||||
}
|
||||
if constraint := rel.ParseConstraint(); constraint != nil {
|
||||
if constraint.Schema == stmt.Schema {
|
||||
sql, vars := buildConstraint(constraint)
|
||||
sql, vars := constraint.Build()
|
||||
createTableSQL += sql + ","
|
||||
values = append(values, vars...)
|
||||
}
|
||||
@ -276,6 +281,11 @@ func (m Migrator) CreateTable(values ...interface{}) error {
|
||||
}
|
||||
}
|
||||
|
||||
for _, uni := range stmt.Schema.ParseUniqueConstraints() {
|
||||
createTableSQL += "CONSTRAINT ? UNIQUE (?),"
|
||||
values = append(values, clause.Column{Name: uni.Name}, clause.Expr{SQL: stmt.Quote(uni.Field.DBName)})
|
||||
}
|
||||
|
||||
for _, chk := range stmt.Schema.ParseCheckConstraints() {
|
||||
createTableSQL += "CONSTRAINT ? CHECK (?),"
|
||||
values = append(values, clause.Column{Name: chk.Name}, clause.Expr{SQL: chk.Constraint})
|
||||
@ -439,6 +449,10 @@ func (m Migrator) RenameColumn(value interface{}, oldName, newName string) error
|
||||
|
||||
// MigrateColumn migrate column
|
||||
func (m Migrator) MigrateColumn(value interface{}, field *schema.Field, columnType gorm.ColumnType) error {
|
||||
if field.IgnoreMigration {
|
||||
return nil
|
||||
}
|
||||
|
||||
// found, smart migrate
|
||||
fullDataType := strings.TrimSpace(strings.ToLower(m.DB.Migrator().FullDataTypeOf(field).SQL))
|
||||
realDataType := strings.ToLower(columnType.DatabaseTypeName())
|
||||
@ -499,7 +513,7 @@ func (m Migrator) MigrateColumn(value interface{}, field *schema.Field, columnTy
|
||||
}
|
||||
|
||||
// check unique
|
||||
if unique, ok := columnType.Unique(); ok && unique != field.Unique {
|
||||
if unique, ok := columnType.Unique(); ok && unique != (field.Unique || field.UniqueIndex != "") {
|
||||
// not primary key
|
||||
if !field.PrimaryKey {
|
||||
alterColumn = true
|
||||
@ -630,37 +644,36 @@ func (m Migrator) DropView(name string) error {
|
||||
return m.DB.Exec("DROP VIEW IF EXISTS ?", clause.Table{Name: name}).Error
|
||||
}
|
||||
|
||||
func buildConstraint(constraint *schema.Constraint) (sql string, results []interface{}) {
|
||||
sql = "CONSTRAINT ? FOREIGN KEY ? REFERENCES ??"
|
||||
if constraint.OnDelete != "" {
|
||||
sql += " ON DELETE " + constraint.OnDelete
|
||||
// GuessConstraintAndTable guess statement's constraint and it's table based on name
|
||||
//
|
||||
// Deprecated: use GuessConstraintInterfaceAndTable instead.
|
||||
func (m Migrator) GuessConstraintAndTable(stmt *gorm.Statement, name string) (*schema.Constraint, *schema.CheckConstraint, string) {
|
||||
constraint, table := m.GuessConstraintInterfaceAndTable(stmt, name)
|
||||
switch c := constraint.(type) {
|
||||
case *schema.Constraint:
|
||||
return c, nil, table
|
||||
case *schema.CheckConstraint:
|
||||
return nil, c, table
|
||||
default:
|
||||
return nil, nil, table
|
||||
}
|
||||
|
||||
if constraint.OnUpdate != "" {
|
||||
sql += " ON UPDATE " + constraint.OnUpdate
|
||||
}
|
||||
|
||||
var foreignKeys, references []interface{}
|
||||
for _, field := range constraint.ForeignKeys {
|
||||
foreignKeys = append(foreignKeys, clause.Column{Name: field.DBName})
|
||||
}
|
||||
|
||||
for _, field := range constraint.References {
|
||||
references = append(references, clause.Column{Name: field.DBName})
|
||||
}
|
||||
results = append(results, clause.Table{Name: constraint.Name}, foreignKeys, clause.Table{Name: constraint.ReferenceSchema.Table}, references)
|
||||
return
|
||||
}
|
||||
|
||||
// GuessConstraintAndTable guess statement's constraint and it's table based on name
|
||||
func (m Migrator) GuessConstraintAndTable(stmt *gorm.Statement, name string) (_ *schema.Constraint, _ *schema.Check, table string) {
|
||||
// GuessConstraintInterfaceAndTable guess statement's constraint and it's table based on name
|
||||
// nolint:cyclop
|
||||
func (m Migrator) GuessConstraintInterfaceAndTable(stmt *gorm.Statement, name string) (_ schema.ConstraintInterface, table string) {
|
||||
if stmt.Schema == nil {
|
||||
return nil, nil, stmt.Table
|
||||
return nil, stmt.Table
|
||||
}
|
||||
|
||||
checkConstraints := stmt.Schema.ParseCheckConstraints()
|
||||
if chk, ok := checkConstraints[name]; ok {
|
||||
return nil, &chk, stmt.Table
|
||||
return &chk, stmt.Table
|
||||
}
|
||||
|
||||
uniqueConstraints := stmt.Schema.ParseUniqueConstraints()
|
||||
if uni, ok := uniqueConstraints[name]; ok {
|
||||
return &uni, stmt.Table
|
||||
}
|
||||
|
||||
getTable := func(rel *schema.Relationship) string {
|
||||
@ -675,7 +688,7 @@ func (m Migrator) GuessConstraintAndTable(stmt *gorm.Statement, name string) (_
|
||||
|
||||
for _, rel := range stmt.Schema.Relationships.Relations {
|
||||
if constraint := rel.ParseConstraint(); constraint != nil && constraint.Name == name {
|
||||
return constraint, nil, getTable(rel)
|
||||
return constraint, getTable(rel)
|
||||
}
|
||||
}
|
||||
|
||||
@ -683,40 +696,39 @@ func (m Migrator) GuessConstraintAndTable(stmt *gorm.Statement, name string) (_
|
||||
for k := range checkConstraints {
|
||||
if checkConstraints[k].Field == field {
|
||||
v := checkConstraints[k]
|
||||
return nil, &v, stmt.Table
|
||||
return &v, stmt.Table
|
||||
}
|
||||
}
|
||||
|
||||
for k := range uniqueConstraints {
|
||||
if uniqueConstraints[k].Field == field {
|
||||
v := uniqueConstraints[k]
|
||||
return &v, stmt.Table
|
||||
}
|
||||
}
|
||||
|
||||
for _, rel := range stmt.Schema.Relationships.Relations {
|
||||
if constraint := rel.ParseConstraint(); constraint != nil && rel.Field == field {
|
||||
return constraint, nil, getTable(rel)
|
||||
return constraint, getTable(rel)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return nil, nil, stmt.Schema.Table
|
||||
return nil, stmt.Schema.Table
|
||||
}
|
||||
|
||||
// CreateConstraint create constraint
|
||||
func (m Migrator) CreateConstraint(value interface{}, name string) error {
|
||||
return m.RunWithValue(value, func(stmt *gorm.Statement) error {
|
||||
constraint, chk, table := m.GuessConstraintAndTable(stmt, name)
|
||||
if chk != nil {
|
||||
return m.DB.Exec(
|
||||
"ALTER TABLE ? ADD CONSTRAINT ? CHECK (?)",
|
||||
m.CurrentTable(stmt), clause.Column{Name: chk.Name}, clause.Expr{SQL: chk.Constraint},
|
||||
).Error
|
||||
}
|
||||
|
||||
constraint, table := m.GuessConstraintInterfaceAndTable(stmt, name)
|
||||
if constraint != nil {
|
||||
vars := []interface{}{clause.Table{Name: table}}
|
||||
if stmt.TableExpr != nil {
|
||||
vars[0] = stmt.TableExpr
|
||||
}
|
||||
sql, values := buildConstraint(constraint)
|
||||
sql, values := constraint.Build()
|
||||
return m.DB.Exec("ALTER TABLE ? ADD "+sql, append(vars, values...)...).Error
|
||||
}
|
||||
|
||||
return nil
|
||||
})
|
||||
}
|
||||
@ -724,11 +736,9 @@ func (m Migrator) CreateConstraint(value interface{}, name string) error {
|
||||
// DropConstraint drop constraint
|
||||
func (m Migrator) DropConstraint(value interface{}, name string) error {
|
||||
return m.RunWithValue(value, func(stmt *gorm.Statement) error {
|
||||
constraint, chk, table := m.GuessConstraintAndTable(stmt, name)
|
||||
constraint, table := m.GuessConstraintInterfaceAndTable(stmt, name)
|
||||
if constraint != nil {
|
||||
name = constraint.Name
|
||||
} else if chk != nil {
|
||||
name = chk.Name
|
||||
name = constraint.GetName()
|
||||
}
|
||||
return m.DB.Exec("ALTER TABLE ? DROP CONSTRAINT ?", clause.Table{Name: table}, clause.Column{Name: name}).Error
|
||||
})
|
||||
@ -739,11 +749,9 @@ func (m Migrator) HasConstraint(value interface{}, name string) bool {
|
||||
var count int64
|
||||
m.RunWithValue(value, func(stmt *gorm.Statement) error {
|
||||
currentDatabase := m.DB.Migrator().CurrentDatabase()
|
||||
constraint, chk, table := m.GuessConstraintAndTable(stmt, name)
|
||||
constraint, table := m.GuessConstraintInterfaceAndTable(stmt, name)
|
||||
if constraint != nil {
|
||||
name = constraint.Name
|
||||
} else if chk != nil {
|
||||
name = chk.Name
|
||||
name = constraint.GetName()
|
||||
}
|
||||
|
||||
return m.DB.Raw(
|
||||
|
@ -1,35 +0,0 @@
|
||||
package schema
|
||||
|
||||
import (
|
||||
"regexp"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// reg match english letters and midline
|
||||
var regEnLetterAndMidline = regexp.MustCompile("^[A-Za-z-_]+$")
|
||||
|
||||
type Check struct {
|
||||
Name string
|
||||
Constraint string // length(phone) >= 10
|
||||
*Field
|
||||
}
|
||||
|
||||
// ParseCheckConstraints parse schema check constraints
|
||||
func (schema *Schema) ParseCheckConstraints() map[string]Check {
|
||||
checks := map[string]Check{}
|
||||
for _, field := range schema.FieldsByDBName {
|
||||
if chk := field.TagSettings["CHECK"]; chk != "" {
|
||||
names := strings.Split(chk, ",")
|
||||
if len(names) > 1 && regEnLetterAndMidline.MatchString(names[0]) {
|
||||
checks[names[0]] = Check{Name: names[0], Constraint: strings.Join(names[1:], ","), Field: field}
|
||||
} else {
|
||||
if names[0] == "" {
|
||||
chk = strings.Join(names[1:], ",")
|
||||
}
|
||||
name := schema.namer.CheckerName(schema.Table, field.DBName)
|
||||
checks[name] = Check{Name: name, Constraint: chk, Field: field}
|
||||
}
|
||||
}
|
||||
}
|
||||
return checks
|
||||
}
|
66
schema/constraint.go
Normal file
66
schema/constraint.go
Normal file
@ -0,0 +1,66 @@
|
||||
package schema
|
||||
|
||||
import (
|
||||
"regexp"
|
||||
"strings"
|
||||
|
||||
"gorm.io/gorm/clause"
|
||||
)
|
||||
|
||||
// reg match english letters and midline
|
||||
var regEnLetterAndMidline = regexp.MustCompile("^[A-Za-z-_]+$")
|
||||
|
||||
type CheckConstraint struct {
|
||||
Name string
|
||||
Constraint string // length(phone) >= 10
|
||||
*Field
|
||||
}
|
||||
|
||||
func (chk *CheckConstraint) GetName() string { return chk.Name }
|
||||
|
||||
func (chk *CheckConstraint) Build() (sql string, vars []interface{}) {
|
||||
return "CONSTRAINT ? CHECK (?)", []interface{}{clause.Column{Name: chk.Name}, clause.Expr{SQL: chk.Constraint}}
|
||||
}
|
||||
|
||||
// ParseCheckConstraints parse schema check constraints
|
||||
func (schema *Schema) ParseCheckConstraints() map[string]CheckConstraint {
|
||||
checks := map[string]CheckConstraint{}
|
||||
for _, field := range schema.FieldsByDBName {
|
||||
if chk := field.TagSettings["CHECK"]; chk != "" {
|
||||
names := strings.Split(chk, ",")
|
||||
if len(names) > 1 && regEnLetterAndMidline.MatchString(names[0]) {
|
||||
checks[names[0]] = CheckConstraint{Name: names[0], Constraint: strings.Join(names[1:], ","), Field: field}
|
||||
} else {
|
||||
if names[0] == "" {
|
||||
chk = strings.Join(names[1:], ",")
|
||||
}
|
||||
name := schema.namer.CheckerName(schema.Table, field.DBName)
|
||||
checks[name] = CheckConstraint{Name: name, Constraint: chk, Field: field}
|
||||
}
|
||||
}
|
||||
}
|
||||
return checks
|
||||
}
|
||||
|
||||
type UniqueConstraint struct {
|
||||
Name string
|
||||
Field *Field
|
||||
}
|
||||
|
||||
func (uni *UniqueConstraint) GetName() string { return uni.Name }
|
||||
|
||||
func (uni *UniqueConstraint) Build() (sql string, vars []interface{}) {
|
||||
return "CONSTRAINT ? UNIQUE (?)", []interface{}{clause.Column{Name: uni.Name}, clause.Column{Name: uni.Field.DBName}}
|
||||
}
|
||||
|
||||
// ParseUniqueConstraints parse schema unique constraints
|
||||
func (schema *Schema) ParseUniqueConstraints() map[string]UniqueConstraint {
|
||||
uniques := make(map[string]UniqueConstraint)
|
||||
for _, field := range schema.Fields {
|
||||
if field.Unique {
|
||||
name := schema.namer.UniqueName(schema.Table, field.DBName)
|
||||
uniques[name] = UniqueConstraint{Name: name, Field: field}
|
||||
}
|
||||
}
|
||||
return uniques
|
||||
}
|
@ -6,6 +6,7 @@ import (
|
||||
"testing"
|
||||
|
||||
"gorm.io/gorm/schema"
|
||||
"gorm.io/gorm/utils/tests"
|
||||
)
|
||||
|
||||
type UserCheck struct {
|
||||
@ -20,7 +21,7 @@ func TestParseCheck(t *testing.T) {
|
||||
t.Fatalf("failed to parse user check, got error %v", err)
|
||||
}
|
||||
|
||||
results := map[string]schema.Check{
|
||||
results := map[string]schema.CheckConstraint{
|
||||
"name_checker": {
|
||||
Name: "name_checker",
|
||||
Constraint: "name <> 'jinzhu'",
|
||||
@ -53,3 +54,31 @@ func TestParseCheck(t *testing.T) {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestParseUniqueConstraints(t *testing.T) {
|
||||
type UserUnique struct {
|
||||
Name1 string `gorm:"unique"`
|
||||
Name2 string `gorm:"uniqueIndex"`
|
||||
}
|
||||
|
||||
user, err := schema.Parse(&UserUnique{}, &sync.Map{}, schema.NamingStrategy{})
|
||||
if err != nil {
|
||||
t.Fatalf("failed to parse user unique, got error %v", err)
|
||||
}
|
||||
constraints := user.ParseUniqueConstraints()
|
||||
|
||||
results := map[string]schema.UniqueConstraint{
|
||||
"uni_user_uniques_name1": {
|
||||
Name: "uni_user_uniques_name1",
|
||||
Field: &schema.Field{Name: "Name1", Unique: true},
|
||||
},
|
||||
}
|
||||
for k, result := range results {
|
||||
v, ok := constraints[k]
|
||||
if !ok {
|
||||
t.Errorf("Failed to found unique constraint %v from parsed constraints %+v", k, constraints)
|
||||
}
|
||||
tests.AssertObjEqual(t, result, v, "Name")
|
||||
tests.AssertObjEqual(t, result.Field, v.Field, "Name", "Unique", "UniqueIndex")
|
||||
}
|
||||
}
|
@ -89,6 +89,12 @@ type Field struct {
|
||||
Set func(context.Context, reflect.Value, interface{}) error
|
||||
Serializer SerializerInterface
|
||||
NewValuePool FieldNewValuePool
|
||||
|
||||
// In some db (e.g. MySQL), Unique and UniqueIndex are indistinguishable.
|
||||
// When a column has a (not Mul) UniqueIndex, Migrator always reports its gorm.ColumnType is Unique.
|
||||
// It causes field unnecessarily migration.
|
||||
// Therefore, we need to record the UniqueIndex on this column (exclude Mul UniqueIndex) for MigrateColumnUnique.
|
||||
UniqueIndex string
|
||||
}
|
||||
|
||||
func (field *Field) BindName() string {
|
||||
|
@ -14,7 +14,7 @@ type Index struct {
|
||||
Where string
|
||||
Comment string
|
||||
Option string // WITH PARSER parser_name
|
||||
Fields []IndexOption
|
||||
Fields []IndexOption // Note: IndexOption's Field maybe the same
|
||||
}
|
||||
|
||||
type IndexOption struct {
|
||||
@ -67,7 +67,7 @@ func (schema *Schema) ParseIndexes() map[string]Index {
|
||||
}
|
||||
for _, index := range indexes {
|
||||
if index.Class == "UNIQUE" && len(index.Fields) == 1 {
|
||||
index.Fields[0].Field.Unique = true
|
||||
index.Fields[0].Field.UniqueIndex = index.Name
|
||||
}
|
||||
}
|
||||
return indexes
|
||||
|
@ -1,11 +1,11 @@
|
||||
package schema_test
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
"sync"
|
||||
"testing"
|
||||
|
||||
"gorm.io/gorm/schema"
|
||||
"gorm.io/gorm/utils/tests"
|
||||
)
|
||||
|
||||
type UserIndex struct {
|
||||
@ -19,6 +19,7 @@ type UserIndex struct {
|
||||
OID int64 `gorm:"index:idx_id;index:idx_oid,unique"`
|
||||
MemberNumber string `gorm:"index:idx_id,priority:1"`
|
||||
Name7 string `gorm:"index:type"`
|
||||
Name8 string `gorm:"index:,length:10;index:,collate:utf8"`
|
||||
|
||||
// Composite Index: Flattened structure.
|
||||
Data0A string `gorm:"index:,composite:comp_id0"`
|
||||
@ -65,7 +66,7 @@ func TestParseIndex(t *testing.T) {
|
||||
"idx_name": {
|
||||
Name: "idx_name",
|
||||
Class: "UNIQUE",
|
||||
Fields: []schema.IndexOption{{Field: &schema.Field{Name: "Name2", Unique: true}}},
|
||||
Fields: []schema.IndexOption{{Field: &schema.Field{Name: "Name2", UniqueIndex: "idx_name"}}},
|
||||
},
|
||||
"idx_user_indices_name3": {
|
||||
Name: "idx_user_indices_name3",
|
||||
@ -81,7 +82,7 @@ func TestParseIndex(t *testing.T) {
|
||||
"idx_user_indices_name4": {
|
||||
Name: "idx_user_indices_name4",
|
||||
Class: "UNIQUE",
|
||||
Fields: []schema.IndexOption{{Field: &schema.Field{Name: "Name4", Unique: true}}},
|
||||
Fields: []schema.IndexOption{{Field: &schema.Field{Name: "Name4", UniqueIndex: "idx_user_indices_name4"}}},
|
||||
},
|
||||
"idx_user_indices_name5": {
|
||||
Name: "idx_user_indices_name5",
|
||||
@ -102,18 +103,27 @@ func TestParseIndex(t *testing.T) {
|
||||
},
|
||||
"idx_id": {
|
||||
Name: "idx_id",
|
||||
Fields: []schema.IndexOption{{Field: &schema.Field{Name: "MemberNumber"}}, {Field: &schema.Field{Name: "OID", Unique: true}}},
|
||||
Fields: []schema.IndexOption{{Field: &schema.Field{Name: "MemberNumber"}}, {Field: &schema.Field{Name: "OID", UniqueIndex: "idx_oid"}}},
|
||||
},
|
||||
"idx_oid": {
|
||||
Name: "idx_oid",
|
||||
Class: "UNIQUE",
|
||||
Fields: []schema.IndexOption{{Field: &schema.Field{Name: "OID", Unique: true}}},
|
||||
Fields: []schema.IndexOption{{Field: &schema.Field{Name: "OID", UniqueIndex: "idx_oid"}}},
|
||||
},
|
||||
"type": {
|
||||
Name: "type",
|
||||
Type: "",
|
||||
Fields: []schema.IndexOption{{Field: &schema.Field{Name: "Name7"}}},
|
||||
},
|
||||
"idx_user_indices_name8": {
|
||||
Name: "idx_user_indices_name8",
|
||||
Type: "",
|
||||
Fields: []schema.IndexOption{
|
||||
{Field: &schema.Field{Name: "Name8"}, Length: 10},
|
||||
// Note: Duplicate Columns
|
||||
{Field: &schema.Field{Name: "Name8"}, Collate: "utf8"},
|
||||
},
|
||||
},
|
||||
"idx_user_indices_comp_id0": {
|
||||
Name: "idx_user_indices_comp_id0",
|
||||
Type: "",
|
||||
@ -146,40 +156,109 @@ func TestParseIndex(t *testing.T) {
|
||||
},
|
||||
}
|
||||
|
||||
indices := user.ParseIndexes()
|
||||
CheckIndices(t, results, user.ParseIndexes())
|
||||
}
|
||||
|
||||
for k, result := range results {
|
||||
v, ok := indices[k]
|
||||
func TestParseIndexWithUniqueIndexAndUnique(t *testing.T) {
|
||||
type IndexTest struct {
|
||||
FieldA string `gorm:"unique;index"` // unique and index
|
||||
FieldB string `gorm:"unique"` // unique
|
||||
|
||||
FieldC string `gorm:"index:,unique"` // uniqueIndex
|
||||
FieldD string `gorm:"uniqueIndex;index"` // uniqueIndex and index
|
||||
|
||||
FieldE1 string `gorm:"uniqueIndex:uniq_field_e1_e2"` // mul uniqueIndex
|
||||
FieldE2 string `gorm:"uniqueIndex:uniq_field_e1_e2"`
|
||||
|
||||
FieldF1 string `gorm:"uniqueIndex:uniq_field_f1_f2;index"` // mul uniqueIndex and index
|
||||
FieldF2 string `gorm:"uniqueIndex:uniq_field_f1_f2;"`
|
||||
|
||||
FieldG string `gorm:"unique;uniqueIndex"` // unique and uniqueIndex
|
||||
|
||||
FieldH1 string `gorm:"unique;uniqueIndex:uniq_field_h1_h2"` // unique and mul uniqueIndex
|
||||
FieldH2 string `gorm:"uniqueIndex:uniq_field_h1_h2"` // unique and mul uniqueIndex
|
||||
}
|
||||
indexSchema, err := schema.Parse(&IndexTest{}, &sync.Map{}, schema.NamingStrategy{})
|
||||
if err != nil {
|
||||
t.Fatalf("failed to parse user index, got error %v", err)
|
||||
}
|
||||
indices := indexSchema.ParseIndexes()
|
||||
CheckIndices(t, map[string]schema.Index{
|
||||
"idx_index_tests_field_a": {
|
||||
Name: "idx_index_tests_field_a",
|
||||
Fields: []schema.IndexOption{{Field: &schema.Field{Name: "FieldA", Unique: true}}},
|
||||
},
|
||||
"idx_index_tests_field_c": {
|
||||
Name: "idx_index_tests_field_c",
|
||||
Class: "UNIQUE",
|
||||
Fields: []schema.IndexOption{{Field: &schema.Field{Name: "FieldC", UniqueIndex: "idx_index_tests_field_c"}}},
|
||||
},
|
||||
"idx_index_tests_field_d": {
|
||||
Name: "idx_index_tests_field_d",
|
||||
Class: "UNIQUE",
|
||||
Fields: []schema.IndexOption{
|
||||
{Field: &schema.Field{Name: "FieldD"}},
|
||||
// Note: Duplicate Columns
|
||||
{Field: &schema.Field{Name: "FieldD"}},
|
||||
},
|
||||
},
|
||||
"uniq_field_e1_e2": {
|
||||
Name: "uniq_field_e1_e2",
|
||||
Class: "UNIQUE",
|
||||
Fields: []schema.IndexOption{
|
||||
{Field: &schema.Field{Name: "FieldE1"}},
|
||||
{Field: &schema.Field{Name: "FieldE2"}},
|
||||
},
|
||||
},
|
||||
"idx_index_tests_field_f1": {
|
||||
Name: "idx_index_tests_field_f1",
|
||||
Fields: []schema.IndexOption{{Field: &schema.Field{Name: "FieldF1"}}},
|
||||
},
|
||||
"uniq_field_f1_f2": {
|
||||
Name: "uniq_field_f1_f2",
|
||||
Class: "UNIQUE",
|
||||
Fields: []schema.IndexOption{
|
||||
{Field: &schema.Field{Name: "FieldF1"}},
|
||||
{Field: &schema.Field{Name: "FieldF2"}},
|
||||
},
|
||||
},
|
||||
"idx_index_tests_field_g": {
|
||||
Name: "idx_index_tests_field_g",
|
||||
Class: "UNIQUE",
|
||||
Fields: []schema.IndexOption{{Field: &schema.Field{Name: "FieldG", Unique: true, UniqueIndex: "idx_index_tests_field_g"}}},
|
||||
},
|
||||
"uniq_field_h1_h2": {
|
||||
Name: "uniq_field_h1_h2",
|
||||
Class: "UNIQUE",
|
||||
Fields: []schema.IndexOption{
|
||||
{Field: &schema.Field{Name: "FieldH1", Unique: true}},
|
||||
{Field: &schema.Field{Name: "FieldH2"}},
|
||||
},
|
||||
},
|
||||
}, indices)
|
||||
}
|
||||
|
||||
func CheckIndices(t *testing.T, expected, actual map[string]schema.Index) {
|
||||
for k, ei := range expected {
|
||||
t.Run(k, func(t *testing.T) {
|
||||
ai, ok := actual[k]
|
||||
if !ok {
|
||||
t.Fatalf("Failed to found index %v from parsed indices %+v", k, indices)
|
||||
t.Errorf("expected index %q but actual missing", k)
|
||||
return
|
||||
}
|
||||
|
||||
for _, name := range []string{"Name", "Class", "Type", "Where", "Comment", "Option"} {
|
||||
if reflect.ValueOf(result).FieldByName(name).Interface() != reflect.ValueOf(v).FieldByName(name).Interface() {
|
||||
t.Errorf(
|
||||
"index %v %v should equal, expects %v, got %v",
|
||||
k, name, reflect.ValueOf(result).FieldByName(name).Interface(), reflect.ValueOf(v).FieldByName(name).Interface(),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
for idx, ef := range result.Fields {
|
||||
rf := v.Fields[idx]
|
||||
if rf.Field.Name != ef.Field.Name {
|
||||
t.Fatalf("index field should equal, expects %v, got %v", rf.Field.Name, ef.Field.Name)
|
||||
}
|
||||
if rf.Field.Unique != ef.Field.Unique {
|
||||
t.Fatalf("index field '%s' should equal, expects %v, got %v", rf.Field.Name, rf.Field.Unique, ef.Field.Unique)
|
||||
}
|
||||
|
||||
for _, name := range []string{"Expression", "Sort", "Collate", "Length"} {
|
||||
if reflect.ValueOf(ef).FieldByName(name).Interface() != reflect.ValueOf(rf).FieldByName(name).Interface() {
|
||||
t.Errorf(
|
||||
"index %v field #%v's %v should equal, expects %v, got %v", k, idx+1, name,
|
||||
reflect.ValueOf(ef).FieldByName(name).Interface(), reflect.ValueOf(rf).FieldByName(name).Interface(),
|
||||
)
|
||||
tests.AssertObjEqual(t, ai, ei, "Name", "Class", "Type", "Where", "Comment", "Option")
|
||||
if len(ei.Fields) != len(ai.Fields) {
|
||||
t.Errorf("expected index %q field length is %d but actual %d", k, len(ei.Fields), len(ai.Fields))
|
||||
return
|
||||
}
|
||||
for i, ef := range ei.Fields {
|
||||
af := ai.Fields[i]
|
||||
tests.AssertObjEqual(t, af, ef, "Name", "Unique", "UniqueIndex", "Expression", "Sort", "Collate", "Length")
|
||||
}
|
||||
})
|
||||
delete(actual, k)
|
||||
}
|
||||
for k := range actual {
|
||||
t.Errorf("unexpected index %q", k)
|
||||
}
|
||||
}
|
||||
|
@ -4,6 +4,12 @@ import (
|
||||
"gorm.io/gorm/clause"
|
||||
)
|
||||
|
||||
// ConstraintInterface database constraint interface
|
||||
type ConstraintInterface interface {
|
||||
GetName() string
|
||||
Build() (sql string, vars []interface{})
|
||||
}
|
||||
|
||||
// GormDataTypeInterface gorm data type interface
|
||||
type GormDataTypeInterface interface {
|
||||
GormDataType() string
|
||||
|
@ -605,6 +605,7 @@ func (schema *Schema) guessRelation(relation *Relationship, field *Field, cgl gu
|
||||
}
|
||||
}
|
||||
|
||||
// Constraint is ForeignKey Constraint
|
||||
type Constraint struct {
|
||||
Name string
|
||||
Field *Field
|
||||
@ -616,6 +617,31 @@ type Constraint struct {
|
||||
OnUpdate string
|
||||
}
|
||||
|
||||
func (constraint *Constraint) GetName() string { return constraint.Name }
|
||||
|
||||
func (constraint *Constraint) Build() (sql string, vars []interface{}) {
|
||||
sql = "CONSTRAINT ? FOREIGN KEY ? REFERENCES ??"
|
||||
if constraint.OnDelete != "" {
|
||||
sql += " ON DELETE " + constraint.OnDelete
|
||||
}
|
||||
|
||||
if constraint.OnUpdate != "" {
|
||||
sql += " ON UPDATE " + constraint.OnUpdate
|
||||
}
|
||||
|
||||
foreignKeys := make([]interface{}, 0, len(constraint.ForeignKeys))
|
||||
for _, field := range constraint.ForeignKeys {
|
||||
foreignKeys = append(foreignKeys, clause.Column{Name: field.DBName})
|
||||
}
|
||||
|
||||
references := make([]interface{}, 0, len(constraint.References))
|
||||
for _, field := range constraint.References {
|
||||
references = append(references, clause.Column{Name: field.DBName})
|
||||
}
|
||||
vars = append(vars, clause.Table{Name: constraint.Name}, foreignKeys, clause.Table{Name: constraint.ReferenceSchema.Table}, references)
|
||||
return
|
||||
}
|
||||
|
||||
func (rel *Relationship) ParseConstraint() *Constraint {
|
||||
str := rel.Field.TagSettings["CONSTRAINT"]
|
||||
if str == "-" {
|
||||
|
@ -307,6 +307,63 @@ func TestNestedPreloadWithUnscoped(t *testing.T) {
|
||||
CheckUserUnscoped(t, *user6, user)
|
||||
}
|
||||
|
||||
func TestNestedPreloadWithNestedJoin(t *testing.T) {
|
||||
type (
|
||||
Preload struct {
|
||||
ID uint
|
||||
Value string
|
||||
NestedID uint
|
||||
}
|
||||
Join struct {
|
||||
ID uint
|
||||
Value string
|
||||
NestedID uint
|
||||
}
|
||||
Nested struct {
|
||||
ID uint
|
||||
Preloads []*Preload
|
||||
Join Join
|
||||
ValueID uint
|
||||
}
|
||||
Value struct {
|
||||
ID uint
|
||||
Name string
|
||||
Nested Nested
|
||||
}
|
||||
)
|
||||
|
||||
DB.Migrator().DropTable(&Preload{}, &Join{}, &Nested{}, &Value{})
|
||||
DB.Migrator().AutoMigrate(&Preload{}, &Join{}, &Nested{}, &Value{})
|
||||
|
||||
value := Value{
|
||||
Name: "value",
|
||||
Nested: Nested{
|
||||
Preloads: []*Preload{
|
||||
{Value: "p1"}, {Value: "p2"},
|
||||
},
|
||||
Join: Join{Value: "j1"},
|
||||
},
|
||||
}
|
||||
if err := DB.Create(&value).Error; err != nil {
|
||||
t.Errorf("failed to create value, got err: %v", err)
|
||||
}
|
||||
|
||||
var find1 Value
|
||||
err := DB.Joins("Nested").Joins("Nested.Join").Preload("Nested.Preloads").First(&find1).Error
|
||||
if err != nil {
|
||||
t.Errorf("failed to find value, got err: %v", err)
|
||||
}
|
||||
AssertEqual(t, find1, value)
|
||||
|
||||
var find2 Value
|
||||
// Joins will automatically add Nested queries.
|
||||
err = DB.Joins("Nested.Join").Preload("Nested.Preloads").First(&find2).Error
|
||||
if err != nil {
|
||||
t.Errorf("failed to find value, got err: %v", err)
|
||||
}
|
||||
AssertEqual(t, find2, value)
|
||||
}
|
||||
|
||||
func TestEmbedPreload(t *testing.T) {
|
||||
type Country struct {
|
||||
ID int `gorm:"primaryKey"`
|
||||
|
Loading…
x
Reference in New Issue
Block a user