add ParseIndex test
This commit is contained in:
parent
387fca3478
commit
567f44f14e
@ -13,8 +13,8 @@ type Index struct {
|
|||||||
Type string // btree, hash, gist, spgist, gin, and brin
|
Type string // btree, hash, gist, spgist, gin, and brin
|
||||||
Where string
|
Where string
|
||||||
Comment string
|
Comment string
|
||||||
Option string // WITH PARSER parser_name
|
Option string // WITH PARSER parser_name
|
||||||
Fields []IndexOption
|
Fields []IndexOption // Note: IndexOption's Field maybe the same
|
||||||
}
|
}
|
||||||
|
|
||||||
type IndexOption struct {
|
type IndexOption struct {
|
||||||
|
@ -1,11 +1,11 @@
|
|||||||
package schema_test
|
package schema_test
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"reflect"
|
|
||||||
"sync"
|
"sync"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"gorm.io/gorm/schema"
|
"gorm.io/gorm/schema"
|
||||||
|
"gorm.io/gorm/utils/tests"
|
||||||
)
|
)
|
||||||
|
|
||||||
type UserIndex struct {
|
type UserIndex struct {
|
||||||
@ -19,6 +19,7 @@ type UserIndex struct {
|
|||||||
OID int64 `gorm:"index:idx_id;index:idx_oid,unique"`
|
OID int64 `gorm:"index:idx_id;index:idx_oid,unique"`
|
||||||
MemberNumber string `gorm:"index:idx_id,priority:1"`
|
MemberNumber string `gorm:"index:idx_id,priority:1"`
|
||||||
Name7 string `gorm:"index:type"`
|
Name7 string `gorm:"index:type"`
|
||||||
|
Name8 string `gorm:"index:,length:10;index:,collate:utf8"`
|
||||||
|
|
||||||
// Composite Index: Flattened structure.
|
// Composite Index: Flattened structure.
|
||||||
Data0A string `gorm:"index:,composite:comp_id0"`
|
Data0A string `gorm:"index:,composite:comp_id0"`
|
||||||
@ -114,6 +115,15 @@ func TestParseIndex(t *testing.T) {
|
|||||||
Type: "",
|
Type: "",
|
||||||
Fields: []schema.IndexOption{{Field: &schema.Field{Name: "Name7"}}},
|
Fields: []schema.IndexOption{{Field: &schema.Field{Name: "Name7"}}},
|
||||||
},
|
},
|
||||||
|
"idx_user_indices_name8": {
|
||||||
|
Name: "idx_user_indices_name8",
|
||||||
|
Type: "",
|
||||||
|
Fields: []schema.IndexOption{
|
||||||
|
{Field: &schema.Field{Name: "Name8"}, Length: 10},
|
||||||
|
// Note: Duplicate Columns
|
||||||
|
{Field: &schema.Field{Name: "Name8"}, Collate: "utf8"},
|
||||||
|
},
|
||||||
|
},
|
||||||
"idx_user_indices_comp_id0": {
|
"idx_user_indices_comp_id0": {
|
||||||
Name: "idx_user_indices_comp_id0",
|
Name: "idx_user_indices_comp_id0",
|
||||||
Type: "",
|
Type: "",
|
||||||
@ -146,40 +156,109 @@ func TestParseIndex(t *testing.T) {
|
|||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
indices := user.ParseIndexes()
|
CheckIndices(t, results, user.ParseIndexes())
|
||||||
|
}
|
||||||
|
|
||||||
for k, result := range results {
|
func TestParseIndexWithUniqueIndexAndUnique(t *testing.T) {
|
||||||
v, ok := indices[k]
|
type IndexTest struct {
|
||||||
if !ok {
|
FieldA string `gorm:"unique;index"` // unique and index
|
||||||
t.Fatalf("Failed to found index %v from parsed indices %+v", k, indices)
|
FieldB string `gorm:"unique"` // unique
|
||||||
}
|
|
||||||
|
|
||||||
for _, name := range []string{"Name", "Class", "Type", "Where", "Comment", "Option"} {
|
FieldC string `gorm:"index:,unique"` // uniqueIndex
|
||||||
if reflect.ValueOf(result).FieldByName(name).Interface() != reflect.ValueOf(v).FieldByName(name).Interface() {
|
FieldD string `gorm:"uniqueIndex;index"` // uniqueIndex and index
|
||||||
t.Errorf(
|
|
||||||
"index %v %v should equal, expects %v, got %v",
|
|
||||||
k, name, reflect.ValueOf(result).FieldByName(name).Interface(), reflect.ValueOf(v).FieldByName(name).Interface(),
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
for idx, ef := range result.Fields {
|
FieldE1 string `gorm:"uniqueIndex:uniq_field_e1_e2"` // mul uniqueIndex
|
||||||
rf := v.Fields[idx]
|
FieldE2 string `gorm:"uniqueIndex:uniq_field_e1_e2"`
|
||||||
if rf.Field.Name != ef.Field.Name {
|
|
||||||
t.Fatalf("index field should equal, expects %v, got %v", rf.Field.Name, ef.Field.Name)
|
|
||||||
}
|
|
||||||
if rf.Field.Unique != ef.Field.Unique {
|
|
||||||
t.Fatalf("index field '%s' should equal, expects %v, got %v", rf.Field.Name, rf.Field.Unique, ef.Field.Unique)
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, name := range []string{"Expression", "Sort", "Collate", "Length"} {
|
FieldF1 string `gorm:"uniqueIndex:uniq_field_f1_f2;index"` // mul uniqueIndex and index
|
||||||
if reflect.ValueOf(ef).FieldByName(name).Interface() != reflect.ValueOf(rf).FieldByName(name).Interface() {
|
FieldF2 string `gorm:"uniqueIndex:uniq_field_f1_f2;"`
|
||||||
t.Errorf(
|
|
||||||
"index %v field #%v's %v should equal, expects %v, got %v", k, idx+1, name,
|
FieldG string `gorm:"unique;uniqueIndex"` // unique and uniqueIndex
|
||||||
reflect.ValueOf(ef).FieldByName(name).Interface(), reflect.ValueOf(rf).FieldByName(name).Interface(),
|
|
||||||
)
|
FieldH1 string `gorm:"unique;uniqueIndex:uniq_field_h1_h2"` // unique and mul uniqueIndex
|
||||||
}
|
FieldH2 string `gorm:"uniqueIndex:uniq_field_h1_h2"` // unique and mul uniqueIndex
|
||||||
|
}
|
||||||
|
indexSchema, err := schema.Parse(&IndexTest{}, &sync.Map{}, schema.NamingStrategy{})
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("failed to parse user index, got error %v", err)
|
||||||
|
}
|
||||||
|
indices := indexSchema.ParseIndexes()
|
||||||
|
CheckIndices(t, map[string]schema.Index{
|
||||||
|
"idx_index_tests_field_a": {
|
||||||
|
Name: "idx_index_tests_field_a",
|
||||||
|
Fields: []schema.IndexOption{{Field: &schema.Field{Name: "FieldA", Unique: true}}},
|
||||||
|
},
|
||||||
|
"idx_index_tests_field_c": {
|
||||||
|
Name: "idx_index_tests_field_c",
|
||||||
|
Class: "UNIQUE",
|
||||||
|
Fields: []schema.IndexOption{{Field: &schema.Field{Name: "FieldC", UniqueIndex: true}}},
|
||||||
|
},
|
||||||
|
"idx_index_tests_field_d": {
|
||||||
|
Name: "idx_index_tests_field_d",
|
||||||
|
Class: "UNIQUE",
|
||||||
|
Fields: []schema.IndexOption{
|
||||||
|
{Field: &schema.Field{Name: "FieldD"}},
|
||||||
|
// Note: Duplicate Columns
|
||||||
|
{Field: &schema.Field{Name: "FieldD"}},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"uniq_field_e1_e2": {
|
||||||
|
Name: "uniq_field_e1_e2",
|
||||||
|
Class: "UNIQUE",
|
||||||
|
Fields: []schema.IndexOption{
|
||||||
|
{Field: &schema.Field{Name: "FieldE1"}},
|
||||||
|
{Field: &schema.Field{Name: "FieldE2"}},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"idx_index_tests_field_f1": {
|
||||||
|
Name: "idx_index_tests_field_f1",
|
||||||
|
Fields: []schema.IndexOption{{Field: &schema.Field{Name: "FieldF1"}}},
|
||||||
|
},
|
||||||
|
"uniq_field_f1_f2": {
|
||||||
|
Name: "uniq_field_f1_f2",
|
||||||
|
Class: "UNIQUE",
|
||||||
|
Fields: []schema.IndexOption{
|
||||||
|
{Field: &schema.Field{Name: "FieldF1"}},
|
||||||
|
{Field: &schema.Field{Name: "FieldF2"}},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"idx_index_tests_field_g": {
|
||||||
|
Name: "idx_index_tests_field_g",
|
||||||
|
Class: "UNIQUE",
|
||||||
|
Fields: []schema.IndexOption{{Field: &schema.Field{Name: "FieldG", Unique: true, UniqueIndex: true}}},
|
||||||
|
},
|
||||||
|
"uniq_field_h1_h2": {
|
||||||
|
Name: "uniq_field_h1_h2",
|
||||||
|
Class: "UNIQUE",
|
||||||
|
Fields: []schema.IndexOption{
|
||||||
|
{Field: &schema.Field{Name: "FieldH1", Unique: true}},
|
||||||
|
{Field: &schema.Field{Name: "FieldH2"}},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}, indices)
|
||||||
|
}
|
||||||
|
|
||||||
|
func CheckIndices(t *testing.T, expected, actual map[string]schema.Index) {
|
||||||
|
for k, ei := range expected {
|
||||||
|
t.Run(k, func(t *testing.T) {
|
||||||
|
ai, ok := actual[k]
|
||||||
|
if !ok {
|
||||||
|
t.Errorf("expected index %q but actual missing", k)
|
||||||
|
return
|
||||||
}
|
}
|
||||||
}
|
tests.AssertObjEqual(t, ai, ei, "Name", "Class", "Type", "Where", "Comment", "Option")
|
||||||
|
if len(ei.Fields) != len(ai.Fields) {
|
||||||
|
t.Errorf("expected index %q field length is %d but actual %d", k, len(ei.Fields), len(ai.Fields))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
for i, ef := range ei.Fields {
|
||||||
|
af := ai.Fields[i]
|
||||||
|
tests.AssertObjEqual(t, af, ef, "Name", "Unique", "UniqueIndex", "Expression", "Sort", "Collate", "Length")
|
||||||
|
}
|
||||||
|
})
|
||||||
|
delete(actual, k)
|
||||||
|
}
|
||||||
|
for k := range actual {
|
||||||
|
t.Errorf("unexpected index %q", k)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1608,7 +1608,7 @@ func TestMigrateWithUniqueIndexAndUnique(t *testing.T) {
|
|||||||
const table = "unique_struct"
|
const table = "unique_struct"
|
||||||
|
|
||||||
checkColumnType := func(t *testing.T, fieldName string, unique bool) {
|
checkColumnType := func(t *testing.T, fieldName string, unique bool) {
|
||||||
columnTypes, err := DB.Debug().Migrator().ColumnTypes(table)
|
columnTypes, err := DB.Migrator().ColumnTypes(table)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Fatalf("%v: failed to get column types, got error: %v", utils.FileWithLineNum(), err)
|
t.Fatalf("%v: failed to get column types, got error: %v", utils.FileWithLineNum(), err)
|
||||||
}
|
}
|
||||||
@ -1749,10 +1749,10 @@ func TestMigrateWithUniqueIndexAndUnique(t *testing.T) {
|
|||||||
if err := DB.Migrator().DropTable(table); err != nil {
|
if err := DB.Migrator().DropTable(table); err != nil {
|
||||||
t.Fatalf("failed to drop table, got error: %v", err)
|
t.Fatalf("failed to drop table, got error: %v", err)
|
||||||
}
|
}
|
||||||
if err := DB.Debug().Table(table).AutoMigrate(test.from); err != nil {
|
if err := DB.Table(table).AutoMigrate(test.from); err != nil {
|
||||||
t.Fatalf("failed to migrate table, got error: %v", err)
|
t.Fatalf("failed to migrate table, got error: %v", err)
|
||||||
}
|
}
|
||||||
if err := DB.Debug().Table(table).AutoMigrate(test.to); err != nil {
|
if err := DB.Table(table).AutoMigrate(test.to); err != nil {
|
||||||
t.Fatalf("failed to migrate table, got error: %v", err)
|
t.Fatalf("failed to migrate table, got error: %v", err)
|
||||||
}
|
}
|
||||||
test.checkFunc(t)
|
test.checkFunc(t)
|
||||||
|
Loading…
x
Reference in New Issue
Block a user