add OnDuplicate/OnDuplicateEx feature for package gdb

This commit is contained in:
John Guo 2021-06-16 21:44:31 +08:00
parent e4b0de0d4f
commit d450de8e0d
8 changed files with 430 additions and 193 deletions

View File

@ -21,7 +21,7 @@ type StrSet struct {
data map[string]struct{}
}
// New create and returns a new set, which contains un-repeated items.
// NewStrSet create and returns a new set, which contains un-repeated items.
// The parameter <safe> is used to specify whether using set in concurrent-safety,
// which is false in default.
func NewStrSet(safe ...bool) *StrSet {

View File

@ -96,7 +96,7 @@ type DB interface {
// ===========================================================================
DoGetAll(ctx context.Context, link Link, sql string, args ...interface{}) (result Result, err error) // See Core.DoGetAll.
DoInsert(ctx context.Context, link Link, table string, data interface{}, option int, batch int) (result sql.Result, err error) // See Core.DoInsert.
DoInsert(ctx context.Context, link Link, table string, data List, option DoInsertOption) (result sql.Result, err error) // See Core.DoInsert.
DoUpdate(ctx context.Context, link Link, table string, data interface{}, condition string, args ...interface{}) (result sql.Result, err error) // See Core.DoUpdate.
DoDelete(ctx context.Context, link Link, table string, condition string, args ...interface{}) (result sql.Result, err error) // See Core.DoDelete.
DoQuery(ctx context.Context, link Link, sql string, args ...interface{}) (rows *sql.Rows, err error) // See Core.DoQuery.
@ -214,6 +214,14 @@ type Sql struct {
IsTransaction bool // IsTransaction marks whether this sql is executed in transaction.
}
// DoInsertOption is the input struct for function DoInsert.
type DoInsertOption struct {
OnDuplicateStr string
OnDuplicateMap map[string]interface{}
InsertOption int // Insert operation.
BatchCount int // Batch count for batch inserting.
}
// TableField is the struct for table field.
type TableField struct {
Index int // For ordering purpose as map is unordered.

View File

@ -367,78 +367,15 @@ func (c *Core) Save(table string, data interface{}, batch ...int) (sql.Result, e
// 1: replace: if there's unique/primary key in the data, it deletes it from table and inserts a new one;
// 2: save: if there's unique/primary key in the data, it updates it or else inserts a new one;
// 3: ignore: if there's unique/primary key in the data, it ignores the inserting;
func (c *Core) DoInsert(ctx context.Context, link Link, table string, data interface{}, option int, batch int) (result sql.Result, err error) {
table = c.QuotePrefixTableName(table)
func (c *Core) DoInsert(ctx context.Context, link Link, table string, list List, option DoInsertOption) (result sql.Result, err error) {
var (
keys []string // Field names.
values []string // Value holder string array, like: (?,?,?)
params []interface{} // Values that will be committed to underlying database driver.
listMap List // The data list that passed from caller.
keys []string // Field names.
values []string // Value holder string array, like: (?,?,?)
params []interface{} // Values that will be committed to underlying database driver.
onDuplicateStr string // onDuplicateStr is used in "ON DUPLICATE KEY UPDATE" statement.
)
switch value := data.(type) {
case Result:
listMap = value.List()
case Record:
listMap = List{value.Map()}
case List:
listMap = value
for i, v := range listMap {
listMap[i] = ConvertDataForTableRecord(v)
}
case Map:
listMap = List{ConvertDataForTableRecord(value)}
default:
var (
rv = reflect.ValueOf(data)
kind = rv.Kind()
)
if kind == reflect.Ptr {
rv = rv.Elem()
kind = rv.Kind()
}
switch kind {
// If it's slice type, it then converts it to List type.
case reflect.Slice, reflect.Array:
listMap = make(List, rv.Len())
for i := 0; i < rv.Len(); i++ {
listMap[i] = ConvertDataForTableRecord(rv.Index(i).Interface())
}
case reflect.Map:
listMap = List{ConvertDataForTableRecord(value)}
case reflect.Struct:
if v, ok := value.(apiInterfaces); ok {
var (
array = v.Interfaces()
list = make(List, len(array))
)
for i := 0; i < len(array); i++ {
list[i] = ConvertDataForTableRecord(array[i])
}
listMap = list
} else {
listMap = List{ConvertDataForTableRecord(value)}
}
default:
return result, gerror.New(fmt.Sprint("unsupported list type:", kind))
}
}
if len(listMap) < 1 {
return result, gerror.New("data list cannot be empty")
}
if link == nil {
if link, err = c.MasterLink(); err != nil {
return
}
}
// Handle the field names and place holders.
for k, _ := range listMap[0] {
for k, _ := range list[0] {
keys = append(keys, k)
}
// Prepare the batch result pointer.
@ -446,54 +383,35 @@ func (c *Core) DoInsert(ctx context.Context, link Link, table string, data inter
charL, charR = c.db.GetChars()
batchResult = new(SqlResult)
keysStr = charL + strings.Join(keys, charR+","+charL) + charR
operation = GetInsertOperationByOption(option)
updateStr = ""
operation = GetInsertOperationByOption(option.InsertOption)
)
if option == insertOptionSave {
for _, k := range keys {
// If it's SAVE operation,
// do not automatically update the creating time.
if c.isSoftCreatedFiledName(k) {
continue
}
if len(updateStr) > 0 {
updateStr += ","
}
updateStr += fmt.Sprintf(
"%s%s%s=VALUES(%s%s%s)",
charL, k, charR,
charL, k, charR,
)
}
updateStr = fmt.Sprintf("ON DUPLICATE KEY UPDATE %s", updateStr)
}
if batch <= 0 {
batch = defaultBatchNumber
if option.InsertOption == insertOptionSave {
onDuplicateStr = c.formatOnDuplicate(keys, option)
}
var (
listMapLen = len(listMap)
listLength = len(list)
valueHolder = make([]string, 0)
)
for i := 0; i < listMapLen; i++ {
for i := 0; i < listLength; i++ {
values = values[:0]
// Note that the map type is unordered,
// so it should use slice+key to retrieve the value.
for _, k := range keys {
if s, ok := listMap[i][k].(Raw); ok {
if s, ok := list[i][k].(Raw); ok {
values = append(values, gconv.String(s))
} else {
values = append(values, "?")
params = append(params, listMap[i][k])
params = append(params, list[i][k])
}
}
valueHolder = append(valueHolder, "("+gstr.Join(values, ",")+")")
// Batch package checks: It meets the batch number or it is the last element.
if len(valueHolder) == batch || (i == listMapLen-1 && len(valueHolder) > 0) {
if len(valueHolder) == option.BatchCount || (i == listLength-1 && len(valueHolder) > 0) {
r, err := c.db.DoExec(ctx, link, fmt.Sprintf(
"%s INTO %s(%s) VALUES%s %s",
operation, table, keysStr,
operation, c.QuotePrefixTableName(table), keysStr,
gstr.Join(valueHolder, ","),
updateStr,
onDuplicateStr,
), params...)
if err != nil {
return r, err
@ -511,6 +429,52 @@ func (c *Core) DoInsert(ctx context.Context, link Link, table string, data inter
return batchResult, nil
}
func (c *Core) formatOnDuplicate(columns []string, option DoInsertOption) string {
var (
onDuplicateStr string
)
if option.OnDuplicateStr != "" {
onDuplicateStr = option.OnDuplicateStr
} else if len(option.OnDuplicateMap) > 0 {
for k, v := range option.OnDuplicateMap {
if len(onDuplicateStr) > 0 {
onDuplicateStr += ","
}
switch v.(type) {
case Raw, *Raw:
onDuplicateStr += fmt.Sprintf(
"%s=%s",
c.QuoteWord(k),
v,
)
default:
onDuplicateStr += fmt.Sprintf(
"%s=VALUES(%s)",
c.QuoteWord(k),
c.QuoteWord(gconv.String(v)),
)
}
}
} else {
for _, column := range columns {
// If it's SAVE operation,
// do not automatically update the creating time.
if c.isSoftCreatedFilledName(column) {
continue
}
if len(onDuplicateStr) > 0 {
onDuplicateStr += ","
}
onDuplicateStr += fmt.Sprintf(
"%s=VALUES(%s)",
c.QuoteWord(column),
c.QuoteWord(column),
)
}
}
return fmt.Sprintf("ON DUPLICATE KEY UPDATE %s", onDuplicateStr)
}
// Update does "UPDATE ... " statement for the table.
//
// The parameter `data` can be type of string/map/gmap/struct/*struct, etc.
@ -711,8 +675,8 @@ func (c *Core) HasTable(name string) (bool, error) {
return false, nil
}
// isSoftCreatedFiledName checks and returns whether given filed name is an automatic-filled created time.
func (c *Core) isSoftCreatedFiledName(fieldName string) bool {
// isSoftCreatedFilledName checks and returns whether given filed name is an automatic-filled created time.
func (c *Core) isSoftCreatedFilledName(fieldName string) bool {
if fieldName == "" {
return false
}

View File

@ -264,95 +264,40 @@ func (d *DriverOracle) getTableUniqueIndex(table string) (fields map[string]map[
return
}
func (d *DriverOracle) DoInsert(ctx context.Context, link Link, table string, list interface{}, option int, batch int) (result sql.Result, err error) {
func (d *DriverOracle) DoInsert(ctx context.Context, link Link, table string, list List, option DoInsertOption) (result sql.Result, err error) {
var (
keys []string
values []string
params []interface{}
)
listMap := (List)(nil)
switch v := list.(type) {
case Result:
listMap = v.List()
case Record:
listMap = List{v.Map()}
case List:
listMap = v
case Map:
listMap = List{v}
default:
var (
rv = reflect.ValueOf(list)
kind = rv.Kind()
)
if kind == reflect.Ptr {
rv = rv.Elem()
kind = rv.Kind()
}
switch kind {
case reflect.Slice, reflect.Array:
listMap = make(List, rv.Len())
for i := 0; i < rv.Len(); i++ {
listMap[i] = ConvertDataForTableRecord(rv.Index(i).Interface())
}
case reflect.Map:
fallthrough
case reflect.Struct:
listMap = List{ConvertDataForTableRecord(list)}
default:
return result, gerror.New(fmt.Sprint("unsupported list type:", kind))
}
}
if len(listMap) < 1 {
return result, gerror.New("empty data list")
}
if link == nil {
if link, err = d.MasterLink(); err != nil {
return
}
}
// Retrieve the table fields and length.
holders := []string(nil)
for k, _ := range listMap[0] {
var (
listLength = len(list)
valueHolder = make([]string, 0)
)
for k, _ := range list[0] {
keys = append(keys, k)
holders = append(holders, "?")
valueHolder = append(valueHolder, "?")
}
var (
batchResult = new(SqlResult)
charL, charR = d.db.GetChars()
keyStr = charL + strings.Join(keys, charL+","+charR) + charR
valueHolderStr = strings.Join(holders, ",")
valueHolderStr = strings.Join(valueHolder, ",")
)
if option != insertOptionDefault {
for _, v := range listMap {
r, err := d.DoInsert(ctx, link, table, v, option, 1)
if err != nil {
return r, err
}
if n, err := r.RowsAffected(); err != nil {
return r, err
} else {
batchResult.result = r
batchResult.affected += n
}
}
return batchResult, nil
}
if batch <= 0 {
batch = defaultBatchNumber
}
// Format "INSERT...INTO..." statement.
intoStr := make([]string, 0)
for i := 0; i < len(listMap); i++ {
for i := 0; i < len(list); i++ {
for _, k := range keys {
params = append(params, listMap[i][k])
params = append(params, list[i][k])
}
values = append(values, valueHolderStr)
intoStr = append(intoStr, fmt.Sprintf(" INTO %s(%s) VALUES(%s) ", table, keyStr, valueHolderStr))
if len(intoStr) == batch {
r, err := d.DoExec(ctx, link, fmt.Sprintf("INSERT ALL %s SELECT * FROM DUAL", strings.Join(intoStr, " ")), params...)
intoStr = append(intoStr, fmt.Sprintf("INTO %s(%s) VALUES(%s)", table, keyStr, valueHolderStr))
if len(intoStr) == option.BatchCount || (i == listLength-1 && len(valueHolder) > 0) {
r, err := d.DoExec(ctx, link, fmt.Sprintf(
"INSERT ALL %s SELECT * FROM DUAL",
strings.Join(intoStr, " "),
), params...)
if err != nil {
return r, err
}
@ -366,18 +311,5 @@ func (d *DriverOracle) DoInsert(ctx context.Context, link Link, table string, li
intoStr = intoStr[:0]
}
}
// The leftover data.
if len(intoStr) > 0 {
r, err := d.DoExec(ctx, link, fmt.Sprintf("INSERT ALL %s SELECT * FROM DUAL", strings.Join(intoStr, " ")), params...)
if err != nil {
return r, err
}
if n, err := r.RowsAffected(); err != nil {
return r, err
} else {
batchResult.result = r
batchResult.affected += n
}
}
return batchResult, nil
}

View File

@ -164,12 +164,15 @@ func ConvertDataForTableRecord(value interface{}) map[string]interface{} {
// Convert the value to JSON.
data[k], _ = json.Marshal(v)
}
case reflect.Struct:
switch v.(type) {
case time.Time, *time.Time, gtime.Time, *gtime.Time:
continue
case Counter, *Counter:
continue
default:
// Use string conversion in default.
if s, ok := v.(apiString); ok {

View File

@ -49,6 +49,8 @@ type Model struct {
cacheName string // Cache name for custom operation.
unscoped bool // Disables soft deleting features when select/delete operations.
safe bool // If true, it clones and returns a new model object whenever operation done; or else it changes the attribute of current model.
onDuplicate interface{} // onDuplicate is used for ON "DUPLICATE KEY UPDATE" statement.
onDuplicateEx interface{} // onDuplicateEx is used for excluding some columns ON "DUPLICATE KEY UPDATE" statement.
}
// whereHolder is the holder for where condition preparing.

View File

@ -8,6 +8,8 @@ package gdb
import (
"database/sql"
"fmt"
"github.com/gogf/gf/container/gset"
"reflect"
"github.com/gogf/gf/errors/gerror"
@ -51,16 +53,20 @@ func (m *Model) Data(data ...interface{}) *Model {
switch params := data[0].(type) {
case Result:
model.data = params.List()
case Record:
model.data = params.Map()
case List:
list := make(List, len(params))
for k, v := range params {
list[k] = gutil.MapCopy(v)
}
model.data = list
case Map:
model.data = gutil.MapCopy(params)
default:
var (
rv = reflect.ValueOf(params)
@ -100,6 +106,24 @@ func (m *Model) Data(data ...interface{}) *Model {
return model
}
// OnDuplicate sets the operations when columns conflicts occurs.
// In MySQL, this is used for "ON DUPLICATE KEY UPDATE" statement.
// The parameter `onDuplicate` can be type of string/Raw/*Raw/map/slice.
func (m *Model) OnDuplicate(onDuplicate interface{}) *Model {
model := m.getModel()
model.onDuplicate = onDuplicate
return model
}
// OnDuplicateEx sets the excluding columns for operations when columns conflicts occurs.
// In MySQL, this is used for "ON DUPLICATE KEY UPDATE" statement.
// The parameter `onDuplicateEx` can be type of string/Raw/*Raw/map/slice.
func (m *Model) OnDuplicateEx(onDuplicateEx interface{}) *Model {
model := m.getModel()
model.onDuplicateEx = onDuplicateEx
return model
}
// Insert does "INSERT INTO ..." statement for the model.
// The optional parameter `data` is the same as the parameter of Model.Data function,
// see Model.Data.
@ -156,7 +180,7 @@ func (m *Model) Save(data ...interface{}) (result sql.Result, err error) {
}
// doInsertWithOption inserts data with option parameter.
func (m *Model) doInsertWithOption(option int) (result sql.Result, err error) {
func (m *Model) doInsertWithOption(insertOption int) (result sql.Result, err error) {
defer func() {
if err == nil {
m.checkAndRemoveCache()
@ -176,17 +200,66 @@ func (m *Model) doInsertWithOption(option int) (result sql.Result, err error) {
if err != nil {
return nil, err
}
// It converts any data to List type for inserting.
switch newData.(type) {
case Map:
list = List{newData.(Map)}
switch value := newData.(type) {
case Result:
list = value.List()
case Record:
list = List{value.Map()}
case List:
list = newData.(List)
list = value
for i, v := range list {
list[i] = ConvertDataForTableRecord(v)
}
case Map:
list = List{ConvertDataForTableRecord(value)}
default:
return nil, gerror.New("inserting into table with invalid data type")
var (
rv = reflect.ValueOf(newData)
kind = rv.Kind()
)
if kind == reflect.Ptr {
rv = rv.Elem()
kind = rv.Kind()
}
switch kind {
// If it's slice type, it then converts it to List type.
case reflect.Slice, reflect.Array:
list = make(List, rv.Len())
for i := 0; i < rv.Len(); i++ {
list[i] = ConvertDataForTableRecord(rv.Index(i).Interface())
}
case reflect.Map:
list = List{ConvertDataForTableRecord(value)}
case reflect.Struct:
if v, ok := value.(apiInterfaces); ok {
var (
array = v.Interfaces()
)
list = make(List, len(array))
for i := 0; i < len(array); i++ {
list[i] = ConvertDataForTableRecord(array[i])
}
} else {
list = List{ConvertDataForTableRecord(value)}
}
default:
return result, gerror.New(fmt.Sprint("unsupported list type:", kind))
}
}
if len(list) < 1 {
return result, gerror.New("data list cannot be empty")
}
// Automatic handling for creating/updating time.
if !m.unscoped && (fieldNameCreate != "" || fieldNameUpdate != "") {
for k, v := range list {
@ -200,7 +273,117 @@ func (m *Model) doInsertWithOption(option int) (result sql.Result, err error) {
list[k] = v
}
}
return m.db.DoInsert(m.GetCtx(), m.getLink(true), m.tables, list, option, m.getBatch())
// Format DoInsertOption, especially for "ON DUPLICATE KEY UPDATE" statement.
columnNames := make([]string, 0, len(list[0]))
for k, _ := range list[0] {
columnNames = append(columnNames, k)
}
doInsertOption, err := m.formatDoInsertOption(insertOption, columnNames)
if err != nil {
return result, err
}
return m.db.DoInsert(m.GetCtx(), m.getLink(true), m.tables, list, doInsertOption)
}
func (m *Model) formatDoInsertOption(insertOption int, columnNames []string) (option DoInsertOption, err error) {
option = DoInsertOption{
InsertOption: insertOption,
BatchCount: m.getBatch(),
}
if insertOption == insertOptionSave {
onDuplicateExKeys, err := m.formatOnDuplicateExKeys(m.onDuplicateEx)
if err != nil {
return option, err
}
var (
onDuplicateExKeySet = gset.NewStrSetFrom(onDuplicateExKeys)
)
if m.onDuplicate != nil {
switch m.onDuplicate.(type) {
case Raw, *Raw:
option.OnDuplicateStr = gconv.String(m.onDuplicate)
default:
var (
reflectValue = reflect.ValueOf(m.onDuplicate)
reflectKind = reflectValue.Kind()
)
for reflectKind == reflect.Ptr {
reflectValue = reflectValue.Elem()
reflectKind = reflectValue.Kind()
}
switch reflectKind {
case reflect.String:
option.OnDuplicateMap = make(map[string]interface{})
for _, v := range gstr.SplitAndTrim(reflectValue.String(), ",") {
if onDuplicateExKeySet.Contains(v) {
continue
}
option.OnDuplicateMap[v] = v
}
case reflect.Map:
option.OnDuplicateMap = make(map[string]interface{})
for k, v := range gconv.Map(m.onDuplicate) {
if onDuplicateExKeySet.Contains(k) {
continue
}
option.OnDuplicateMap[k] = v
}
case reflect.Slice, reflect.Array:
option.OnDuplicateMap = make(map[string]interface{})
for _, v := range gconv.Strings(m.onDuplicate) {
if onDuplicateExKeySet.Contains(v) {
continue
}
option.OnDuplicateMap[v] = v
}
default:
return option, gerror.Newf(`unsupported OnDuplicate parameter type "%s"`, reflect.TypeOf(m.onDuplicate))
}
}
} else if onDuplicateExKeySet.Size() > 0 {
option.OnDuplicateMap = make(map[string]interface{})
for _, v := range columnNames {
if onDuplicateExKeySet.Contains(v) {
continue
}
option.OnDuplicateMap[v] = v
}
}
}
return
}
func (m *Model) formatOnDuplicateExKeys(onDuplicateEx interface{}) ([]string, error) {
if onDuplicateEx == nil {
return nil, nil
}
var (
reflectValue = reflect.ValueOf(onDuplicateEx)
reflectKind = reflectValue.Kind()
)
for reflectKind == reflect.Ptr {
reflectValue = reflectValue.Elem()
reflectKind = reflectValue.Kind()
}
switch reflectKind {
case reflect.String:
return gstr.SplitAndTrim(reflectValue.String(), ","), nil
case reflect.Map:
return gutil.Keys(onDuplicateEx), nil
case reflect.Slice, reflect.Array:
return gconv.Strings(onDuplicateEx), nil
default:
return nil, gerror.Newf(`unsupported OnDuplicateEx parameter type "%s"`, reflect.TypeOf(onDuplicateEx))
}
}
func (m *Model) getBatch() int {

View File

@ -3551,3 +3551,148 @@ func Test_Model_Increment_Decrement(t *testing.T) {
t.Assert(count, 1)
})
}
func Test_Model_OnDuplicate(t *testing.T) {
table := createInitTable()
defer dropTable(table)
// string.
gtest.C(t, func(t *gtest.T) {
data := g.Map{
"id": 1,
"passport": "pp1",
"password": "pw1",
"nickname": "n1",
"create_time": "2016-06-06",
}
_, err := db.Model(table).OnDuplicate("passport,password").Data(data).Save()
t.AssertNil(err)
one, err := db.Model(table).FindOne(1)
t.AssertNil(err)
t.Assert(one["passport"], data["passport"])
t.Assert(one["password"], data["password"])
t.Assert(one["nickname"], "name_1")
})
// slice.
gtest.C(t, func(t *gtest.T) {
data := g.Map{
"id": 1,
"passport": "pp1",
"password": "pw1",
"nickname": "n1",
"create_time": "2016-06-06",
}
_, err := db.Model(table).OnDuplicate(g.Slice{"passport", "password"}).Data(data).Save()
t.AssertNil(err)
one, err := db.Model(table).FindOne(1)
t.AssertNil(err)
t.Assert(one["passport"], data["passport"])
t.Assert(one["password"], data["password"])
t.Assert(one["nickname"], "name_1")
})
// map.
gtest.C(t, func(t *gtest.T) {
data := g.Map{
"id": 1,
"passport": "pp1",
"password": "pw1",
"nickname": "n1",
"create_time": "2016-06-06",
}
_, err := db.Model(table).OnDuplicate(g.Map{
"passport": "nickname",
"password": "nickname",
}).Data(data).Save()
t.AssertNil(err)
one, err := db.Model(table).FindOne(1)
t.AssertNil(err)
t.Assert(one["passport"], data["nickname"])
t.Assert(one["password"], data["nickname"])
t.Assert(one["nickname"], "name_1")
})
// map+raw.
gtest.C(t, func(t *gtest.T) {
data := g.MapStrStr{
"id": "1",
"passport": "pp1",
"password": "pw1",
"nickname": "n1",
"create_time": "2016-06-06",
}
_, err := db.Model(table).OnDuplicate(g.Map{
"passport": gdb.Raw("CONCAT(VALUES(`passport`), '1')"),
"password": gdb.Raw("CONCAT(VALUES(`password`), '2')"),
}).Data(data).Save()
t.AssertNil(err)
one, err := db.Model(table).FindOne(1)
t.AssertNil(err)
t.Assert(one["passport"], data["passport"]+"1")
t.Assert(one["password"], data["password"]+"2")
t.Assert(one["nickname"], "name_1")
})
}
func Test_Model_OnDuplicateEx(t *testing.T) {
table := createInitTable()
defer dropTable(table)
// string.
gtest.C(t, func(t *gtest.T) {
data := g.Map{
"id": 1,
"passport": "pp1",
"password": "pw1",
"nickname": "n1",
"create_time": "2016-06-06",
}
_, err := db.Model(table).OnDuplicateEx("nickname,create_time").Data(data).Save()
t.AssertNil(err)
one, err := db.Model(table).FindOne(1)
t.AssertNil(err)
t.Assert(one["passport"], data["passport"])
t.Assert(one["password"], data["password"])
t.Assert(one["nickname"], "name_1")
})
// slice.
gtest.C(t, func(t *gtest.T) {
data := g.Map{
"id": 1,
"passport": "pp1",
"password": "pw1",
"nickname": "n1",
"create_time": "2016-06-06",
}
_, err := db.Model(table).OnDuplicateEx(g.Slice{"nickname", "create_time"}).Data(data).Save()
t.AssertNil(err)
one, err := db.Model(table).FindOne(1)
t.AssertNil(err)
t.Assert(one["passport"], data["passport"])
t.Assert(one["password"], data["password"])
t.Assert(one["nickname"], "name_1")
})
// map.
gtest.C(t, func(t *gtest.T) {
data := g.Map{
"id": 1,
"passport": "pp1",
"password": "pw1",
"nickname": "n1",
"create_time": "2016-06-06",
}
_, err := db.Model(table).OnDuplicateEx(g.Map{
"nickname": "nickname",
"create_time": "nickname",
}).Data(data).Save()
t.AssertNil(err)
one, err := db.Model(table).FindOne(1)
t.AssertNil(err)
t.Assert(one["passport"], data["passport"])
t.Assert(one["password"], data["password"])
t.Assert(one["nickname"], "name_1")
})
}