-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathstream_writer.go
120 lines (108 loc) · 2.68 KB
/
stream_writer.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
package sql
import (
"context"
"database/sql"
"database/sql/driver"
"fmt"
"reflect"
q "github.com/core-go/sql"
)
type StreamWriter[T any] struct {
db *sql.DB
tableName string
BuildParam func(i int) string
Map func(T)
schema *q.Schema
batchSize int
batch []interface{}
Driver string
ToArray func(interface{}) interface {
driver.Valuer
sql.Scanner
}
}
func NewStreamWriter[T any](db *sql.DB, tableName string, batchSize int, options ...func(T)) *StreamWriter[T] {
var mp func(T)
if len(options) >= 1 {
mp = options[0]
}
return NewSqlStreamWriter[T](db, tableName, batchSize, mp, nil)
}
func NewStreamWriterWithArray[T any](db *sql.DB, tableName string, batchSize int, toArray func(interface{}) interface {
driver.Valuer
sql.Scanner
}, options ...func(T)) *StreamWriter[T] {
var mp func(T)
if len(options) >= 1 {
mp = options[0]
}
return NewSqlStreamWriter[T](db, tableName, batchSize, mp, toArray)
}
func NewSqlStreamWriter[T any](db *sql.DB, tableName string, batchSize int,
mp func(T), toArray func(interface{}) interface {
driver.Valuer
sql.Scanner
}, options ...func(i int) string) *StreamWriter[T] {
var buildParam func(i int) string
if len(options) > 0 && options[0] != nil {
buildParam = options[0]
} else {
buildParam = q.GetBuild(db)
}
driver := q.GetDriver(db)
// boolSupport := driver == DriverPostgres
var t T
modelType := reflect.TypeOf(t)
if modelType.Kind() == reflect.Ptr {
modelType = modelType.Elem()
}
schema := q.CreateSchema(modelType)
if len(schema.Keys) <= 0 {
panic(fmt.Sprintf("require primary key for table '%s'", tableName))
}
return &StreamWriter[T]{db: db, Driver: driver, schema: schema, tableName: tableName, batchSize: batchSize, BuildParam: buildParam, Map: mp, ToArray: toArray}
}
func (w *StreamWriter[T]) Write(ctx context.Context, model T) error {
if w.Map != nil {
w.Map(model)
}
w.batch = append(w.batch, model)
if len(w.batch) >= w.batchSize {
return w.Flush(ctx)
}
return nil
}
func (w *StreamWriter[T]) Flush(ctx context.Context) error {
var queryArgsArray []q.Statement
for _, v := range w.batch {
query, args, err := q.BuildToSaveWithArray(w.tableName, v, w.Driver, w.ToArray, w.schema)
if err != nil {
return err
}
queryArgs := q.Statement{
Query: query,
Params: args,
}
queryArgsArray = append(queryArgsArray, queryArgs)
}
tx, err := w.db.BeginTx(ctx, nil)
if err != nil {
return err
}
defer func() {
w.batch = make([]interface{}, 0)
}()
for _, v := range queryArgsArray {
_, err = tx.Exec(v.Query, v.Params...)
if err != nil {
tx.Rollback()
return err
}
}
err = tx.Commit()
if err != nil {
tx.Rollback()
return err
}
return nil
}