@@ -23,6 +23,11 @@ import (
23
23
"time"
24
24
)
25
25
26
+ const (
27
+ batcherActive = uint32 (0 )
28
+ batcherDisposed = uint32 (1 )
29
+ )
30
+
26
31
// Batcher provides an API for accumulating items into a batch for processing.
27
32
type Batcher interface {
28
33
// Put adds items to the batcher.
@@ -55,10 +60,11 @@ type basicBatcher struct {
55
60
maxItems uint
56
61
maxBytes uint
57
62
calculateBytes CalculateBytes
58
- disposed bool
63
+ disposed uint32
59
64
items []interface {}
60
65
lock sync.RWMutex
61
66
batchChan chan []interface {}
67
+ disposeChan chan struct {}
62
68
availableBytes uint
63
69
waiting int32
64
70
}
@@ -82,19 +88,19 @@ func New(maxTime time.Duration, maxItems, maxBytes, queueLen uint, calculate Cal
82
88
calculateBytes : calculate ,
83
89
items : make ([]interface {}, 0 , maxItems ),
84
90
batchChan : make (chan []interface {}, queueLen ),
91
+ disposeChan : make (chan struct {}),
85
92
}, nil
86
93
}
87
94
88
95
// Put adds items to the batcher. If Put is continually called without calls to
89
96
// Get, an unbounded number of go-routines will be generated.
90
97
// Note: there is no order guarantee for items entering/leaving the batcher.
91
98
func (b * basicBatcher ) Put (item interface {}) error {
92
- b .lock .Lock ()
93
- if b .disposed {
94
- b .lock .Unlock ()
99
+ // Check to see if disposed before putting
100
+ if b .IsDisposed () {
95
101
return ErrDisposed
96
102
}
97
-
103
+ b . lock . Lock ()
98
104
b .items = append (b .items , item )
99
105
if b .calculateBytes != nil {
100
106
b .availableBytes += b .calculateBytes (item )
@@ -121,18 +127,25 @@ func (b *basicBatcher) Get() ([]interface{}, error) {
121
127
timeout = time .After (b .maxTime )
122
128
}
123
129
130
+ // Check to see if disposed before blocking
131
+ if b .IsDisposed () {
132
+ return nil , ErrDisposed
133
+ }
134
+
124
135
select {
125
- case items , ok := <- b .batchChan :
136
+ case items := <- b .batchChan :
137
+ return items , nil
138
+ case _ , ok := <- b .disposeChan :
126
139
if ! ok {
127
140
return nil , ErrDisposed
128
141
}
129
- return items , nil
142
+ return nil , nil
130
143
case <- timeout :
131
- b .lock .Lock ()
132
- if b .disposed {
133
- b .lock .Unlock ()
144
+ // Check to see if disposed before getting lock
145
+ if b .IsDisposed () {
134
146
return nil , ErrDisposed
135
147
}
148
+ b .lock .Lock ()
136
149
items := b .items
137
150
b .items = make ([]interface {}, 0 , b .maxItems )
138
151
b .availableBytes = 0
@@ -143,11 +156,10 @@ func (b *basicBatcher) Get() ([]interface{}, error) {
143
156
144
157
// Flush forcibly completes the batch currently being built
145
158
func (b * basicBatcher ) Flush () error {
146
- b .lock .Lock ()
147
- if b .disposed {
148
- b .lock .Unlock ()
159
+ if b .IsDisposed () {
149
160
return ErrDisposed
150
161
}
162
+ b .lock .Lock ()
151
163
b .flush ()
152
164
b .lock .Unlock ()
153
165
return nil
@@ -157,14 +169,14 @@ func (b *basicBatcher) Flush() error {
157
169
// will return ErrDisposed, calls to Get will return an error iff
158
170
// there are no more ready batches.
159
171
func (b * basicBatcher ) Dispose () {
160
- b .lock .Lock ()
161
- if b .disposed {
162
- b .lock .Unlock ()
172
+ // Check to see if disposed before attempting to dispose
173
+ if atomic .CompareAndSwapUint32 (& b .disposed , batcherActive , batcherDisposed ) {
163
174
return
164
175
}
176
+ b .lock .Lock ()
165
177
b .flush ()
166
- b .disposed = true
167
178
b .items = nil
179
+ close (b .disposeChan )
168
180
169
181
// Drain the batch channel and all routines waiting to put on the channel
170
182
for len (b .batchChan ) > 0 || atomic .LoadInt32 (& b .waiting ) > 0 {
@@ -176,10 +188,7 @@ func (b *basicBatcher) Dispose() {
176
188
177
189
// IsDisposed will determine if the batcher is disposed
178
190
func (b * basicBatcher ) IsDisposed () bool {
179
- b .lock .RLock ()
180
- disposed := b .disposed
181
- b .lock .RUnlock ()
182
- return disposed
191
+ return atomic .LoadUint32 (& b .disposed ) == batcherDisposed
183
192
}
184
193
185
194
// flush adds the batch currently being built to the queue of completed batches.
0 commit comments