40
40
import com .google .api .gax .batching .FlowController .FlowControlException ;
41
41
import com .google .api .gax .batching .FlowController .FlowControlRuntimeException ;
42
42
import com .google .api .gax .batching .FlowController .LimitExceededBehavior ;
43
+ import com .google .api .gax .rpc .ApiCallContext ;
43
44
import com .google .api .gax .rpc .UnaryCallable ;
44
45
import com .google .common .annotations .VisibleForTesting ;
45
46
import com .google .common .base .Preconditions ;
47
+ import com .google .common .base .Stopwatch ;
46
48
import com .google .common .util .concurrent .Futures ;
47
49
import java .lang .ref .Reference ;
48
50
import java .lang .ref .ReferenceQueue ;
@@ -93,22 +95,26 @@ public class BatcherImpl<ElementT, ElementResultT, RequestT, ResponseT>
93
95
private SettableApiFuture <Void > closeFuture ;
94
96
private final BatcherStats batcherStats = new BatcherStats ();
95
97
private final FlowController flowController ;
98
+ private final ApiCallContext callContext ;
96
99
97
100
/**
98
101
* @param batchingDescriptor a {@link BatchingDescriptor} for transforming individual elements
99
102
* into wrappers request and response
100
103
* @param unaryCallable a {@link UnaryCallable} object
101
104
* @param prototype a {@link RequestT} object
102
105
* @param batchingSettings a {@link BatchingSettings} with configuration of thresholds
106
+ * @deprecated Please instantiate the Batcher with {@link FlowController} and {@link
107
+ * ApiCallContext}
103
108
*/
109
+ @ Deprecated
104
110
public BatcherImpl (
105
111
BatchingDescriptor <ElementT , ElementResultT , RequestT , ResponseT > batchingDescriptor ,
106
112
UnaryCallable <RequestT , ResponseT > unaryCallable ,
107
113
RequestT prototype ,
108
114
BatchingSettings batchingSettings ,
109
115
ScheduledExecutorService executor ) {
110
116
111
- this (batchingDescriptor , unaryCallable , prototype , batchingSettings , executor , null );
117
+ this (batchingDescriptor , unaryCallable , prototype , batchingSettings , executor , null , null );
112
118
}
113
119
114
120
/**
@@ -119,7 +125,9 @@ public BatcherImpl(
119
125
* @param batchingSettings a {@link BatchingSettings} with configuration of thresholds
120
126
* @param flowController a {@link FlowController} for throttling requests. If it's null, create a
121
127
* {@link FlowController} object from {@link BatchingSettings#getFlowControlSettings()}.
128
+ * @deprecated Please instantiate the Batcher with {@link ApiCallContext}
122
129
*/
130
+ @ Deprecated
123
131
public BatcherImpl (
124
132
BatchingDescriptor <ElementT , ElementResultT , RequestT , ResponseT > batchingDescriptor ,
125
133
UnaryCallable <RequestT , ResponseT > unaryCallable ,
@@ -128,6 +136,35 @@ public BatcherImpl(
128
136
ScheduledExecutorService executor ,
129
137
@ Nullable FlowController flowController ) {
130
138
139
+ this (
140
+ batchingDescriptor ,
141
+ unaryCallable ,
142
+ prototype ,
143
+ batchingSettings ,
144
+ executor ,
145
+ flowController ,
146
+ null );
147
+ }
148
+
149
+ /**
150
+ * @param batchingDescriptor a {@link BatchingDescriptor} for transforming individual elements
151
+ * into wrappers request and response
152
+ * @param unaryCallable a {@link UnaryCallable} object
153
+ * @param prototype a {@link RequestT} object
154
+ * @param batchingSettings a {@link BatchingSettings} with configuration of thresholds
155
+ * @param flowController a {@link FlowController} for throttling requests. If it's null, create a
156
+ * {@link FlowController} object from {@link BatchingSettings#getFlowControlSettings()}.
157
+ * @param callContext a {@link ApiCallContext} object that'll be merged in unaryCallable
158
+ */
159
+ public BatcherImpl (
160
+ BatchingDescriptor <ElementT , ElementResultT , RequestT , ResponseT > batchingDescriptor ,
161
+ UnaryCallable <RequestT , ResponseT > unaryCallable ,
162
+ RequestT prototype ,
163
+ BatchingSettings batchingSettings ,
164
+ ScheduledExecutorService executor ,
165
+ @ Nullable FlowController flowController ,
166
+ @ Nullable ApiCallContext callContext ) {
167
+
131
168
this .batchingDescriptor =
132
169
Preconditions .checkNotNull (batchingDescriptor , "batching descriptor cannot be null" );
133
170
this .unaryCallable = Preconditions .checkNotNull (unaryCallable , "callable cannot be null" );
@@ -168,6 +205,7 @@ public BatcherImpl(
168
205
scheduledFuture = Futures .immediateCancelledFuture ();
169
206
}
170
207
currentBatcherReference = new BatcherReference (this );
208
+ this .callContext = callContext ;
171
209
}
172
210
173
211
/** {@inheritDoc} */
@@ -192,16 +230,18 @@ public ApiFuture<ElementResultT> add(ElementT element) {
192
230
// class, which made it seem unnecessary to have blocking and non-blocking semaphore
193
231
// implementations. Some refactoring may be needed for the optimized implementation. So we'll
194
232
// defer it till we decide on if refactoring FlowController is necessary.
233
+ Stopwatch stopwatch = Stopwatch .createStarted ();
195
234
try {
196
235
flowController .reserve (1 , batchingDescriptor .countBytes (element ));
197
236
} catch (FlowControlException e ) {
198
237
// This exception will only be thrown if the FlowController is set to ThrowException behavior
199
238
throw FlowControlRuntimeException .fromFlowControlException (e );
200
239
}
240
+ long throttledTimeMs = stopwatch .elapsed (TimeUnit .MILLISECONDS );
201
241
202
242
SettableApiFuture <ElementResultT > result = SettableApiFuture .create ();
203
243
synchronized (elementLock ) {
204
- currentOpenBatch .add (element , result );
244
+ currentOpenBatch .add (element , result , throttledTimeMs );
205
245
}
206
246
207
247
if (currentOpenBatch .hasAnyThresholdReached ()) {
@@ -230,8 +270,14 @@ public void sendOutstanding() {
230
270
currentOpenBatch = new Batch <>(prototype , batchingDescriptor , batchingSettings , batcherStats );
231
271
}
232
272
273
+ // This check is for old clients that instantiated the batcher without ApiCallContext
274
+ ApiCallContext callContextWithOption = null ;
275
+ if (callContext != null ) {
276
+ callContextWithOption =
277
+ callContext .withOption (THROTTLED_TIME_KEY , accumulatedBatch .totalThrottledTimeMs );
278
+ }
233
279
final ApiFuture <ResponseT > batchResponse =
234
- unaryCallable .futureCall (accumulatedBatch .builder .build ());
280
+ unaryCallable .futureCall (accumulatedBatch .builder .build (), callContextWithOption );
235
281
236
282
numOfOutstandingBatches .incrementAndGet ();
237
283
ApiFutures .addCallback (
@@ -367,6 +413,7 @@ private static class Batch<ElementT, ElementResultT, RequestT, ResponseT> {
367
413
368
414
private long elementCounter = 0 ;
369
415
private long byteCounter = 0 ;
416
+ private long totalThrottledTimeMs = 0 ;
370
417
371
418
private Batch (
372
419
RequestT prototype ,
@@ -383,11 +430,12 @@ private Batch(
383
430
this .batcherStats = batcherStats ;
384
431
}
385
432
386
- void add (ElementT element , SettableApiFuture <ElementResultT > result ) {
433
+ void add (ElementT element , SettableApiFuture <ElementResultT > result , long throttledTimeMs ) {
387
434
builder .add (element );
388
435
entries .add (BatchEntry .create (element , result ));
389
436
elementCounter ++;
390
437
byteCounter += descriptor .countBytes (element );
438
+ totalThrottledTimeMs += throttledTimeMs ;
391
439
}
392
440
393
441
void onBatchSuccess (ResponseT response ) {
0 commit comments