Skip to content

Commit d2fead7

Browse files
authored
Merge pull request #37769: [ErrorProne] Enable LongDoubleConversion check
2 parents 52c6603 + 1317aa0 commit d2fead7

24 files changed

Lines changed: 47 additions & 41 deletions

File tree

buildSrc/src/main/groovy/org/apache/beam/gradle/BeamModulePlugin.groovy

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1572,7 +1572,6 @@ class BeamModulePlugin implements Plugin<Project> {
15721572
"DirectInvocationOnMock",
15731573
"Finalize",
15741574
"JUnitIncompatibleType",
1575-
"LongDoubleConversion",
15761575
"MockNotUsedInProduction",
15771576
"NarrowCalculation",
15781577
"NullableTypeParameter",

it/google-cloud-platform/src/test/java/org/apache/beam/it/gcp/bigquery/BigQueryIOLT.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -341,7 +341,7 @@ private void testRead() throws IOException {
341341
region,
342342
launchInfo.jobId(),
343343
getBeamMetricsName(PipelineMetricsType.COUNTER, READ_ELEMENT_METRIC_NAME));
344-
assertEquals(configuration.numRecords, numRecords, 0.5);
344+
assertEquals((double) configuration.numRecords, numRecords, 0.5);
345345

346346
// export metrics
347347
MetricsConfiguration metricsConfig =

it/google-cloud-platform/src/test/java/org/apache/beam/it/gcp/spanner/SpannerIOLT.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -156,7 +156,7 @@ public void testSpannerWriteAndRead() throws IOException {
156156
region,
157157
readInfo.jobId(),
158158
getBeamMetricsName(PipelineMetricsType.COUNTER, READ_ELEMENT_METRIC_NAME));
159-
assertEquals(configuration.numRecords, numRecords, 0.5);
159+
assertEquals((double) configuration.numRecords, numRecords, 0.5);
160160

161161
// export metrics
162162
MetricsConfiguration metricsConfig =

it/google-cloud-platform/src/test/java/org/apache/beam/it/gcp/storage/FileBasedIOLT.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -221,7 +221,7 @@ public void testTextIOWriteThenRead() throws IOException {
221221
readInfo.jobId(),
222222
getBeamMetricsName(PipelineMetricsType.COUNTER, READ_ELEMENT_METRIC_NAME));
223223

224-
assertEquals(configuration.numRecords, numRecords, 0.5);
224+
assertEquals((double) configuration.numRecords, numRecords, 0.5);
225225

226226
// export metrics
227227
MetricsConfiguration metricsConfig =

runners/direct-java/src/main/java/org/apache/beam/runners/direct/WriteWithShardingFactory.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -138,7 +138,7 @@ private int calculateShards(long totalRecords) {
138138
return (int) totalRecords;
139139
}
140140
// 100mil records before >7 output files
141-
int floorLogRecs = (int) Math.log10(totalRecords);
141+
int floorLogRecs = (int) Math.log10((double) totalRecords);
142142
return Math.max(floorLogRecs, MIN_SHARDS_FOR_LOG) + extraShards;
143143
}
144144
}

runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/WorkerCustomSources.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -661,7 +661,7 @@ public void close() throws IOException {
661661
@VisibleForTesting
662662
static @Nullable ReportedParallelism longToParallelism(long value) {
663663
if (value >= 0) {
664-
return new ReportedParallelism().setValue(Double.valueOf(value));
664+
return new ReportedParallelism().setValue(Double.valueOf((double) value));
665665
} else {
666666
return null;
667667
}

runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/counters/CounterFactory.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -226,7 +226,7 @@ public final CounterDistribution addValue(long value) {
226226
long count = this.getCount() + 1;
227227
long sum = this.getSum() + value;
228228
// TODO: Replace sum-of-squares with statistics for a better stddev algorithm.
229-
double sumOfSquares = this.getSumOfSquares() + Math.pow(value, 2);
229+
double sumOfSquares = this.getSumOfSquares() + Math.pow((double) value, 2);
230230

231231
int bucketIndex = calculateBucket(value);
232232
List<Long> buckets = incrementBucket(bucketIndex);

sdks/java/core/src/main/java/org/apache/beam/sdk/io/Read.java

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -298,7 +298,7 @@ public void splitRestriction(
298298
desiredChunkSize = 64 << 20; // 64mb
299299
} else {
300300
// 1mb --> 1 shard; 1gb --> 32 shards; 1tb --> 1000 shards, 1pb --> 32k shards
301-
desiredChunkSize = Math.max(1 << 20, (long) (1000 * Math.sqrt(estimatedSize)));
301+
desiredChunkSize = Math.max(1 << 20, (long) (1000 * Math.sqrt((double) estimatedSize)));
302302
}
303303
List<BoundedSourceT> splits =
304304
(List<BoundedSourceT>) restriction.split(desiredChunkSize, pipelineOptions);
@@ -1079,7 +1079,7 @@ private Progress tryGetProgressOrThrow() throws IOException {
10791079
if (size != UnboundedReader.BACKLOG_UNKNOWN) {
10801080
// The UnboundedSource/UnboundedReader API has no way of reporting how much work
10811081
// has been completed so runners can only see the work remaining changing.
1082-
return Progress.from(0, size);
1082+
return Progress.from(0, (double) size);
10831083
}
10841084

10851085
// TODO: Support "global" backlog reporting

sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/ApproximateQuantiles.java

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -277,7 +277,7 @@ public ApproximateQuantilesCombineFn<T, ComparatorT> withEpsilon(double epsilon)
277277
* of {@code maxNumElements}.
278278
*/
279279
public ApproximateQuantilesCombineFn<T, ComparatorT> withMaxInputSize(long maxNumElements) {
280-
return create(numQuantiles, compareFn, maxNumElements, maxNumElements);
280+
return create(numQuantiles, compareFn, maxNumElements, (double) maxNumElements);
281281
}
282282

283283
/**
@@ -484,7 +484,8 @@ private QuantileBuffer<T> collapse(Iterable<QuantileBuffer<T>> buffers) {
484484
newLevel = Math.max(newLevel, buffer.level + 1);
485485
newWeight += buffer.weight;
486486
}
487-
List<T> newElements = interpolate(buffers, bufferSize, newWeight, offset(newWeight));
487+
List<T> newElements =
488+
interpolate(buffers, bufferSize, (double) newWeight, (double) offset(newWeight));
488489
return new QuantileBuffer<>(newLevel, newWeight, newElements);
489490
}
490491

sdks/java/core/src/main/java/org/apache/beam/sdk/util/FluentBackoff.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -234,7 +234,7 @@ public long nextBackOffMillis() {
234234
Math.min(
235235
backoffConfig.initialBackoff.getMillis()
236236
* Math.pow(backoffConfig.exponent, currentRetry),
237-
backoffConfig.maxBackoff.getMillis());
237+
(double) backoffConfig.maxBackoff.getMillis());
238238
double randomOffset =
239239
(Math.random() * 2 - 1) * DEFAULT_RANDOMIZATION_FACTOR * currentIntervalMillis;
240240
long nextBackoffMillis = Math.round(currentIntervalMillis + randomOffset);

0 commit comments

Comments
 (0)