Skip to content

Commit 6c42cc1

Browse files
authored
[ErrorProne] Enable BadImport ErrorProne check and fix violations (#37760)
* Fix BadImport ErrorProne violations across multiple modules * spotless
1 parent b203f53 commit 6c42cc1

25 files changed

Lines changed: 363 additions & 310 deletions

File tree

buildSrc/src/main/groovy/org/apache/beam/gradle/BeamModulePlugin.groovy

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1534,7 +1534,6 @@ class BeamModulePlugin implements Plugin<Project> {
15341534
"AutoValueImmutableFields",
15351535
"AutoValueImmutableFields",
15361536
"AutoValueSubclassLeaked",
1537-
"BadImport",
15381537
"BigDecimalEquals",
15391538
"ComparableType",
15401539
"DoNotMockAutoValue",

runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/StreamingDataflowWorkerTest.java

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -134,7 +134,6 @@
134134
import org.apache.beam.runners.dataflow.worker.windmill.Windmill.LatencyAttribution;
135135
import org.apache.beam.runners.dataflow.worker.windmill.Windmill.LatencyAttribution.State;
136136
import org.apache.beam.runners.dataflow.worker.windmill.Windmill.Timer;
137-
import org.apache.beam.runners.dataflow.worker.windmill.Windmill.Timer.Type;
138137
import org.apache.beam.runners.dataflow.worker.windmill.Windmill.WatermarkHold;
139138
import org.apache.beam.runners.dataflow.worker.windmill.Windmill.WorkItemCommitRequest;
140139
import org.apache.beam.runners.dataflow.worker.windmill.Windmill.WorkerMetadataResponse;
@@ -1644,7 +1643,7 @@ private Timer buildWatermarkTimer(String tagPrefix, long timestampMillis, boolea
16441643
Timer.Builder builder =
16451644
Timer.newBuilder()
16461645
.setTag(ByteString.copyFromUtf8(tagPrefix + ":" + timestampMillis))
1647-
.setType(Type.WATERMARK)
1646+
.setType(Timer.Type.WATERMARK)
16481647
.setStateFamily("MergeWindows");
16491648
if (!delete) {
16501649
builder.setTimestamp(timestampMillis * 1000);

runners/java-fn-execution/src/test/java/org/apache/beam/runners/fnexecution/control/DefaultJobBundleFactoryTest.java

Lines changed: 4 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -50,7 +50,6 @@
5050
import org.apache.beam.runners.fnexecution.artifact.ArtifactRetrievalService;
5151
import org.apache.beam.runners.fnexecution.data.GrpcDataService;
5252
import org.apache.beam.runners.fnexecution.environment.EnvironmentFactory;
53-
import org.apache.beam.runners.fnexecution.environment.EnvironmentFactory.Provider;
5453
import org.apache.beam.runners.fnexecution.environment.RemoteEnvironment;
5554
import org.apache.beam.runners.fnexecution.logging.GrpcLoggingService;
5655
import org.apache.beam.runners.fnexecution.provisioning.JobInfo;
@@ -190,7 +189,7 @@ public void createsMultipleEnvironmentOfSingleType() throws Exception {
190189
.thenReturn(envFactoryB);
191190
when(environmentProviderFactoryB.getServerFactory()).thenReturn(serverFactory);
192191

193-
Map<String, Provider> environmentFactoryProviderMap =
192+
Map<String, EnvironmentFactory.Provider> environmentFactoryProviderMap =
194193
ImmutableMap.of(
195194
environmentA.getUrn(), environmentProviderFactoryA,
196195
environmentB.getUrn(), environmentProviderFactoryB);
@@ -231,7 +230,7 @@ public void createsMultipleEnvironmentsWithSdkWorkerParallelism() throws Excepti
231230
.thenReturn(envFactoryA);
232231
when(environmentProviderFactoryA.getServerFactory()).thenReturn(serverFactory);
233232

234-
Map<String, Provider> environmentFactoryProviderMap =
233+
Map<String, EnvironmentFactory.Provider> environmentFactoryProviderMap =
235234
ImmutableMap.of(environmentA.getUrn(), environmentProviderFactoryA);
236235

237236
PortablePipelineOptions portableOptions =
@@ -309,7 +308,7 @@ public void creatingMultipleEnvironmentFromMultipleTypes() throws Exception {
309308
.thenReturn(envFactoryB);
310309
when(environmentProviderFactoryB.getServerFactory()).thenReturn(serverFactory);
311310

312-
Map<String, Provider> environmentFactoryProviderMap =
311+
Map<String, EnvironmentFactory.Provider> environmentFactoryProviderMap =
313312
ImmutableMap.of(
314313
environmentA.getUrn(), environmentProviderFactoryA,
315314
environmentB.getUrn(), environmentProviderFactoryB);
@@ -338,7 +337,7 @@ public void expiresEnvironment() throws Exception {
338337
.thenReturn(envFactoryA);
339338
when(environmentProviderFactoryA.getServerFactory()).thenReturn(serverFactory);
340339

341-
Map<String, Provider> environmentFactoryProviderMap =
340+
Map<String, EnvironmentFactory.Provider> environmentFactoryProviderMap =
342341
ImmutableMap.of(environmentA.getUrn(), environmentProviderFactoryA);
343342

344343
PortablePipelineOptions portableOptions =

runners/java-job-service/src/main/java/org/apache/beam/runners/jobsubmission/JobInvocation.java

Lines changed: 5 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,6 @@
2929
import org.apache.beam.model.jobmanagement.v1.JobApi;
3030
import org.apache.beam.model.jobmanagement.v1.JobApi.JobMessage;
3131
import org.apache.beam.model.jobmanagement.v1.JobApi.JobState;
32-
import org.apache.beam.model.jobmanagement.v1.JobApi.JobState.Enum;
3332
import org.apache.beam.model.jobmanagement.v1.JobApi.JobStateEvent;
3433
import org.apache.beam.model.pipeline.v1.RunnerApi;
3534
import org.apache.beam.model.pipeline.v1.RunnerApi.Pipeline;
@@ -112,16 +111,16 @@ public void onSuccess(PortablePipelineResult pipelineResult) {
112111

113112
switch (state) {
114113
case DONE:
115-
setState(Enum.DONE);
114+
setState(JobState.Enum.DONE);
116115
break;
117116
case RUNNING:
118-
setState(Enum.RUNNING);
117+
setState(JobState.Enum.RUNNING);
119118
break;
120119
case CANCELLED:
121-
setState(Enum.CANCELLED);
120+
setState(JobState.Enum.CANCELLED);
122121
break;
123122
case FAILED:
124-
setState(Enum.FAILED);
123+
setState(JobState.Enum.FAILED);
125124
break;
126125
default:
127126
setState(JobState.Enum.UNSPECIFIED);
@@ -257,7 +256,7 @@ private synchronized void sendMessage(JobMessage message) {
257256
}
258257
}
259258

260-
public static Boolean isTerminated(Enum state) {
259+
public static Boolean isTerminated(JobState.Enum state) {
261260
switch (state) {
262261
case DONE:
263262
case FAILED:

runners/spark/3/src/main/java/org/apache/beam/runners/spark/structuredstreaming/translation/helpers/EncoderHelpers.java

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,6 @@
1919

2020
import static org.apache.beam.runners.spark.structuredstreaming.translation.helpers.EncoderFactory.invoke;
2121
import static org.apache.beam.runners.spark.structuredstreaming.translation.helpers.EncoderFactory.invokeIfNotNull;
22-
import static org.apache.beam.runners.spark.structuredstreaming.translation.helpers.EncoderFactory.newInstance;
2322
import static org.apache.beam.runners.spark.structuredstreaming.translation.utils.ScalaInterop.match;
2423
import static org.apache.beam.runners.spark.structuredstreaming.translation.utils.ScalaInterop.replace;
2524
import static org.apache.beam.runners.spark.structuredstreaming.translation.utils.ScalaInterop.seqOf;
@@ -416,7 +415,8 @@ private static <T> Expression serializeOneOfField(
416415
private static <T> Expression deserializeOneOfField(Expression in, Encoder<T> enc, int idx) {
417416
GetStructField field = new GetStructField(in, idx, Option.empty());
418417
Expression litNull = lit(null, TUPLE2_TYPE);
419-
Expression newTuple = newInstance(Tuple2.class, TUPLE2_TYPE, lit(idx), deserialize(field, enc));
418+
Expression newTuple =
419+
EncoderFactory.newInstance(Tuple2.class, TUPLE2_TYPE, lit(idx), deserialize(field, enc));
420420
return new If(new IsNull(field), litNull, newTuple);
421421
}
422422

runners/spark/src/main/java/org/apache/beam/runners/spark/util/CachedSideInputReader.java

Lines changed: 4 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -19,8 +19,6 @@
1919

2020
import java.util.concurrent.ExecutionException;
2121
import org.apache.beam.runners.core.SideInputReader;
22-
import org.apache.beam.runners.spark.util.SideInputStorage.Key;
23-
import org.apache.beam.runners.spark.util.SideInputStorage.Value;
2422
import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
2523
import org.apache.beam.sdk.values.PCollectionView;
2624
import org.apache.beam.vendor.guava.v32_1_2_jre.com.google.common.cache.Cache;
@@ -57,13 +55,13 @@ private CachedSideInputReader(SideInputReader delegate) {
5755
@Override
5856
public <T> @Nullable T get(PCollectionView<T> view, BoundedWindow window) {
5957
@SuppressWarnings("unchecked")
60-
final Cache<Key<T>, Value<T>> materializedCasted =
58+
final Cache<SideInputStorage.Key<T>, SideInputStorage.Value<T>> materializedCasted =
6159
(Cache) SideInputStorage.getMaterializedSideInputs();
6260

63-
Key<T> sideInputKey = new Key<>(view, window);
61+
SideInputStorage.Key<T> sideInputKey = new SideInputStorage.Key<>(view, window);
6462

6563
try {
66-
Value<T> cachedResult =
64+
SideInputStorage.Value<T> cachedResult =
6765
materializedCasted.get(
6866
sideInputKey,
6967
() -> {
@@ -73,7 +71,7 @@ private CachedSideInputReader(SideInputReader delegate) {
7371
sideInputKey,
7472
SizeEstimator.estimate(result));
7573

76-
return new Value<>(result);
74+
return new SideInputStorage.Value<>(result);
7775
});
7876
return cachedResult.getValue();
7977
} catch (ExecutionException e) {

sdks/java/core/src/main/java/org/apache/beam/sdk/fn/channel/AddHarnessIdInterceptor.java

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -21,12 +21,12 @@
2121

2222
import org.apache.beam.vendor.grpc.v1p69p0.io.grpc.ClientInterceptor;
2323
import org.apache.beam.vendor.grpc.v1p69p0.io.grpc.Metadata;
24-
import org.apache.beam.vendor.grpc.v1p69p0.io.grpc.Metadata.Key;
2524
import org.apache.beam.vendor.grpc.v1p69p0.io.grpc.stub.MetadataUtils;
2625

2726
/** A {@link ClientInterceptor} that attaches a provided SDK Harness ID to outgoing messages. */
2827
public class AddHarnessIdInterceptor {
29-
private static final Key<String> ID_KEY = Key.of("worker_id", Metadata.ASCII_STRING_MARSHALLER);
28+
private static final Metadata.Key<String> ID_KEY =
29+
Metadata.Key.of("worker_id", Metadata.ASCII_STRING_MARSHALLER);
3030

3131
public static ClientInterceptor create(String harnessId) {
3232
checkArgument(harnessId != null, "harnessId must not be null");

sdks/java/core/src/main/java/org/apache/beam/sdk/fn/server/GrpcContextHeaderAccessorProvider.java

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,6 @@
2020
import org.apache.beam.vendor.grpc.v1p69p0.io.grpc.Context;
2121
import org.apache.beam.vendor.grpc.v1p69p0.io.grpc.Contexts;
2222
import org.apache.beam.vendor.grpc.v1p69p0.io.grpc.Metadata;
23-
import org.apache.beam.vendor.grpc.v1p69p0.io.grpc.Metadata.Key;
2423
import org.apache.beam.vendor.grpc.v1p69p0.io.grpc.ServerCall;
2524
import org.apache.beam.vendor.grpc.v1p69p0.io.grpc.ServerCall.Listener;
2625
import org.apache.beam.vendor.grpc.v1p69p0.io.grpc.ServerCallHandler;
@@ -36,8 +35,8 @@
3635
})
3736
public class GrpcContextHeaderAccessorProvider {
3837

39-
private static final Key<String> WORKER_ID_KEY =
40-
Key.of("worker_id", Metadata.ASCII_STRING_MARSHALLER);
38+
private static final Metadata.Key<String> WORKER_ID_KEY =
39+
Metadata.Key.of("worker_id", Metadata.ASCII_STRING_MARSHALLER);
4140
private static final Context.Key<String> SDK_WORKER_CONTEXT_KEY = Context.key("worker_id");
4241
private static final GrpcHeaderAccessor HEADER_ACCESSOR = new GrpcHeaderAccessor();
4342

sdks/java/extensions/avro/src/main/java/org/apache/beam/sdk/extensions/avro/schemas/utils/AvroUtils.java

Lines changed: 36 additions & 26 deletions
Original file line numberDiff line numberDiff line change
@@ -56,7 +56,6 @@
5656
import org.apache.avro.Conversions;
5757
import org.apache.avro.LogicalType;
5858
import org.apache.avro.LogicalTypes;
59-
import org.apache.avro.Schema.Type;
6059
import org.apache.avro.generic.GenericData;
6160
import org.apache.avro.generic.GenericFixed;
6261
import org.apache.avro.generic.GenericRecord;
@@ -238,15 +237,17 @@ public static TypeWithNullability create(org.apache.avro.Schema avroSchema) {
238237
}
239238

240239
TypeWithNullability(org.apache.avro.Schema avroSchema) {
241-
if (avroSchema.getType() == Type.UNION) {
240+
if (avroSchema.getType() == org.apache.avro.Schema.Type.UNION) {
242241
List<org.apache.avro.Schema> types = avroSchema.getTypes();
243242

244243
// optional fields in AVRO have form of:
245244
// {"name": "foo", "type": ["null", "something"]}
246245

247246
// don't need recursion because nested unions aren't supported in AVRO
248247
List<org.apache.avro.Schema> nonNullTypes =
249-
types.stream().filter(x -> x.getType() != Type.NULL).collect(Collectors.toList());
248+
types.stream()
249+
.filter(x -> x.getType() != org.apache.avro.Schema.Type.NULL)
250+
.collect(Collectors.toList());
250251

251252
if (nonNullTypes.size() == types.size() || nonNullTypes.isEmpty()) {
252253
// union without `null` or all 'null' union, keep as is.
@@ -303,7 +304,7 @@ && checkNotNull(fieldType.getLogicalType())
303304

304305
/** Create a {@link FixedBytesField} from an AVRO type. */
305306
public static @Nullable FixedBytesField fromAvroType(org.apache.avro.Schema type) {
306-
if (type.getType().equals(Type.FIXED)) {
307+
if (type.getType().equals(org.apache.avro.Schema.Type.FIXED)) {
307308
return new FixedBytesField(type.getFixedSize());
308309
} else {
309310
return null;
@@ -672,7 +673,9 @@ public static <T> SerializableFunction<Row, T> getFromRowFunction(Class<T> clazz
672673
public static @Nullable <T> Schema getSchema(
673674
Class<T> clazz, org.apache.avro.@Nullable Schema schema) {
674675
if (schema != null) {
675-
return schema.getType().equals(Type.RECORD) ? toBeamSchema(schema) : null;
676+
return schema.getType().equals(org.apache.avro.Schema.Type.RECORD)
677+
? toBeamSchema(schema)
678+
: null;
676679
}
677680
if (GenericRecord.class.equals(clazz)) {
678681
throw new IllegalArgumentException("No schema provided for getSchema(GenericRecord)");
@@ -1118,44 +1121,45 @@ private static org.apache.avro.Schema getFieldSchema(
11181121
case BYTE:
11191122
case INT16:
11201123
case INT32:
1121-
baseType = org.apache.avro.Schema.create(Type.INT);
1124+
baseType = org.apache.avro.Schema.create(org.apache.avro.Schema.Type.INT);
11221125
break;
11231126

11241127
case INT64:
1125-
baseType = org.apache.avro.Schema.create(Type.LONG);
1128+
baseType = org.apache.avro.Schema.create(org.apache.avro.Schema.Type.LONG);
11261129
break;
11271130

11281131
case DECIMAL:
11291132
baseType =
11301133
LogicalTypes.decimal(Integer.MAX_VALUE)
1131-
.addToSchema(org.apache.avro.Schema.create(Type.BYTES));
1134+
.addToSchema(org.apache.avro.Schema.create(org.apache.avro.Schema.Type.BYTES));
11321135
break;
11331136

11341137
case FLOAT:
1135-
baseType = org.apache.avro.Schema.create(Type.FLOAT);
1138+
baseType = org.apache.avro.Schema.create(org.apache.avro.Schema.Type.FLOAT);
11361139
break;
11371140

11381141
case DOUBLE:
1139-
baseType = org.apache.avro.Schema.create(Type.DOUBLE);
1142+
baseType = org.apache.avro.Schema.create(org.apache.avro.Schema.Type.DOUBLE);
11401143
break;
11411144

11421145
case STRING:
1143-
baseType = org.apache.avro.Schema.create(Type.STRING);
1146+
baseType = org.apache.avro.Schema.create(org.apache.avro.Schema.Type.STRING);
11441147
break;
11451148

11461149
case DATETIME:
11471150
// TODO: There is a desire to move Beam schema DATETIME to a micros representation. When
11481151
// this is done, this logical type needs to be changed.
11491152
baseType =
1150-
LogicalTypes.timestampMillis().addToSchema(org.apache.avro.Schema.create(Type.LONG));
1153+
LogicalTypes.timestampMillis()
1154+
.addToSchema(org.apache.avro.Schema.create(org.apache.avro.Schema.Type.LONG));
11511155
break;
11521156

11531157
case BOOLEAN:
1154-
baseType = org.apache.avro.Schema.create(Type.BOOLEAN);
1158+
baseType = org.apache.avro.Schema.create(org.apache.avro.Schema.Type.BOOLEAN);
11551159
break;
11561160

11571161
case BYTES:
1158-
baseType = org.apache.avro.Schema.create(Type.BYTES);
1162+
baseType = org.apache.avro.Schema.create(org.apache.avro.Schema.Type.BYTES);
11591163
break;
11601164

11611165
case LOGICAL_TYPE:
@@ -1167,7 +1171,7 @@ private static org.apache.avro.Schema getFieldSchema(
11671171
baseType = fixedBytesField.toAvroType("fixed", namespace + "." + fieldName);
11681172
} else if (VariableBytes.IDENTIFIER.equals(identifier)) {
11691173
// treat VARBINARY as bytes as that is what avro supports
1170-
baseType = org.apache.avro.Schema.create(Type.BYTES);
1174+
baseType = org.apache.avro.Schema.create(org.apache.avro.Schema.Type.BYTES);
11711175
} else if (FixedString.IDENTIFIER.equals(identifier)
11721176
|| "CHAR".equals(identifier)
11731177
|| "NCHAR".equals(identifier)) {
@@ -1190,19 +1194,24 @@ private static org.apache.avro.Schema getFieldSchema(
11901194
.map(x -> getFieldSchema(x.getType(), x.getName(), namespace))
11911195
.collect(Collectors.toList()));
11921196
} else if ("DATE".equals(identifier) || SqlTypes.DATE.getIdentifier().equals(identifier)) {
1193-
baseType = LogicalTypes.date().addToSchema(org.apache.avro.Schema.create(Type.INT));
1197+
baseType =
1198+
LogicalTypes.date()
1199+
.addToSchema(org.apache.avro.Schema.create(org.apache.avro.Schema.Type.INT));
11941200
} else if ("TIME".equals(identifier)) {
1195-
baseType = LogicalTypes.timeMillis().addToSchema(org.apache.avro.Schema.create(Type.INT));
1201+
baseType =
1202+
LogicalTypes.timeMillis()
1203+
.addToSchema(org.apache.avro.Schema.create(org.apache.avro.Schema.Type.INT));
11961204
} else if (SqlTypes.TIMESTAMP.getIdentifier().equals(identifier)) {
11971205
baseType =
1198-
LogicalTypes.timestampMicros().addToSchema(org.apache.avro.Schema.create(Type.LONG));
1206+
LogicalTypes.timestampMicros()
1207+
.addToSchema(org.apache.avro.Schema.create(org.apache.avro.Schema.Type.LONG));
11991208
} else if (Timestamp.IDENTIFIER.equals(identifier)) {
12001209
int precision = checkNotNull(logicalType.getArgument());
12011210
if (precision != 9) {
12021211
throw new RuntimeException(
12031212
"Timestamp logical type precision not supported:" + precision);
12041213
}
1205-
baseType = org.apache.avro.Schema.create(Type.LONG);
1214+
baseType = org.apache.avro.Schema.create(org.apache.avro.Schema.Type.LONG);
12061215
baseType.addProp("logicalType", TIMESTAMP_NANOS_LOGICAL_TYPE);
12071216
} else {
12081217
throw new RuntimeException(
@@ -1242,10 +1251,11 @@ private static org.apache.avro.Schema getFieldSchema(
12421251
private static final Map<org.apache.avro.Schema, Function<Number, ? extends Number>>
12431252
NUMERIC_CONVERTERS =
12441253
ImmutableMap.of(
1245-
org.apache.avro.Schema.create(Type.INT), Number::intValue,
1246-
org.apache.avro.Schema.create(Type.LONG), Number::longValue,
1247-
org.apache.avro.Schema.create(Type.FLOAT), Number::floatValue,
1248-
org.apache.avro.Schema.create(Type.DOUBLE), Number::doubleValue);
1254+
org.apache.avro.Schema.create(org.apache.avro.Schema.Type.INT), Number::intValue,
1255+
org.apache.avro.Schema.create(org.apache.avro.Schema.Type.LONG), Number::longValue,
1256+
org.apache.avro.Schema.create(org.apache.avro.Schema.Type.FLOAT), Number::floatValue,
1257+
org.apache.avro.Schema.create(org.apache.avro.Schema.Type.DOUBLE),
1258+
Number::doubleValue);
12491259

12501260
/** Convert a value from Beam Row to a vlue used for Avro GenericRecord. */
12511261
private static @Nullable Object genericFromBeamField(
@@ -1290,10 +1300,10 @@ private static org.apache.avro.Schema getFieldSchema(
12901300
return result;
12911301

12921302
case DATETIME:
1293-
if (typeWithNullability.type.getType() == Type.INT) {
1303+
if (typeWithNullability.type.getType() == org.apache.avro.Schema.Type.INT) {
12941304
ReadableInstant instant = (ReadableInstant) value;
12951305
return (int) Days.daysBetween(Instant.EPOCH, instant).getDays();
1296-
} else if (typeWithNullability.type.getType() == Type.LONG) {
1306+
} else if (typeWithNullability.type.getType() == org.apache.avro.Schema.Type.LONG) {
12971307
ReadableInstant instant = (ReadableInstant) value;
12981308
return (long) instant.getMillis();
12991309
} else {
@@ -1418,7 +1428,7 @@ private static Object convertLogicalType(
14181428

14191429
// TODO: Remove this workaround once Avro is upgraded to 1.12+ where timestamp-nanos
14201430
if (TIMESTAMP_NANOS_LOGICAL_TYPE.equals(type.type.getProp("logicalType"))) {
1421-
if (type.type.getType() == Type.LONG) {
1431+
if (type.type.getType() == org.apache.avro.Schema.Type.LONG) {
14221432
Long nanos = (Long) value;
14231433
// Check if Beam expects Timestamp logical type
14241434
if (fieldType.getTypeName() == TypeName.LOGICAL_TYPE

0 commit comments

Comments
 (0)