Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Enabling ErrorProne and NullAway with initial fixes #1248

Open
wants to merge 1 commit into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -323,7 +323,7 @@ private <T> List<StructureField<T>> elementToFields(
// we don't reset the stack, then we should handle null returns.
T child = transform(visitor, element, structureDefinition, new ArrayDeque<>());
Verify.verify(
child != null, "Unexpected null choice type {} for element {}", typeCode, element);
child != null, "Unexpected null choice type %s for element %s", typeCode, element);
choiceTypes.put(typeCode, child);
}
}
Expand Down
15 changes: 15 additions & 0 deletions doc/developers.md
Original file line number Diff line number Diff line change
Expand Up @@ -130,3 +130,18 @@ To regenerate this jar file:

NOTE: Parquet tools will be replaced with parquet cli in the next release
`apache-parquet-1.12.0`

## Error Prone and NullAway
We use [Error Prone](https://errorprone.info/index) and
[NullAway](https://github.com/uber/NullAway) to catch common issues at build
time, in particular `NullPointerException`s. These are enabled in
[pom.xml](../pipelines/pom.xml) according to
[installation instructions](https://errorprone.info/docs/installation#maven).
An unfortunate side effect of this is that we need manually add all annotation
processors that we care about as discussed
[here](https://errorprone.info/docs/installation#using-error-prone-together-with-other-annotation-processors).

Currently, these errors only appear as WARNING and are not blockers but please
take a look at the new ones caused by your changes and fix them as we are trying
to address all of these, gradually. You can enable Error Prone in your IDE too,
see [here](https://errorprone.info/docs/installation#intellij-idea).
Original file line number Diff line number Diff line change
Expand Up @@ -21,9 +21,9 @@
import com.google.fhir.analytics.model.BulkExportResponse;
import java.io.IOException;
import java.util.List;
import javax.annotation.Nullable;
import org.apache.commons.collections.CollectionUtils;
import org.apache.http.HttpStatus;
import org.jspecify.annotations.Nullable;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,6 @@
import java.io.IOException;
import java.sql.SQLException;
import java.util.Set;
import javax.annotation.Nullable;
import javax.sql.DataSource;
import org.apache.beam.sdk.metrics.Counter;
import org.apache.beam.sdk.metrics.Metrics;
Expand All @@ -37,6 +36,7 @@
import org.hl7.fhir.r4.model.Bundle;
import org.hl7.fhir.r4.model.Bundle.BundleEntryComponent;
import org.hl7.fhir.r4.model.Resource;
import org.jspecify.annotations.Nullable;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

Expand Down Expand Up @@ -96,13 +96,13 @@ abstract class FetchSearchPageFn<T> extends DoFn<T, KV<String, Integer>> {

private final int recursiveDepth;

protected final DataSourceConfig sinkDbConfig;
@Nullable protected final DataSourceConfig sinkDbConfig;

protected final String viewDefinitionsDir;

private final int maxPoolSize;

@VisibleForTesting protected ParquetUtil parquetUtil;
@VisibleForTesting @Nullable protected ParquetUtil parquetUtil;

protected FetchUtil fetchUtil;

Expand Down Expand Up @@ -137,6 +137,7 @@ abstract class FetchSearchPageFn<T> extends DoFn<T, KV<String, Integer>> {
this.parquetFile = options.getOutputParquetPath();
this.secondsToFlush = options.getSecondsToFlushParquetFiles();
this.rowGroupSize = options.getRowGroupSizeForParquetFiles();
// TODO enable the caching feature for all runners.
if (DATAFLOW_RUNNER.equals(options.getRunner().getSimpleName())) {
this.cacheBundle = options.getCacheBundleForParquetWrites();
} else {
Expand Down Expand Up @@ -250,14 +251,11 @@ public void setup() throws SQLException, ProfileException {
public void finishBundle(FinishBundleContext context) {
try {
if (DATAFLOW_RUNNER.equals(context.getPipelineOptions().getRunner().getSimpleName())) {
if (cacheBundle) {
parquetUtil.emptyCache();
}
if (parquetUtil != null) {
parquetUtil.flushAll();
parquetUtil.flushAllWriters();
}
}
} catch (IOException | ProfileException e) {
} catch (IOException e) {
// There is not much that we can do at finishBundle so just throw a RuntimeException
log.error("At finishBundle caught exception ", e);
throw new IllegalStateException(e);
Expand All @@ -271,7 +269,7 @@ public void teardown() throws IOException {
// DataflowRunner and that's why we have the finishBundle method above:
// https://beam.apache.org/releases/javadoc/current/org/apache/beam/sdk/transforms/DoFn.Teardown.html
if (parquetUtil != null) {
parquetUtil.closeAllWriters();
parquetUtil.flushAllWriters();
}
}

Expand Down
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
/*
* Copyright 2020-2023 Google LLC
* Copyright 2020-2024 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
Expand All @@ -18,10 +18,10 @@
import com.google.auto.value.AutoValue;
import java.io.Serializable;
import java.util.List;
import javax.annotation.Nullable;
import lombok.Data;
import org.apache.beam.sdk.coders.DefaultCoder;
import org.apache.beam.sdk.coders.SerializableCoder;
import org.jspecify.annotations.Nullable;

@DefaultCoder(SerializableCoder.class)
@AutoValue
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,6 @@
import java.util.Iterator;
import java.util.List;
import java.util.Set;
import javax.annotation.Nullable;
import org.apache.avro.Schema;
import org.apache.avro.generic.GenericRecord;
import org.apache.beam.sdk.Pipeline;
Expand All @@ -56,6 +55,7 @@
import org.apache.beam.sdk.values.TupleTag;
import org.apache.parquet.hadoop.metadata.CompressionCodecName;
import org.hl7.fhir.r4.model.codesystems.ActionType;
import org.jspecify.annotations.Nullable;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

Expand Down
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
/*
* Copyright 2020-2023 Google LLC
* Copyright 2020-2024 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
Expand All @@ -15,11 +15,11 @@
*/
package com.google.fhir.analytics.metrics;

import javax.annotation.Nullable;
import org.apache.beam.runners.flink.FlinkRunner;
import org.apache.flink.api.common.JobExecutionResult;
import org.apache.flink.core.execution.JobClient;
import org.apache.flink.core.execution.JobListener;
import org.jspecify.annotations.Nullable;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

Expand Down
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
/*
* Copyright 2020-2023 Google LLC
* Copyright 2020-2024 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
Expand All @@ -15,8 +15,8 @@
*/
package com.google.fhir.analytics.metrics;

import javax.annotation.Nullable;
import org.apache.beam.sdk.PipelineRunner;
import org.jspecify.annotations.Nullable;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

Expand All @@ -41,8 +41,7 @@ public static PipelineMetrics getPipelineMetrics(Class<? extends PipelineRunner>
return flinkPipelineMetrics;
} else {
logger.warn(
"Metrics is not supported for the pipeline runner {}",
pipelineRunner.getClass().getSimpleName());
"Metrics is not supported for the pipeline runner {}", pipelineRunner.getSimpleName());
return null;
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,13 +27,13 @@
import java.nio.channels.ReadableByteChannel;
import java.sql.SQLException;
import java.util.Set;
import javax.annotation.Nullable;
import org.apache.beam.sdk.io.FileIO;
import org.apache.beam.sdk.io.FileSystems;
import org.apache.beam.sdk.io.fs.ResourceId;
import org.apache.beam.sdk.options.PipelineOptionsFactory;
import org.hl7.fhir.r4.model.Bundle;
import org.hl7.fhir.r4.model.Observation;
import org.jspecify.annotations.Nullable;
import org.junit.After;
import org.junit.Test;
import org.junit.runner.RunWith;
Expand Down
Loading
Loading