Skip to content

Commit 906f6a7

Browse files
author
Christopher Gross cogross
committed
fix major issues from sonarqube
1 parent f66c239 commit 906f6a7

File tree

72 files changed

+239
-76
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

72 files changed

+239
-76
lines changed

core/connection-pool/src/main/java/datawave/core/common/cache/SharedCacheCoordinator.java

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -726,11 +726,17 @@ protected void reapEvictions() {
726726
String recursiveDeletePath = ZKPaths.makePath(curatorClient.getNamespace(), path);
727727
ZKUtil.deleteRecursive(curatorClient.getZookeeperClient().getZooKeeper(), recursiveDeletePath);
728728
} catch (Exception e) {
729+
if (e instanceof InterruptedException) {
730+
Thread.currentThread().interrupt();
731+
}
729732
log.trace("Problem deleting " + path + " (this may be ok): " + e.getMessage(), e);
730733
}
731734
}
732735
}
733736
} catch (Exception e) {
737+
if (e instanceof InterruptedException) {
738+
Thread.currentThread().interrupt();
739+
}
734740
log.warn("Error cleaning up eviction notices: " + e.getMessage(), e);
735741
}
736742
}

core/map-reduce/src/main/java/datawave/core/mapreduce/bulkresults/map/BulkResultsFileOutputMapper.java

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -118,6 +118,9 @@ protected void map(Key key, Value value, org.apache.hadoop.mapreduce.Mapper<Key,
118118
}
119119
context.write(key, val);
120120
} catch (Exception e) {
121+
if (e instanceof InterruptedException) {
122+
Thread.currentThread().interrupt();
123+
}
121124
throw new RuntimeException("Unable to serialize response of class: " + response.getClass().getName(), e);
122125
}
123126
context.progress();

core/map-reduce/src/main/java/datawave/core/mapreduce/bulkresults/map/BulkResultsTableOutputMapper.java

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -89,6 +89,9 @@ protected void map(Key key, Value value, org.apache.hadoop.mapreduce.Mapper<Key,
8989
m.put(key.getColumnFamily(), key.getColumnQualifier(), new ColumnVisibility(key.getColumnVisibility()), key.getTimestamp(), val);
9090
context.write(this.tableName, m);
9191
} catch (Exception e) {
92+
if (e instanceof InterruptedException) {
93+
Thread.currentThread().interrupt();
94+
}
9295
throw new RuntimeException("Unable to serialize response of class: " + response.getClass().getName(), e);
9396
}
9497
context.progress();

core/query/src/main/java/datawave/core/query/logic/composite/CompositeQueryLogic.java

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -150,6 +150,7 @@ public void run() {
150150
log.debug(Thread.currentThread().getName() + ": Got null result");
151151
}
152152
} catch (InterruptedException e) {
153+
Thread.currentThread().interrupt();
153154
// if this was on purpose, then just log and the loop will naturally exit
154155
if (interrupted) {
155156
log.warn("QueryLogic thread interrupted", e);
@@ -478,6 +479,7 @@ public void close() {
478479
try {
479480
holder.join();
480481
} catch (InterruptedException e) {
482+
Thread.currentThread().interrupt();
481483
log.error("Error joining query logic thread", e);
482484
throw new RuntimeException("Error joining query logic thread", e);
483485
}

core/query/src/main/java/datawave/core/query/logic/composite/CompositeQueryLogicResultsIterator.java

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -68,6 +68,7 @@ public boolean hasNext() {
6868
if (failure != null) {
6969
Throwables.propagate(failure);
7070
}
71+
Thread.currentThread().interrupt();
7172
throw new RuntimeException(e);
7273
}
7374
}

warehouse/assemble/webservice/src/main/java/datawave/webservice/mr/input/SecureEventSequenceFileInputFormat.java

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -17,6 +17,7 @@ public RecordReader<K,RawRecordContainer> createRecordReader(InputSplit split, T
1717
try {
1818
reader.initialize(split, context);
1919
} catch (InterruptedException e) {
20+
Thread.currentThread().interrupt();
2021
throw new IOException("Error initializing SecureEventSequenceFileRecordReader", e);
2122
}
2223
return reader;

warehouse/ingest-core/src/main/java/datawave/ingest/data/tokenize/TokenizationHelper.java

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
package datawave.ingest.data.tokenize;
22

33
import java.io.IOException;
4+
import java.util.concurrent.atomic.AtomicInteger;
45

56
import org.apache.hadoop.conf.Configuration;
67
import org.apache.log4j.Logger;
@@ -25,6 +26,7 @@ public static class HeartBeatThread extends Thread {
2526

2627
public static final long INTERVAL = 500; // half second resolution
2728
public static volatile int counter = 0;
29+
2830
public static long lastRun;
2931

3032
static {
@@ -41,6 +43,7 @@ public void run() {
4143
try {
4244
Thread.sleep(INTERVAL);
4345
} catch (InterruptedException e) {
46+
Thread.currentThread().interrupt();
4447
throw new RuntimeException(e);
4548
}
4649

warehouse/ingest-core/src/main/java/datawave/ingest/input/reader/AbstractEventRecordReader.java

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -175,6 +175,9 @@ public RawRecordContainer getEvent() {
175175

176176
try {
177177
event.setRawRecordNumber(getCurrentKey().get());
178+
} catch (InterruptedException e) {
179+
Thread.currentThread().interrupt();
180+
throw new RuntimeException("Unable to get current key", e);
178181
} catch (Exception e) {
179182
throw new RuntimeException("Unable to get current key", e);
180183
}

warehouse/ingest-core/src/main/java/datawave/ingest/mapreduce/EventMapper.java

Lines changed: 8 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -253,6 +253,9 @@ public void setup(Context context) throws IOException, InterruptedException {
253253
try {
254254
contextWriter = contextWriterClass.getDeclaredConstructor().newInstance();
255255
contextWriter.setup(filterConf, filterConf.getBoolean(CONTEXT_WRITER_OUTPUT_TABLE_COUNTERS, false));
256+
} catch (InterruptedException e) {
257+
Thread.currentThread().interrupt();
258+
throw new IOException("Failed to initialized " + contextWriterClass + " from property " + CONTEXT_WRITER_CLASS, e);
256259
} catch (Exception e) {
257260
throw new IOException("Failed to initialized " + contextWriterClass + " from property " + CONTEXT_WRITER_CLASS, e);
258261
}
@@ -480,7 +483,11 @@ public void map(K1 key, V1 value, Context context) throws IOException, Interrupt
480483
NDC.push(origFiles.iterator().next());
481484
reprocessedNDCPush = true;
482485
}
483-
486+
} catch (InterruptedException e) {
487+
contextWriter.rollback();
488+
Thread.currentThread().interrupt();
489+
log.error("Failed to clean event from error table. Terminating map", e);
490+
throw new IOException("Failed to clean event from error table, Terminating map", e);
484491
} catch (Exception e) {
485492
contextWriter.rollback();
486493
log.error("Failed to clean event from error table. Terminating map", e);

warehouse/ingest-core/src/main/java/datawave/ingest/mapreduce/handler/facet/HashTableFunction.java

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -107,7 +107,10 @@ public Collection<NormalizedContentInterface> apply(@Nullable Collection<Normali
107107

108108
try {
109109
contextWriter.write(map, context);
110-
} catch (IOException | InterruptedException e) {
110+
} catch (InterruptedException e) {
111+
Thread.currentThread().interrupt();
112+
throw new RuntimeException(e);
113+
} catch (IOException e) {
111114
throw new RuntimeException(e);
112115
}
113116

0 commit comments

Comments
 (0)