Skip to content

Commit 1d7b9e4

Browse files
committed
Fixes various warnings.
1 parent 00f0fdb commit 1d7b9e4

7 files changed

Lines changed: 47 additions & 56 deletions

File tree

contrib/storage-ipfs/src/main/java/org/apache/drill/exec/store/ipfs/IPFSGroupScan.java

Lines changed: 16 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -28,7 +28,6 @@
2828
import io.ipfs.api.MerkleNode;
2929
import io.ipfs.multihash.Multihash;
3030
import org.apache.drill.common.PlanStringBuilder;
31-
import org.apache.drill.common.exceptions.ExecutionSetupException;
3231
import org.apache.drill.common.expression.SchemaPath;
3332
import org.apache.drill.common.util.DrillVersionInfo;
3433
import org.apache.drill.exec.coord.ClusterCoordinator;
@@ -70,12 +69,12 @@
7069
@JsonTypeName("ipfs-scan")
7170
public class IPFSGroupScan extends AbstractGroupScan {
7271
private static final Logger logger = LoggerFactory.getLogger(IPFSGroupScan.class);
73-
private IPFSContext ipfsContext;
74-
private IPFSScanSpec ipfsScanSpec;
75-
private IPFSStoragePluginConfig config;
72+
private final IPFSContext ipfsContext;
73+
private final IPFSScanSpec ipfsScanSpec;
74+
private final IPFSStoragePluginConfig config;
7675
private List<SchemaPath> columns;
7776

78-
private static long DEFAULT_NODE_SIZE = 1000l;
77+
private static final long DEFAULT_NODE_SIZE = 1000L;
7978

8079
private ListMultimap<Integer, IPFSWork> assignments;
8180
private List<IPFSWork> ipfsWorkList = Lists.newArrayList();
@@ -86,9 +85,9 @@ public class IPFSGroupScan extends AbstractGroupScan {
8685
public IPFSGroupScan(@JsonProperty("IPFSScanSpec") IPFSScanSpec ipfsScanSpec,
8786
@JsonProperty("IPFSStoragePluginConfig") IPFSStoragePluginConfig ipfsStoragePluginConfig,
8887
@JsonProperty("columns") List<SchemaPath> columns,
89-
@JacksonInject StoragePluginRegistry pluginRegistry) throws IOException, ExecutionSetupException {
88+
@JacksonInject StoragePluginRegistry pluginRegistry) {
9089
this(
91-
((IPFSStoragePlugin) pluginRegistry.getPlugin(ipfsStoragePluginConfig)).getIPFSContext(),
90+
pluginRegistry.resolve(ipfsStoragePluginConfig, IPFSStoragePlugin.class).getIPFSContext(),
9291
ipfsScanSpec,
9392
columns
9493
);
@@ -248,13 +247,13 @@ public void applyAssignments(List<DrillbitEndpoint> incomingEndpoints) {
248247
public IPFSSubScan getSpecificScan(int minorFragmentId) {
249248
logger.debug(String.format("getSpecificScan: minorFragmentId = %d", minorFragmentId));
250249
List<IPFSWork> workList = assignments.get(minorFragmentId);
251-
logger.debug("workList == null: " + (workList == null? "true": "false"));
252-
logger.debug(String.format("workList.size(): %d", workList.size()));
253-
254250
List<Multihash> scanSpecList = Lists.newArrayList();
251+
if (workList != null) {
252+
logger.debug("workList.size(): {}", workList.size());
255253

256-
for (IPFSWork work : workList) {
257-
scanSpecList.add(work.getPartialRootHash());
254+
for (IPFSWork work : workList) {
255+
scanSpecList.add(work.getPartialRootHash());
256+
}
258257
}
259258

260259
return new IPFSSubScan(ipfsContext, scanSpecList, ipfsScanSpec.getFormatExtension(), columns);
@@ -301,9 +300,9 @@ public String toString() {
301300
.toString();
302301
}
303302

304-
private class IPFSWork implements CompleteWork {
305-
private EndpointByteMapImpl byteMap = new EndpointByteMapImpl();
306-
private Multihash partialRoot;
303+
private static class IPFSWork implements CompleteWork {
304+
private final EndpointByteMapImpl byteMap = new EndpointByteMapImpl();
305+
private final Multihash partialRoot;
307306
private DrillbitEndpoint onEndpoint = null;
308307

309308

@@ -378,7 +377,7 @@ public Map<Multihash, String> compute() {
378377
try {
379378
if (isProvider) {
380379
IPFSPeer peer = peerCache.getUnchecked(hash);
381-
ret.put(hash, peer.hasDrillbitAddress() ? peer.getDrillbitAddress().get() : null);
380+
ret.put(hash, peer.getDrillbitAddress().orElse(null));
382381
return ret;
383382
}
384383

@@ -403,9 +402,7 @@ public Map<Multihash, String> compute() {
403402
} else {
404403
logger.debug("{} is a simple node", hash);
405404
List<IPFSPeer> providers = helper.findprovsTimeout(hash).stream()
406-
.map(id ->
407-
peerCache.getUnchecked(id)
408-
)
405+
.map(peerCache::getUnchecked)
409406
.collect(Collectors.toList());
410407
providers = providers.stream()
411408
.filter(IPFSPeer::isDrillReady)

contrib/storage-ipfs/src/main/java/org/apache/drill/exec/store/ipfs/IPFSHelper.java

Lines changed: 9 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -103,8 +103,7 @@ public List<Multihash> findprovsTimeout(Multihash id) {
103103
List<String> providers;
104104
providers = clientCompat.dht.findprovsListTimeout(id, maxPeersPerLeaf, timeouts.get(IPFSTimeOut.FIND_PROV), executorService);
105105

106-
List<Multihash> ret = providers.stream().map(str -> Multihash.fromBase58(str)).collect(Collectors.toList());
107-
return ret;
106+
return providers.stream().map(Multihash::fromBase58).collect(Collectors.toList());
108107
}
109108

110109
public List<MultiAddress> findpeerTimeout(Multihash peerId) {
@@ -116,12 +115,9 @@ public List<MultiAddress> findpeerTimeout(Multihash peerId) {
116115

117116
List<String> addrs;
118117
addrs = clientCompat.dht.findpeerListTimeout(peerId, timeouts.get(IPFSTimeOut.FIND_PEER_INFO), executorService);
119-
List<MultiAddress>
120-
ret = addrs
121-
.stream()
118+
return addrs.stream()
122119
.filter(addr -> !addr.equals(""))
123-
.map(str -> new MultiAddress(str)).collect(Collectors.toList());
124-
return ret;
120+
.map(MultiAddress::new).collect(Collectors.toList());
125121
}
126122

127123
public byte[] getObjectDataTimeout(Multihash object) throws IOException {
@@ -144,7 +140,7 @@ public IPFSPeer getMyself() throws IOException {
144140
// But is it safe to assume IPFS always listens on loopback and local addresses?
145141
List<MultiAddress> myAddrs = ((List<String>) res.get("Addresses"))
146142
.stream()
147-
.map(addr -> new MultiAddress(addr))
143+
.map(MultiAddress::new)
148144
.filter(addr -> {
149145
try {
150146
InetAddress inetAddress = InetAddress.getByName(addr.getHost());
@@ -192,11 +188,11 @@ public interface ThrowingSupplier<R, E extends Exception> {
192188
* @param op a Function that represents the operation to perform
193189
* @param in the parameter for op
194190
* @param timeout consider the execution has timed out after this amount of time in seconds
195-
* @param <T>
196-
* @param <R>
197-
* @param <E>
191+
* @param <T> Input type
192+
* @param <R> Return type
193+
* @param <E> Type of checked exception op throws
198194
* @return R the result of the operation
199-
* @throws E
195+
* @throws E when the function throws an E
200196
*/
201197
public <T, R, E extends Exception> R timedFailure(ThrowingFunction<T, R, E> op, T in, int timeout) throws E {
202198
Callable<R> task = () -> op.apply(in);
@@ -242,8 +238,7 @@ public static Optional<String> pickPeerHost(List<MultiAddress> peerAddrs) {
242238
} else {
243239
return Optional.of(host);
244240
}
245-
} catch (UnknownHostException e) {
246-
continue;
241+
} catch (UnknownHostException ignored) {
247242
}
248243
}
249244

contrib/storage-ipfs/src/main/java/org/apache/drill/exec/store/ipfs/IPFSScanSpec.java

Lines changed: 5 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -27,7 +27,6 @@
2727
import com.fasterxml.jackson.annotation.JsonTypeName;
2828
import io.ipfs.multihash.Multihash;
2929
import org.apache.drill.common.PlanStringBuilder;
30-
import org.apache.drill.common.exceptions.ExecutionSetupException;
3130
import org.apache.drill.common.exceptions.UserException;
3231
import org.apache.drill.exec.store.StoragePluginRegistry;
3332
import org.apache.drill.shaded.guava.com.google.common.collect.ImmutableSet;
@@ -51,7 +50,7 @@ public enum Prefix {
5150
IPNS("ipns");
5251

5352
@JsonProperty("prefix")
54-
private String name;
53+
private final String name;
5554
Prefix(String prefix) {
5655
this.name = prefix;
5756
}
@@ -81,7 +80,7 @@ public enum Format {
8180
CSV("csv");
8281

8382
@JsonProperty("format")
84-
private String name;
83+
private final String name;
8584
Format(String prefix) {
8685
this.name = prefix;
8786
}
@@ -108,15 +107,15 @@ public static Format of(String what) {
108107
private Prefix prefix;
109108
private String path;
110109
private Format formatExtension;
111-
private IPFSContext ipfsContext;
110+
private final IPFSContext ipfsContext;
112111

113112
@JsonCreator
114113
public IPFSScanSpec (@JacksonInject StoragePluginRegistry registry,
115114
@JsonProperty("IPFSStoragePluginConfig") IPFSStoragePluginConfig ipfsStoragePluginConfig,
116115
@JsonProperty("prefix") Prefix prefix,
117116
@JsonProperty("format") Format format,
118-
@JsonProperty("path") String path) throws ExecutionSetupException {
119-
this.ipfsContext = ((IPFSStoragePlugin) registry.getPlugin(ipfsStoragePluginConfig)).getIPFSContext();
117+
@JsonProperty("path") String path) {
118+
this.ipfsContext = registry.resolve(ipfsStoragePluginConfig, IPFSStoragePlugin.class).getIPFSContext();
120119
this.prefix = prefix;
121120
this.formatExtension = format;
122121
this.path = path;

contrib/storage-ipfs/src/main/java/org/apache/drill/exec/store/ipfs/IPFSSchemaFactory.java

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -55,10 +55,10 @@ public void registerSchemas(SchemaConfig schemaConfig, SchemaPlus parent) {
5555
}
5656

5757
class IPFSTables extends AbstractSchema {
58-
private Set<String> tableNames = Sets.newHashSet();
58+
private final Set<String> tableNames = Sets.newHashSet();
5959
private final ConcurrentMap<String, Table> tables = new ConcurrentSkipListMap<>(String::compareToIgnoreCase);
6060
public IPFSTables (String name) {
61-
super(ImmutableList.<String>of(), name);
61+
super(ImmutableList.of(), name);
6262
tableNames.add(name);
6363
}
6464

contrib/storage-ipfs/src/main/java/org/apache/drill/exec/store/ipfs/IPFSStoragePlugin.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -72,7 +72,7 @@ public IPFSGroupScan getPhysicalScan(String userName, JSONOptions selection, Lis
7272
}
7373

7474
@Override
75-
public void registerSchemas(SchemaConfig schemaConfig, SchemaPlus parent) throws IOException {
75+
public void registerSchemas(SchemaConfig schemaConfig, SchemaPlus parent) {
7676
schemaFactory.registerSchemas(schemaConfig, parent);
7777
}
7878

contrib/storage-ipfs/src/main/java/org/apache/drill/exec/store/ipfs/IPFSSubScan.java

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -52,7 +52,7 @@
5252

5353
@JsonTypeName("ipfs-sub-scan")
5454
public class IPFSSubScan extends AbstractBase implements SubScan {
55-
private static int IPFS_SUB_SCAN_VALUE = 19155;
55+
private static final int IPFS_SUB_SCAN_VALUE = 19155;
5656
private final IPFSContext ipfsContext;
5757
private final List<Multihash> ipfsSubScanSpecList;
5858
private final IPFSScanSpec.Format format;
@@ -67,7 +67,7 @@ public IPFSSubScan(@JacksonInject StoragePluginRegistry registry,
6767
@JsonProperty("columns") List<SchemaPath> columns
6868
) throws ExecutionSetupException {
6969
super((String) null);
70-
IPFSStoragePlugin plugin = (IPFSStoragePlugin) registry.getPlugin(ipfsStoragePluginConfig);
70+
IPFSStoragePlugin plugin = registry.resolve(ipfsStoragePluginConfig, IPFSStoragePlugin.class);
7171
ipfsContext = plugin.getIPFSContext();
7272
this.ipfsSubScanSpecList = ipfsSubScanSpecList;
7373
this.format = format;

contrib/storage-ipfs/src/test/java/org/apache/drill/exec/store/ipfs/IPFSTestConstants.java

Lines changed: 12 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -10,18 +10,18 @@
1010
import java.util.stream.Collectors;
1111

1212
public interface IPFSTestConstants {
13-
static String MOCK_NODE_ID_STRING = "QmP14kRKf1mR6LAYgfuuMirscgZNYbzMCHQ1ebe4bBKdah";
14-
static Multihash MOCK_NODE_ID_MULTIHASH = Multihash.fromBase58(MOCK_NODE_ID_STRING);
15-
static String MOCK_NODE_ADDR = "127.0.0.1";
16-
static int MOCK_NODE_IPFS_SWARM_PORT = 4001;
17-
static int MOCK_NODE_IPFS_API_PORT = 5001;
18-
static List<String> MOCK_NODE_ADDRS = ImmutableList.of(
13+
String MOCK_NODE_ID_STRING = "QmP14kRKf1mR6LAYgfuuMirscgZNYbzMCHQ1ebe4bBKdah";
14+
Multihash MOCK_NODE_ID_MULTIHASH = Multihash.fromBase58(MOCK_NODE_ID_STRING);
15+
String MOCK_NODE_ADDR = "127.0.0.1";
16+
int MOCK_NODE_IPFS_SWARM_PORT = 4001;
17+
int MOCK_NODE_IPFS_API_PORT = 5001;
18+
List<String> MOCK_NODE_ADDRS = ImmutableList.of(
1919
String.format("/ip4/%s/tcp/%d/ipfs/%s", MOCK_NODE_ADDR, MOCK_NODE_IPFS_SWARM_PORT, MOCK_NODE_ID_STRING)
2020
);
21-
static List<MultiAddress> MOCK_NODE_MULTIADDRS = MOCK_NODE_ADDRS.stream().map(MultiAddress::new).collect(Collectors.toList());
21+
List<MultiAddress> MOCK_NODE_MULTIADDRS = MOCK_NODE_ADDRS.stream().map(MultiAddress::new).collect(Collectors.toList());
2222

23-
static String SIMPLE_DATASET_HASH_STRING = "QmcbeavnEofA6NjG7vkpe1yLJo6En6ML4JnDooDn1BbKmR";
24-
static Multihash SIMPLE_DATASET_MULTIHASH = Multihash.fromBase58(SIMPLE_DATASET_HASH_STRING);
23+
String SIMPLE_DATASET_HASH_STRING = "QmcbeavnEofA6NjG7vkpe1yLJo6En6ML4JnDooDn1BbKmR";
24+
Multihash SIMPLE_DATASET_MULTIHASH = Multihash.fromBase58(SIMPLE_DATASET_HASH_STRING);
2525

2626
/**
2727
* Chunked dataset layout:
@@ -30,9 +30,9 @@ public interface IPFSTestConstants {
3030
* +-- 2 QmQVBWTZ7MZjwHv5q9qG3zLzczsh8PGAVRWhF2gKsrj1hP chunked-json-2.json (159 bytes)
3131
* +-- 3 QmY8ghdB3mwdUAdBmft3bdgzPVcq8bCvtqTRd9wu3LjyTd chunked-json-3.json (89 bytes)
3232
*/
33-
static String CHUNKED_DATASET_HASH_STRING = "QmSeX1YAGWMXoPrgeKBTq2Be6NdRzTVESeeWyt7mQFuvzo";
34-
static Multihash CHUNKED_DATASET_MULTIHASH = Multihash.fromBase58(CHUNKED_DATASET_HASH_STRING);
35-
static Map<String, Multihash> CHUNKS_MULTIHASH = ImmutableMap.of(
33+
String CHUNKED_DATASET_HASH_STRING = "QmSeX1YAGWMXoPrgeKBTq2Be6NdRzTVESeeWyt7mQFuvzo";
34+
Multihash CHUNKED_DATASET_MULTIHASH = Multihash.fromBase58(CHUNKED_DATASET_HASH_STRING);
35+
Map<String, Multihash> CHUNKS_MULTIHASH = ImmutableMap.of(
3636
"chunked-json-1.json", Multihash.fromBase58("QmSmDFd1GcLPyYtscdtkBCj7gbNKiJ8MkaBPEFMz9orPEi"),
3737
"chunked-json-2.json", Multihash.fromBase58("QmQVBWTZ7MZjwHv5q9qG3zLzczsh8PGAVRWhF2gKsrj1hP"),
3838
"chunked-json-3.json", Multihash.fromBase58("QmY8ghdB3mwdUAdBmft3bdgzPVcq8bCvtqTRd9wu3LjyTd")

0 commit comments

Comments
 (0)