Skip to content

Commit efcb778

Browse files
authored
HDDS-14157. Change SCM ha and pipeline code to throw more specific exceptions (#9482)
1 parent 897c610 commit efcb778

16 files changed

Lines changed: 119 additions & 148 deletions

File tree

hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/scm/metadata/DBTransactionBuffer.java

Lines changed: 7 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -17,18 +17,20 @@
1717

1818
package org.apache.hadoop.hdds.scm.metadata;
1919

20-
import java.io.Closeable;
21-
import java.io.IOException;
20+
import org.apache.hadoop.hdds.utils.db.CodecException;
21+
import org.apache.hadoop.hdds.utils.db.RocksDatabaseException;
2222
import org.apache.hadoop.hdds.utils.db.Table;
2323

2424
/**
2525
* DB transaction that abstracts the updates to the underlying datastore.
2626
*/
27-
public interface DBTransactionBuffer extends Closeable {
27+
public interface DBTransactionBuffer {
2828

2929
<KEY, VALUE> void addToBuffer(Table<KEY, VALUE> table, KEY key, VALUE value)
30-
throws IOException;
30+
throws RocksDatabaseException, CodecException;
3131

3232
<KEY, VALUE> void removeFromBuffer(Table<KEY, VALUE> table, KEY key)
33-
throws IOException;
33+
throws RocksDatabaseException, CodecException;
34+
35+
void close() throws RocksDatabaseException;
3436
}

hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/scm/metadata/SCMDBTransactionBufferImpl.java

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,8 @@
1717

1818
package org.apache.hadoop.hdds.scm.metadata;
1919

20-
import java.io.IOException;
20+
import org.apache.hadoop.hdds.utils.db.CodecException;
21+
import org.apache.hadoop.hdds.utils.db.RocksDatabaseException;
2122
import org.apache.hadoop.hdds.utils.db.Table;
2223

2324
/**
@@ -30,14 +31,14 @@ public SCMDBTransactionBufferImpl() {
3031
}
3132

3233
@Override
33-
public <KEY, VALUE> void addToBuffer(
34-
Table<KEY, VALUE> table, KEY key, VALUE value) throws IOException {
34+
public <KEY, VALUE> void addToBuffer(Table<KEY, VALUE> table, KEY key, VALUE value)
35+
throws RocksDatabaseException, CodecException {
3536
table.put(key, value);
3637
}
3738

3839
@Override
3940
public <KEY, VALUE>void removeFromBuffer(Table<KEY, VALUE> table, KEY key)
40-
throws IOException {
41+
throws RocksDatabaseException, CodecException {
4142
table.delete(key);
4243
}
4344

hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/BatchOperationHandler.java

Lines changed: 1 addition & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -17,8 +17,6 @@
1717

1818
package org.apache.hadoop.hdds.utils.db;
1919

20-
import java.io.IOException;
21-
2220
/**
2321
* Create and commit batch operation for one DB.
2422
*/
@@ -37,7 +35,6 @@ public interface BatchOperationHandler {
3735
* Commit the batch operations.
3836
*
3937
* @param operation which contains all the required batch operation.
40-
* @throws IOException on Failure.
4138
*/
42-
void commitBatchOperation(BatchOperation operation) throws IOException;
39+
void commitBatchOperation(BatchOperation operation) throws RocksDatabaseException;
4340
}

hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/ha/SCMHADBTransactionBuffer.java

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -17,10 +17,11 @@
1717

1818
package org.apache.hadoop.hdds.scm.ha;
1919

20-
import java.io.IOException;
2120
import java.util.concurrent.atomic.AtomicReference;
2221
import org.apache.hadoop.hdds.scm.metadata.DBTransactionBuffer;
2322
import org.apache.hadoop.hdds.utils.TransactionInfo;
23+
import org.apache.hadoop.hdds.utils.db.CodecException;
24+
import org.apache.hadoop.hdds.utils.db.RocksDatabaseException;
2425
import org.apache.ratis.statemachine.SnapshotInfo;
2526

2627
/**
@@ -41,9 +42,9 @@ public interface SCMHADBTransactionBuffer
4142

4243
AtomicReference<SnapshotInfo> getLatestSnapshotRef();
4344

44-
void flush() throws IOException;
45-
45+
void flush() throws RocksDatabaseException, CodecException;
46+
4647
boolean shouldFlush(long snapshotWaitTime);
4748

48-
void init() throws IOException;
49+
void init() throws RocksDatabaseException, CodecException;
4950
}

hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/ha/SCMHADBTransactionBufferImpl.java

Lines changed: 9 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,6 @@
2020
import static org.apache.hadoop.ozone.OzoneConsts.TRANSACTION_INFO_KEY;
2121

2222
import com.google.common.base.Preconditions;
23-
import java.io.IOException;
2423
import java.util.concurrent.atomic.AtomicLong;
2524
import java.util.concurrent.atomic.AtomicReference;
2625
import java.util.concurrent.locks.ReentrantReadWriteLock;
@@ -31,6 +30,8 @@
3130
import org.apache.hadoop.hdds.utils.IOUtils;
3231
import org.apache.hadoop.hdds.utils.TransactionInfo;
3332
import org.apache.hadoop.hdds.utils.db.BatchOperation;
33+
import org.apache.hadoop.hdds.utils.db.CodecException;
34+
import org.apache.hadoop.hdds.utils.db.RocksDatabaseException;
3435
import org.apache.hadoop.hdds.utils.db.Table;
3536
import org.apache.ratis.statemachine.SnapshotInfo;
3637
import org.slf4j.Logger;
@@ -55,8 +56,7 @@ public class SCMHADBTransactionBufferImpl implements SCMHADBTransactionBuffer {
5556
private long lastSnapshotTimeMs = 0;
5657
private final ReentrantReadWriteLock rwLock = new ReentrantReadWriteLock();
5758

58-
public SCMHADBTransactionBufferImpl(StorageContainerManager scm)
59-
throws IOException {
59+
public SCMHADBTransactionBufferImpl(StorageContainerManager scm) throws RocksDatabaseException, CodecException {
6060
this.scm = scm;
6161
init();
6262
}
@@ -66,8 +66,8 @@ private BatchOperation getCurrentBatchOperation() {
6666
}
6767

6868
@Override
69-
public <KEY, VALUE> void addToBuffer(
70-
Table<KEY, VALUE> table, KEY key, VALUE value) throws IOException {
69+
public <KEY, VALUE> void addToBuffer(Table<KEY, VALUE> table, KEY key, VALUE value)
70+
throws RocksDatabaseException, CodecException {
7171
rwLock.readLock().lock();
7272
try {
7373
txFlushPending.getAndIncrement();
@@ -78,8 +78,7 @@ public <KEY, VALUE> void addToBuffer(
7878
}
7979

8080
@Override
81-
public <KEY, VALUE> void removeFromBuffer(Table<KEY, VALUE> table, KEY key)
82-
throws IOException {
81+
public <KEY, VALUE> void removeFromBuffer(Table<KEY, VALUE> table, KEY key) throws CodecException {
8382
rwLock.readLock().lock();
8483
try {
8584
txFlushPending.getAndIncrement();
@@ -122,7 +121,7 @@ public AtomicReference<SnapshotInfo> getLatestSnapshotRef() {
122121
}
123122

124123
@Override
125-
public void flush() throws IOException {
124+
public void flush() throws RocksDatabaseException, CodecException {
126125
rwLock.writeLock().lock();
127126
try {
128127
// write latest trx info into trx table in the same batch
@@ -150,7 +149,7 @@ public void flush() throws IOException {
150149
}
151150

152151
@Override
153-
public void init() throws IOException {
152+
public void init() throws RocksDatabaseException, CodecException {
154153
metadataStore = scm.getScmMetadataStore();
155154

156155
rwLock.writeLock().lock();
@@ -191,7 +190,7 @@ public String toString() {
191190
}
192191

193192
@Override
194-
public void close() throws IOException {
193+
public void close() {
195194
if (currentBatchOperation != null) {
196195
currentBatchOperation.close();
197196
}

hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/ha/SCMHADBTransactionBufferStub.java

Lines changed: 8 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -17,13 +17,14 @@
1717

1818
package org.apache.hadoop.hdds.scm.ha;
1919

20-
import java.io.IOException;
2120
import java.util.concurrent.atomic.AtomicReference;
2221
import java.util.concurrent.locks.ReentrantReadWriteLock;
2322
import org.apache.hadoop.hdds.utils.TransactionInfo;
2423
import org.apache.hadoop.hdds.utils.db.BatchOperation;
24+
import org.apache.hadoop.hdds.utils.db.CodecException;
2525
import org.apache.hadoop.hdds.utils.db.DBStore;
2626
import org.apache.hadoop.hdds.utils.db.RDBBatchOperation;
27+
import org.apache.hadoop.hdds.utils.db.RocksDatabaseException;
2728
import org.apache.hadoop.hdds.utils.db.Table;
2829
import org.apache.ratis.statemachine.SnapshotInfo;
2930

@@ -55,8 +56,8 @@ private BatchOperation getCurrentBatchOperation() {
5556
}
5657

5758
@Override
58-
public <KEY, VALUE> void addToBuffer(
59-
Table<KEY, VALUE> table, KEY key, VALUE value) throws IOException {
59+
public <KEY, VALUE> void addToBuffer(Table<KEY, VALUE> table, KEY key, VALUE value)
60+
throws RocksDatabaseException, CodecException {
6061
rwLock.readLock().lock();
6162
try {
6263
table.putWithBatch(getCurrentBatchOperation(), key, value);
@@ -66,8 +67,7 @@ public <KEY, VALUE> void addToBuffer(
6667
}
6768

6869
@Override
69-
public <KEY, VALUE> void removeFromBuffer(Table<KEY, VALUE> table, KEY key)
70-
throws IOException {
70+
public <KEY, VALUE> void removeFromBuffer(Table<KEY, VALUE> table, KEY key) throws CodecException {
7171
rwLock.readLock().lock();
7272
try {
7373
table.deleteWithBatch(getCurrentBatchOperation(), key);
@@ -102,7 +102,7 @@ public AtomicReference<SnapshotInfo> getLatestSnapshotRef() {
102102
}
103103

104104
@Override
105-
public void flush() throws IOException {
105+
public void flush() throws RocksDatabaseException {
106106
rwLock.writeLock().lock();
107107
try {
108108
if (dbStore != null) {
@@ -123,12 +123,11 @@ public boolean shouldFlush(long snapshotWaitTime) {
123123
}
124124

125125
@Override
126-
public void init() throws IOException {
127-
126+
public void init() {
128127
}
129128

130129
@Override
131-
public void close() throws IOException {
130+
public void close() throws RocksDatabaseException {
132131
flush();
133132
}
134133
}

hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/ha/SCMHAManagerImpl.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -394,7 +394,7 @@ public void stop() throws IOException {
394394
*/
395395
@Override
396396
public void close() {
397-
IOUtils.close(LOG, transactionBuffer);
397+
IOUtils.close(LOG, transactionBuffer::close);
398398
}
399399

400400
@Override

hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/ha/SCMHAManagerStub.java

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -96,7 +96,7 @@ public void stop() throws IOException {
9696

9797
@Override
9898
public void close() {
99-
IOUtils.closeQuietly(transactionBuffer);
99+
IOUtils.closeQuietly(transactionBuffer::close);
100100
}
101101

102102
/**
@@ -141,7 +141,7 @@ public boolean addSCM(AddSCMRequest request) throws IOException {
141141
}
142142

143143
@Override
144-
public boolean removeSCM(RemoveSCMRequest request) throws IOException {
144+
public boolean removeSCM(RemoveSCMRequest request) {
145145
return false;
146146
}
147147

@@ -273,12 +273,12 @@ public SCMStateMachine getSCMStateMachine() {
273273
}
274274

275275
@Override
276-
public boolean addSCM(AddSCMRequest request) throws IOException {
276+
public boolean addSCM(AddSCMRequest request) {
277277
return false;
278278
}
279279

280280
@Override
281-
public boolean removeSCM(RemoveSCMRequest request) throws IOException {
281+
public boolean removeSCM(RemoveSCMRequest request) {
282282
return false;
283283
}
284284

hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/pipeline/PipelineManager.java

Lines changed: 8 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -27,6 +27,8 @@
2727
import org.apache.hadoop.hdds.protocol.DatanodeDetails;
2828
import org.apache.hadoop.hdds.scm.container.ContainerID;
2929
import org.apache.hadoop.hdds.scm.container.ContainerReplica;
30+
import org.apache.hadoop.hdds.utils.db.CodecException;
31+
import org.apache.hadoop.hdds.utils.db.RocksDatabaseException;
3032
import org.apache.hadoop.hdds.utils.db.Table;
3133

3234
/**
@@ -90,7 +92,7 @@ int getPipelineCount(
9092
);
9193

9294
void addContainerToPipeline(PipelineID pipelineID, ContainerID containerID)
93-
throws IOException;
95+
throws PipelineNotFoundException, InvalidPipelineStateException;
9496

9597
/**
9698
* Add container to pipeline during SCM Start.
@@ -99,16 +101,13 @@ void addContainerToPipeline(PipelineID pipelineID, ContainerID containerID)
99101
* @param containerID ID of the container which is added to the pipeline.
100102
* @throws IOException in case of any Exception
101103
*/
102-
void addContainerToPipelineSCMStart(PipelineID pipelineID,
103-
ContainerID containerID) throws IOException;
104+
void addContainerToPipelineSCMStart(PipelineID pipelineID, ContainerID containerID) throws PipelineNotFoundException;
104105

105-
void removeContainerFromPipeline(PipelineID pipelineID,
106-
ContainerID containerID) throws IOException;
106+
void removeContainerFromPipeline(PipelineID pipelineID, ContainerID containerID);
107107

108-
NavigableSet<ContainerID> getContainersInPipeline(PipelineID pipelineID)
109-
throws IOException;
108+
NavigableSet<ContainerID> getContainersInPipeline(PipelineID pipelineID) throws PipelineNotFoundException;
110109

111-
int getNumberOfContainers(PipelineID pipelineID) throws IOException;
110+
int getNumberOfContainers(PipelineID pipelineID) throws PipelineNotFoundException;
112111

113112
void openPipeline(PipelineID pipelineId) throws IOException;
114113

@@ -183,7 +182,7 @@ default Pipeline waitOnePipelineReady(Collection<PipelineID> pipelineIDs,
183182
* during SCM reload.
184183
*/
185184
void reinitialize(Table<PipelineID, Pipeline> pipelineStore)
186-
throws IOException;
185+
throws RocksDatabaseException, DuplicatedPipelineIdException, CodecException;
187186

188187
/**
189188
* Ask pipeline manager to not create any new pipelines.

hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/pipeline/PipelineManagerImpl.java

Lines changed: 10 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -58,6 +58,8 @@
5858
import org.apache.hadoop.hdds.scm.node.NodeManager;
5959
import org.apache.hadoop.hdds.scm.server.upgrade.FinalizationManager;
6060
import org.apache.hadoop.hdds.server.events.EventPublisher;
61+
import org.apache.hadoop.hdds.utils.db.CodecException;
62+
import org.apache.hadoop.hdds.utils.db.RocksDatabaseException;
6163
import org.apache.hadoop.hdds.utils.db.Table;
6264
import org.apache.hadoop.metrics2.util.MBeans;
6365
import org.apache.hadoop.ozone.ClientVersion;
@@ -385,34 +387,32 @@ public int getPipelineCount(ReplicationConfig config,
385387
}
386388

387389
@Override
388-
public void addContainerToPipeline(
389-
PipelineID pipelineID, ContainerID containerID) throws IOException {
390+
public void addContainerToPipeline(PipelineID pipelineID, ContainerID containerID)
391+
throws PipelineNotFoundException, InvalidPipelineStateException {
390392
// should not lock here, since no ratis operation happens.
391393
stateManager.addContainerToPipeline(pipelineID, containerID);
392394
}
393395

394396
@Override
395-
public void addContainerToPipelineSCMStart(
396-
PipelineID pipelineID, ContainerID containerID) throws IOException {
397+
public void addContainerToPipelineSCMStart(PipelineID pipelineID, ContainerID containerID)
398+
throws PipelineNotFoundException {
397399
// should not lock here, since no ratis operation happens.
398400
stateManager.addContainerToPipelineForce(pipelineID, containerID);
399401
}
400402

401403
@Override
402-
public void removeContainerFromPipeline(
403-
PipelineID pipelineID, ContainerID containerID) throws IOException {
404+
public void removeContainerFromPipeline(PipelineID pipelineID, ContainerID containerID) {
404405
// should not lock here, since no ratis operation happens.
405406
stateManager.removeContainerFromPipeline(pipelineID, containerID);
406407
}
407408

408409
@Override
409-
public NavigableSet<ContainerID> getContainersInPipeline(
410-
PipelineID pipelineID) throws IOException {
410+
public NavigableSet<ContainerID> getContainersInPipeline(PipelineID pipelineID) throws PipelineNotFoundException {
411411
return stateManager.getContainers(pipelineID);
412412
}
413413

414414
@Override
415-
public int getNumberOfContainers(PipelineID pipelineID) throws IOException {
415+
public int getNumberOfContainers(PipelineID pipelineID) throws PipelineNotFoundException {
416416
return stateManager.getNumberOfContainers(pipelineID);
417417
}
418418

@@ -804,7 +804,7 @@ public boolean getSafeModeStatus() {
804804

805805
@Override
806806
public void reinitialize(Table<PipelineID, Pipeline> pipelineStore)
807-
throws IOException {
807+
throws RocksDatabaseException, DuplicatedPipelineIdException, CodecException {
808808
stateManager.reinitialize(pipelineStore);
809809
}
810810

0 commit comments

Comments
 (0)