Skip to content

Commit 5a9b98f

Browse files
committed
S3UTILS-222: run fix script inside vault container with aws-sdk v2
Switch from @aws-sdk/client-iam v3 to aws-sdk v2 so the fix script can run inside the vault container of older S3C versions (pre-9.5.2) where only v2 is available. Update README workflow to copy and execute the fix script inside the vault container instead of running it externally.
1 parent bae6257 commit 5a9b98f

4 files changed

Lines changed: 73 additions & 41 deletions

File tree

package.json

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,6 @@
2626
},
2727
"homepage": "https://github.com/scality/s3utils#readme",
2828
"dependencies": {
29-
"@aws-sdk/client-iam": "^3.962.0",
3029
"@aws-sdk/client-s3": "^3.873.0",
3130
"@aws-sdk/node-http-handler": "^3.374.0",
3231
"@scality/cloudserverclient": "^1.0.4",
@@ -55,6 +54,8 @@
5554
"string-width": "4.2.3"
5655
},
5756
"devDependencies": {
57+
"@aws-sdk/client-iam": "^3.962.0",
58+
"aws-sdk": "^2.1692.0",
5859
"@scality/eslint-config-scality": "scality/Guidelines#8.3.0",
5960
"@sinonjs/fake-timers": "^14.0.0",
6061
"eslint": "^9.14.0",

replicationAudit/README.md

Lines changed: 31 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -1,12 +1,15 @@
11
# TL;DR Complete Workflow Example
22

3-
Here's a complete example running the two scripts and audit the IAM policies used by CRR:
3+
Here's a complete example running the two scripts and audit the IAM policies used by CRR.
4+
5+
Use the fix-missing-replication-permissions.js script (step 7) to correct any missing permissions found by the check script. If the fix script fails with "missing ownerDisplayName", re-run check-replication-permissions.js — this field was added in s3utils 1.17.5.
46

57
From your local machine: copy scripts to the supervisor
68

79
```bash
810
scp replicationAudit/list-buckets-with-replication.sh root@<supervisor-ip>:/root/
911
scp replicationAudit/check-replication-permissions.js root@<supervisor-ip>:/root/
12+
scp replicationAudit/fix-missing-replication-permissions.js root@<supervisor-ip>:/root/
1013
```
1114

1215
Connect to the supervisor
@@ -26,6 +29,8 @@ ansible -i env/$ENV_DIR/inventory runners_s3[0] -m copy \
2629
-a 'src=/root/list-buckets-with-replication.sh dest=/root/'
2730
ansible -i env/$ENV_DIR/inventory runners_s3[0] -m copy \
2831
-a 'src=/root/check-replication-permissions.js dest={{ env_host_logs}}/scality-vault{{ container_name_suffix | default("")}}/logs'
32+
ansible -i env/$ENV_DIR/inventory runners_s3[0] -m copy \
33+
-a 'src=/root/fix-missing-replication-permissions.js dest={{ env_host_logs}}/scality-vault{{ container_name_suffix | default("")}}/logs'
2934

3035
# Step 2: Run list-buckets-with-replication.sh
3136
ansible -i env/$ENV_DIR/inventory runners_s3[0] -m shell \
@@ -50,19 +55,34 @@ ansible -i env/$ENV_DIR/inventory runners_s3[0] -m shell \
5055
-a 'cat {{ env_host_logs}}/scality-vault{{ container_name_suffix | default("")}}/logs/missing.json' \
5156
| grep -v CHANGED | tee /root/replicationAudit_missing.json
5257

53-
# Step 6: Clean up remote files
58+
# Step 6: Fix missing permissions
59+
# Copy admin credentials and missing.json to vault container and run inside it
60+
ansible -i env/$ENV_DIR/inventory runners_s3[0] -m copy \
61+
-a "src=/srv/scality/s3/s3-offline/federation/env/$ENV_DIR/vault/admin-clientprofile/admin1.json dest={{ env_host_logs}}/scality-vault{{ container_name_suffix | default("")}}/logs"
62+
63+
ansible -i env/$ENV_DIR/inventory runners_s3[0] -m copy \
64+
-a 'src=/root/replicationAudit_missing.json dest={{ env_host_logs}}/scality-vault{{ container_name_suffix | default("")}}/logs/missing.json'
65+
66+
ansible -i env/$ENV_DIR/inventory runners_s3[0] -m shell \
67+
-a 'ctrctl exec scality-vault{{ container_name_suffix | default("")}} env NODE_PATH=/home/scality/vault/node_modules node /logs/fix-missing-replication-permissions.js \
68+
/logs/missing.json localhost /logs/admin1.json /logs/replication-fix-results.json'
69+
70+
# Retrieve fix results
71+
ansible -i env/$ENV_DIR/inventory runners_s3[0] -m shell \
72+
-a 'cat {{ env_host_logs}}/scality-vault{{ container_name_suffix | default("")}}/logs/replication-fix-results.json' \
73+
| grep -v CHANGED | tee /root/replicationAudit_fix_results.json
74+
75+
# Step 7: Re-run check to verify fixes (repeat steps 3-5)
76+
77+
# Step 8: Clean up remote files
5478
ansible -i env/$ENV_DIR/inventory runners_s3[0] -m shell \
5579
-a 'rm -f {{ env_host_logs}}/scality-vault{{ container_name_suffix | default("")}}/logs/missing.json \
5680
{{ env_host_logs}}/scality-vault{{ container_name_suffix | default("")}}/logs/check-replication-permissions.js \
81+
{{ env_host_logs}}/scality-vault{{ container_name_suffix | default("")}}/logs/fix-missing-replication-permissions.js \
5782
{{ env_host_logs}}/scality-vault{{ container_name_suffix | default("")}}/logs/buckets-with-replication.json \
83+
{{ env_host_logs}}/scality-vault{{ container_name_suffix | default("")}}/logs/replication-fix-results.json \
84+
{{ env_host_logs}}/scality-vault{{ container_name_suffix | default("")}}/logs/admin1.json \
5885
/root/list-buckets-with-replication.sh'
59-
60-
# Step 7 (optional): Fix missing permissions
61-
# Run from your local machine (requires vaultclient and @aws-sdk/client-iam)
62-
node replicationAudit/fix-missing-replication-permissions.js \
63-
/root/replicationAudit_missing.json <supervisor-ip> admin1.json
64-
65-
# Step 8: Re-run check to verify fixes (repeat steps 3-5)
6686
```
6787

6888
# Scripts Documentation
@@ -497,8 +517,7 @@ if the policy already exists, its document is guaranteed to be identical.
497517
```
498518
/srv/scality/s3/s3-offline/federation/env/<ENV_DIR>/vault/admin-clientprofile/admin1.json
499519
```
500-
- Network access to Vault admin/IAM API (port 8600) from the machine running the script
501-
- `vaultclient` and `@aws-sdk/client-iam` installed (both in s3utils dependencies)
520+
- The script runs inside the vault container (`scality-vault`), which has `vaultclient` and `aws-sdk` pre-installed
502521

503522
### Usage
504523

@@ -509,7 +528,7 @@ node fix-missing-replication-permissions.js <input-file> <vault-host> <admin-con
509528
| Argument | Default | Description |
510529
|----------|---------|-------------|
511530
| `input-file` | (required) | Path to missing.json from check script |
512-
| `vault-host` | (required) | Vault admin host (e.g., 13.50.166.21) |
531+
| `vault-host` | (required) | Vault admin host (use `localhost` when running inside the vault container) |
513532
| `admin-config` | (required) | Path to admin credentials JSON |
514533
| `output-file` | replication-fix-results.json | Output file path |
515534
| `--iam-port <port>` | 8600 | Vault admin and IAM API port |

replicationAudit/fix-missing-replication-permissions.js

Lines changed: 24 additions & 28 deletions
Original file line numberDiff line numberDiff line change
@@ -19,21 +19,20 @@
1919
*
2020
* Usage: node fix-missing-replication-permissions.js <input-file> <vault-host> <admin-config> [output-file] [--iam-port <port>] [--https] [--dry-run]
2121
*
22-
* Requires: vaultclient, @aws-sdk/client-iam (both in s3utils dependencies)
22+
* Requires: vaultclient, aws-sdk (both available in vault container)
23+
*
24+
* Note: This script uses aws-sdk v2 (not @aws-sdk/client-iam v3) because it
25+
* is meant to be copied into the vault container of older S3C versions
26+
* (before S3C 9.5.2 / Vault 7.84.0) where aws-sdk v2 is available but
27+
* @aws-sdk/client-iam v3 is not.
2328
*/
2429

2530
const fs = require('fs');
2631
const http = require('http');
2732
const https = require('https');
2833
const { parseArgs } = require('util');
2934
const { Client: VaultClient } = require('vaultclient');
30-
const {
31-
IAMClient,
32-
CreatePolicyCommand,
33-
AttachRolePolicyCommand,
34-
DeleteAccessKeyCommand,
35-
} = require('@aws-sdk/client-iam');
36-
const { NodeHttpHandler } = require('@aws-sdk/node-http-handler');
35+
const AWS = require('aws-sdk');
3736

3837
// ===========================================================================
3938
// Constants
@@ -121,19 +120,17 @@ function generateAccountAccessKeyAsync(client, accountName, options) {
121120
/** Create an IAM client for a given account */
122121
function createIAMClient(config, accessKeyId, secretKey) {
123122
const protocol = config.useHttps ? 'https' : 'http';
124-
return new IAMClient({
125-
region: 'us-east-1',
123+
return new AWS.IAM({
126124
endpoint: `${protocol}://${config.vaultHost}:${config.iamPort}`,
127-
credentials: { accessKeyId, secretAccessKey: secretKey },
128-
requestHandler: new NodeHttpHandler({
129-
httpAgent: new http.Agent({ keepAlive: true }),
130-
// TBD: rejectUnauthorized: false disables certificate validation.
131-
// Consider accepting a CA cert path via CLI option instead.
132-
httpsAgent: new https.Agent({
133-
keepAlive: true,
134-
rejectUnauthorized: false,
135-
}),
136-
}),
125+
region: 'us-east-1',
126+
accessKeyId,
127+
secretAccessKey: secretKey,
128+
sslEnabled: config.useHttps,
129+
httpOptions: {
130+
agent: config.useHttps
131+
? new https.Agent({ keepAlive: true, rejectUnauthorized: false })
132+
: new http.Agent({ keepAlive: true }),
133+
},
137134
});
138135
}
139136

@@ -271,16 +268,15 @@ async function main() {
271268
// a no-op if the policy is already attached to the role.
272269
let policyArn;
273270
try {
274-
const resp = await iamClient.send(new CreatePolicyCommand({
271+
const resp = await iamClient.createPolicy({
275272
PolicyName: policyName,
276273
PolicyDocument: JSON.stringify(policyDocument),
277-
}));
274+
}).promise();
278275
policyArn = resp.Policy.Arn;
279276
outcome.metadata.counts.policiesCreated++;
280277
log(` Created policy "${policyName}"`);
281278
} catch (err) {
282-
if (err.name === 'EntityAlreadyExistsException'
283-
|| err.Code === 'EntityAlreadyExists') {
279+
if (err.code === 'EntityAlreadyExists') {
284280
policyArn = `arn:aws:iam::${accountId}:policy/${policyName}`;
285281
log(` Policy "${policyName}" already exists, skipping`);
286282
} else {
@@ -290,10 +286,10 @@ async function main() {
290286

291287
fix.policyArn = policyArn;
292288

293-
await iamClient.send(new AttachRolePolicyCommand({
289+
await iamClient.attachRolePolicy({
294290
RoleName: roleName,
295291
PolicyArn: policyArn,
296-
}));
292+
}).promise();
297293
outcome.metadata.counts.policiesAttached++;
298294
log(` Attached policy to role "${roleName}"`);
299295

@@ -320,9 +316,9 @@ async function main() {
320316
// Cleanup: delete all temporary keys via IAM DeleteAccessKey
321317
for (const [accountId, { accountName, accessKeyId, iamClient }] of accountCache) {
322318
try {
323-
await iamClient.send(new DeleteAccessKeyCommand({
319+
await iamClient.deleteAccessKey({
324320
AccessKeyId: accessKeyId,
325-
}));
321+
}).promise();
326322
outcome.metadata.counts.keysDeleted++;
327323
log(`Deleted temp key for account "${accountName}" (${accountId})`);
328324
} catch (err) {

yarn.lock

Lines changed: 16 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -3554,6 +3554,22 @@ aws-sdk@^2.1691.0:
35543554
uuid "8.0.0"
35553555
xml2js "0.6.2"
35563556

3557+
aws-sdk@^2.1692.0:
3558+
version "2.1693.0"
3559+
resolved "https://registry.yarnpkg.com/aws-sdk/-/aws-sdk-2.1693.0.tgz#fda38671af3dc5fa8117e9aa09cf6ce37e34010e"
3560+
integrity sha512-cJmb8xEnVLT+R6fBS5sn/EFJiX7tUnDaPtOPZ1vFbOJtd0fnZn/Ky2XGgsvvoeliWeH7mL3TWSX5zXXGSQV6gQ==
3561+
dependencies:
3562+
buffer "4.9.2"
3563+
events "1.1.1"
3564+
ieee754 "1.1.13"
3565+
jmespath "0.16.0"
3566+
querystring "0.2.0"
3567+
sax "1.2.1"
3568+
url "0.10.3"
3569+
util "^0.12.4"
3570+
uuid "8.0.0"
3571+
xml2js "0.6.2"
3572+
35573573
b4a@^1.6.4:
35583574
version "1.7.3"
35593575
resolved "https://registry.yarnpkg.com/b4a/-/b4a-1.7.3.tgz#24cf7ccda28f5465b66aec2bac69e32809bf112f"

0 commit comments

Comments
 (0)