Skip to content

Commit f921bf3

Browse files
committed
Automation Toolkit Release v2026.1.1
1 parent f914894 commit f921bf3

74 files changed

Lines changed: 3015 additions & 1879 deletions

File tree

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

cd3_automation_toolkit/Release-Notes

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,12 @@
1+
-------------------------------------
2+
CD3 Automation Toolkit Tag v2026.1.1
3+
May 8th, 2026
4+
-------------------------------------
5+
1. Support for export of resources from all child compartments of a parent without specifying them separately in command line.
6+
2. Deprecate python script createTenancyConfig.py. Instead use connectCloud.py
7+
3. Fixed ordering issue for views for export of DNS resolvers.
8+
4. Minor internal bug fixes.
9+
110
-------------------------------------
211
CD3 Automation Toolkit Tag v2026.1.0
312
Mar 27th, 2026
Lines changed: 29 additions & 45 deletions
Original file line numberDiff line numberDiff line change
@@ -1,56 +1,40 @@
1-
import boto3
2-
from botocore.exceptions import ClientError, NoCredentialsError
1+
# Copyright (c) 2016, 2024, Oracle and/or its affiliates. All rights reserved.
2+
#
3+
# Common tools for ODB @AWS CD3 scripts.
4+
# awscommontools.py
5+
#
36

4-
def read_aws_auth_properties(filepath):
5-
6-
aws_access_key_id = None
7-
aws_secret_access_key = None
8-
9-
with open(filepath, "r") as f:
10-
11-
for line in f:
12-
line = line.strip()
13-
if line == "" or line.startswith("#") or line.startswith("["):
14-
continue
15-
16-
if line.startswith("aws_access_key_id"):
17-
aws_access_key_id = line.split("=", 1)[1].strip()
18-
19-
elif line.startswith("aws_secret_access_key"):
20-
aws_secret_access_key = line.split("=", 1)[1].strip()
21-
22-
23-
if not aws_access_key_id or not aws_secret_access_key:
24-
print("Missing AWS authentication parameters in properties file")
25-
exit(1)
26-
27-
return aws_access_key_id, aws_secret_access_key
7+
from typing import Dict, Optional
8+
from common.python.commonTools import *
289

2910

3011
class awsCommonTools():
3112

32-
def authenticate(self, propsfile):
3313

34-
aws_access_key_id, aws_secret_access_key = read_aws_auth_properties(propsfile)
14+
tagColumns = {'common tags', 'common_tags', 'peering tags', 'peering_tags'}
3515

36-
try:
16+
def split_tag_values(columnname, columnvalue, tempdict):
3717

38-
session = boto3.Session(
39-
aws_access_key_id=aws_access_key_id,
40-
aws_secret_access_key=aws_secret_access_key,
41-
region_name="us-east-1"
42-
)
18+
columnvalue = columnvalue.replace("\n", "")
19+
if ";" in columnvalue:
4320

44-
# validate credentials
45-
sts = session.client("sts")
46-
sts.get_caller_identity()
21+
columnname = commonTools.check_column_headers(columnname)
22+
multivalues = columnvalue.split(";")
23+
multivalues = [part.split("=") for part in multivalues if part]
24+
tempdict = {columnname: multivalues}
25+
else:
26+
# If there is only one tag; split them only by "="; each key-value pair is stored as a list
27+
columnname = commonTools.check_column_headers(columnname)
28+
multivalues = columnvalue.split("=")
29+
multivalues = [str(part).strip() for part in multivalues if part]
30+
tempdict = {columnname: [multivalues]}
31+
return tempdict
4732

48-
return session
33+
def _flatten_tags(tags: Optional[Dict[str, str]]) -> str:
4934

50-
except NoCredentialsError:
51-
print("AWS credentials not found")
52-
exit(1)
53-
54-
except ClientError as e:
55-
print("Invalid AWS credentials:", e)
56-
exit(1)
35+
if not tags:
36+
return ""
37+
try:
38+
return ";".join([f"{k}={v}" for k, v in tags.items() if v is not None])
39+
except Exception:
40+
return ""
Lines changed: 111 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,111 @@
1+
#!/usr/bin/python3
2+
# Copyright (c) 2016, 2024, Oracle and/or its affiliates. All rights reserved.
3+
#
4+
# This script will produce a Terraform file that will be used to set up OCI Database
5+
# Exadata Infrastructure @AWS
6+
#
7+
# Author: Oracle Consulting
8+
#
9+
10+
import os
11+
import sys
12+
from jinja2 import Environment, FileSystemLoader
13+
from pathlib import Path
14+
sys.path.append(os.getcwd()+"/..")
15+
from common.python.commonTools import *
16+
import awscloud.python.awsCommonTools as awsCommonTools
17+
18+
19+
######
20+
# Required Inputs- CD3 excel file, prefix AND outdir
21+
######
22+
# Execution of the code begins here
23+
def create_terraform_exa_infra_aws(inputfile, outdir, prefix):
24+
25+
filename = inputfile
26+
sheetName = "EXA-Infra-AWS"
27+
auto_tfvars_filename = prefix + '_' + sheetName.lower() + '.auto.tfvars'
28+
resource = sheetName.lower()
29+
30+
# Load the template file
31+
file_loader = FileSystemLoader(f'{Path(__file__).parent}/templates')
32+
env = Environment(loader=file_loader, keep_trailing_newline=True, trim_blocks=True, lstrip_blocks=True)
33+
template = env.get_template('exa-infra-aws-template')
34+
35+
# Read cd3 using pandas dataframe
36+
df, col_headers = commonTools.read_cd3(filename, sheetName)
37+
38+
# Remove empty rows
39+
df = df.dropna(how='all')
40+
df = df.reset_index(drop=True)
41+
tfStr = ''
42+
43+
# List of the column headers
44+
dfcolumns = df.columns.values.tolist()
45+
46+
# Iterate over rows
47+
for i in df.index:
48+
display_name = str(df.loc[i, 'Display Name']).strip()
49+
50+
# Encountered <End>
51+
if (display_name in commonTools.endNames):
52+
break
53+
54+
if display_name.lower() == 'nan':
55+
continue
56+
57+
display_name = display_name.strip()
58+
59+
# temporary dictionary1 and dictionary2
60+
tempStr = {}
61+
tempdict = {}
62+
63+
# Check if values are entered for mandatory fields
64+
if str(df.loc[i, 'Display Name']).lower() == 'nan' or \
65+
str(df.loc[i, 'Region']).lower() == 'nan' or \
66+
str(df.loc[i, 'Availability Zone ID']).lower() == 'nan' or \
67+
str(df.loc[i, 'Shape']).lower() == 'nan' or \
68+
str(df.loc[i, 'Compute Count']).lower() == 'nan' or \
69+
str(df.loc[i, 'Storage Count']).lower() == 'nan' or \
70+
str(df.loc[i, 'Database Server Type']).lower() == 'nan' or \
71+
str(df.loc[i, 'Storage Server Type']).lower() == 'nan':
72+
print("\nAll fields except Maintenance Window, Customer Contacts and Common Tags are mandatory. Please enter a value and try again !!")
73+
exit(1)
74+
75+
for columnname in dfcolumns:
76+
# Column value
77+
columnvalue = str(df[columnname][i]).strip()
78+
79+
# Check for boolean/null in column values
80+
columnvalue = commonTools.check_columnvalue(columnvalue)
81+
82+
# Check for multivalued columns
83+
tempdict = commonTools.check_multivalues_columnvalue(columnvalue, columnname, tempdict)
84+
85+
# Process Defined and Freeform Tags
86+
if columnname.lower() in awsCommonTools.awsCommonTools.tagColumns:
87+
tempdict = awsCommonTools.awsCommonTools.split_tag_values(columnname, columnvalue, tempdict)
88+
89+
if columnname == "Display Name":
90+
display_name = columnvalue.strip()
91+
display_tf_name = commonTools.check_tf_variable(display_name)
92+
tempdict = {'display_name': display_name, 'display_tf_name': display_tf_name}
93+
94+
columnname = commonTools.check_column_headers(columnname)
95+
tempStr[columnname] = str(columnvalue).strip()
96+
tempStr.update(tempdict)
97+
98+
# Write all info to TF string
99+
tfStr = tfStr + template.render(tempStr)
100+
101+
# Write TF string to the file
102+
if (tfStr != ''):
103+
outfile = outdir + "/" + auto_tfvars_filename
104+
commonTools.backup_file(outdir, resource, auto_tfvars_filename)
105+
src = "##Add New Exa-Infra @AWS here##"
106+
tfStr = template.render(count=0).replace(src, tfStr + "\n" + src)
107+
tfStr = "".join([s for s in tfStr.strip().splitlines(True) if s.strip("\r\n").strip()])
108+
oname = open(outfile, 'w')
109+
oname.write(tfStr)
110+
oname.close()
111+
print(outfile + " containing TF for Exa-Infra @AWS has been created")
Lines changed: 124 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,124 @@
1+
#!/usr/bin/python3
2+
# Copyright (c) 2016, 2024, Oracle and/or its affiliates. All rights reserved.
3+
#
4+
# This script will produce a Terraform file that will be used to set up OCI Database
5+
# Exadata VM Cluster @AWS
6+
#
7+
# Author: Oracle Consulting
8+
#
9+
10+
import os
11+
import sys
12+
from jinja2 import Environment, FileSystemLoader
13+
from pathlib import Path
14+
sys.path.append(os.getcwd()+"/..")
15+
from common.python.commonTools import *
16+
import awscloud.python.awsCommonTools as awsCommonTools
17+
18+
19+
######
20+
# Required Inputs- CD3 excel file, prefix AND outdir
21+
######
22+
# Execution of the code begins here
23+
def create_terraform_exa_vmclusters_aws(inputfile, outdir, prefix):
24+
25+
filename = inputfile
26+
sheetName = "EXA-VMCluster-AWS"
27+
auto_tfvars_filename = prefix + '_' + sheetName.lower() + '.auto.tfvars'
28+
resource = sheetName.lower()
29+
30+
# Load the template file
31+
file_loader = FileSystemLoader(f'{Path(__file__).parent}/templates')
32+
env = Environment(loader=file_loader, keep_trailing_newline=True, trim_blocks=True, lstrip_blocks=True)
33+
template = env.get_template('exa-vmcluster-aws-template')
34+
35+
# Read cd3 using pandas dataframe
36+
df, col_headers = commonTools.read_cd3(filename, sheetName)
37+
tfStr = ''
38+
39+
# Remove empty rows
40+
df = df.dropna(how='all')
41+
df = df.reset_index(drop=True)
42+
43+
# List of the column headers
44+
dfcolumns = df.columns.values.tolist()
45+
46+
# Iterate over rows
47+
for i in df.index:
48+
display_name = str(df.loc[i, 'Display Name']).strip()
49+
50+
# Encountered <End>
51+
if (display_name in commonTools.endNames):
52+
break
53+
54+
if display_name.lower() == 'nan':
55+
continue
56+
57+
display_name = display_name.strip()
58+
59+
# temporary dictionary1 and dictionary2
60+
tempStr = {}
61+
tempdict = {}
62+
63+
# Check if values are entered for mandatory fields
64+
if str(df.loc[i, 'Display Name']).lower() == 'nan' or \
65+
str(df.loc[i, 'Create ODB Network']).lower() == 'nan' or \
66+
str(df.loc[i, 'ODB Network Name']).lower() == 'nan' or \
67+
str(df.loc[i, 'Exadata Infrastructure Name']).lower() == 'nan' or \
68+
str(df.loc[i, 'Region']).lower() == 'nan' or \
69+
str(df.loc[i, 'Cluster Name']).lower() == 'nan' or \
70+
str(df.loc[i, 'Hostname Prefix']).lower() == 'nan' or \
71+
str(df.loc[i, 'GI Version']).lower() == 'nan' or \
72+
str(df.loc[i, 'CPU Core Count']).lower() == 'nan' or \
73+
str(df.loc[i, 'Memory Size in GBS']).lower() == 'nan' or \
74+
str(df.loc[i, 'Data Storage Size in TBS']).lower() == 'nan' or \
75+
str(df.loc[i, 'DB Node Storage Size in GBS']).lower() == 'nan' or \
76+
str(df.loc[i, 'SSH Public Keys']).lower() == 'nan' or \
77+
str(df.loc[i, 'License Model']).lower() == 'nan' or \
78+
str(df.loc[i, 'Timezone']).lower() == 'nan' or \
79+
str(df.loc[i, 'Is Local Backup Enabled']).lower() == 'nan' or \
80+
str(df.loc[i, 'Is Sparse Diskgroup Enabled']).lower() == 'nan':
81+
print("\nDisplay Name, Create ODB Network, ODB Network Name, Exadata Infrastructure Name, Region, Cluster Name, Hostname Prefix, GI Version, CPU Core Count, Memory Size in GBS, Data Storage Size in TBS, DB Node Storage Size in GBS, SSH Public Keys, License Model, Timezone, Is Local Backup Enabled and Is Sparse Diskgroup Enabled are mandatory. Please enter a value and try again !!")
82+
exit(1)
83+
84+
for columnname in dfcolumns:
85+
# Column value
86+
columnvalue = str(df[columnname][i]).strip()
87+
88+
# Check for boolean/null in column values
89+
columnvalue = commonTools.check_columnvalue(columnvalue)
90+
91+
# Check for multivalued columns
92+
tempdict = commonTools.check_multivalues_columnvalue(columnvalue, columnname, tempdict)
93+
94+
if columnname == "Display Name":
95+
display_name = columnvalue.strip()
96+
display_tf_name = commonTools.check_tf_variable(display_name)
97+
tempdict = {'display_tf_name': display_tf_name, 'display_name': display_name}
98+
99+
# Process Defined and Freeform Tags
100+
if columnname.lower() in awsCommonTools.awsCommonTools.tagColumns:
101+
tempdict = awsCommonTools.awsCommonTools.split_tag_values(columnname, columnvalue, tempdict)
102+
103+
if columnname == "Exadata Infrastructure Name":
104+
exadata_infrastructure_name = columnvalue.strip()
105+
tempdict = {'exadata_infrastructure_name': exadata_infrastructure_name}
106+
107+
columnname = commonTools.check_column_headers(columnname)
108+
tempStr[columnname] = str(columnvalue).strip()
109+
tempStr.update(tempdict)
110+
111+
# Write all info to TF string
112+
tfStr = tfStr + template.render(tempStr)
113+
114+
# Write TF string to the file
115+
if (tfStr != ''):
116+
outfile = outdir + "/" + auto_tfvars_filename
117+
commonTools.backup_file(outdir, resource, auto_tfvars_filename)
118+
src = "##Add New Exa-VMCluster @AWS here##"
119+
tfStr = template.render(count=0).replace(src, tfStr + "\n" + src)
120+
tfStr = "".join([s for s in tfStr.strip().splitlines(True) if s.strip("\r\n").strip()])
121+
oname = open(outfile, 'w')
122+
oname.write(tfStr)
123+
oname.close()
124+
print(outfile + " containing TF for Exa-VMCluster @AWS has been created")

0 commit comments

Comments
 (0)