diff --git a/pytest-Tests/hdfs/conftest.py b/pytest-Tests/hdfs/conftest.py
new file mode 100644
index 0000000000..19472b1592
--- /dev/null
+++ b/pytest-Tests/hdfs/conftest.py
@@ -0,0 +1,106 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+# This workflow will build a Java project with Maven, and cache/restore any dependencies to improve the workflow execution time
+# For more information see: https://docs.github.com/en/actions/automating-builds-and-tests/building-and-testing-java-with-maven
+
+# This workflow uses actions that are not certified by GitHub.
+# They are provided by a third-party and are governed by
+# separate terms of service, privacy policy, and support
+# documentation.
+
+import docker
+import pytest
+import time
+from test_config import (HADOOP_CONTAINER, HDFS_USER,KMS_PROPERTY,CORE_SITE_XML_PATH)
+
+# Setup Docker Client
+client = docker.from_env()
+
+@pytest.fixture(scope="session")
+def hadoop_container():
+ container = client.containers.get(HADOOP_CONTAINER) #to get hadoop container instance
+ return container
+
+def ensure_key_provider_and_simple_auth(container) -> bool:
+ """
+ Ensures:
+ 1) KMS provider property exists
+ 2) hadoop.security.authentication = simple
+ Returns True if the file was modified.
+ """
+ changed = False
+
+ # 1) Ensure KMS provider property exists
+ exit_code, _ = container.exec_run(
+ f"grep -q 'hadoop.security.key.provider.path' {CORE_SITE_XML_PATH}",
+ user="root",
+ )
+ if exit_code != 0:
+ container.exec_run(
+ f"sed -i '/<\\/configuration>/i {KMS_PROPERTY}' {CORE_SITE_XML_PATH}",
+ user="root",
+ )
+ changed = True
+
+ # 2) Force auth to simple (replace value if property exists, else insert new property)
+ exit_code, _ = container.exec_run(
+ f"grep -q 'hadoop.security.authentication' {CORE_SITE_XML_PATH}",
+ user="root",
+ )
+ if exit_code == 0:
+ container.exec_run(
+ "sed -i "
+ "'/hadoop.security.authentication<\\/name>/,/<\\/property>/ "
+ "s/[^<]*<\\/value>/simple<\\/value>/' "
+ f"{CORE_SITE_XML_PATH}",
+ user="root",
+ )
+ changed = True
+ else:
+ simple_prop = (
+ "hadoop.security.authentication"
+ "simple"
+ )
+ container.exec_run(
+ f"sed -i '/<\\/configuration>/i {simple_prop}' {CORE_SITE_XML_PATH}",
+ user="root",
+ )
+ changed = True
+
+ return changed
+
+def ensure_user_exists(container, username: str) -> None:
+ exit_code, _ = container.exec_run(f"id -u {username}", user="root")
+ if exit_code == 0:
+ return
+
+ container.exec_run(f"useradd -m -s /bin/bash {username}", user="root")
+ container.exec_run(f"usermod -aG hadoop {username}", user="root")
+
+
+@pytest.fixture(scope="session", autouse=True)
+def setup_environment(hadoop_container):
+ changed = ensure_key_provider_and_simple_auth(hadoop_container)
+ if changed:
+ hadoop_container.restart()
+
+ time.sleep(30) # Wait for container to restart and services to come up
+
+ ensure_user_exists(hadoop_container, "keyadmin")
+ hadoop_container.exec_run("hdfs dfsadmin -safemode leave", user=HDFS_USER)
+
+ yield
diff --git a/pytest-Tests/hdfs/readme.md b/pytest-Tests/hdfs/readme.md
new file mode 100644
index 0000000000..95a503a6be
--- /dev/null
+++ b/pytest-Tests/hdfs/readme.md
@@ -0,0 +1,95 @@
+
+
+# This is the main directory for testing HDFS encryption cycle
+
+## Structure
+```
+test_hdfs/
+├── test_encryption.py
+├── test_encryption02.py
+├── test_encryption03.py
+├── test_config.py #stores all constants and HDFS commands
+├── conftest.py #sets up the environment
+├── utils.py #utility methods
+
+```
+
+---
+
+## Features
+
+- **Markers:**
+ Markers can be used to selectively run specific test cases, improving test efficiency and organization.
+
+---
+
+### `setup_environment`
+
+Handled in `conftest.py` file
+Before running the test cases, some environment configurations are needed:
+- HDFS must communicate with KMS to fetch key details.
+- Specific KMS properties are added to the `core-site.xml` file.
+- Containers are restarted to apply the changes effectively.
+
+---
+
+### Utility Methods
+
+- **get_error_logs:**
+ Fetches logs from both KMS and HDFS containers. Helps in identifying issues when errors or exceptions occur during testing.
+
+- **run_command:**
+ Executes all necessary HDFS commands inside the containers.
+
+---
+
+## `test_encryption.py`
+
+Handles the **full HDFS encryption cycle**, including setup, positive and negative test scenarios, and cleanup.
+
+### Main Highlights:
+- Encryption Zone (EZ) creation in HDFS.
+- Granting permissions to specific users for read/write operations within the EZ.
+- Validating read/write attempts by unauthorized users inside the EZ.
+
+
+## `test_encryption02.py`
+
+Handles the **Check if after key roll over old files can be read or not**
+**Check if after key roll over new files can be written and read too**
+**Check read operation on file after key deletion**
+
+---
+
+## `test_encryption03.py`
+
+Handles the **Test case on cross Encryption zone operations**
+
+
+## Summary
+
+This test suite ensures that **HDFS encryption and access control mechanisms** function as expected, validating both authorized and unauthorized access scenarios.
diff --git a/pytest-Tests/hdfs/test_config.py b/pytest-Tests/hdfs/test_config.py
new file mode 100644
index 0000000000..31c194efa7
--- /dev/null
+++ b/pytest-Tests/hdfs/test_config.py
@@ -0,0 +1,100 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+# This workflow will build a Java project with Maven, and cache/restore any dependencies to improve the workflow execution time
+# For more information see: https://docs.github.com/en/actions/automating-builds-and-tests/building-and-testing-java-with-maven
+
+# This workflow uses actions that are not certified by GitHub.
+# They are provided by a third-party and are governed by
+# separate terms of service, privacy policy, and support
+# documentation.
+
+##Contains all constant values regarding USER, PATH, HDFS Commands----------------------
+
+HDFS_USER = "hdfs"
+HIVE_USER = "hive"
+HBASE_USER= "hbase"
+KEY_ADMIN="keyadmin"
+HEADERS={"Content-Type": "application/json","Accept":"application/json"}
+PARAMS={"user.name":"keyadmin"}
+BASE_URL="http://localhost:9292/kms/v1"
+
+HADOOP_CONTAINER = "ranger-hadoop"
+KMS_CONTAINER = "ranger-kms"
+
+#KMS configs that needs to be added in XML file------------add more if needed
+KMS_PROPERTY = """hadoop.security.key.provider.pathkms://http@host.docker.internal:9292/kms"""
+
+CORE_SITE_XML_PATH = "/opt/hadoop/etc/hadoop/core-site.xml"
+HADOOP_NAMENODE_LOG_PATH="/opt/hadoop/logs/hadoop-hdfs-namenode-ranger-hadoop.rangernw.log"
+KMS_LOG_PATH="/var/log/ranger/kms/ranger-kms-ranger-kms.rangernw-root.log"
+
+
+# HDFS Commands----------------------------------------------------
+CREATE_KEY_COMMAND = "hadoop key create {key_name} -size 128 -provider kms://http@host.docker.internal:9292/kms"
+
+VALIDATE_KEY_COMMAND = "hadoop key list -provider kms://http@host.docker.internal:9292/kms"
+
+CREATE_EZ_COMMANDS = [
+ "hdfs dfs -mkdir /{ez_name}",
+ "hdfs crypto -createZone -keyName {key_name} -path /{ez_name}",
+ "hdfs crypto -listZones"
+]
+
+GRANT_PERMISSIONS_COMMANDS = [
+ "hdfs dfs -chmod -R 700 /{ez_name}",
+ "hdfs dfs -chown -R {user}:{user} /{ez_name}"
+]
+
+CREATE_FILE_COMMAND = [ 'echo "{filecontent}" > /home/{user}/{filename}.txt && ls -l /home/{user}/{filename}.txt' ]
+
+ACTIONS_COMMANDS = [
+ "hdfs dfs -put /home/{user}/{filename}.txt /{ez_name}/",
+ "hdfs dfs -ls /{ez_name}/",
+ "hdfs dfs -cat /{ez_name}/{filename}.txt"
+]
+
+CROSS_EZ_ACTION_COMMANDS = [
+ "hdfs dfs -put /home/{user}/{filename}.txt /{ez_name}/{dirname}/",
+ "hdfs dfs -ls /{ez_name}/",
+ "hdfs dfs -cat /{ez_name}/{dirname}/{filename}.txt"
+]
+
+READ_EZ_FILE=[
+ "hdfs dfs -cat /{ez_name}/{filename}.txt"
+]
+
+READ_EZ = [
+ "hdfs dfs -cat /{ez_name}/"
+]
+
+UNAUTHORIZED_WRITE_COMMAND = 'hdfs dfs -put /home/{user}/{filename}.txt /{ez_name}/'
+
+UNAUTHORIZED_READ_COMMAND = "hdfs dfs -cat /{ez_name}/{filename}.txt"
+
+CLEANUP_COMMANDS = [
+ "hdfs dfs -rm /{ez_name}/{filename}.txt",
+ "hdfs dfs -rm -R /{ez_name}"
+]
+CLEANUP_EZ = [
+ "hdfs dfs -rm -R /{ez_name}"
+]
+CLEANUP_EZ_FILE = [
+ "hdfs dfs -rm /{ez_name}/{filename}.txt"
+]
+KEY_DELETION_CMD = "bash -c \"echo 'Y' | hadoop key delete {key_name} -provider kms://http@host.docker.internal:9292/kms\""
+
+
diff --git a/pytest-Tests/hdfs/test_encryption.py b/pytest-Tests/hdfs/test_encryption.py
new file mode 100644
index 0000000000..10817c054f
--- /dev/null
+++ b/pytest-Tests/hdfs/test_encryption.py
@@ -0,0 +1,142 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+# This workflow will build a Java project with Maven, and cache/restore any dependencies to improve the workflow execution time
+# For more information see: https://docs.github.com/en/actions/automating-builds-and-tests/building-and-testing-java-with-maven
+
+# This workflow uses actions that are not certified by GitHub.
+# They are provided by a third-party and are governed by
+# separate terms of service, privacy policy, and support
+# documentation.
+
+import pytest
+from utils import run_command,get_error_logs
+from test_config import (HDFS_USER,HIVE_USER,HBASE_USER,KEY_ADMIN,
+ CREATE_KEY_COMMAND, VALIDATE_KEY_COMMAND, CREATE_EZ_COMMANDS,GRANT_PERMISSIONS_COMMANDS,
+ UNAUTHORIZED_WRITE_COMMAND, ACTIONS_COMMANDS,
+ UNAUTHORIZED_READ_COMMAND,KEY_DELETION_CMD,
+ CLEANUP_COMMANDS,CREATE_FILE_COMMAND)
+
+key_name="hdfs-key"
+ez_name="secure_zone"
+filename="hdfs-test-file"
+filecontent="Welcome to hdfs encryption"
+
+# EZ key creation before creating an EZ---------------------------------------------
+def test_create_key(hadoop_container):
+ create_key_cmd= CREATE_KEY_COMMAND.format(key_name=key_name)
+ output = run_command(hadoop_container,create_key_cmd, KEY_ADMIN) # Run the command as keyadmin user
+ print("Key Creation Output:", output)
+
+ # Validate if the key was created successfully
+ validation_output = run_command(hadoop_container, VALIDATE_KEY_COMMAND, KEY_ADMIN)
+
+ print("Key List Output:", validation_output)
+
+ # Check if key is present
+ if key_name not in validation_output:
+ error_logs = get_error_logs() # Fetch logs on failure
+ pytest.fail(f"Key creation failed. Logs:\n{error_logs}")
+
+
+# Create Encryption Zone -----------------------------------------------------------
+@pytest.mark.createEZ
+def test_create_encryption_zone(hadoop_container):
+ create_ez_commands = [cmd.format(ez_name=ez_name, key_name=key_name) for cmd in CREATE_EZ_COMMANDS]
+
+ for cmd in create_ez_commands:
+ output = run_command(hadoop_container, cmd, HDFS_USER)
+ print(output)
+
+
+# Grant Permissions to 'Hive' User to above EZ----------------------------------------
+def test_grant_permissions(hadoop_container):
+ grant_permission_commands= [cmd.format(ez_name=ez_name, user=HIVE_USER) for cmd in GRANT_PERMISSIONS_COMMANDS]
+
+ for cmd in grant_permission_commands:
+ output = run_command(hadoop_container,cmd,HDFS_USER)
+ print(output)
+
+# Testing read write permission for hive user-----------------------------------------
+def test_hive_user_write_read(hadoop_container):
+ #create file as 'hive' user
+ create_file_cmd = [cmd.format(
+ filename=filename,
+ filecontent=filecontent,
+ user=HIVE_USER
+ ) for cmd in CREATE_FILE_COMMAND]
+
+ run_command(hadoop_container, ["bash", "-c", create_file_cmd[0]], HIVE_USER)
+
+ #read-write using 'hive' user
+ read_write_cmd= [cmd.format(filename=filename, ez_name=ez_name, user=HIVE_USER) for cmd in ACTIONS_COMMANDS]
+ for cmd in read_write_cmd:
+ run_command(hadoop_container,cmd,HIVE_USER)
+
+
+# Negative Test - Unauthorized User Cannot Write i.e 'HBASE' in this case-------------
+def test_unauthorized_write(hadoop_container):
+ filename2="hdfs-test-file2" #writing new file into EZ
+ failure_detected = False
+
+ unauth_write_cmd= UNAUTHORIZED_WRITE_COMMAND.format(filename=filename2,user=HBASE_USER,ez_name=ez_name)
+ output,exit_code= run_command(hadoop_container,unauth_write_cmd,HBASE_USER,fail_on_error=False,return_exit_code=True)
+
+ print(f"Command Output:\n{output}")
+
+ # Check for known failure indicators in output
+ if exit_code != 0:
+ failure_detected = True
+
+ #assert that failure was detected as expected
+ assert failure_detected, "Expected failure due to no permission on EZ, but command succeeded."
+
+
+# Negative Test - Unauthorized User 'HBASE' Cannot Read ------------------------------
+def test_unauthorized_read(hadoop_container):
+ unauth_read= UNAUTHORIZED_READ_COMMAND.format(filename=filename, ez_name=ez_name, user=HBASE_USER)
+ output,exit_code = run_command(hadoop_container,unauth_read,HBASE_USER,fail_on_error=False,return_exit_code=True)
+
+ print(f"Command Output:\n{output}")
+
+ # Check for known failure indicators in output
+ if exit_code != 0:
+ failure_detected = True
+
+ #assert that failure was detected as expected
+ assert failure_detected, "Expected failure due to no permission on EZ, but command succeeded."
+
+
+# Clean Up - Remove Test file and EZ -------------------------------------------------
+@pytest.mark.cleanEZ
+def test_cleanup(hadoop_container):
+ cleanup_cmd=[cmd.format(filename=filename, ez_name=ez_name) for cmd in CLEANUP_COMMANDS]
+ for cmd in cleanup_cmd:
+ output=run_command(hadoop_container,cmd,HDFS_USER)
+
+ print(output)
+
+ #clean EZ key
+ key_deletion_cmd=KEY_DELETION_CMD.format(key_name=key_name)
+ output=run_command(hadoop_container,key_deletion_cmd,KEY_ADMIN)
+ print(output)
+
+
+
+
+
+
+
diff --git a/pytest-Tests/hdfs/test_encryption02.py b/pytest-Tests/hdfs/test_encryption02.py
new file mode 100644
index 0000000000..f26cce9ae7
--- /dev/null
+++ b/pytest-Tests/hdfs/test_encryption02.py
@@ -0,0 +1,243 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+# This workflow will build a Java project with Maven, and cache/restore any dependencies to improve the workflow execution time
+# For more information see: https://docs.github.com/en/actions/automating-builds-and-tests/building-and-testing-java-with-maven
+
+# This workflow uses actions that are not certified by GitHub.
+# They are provided by a third-party and are governed by
+# separate terms of service, privacy policy, and support
+# documentation.
+
+import pytest
+import requests
+from utils import run_command,get_error_logs
+from test_config import (HDFS_USER,HIVE_USER,HEADERS,PARAMS,BASE_URL,
+ CREATE_EZ_COMMANDS ,GRANT_PERMISSIONS_COMMANDS,
+ CREATE_FILE_COMMAND, ACTIONS_COMMANDS,READ_EZ_FILE,
+ CLEANUP_COMMANDS)
+
+# ****** ********************Test Case 01 ********************************************
+# ***** Check if after key roll over old files can be read or not
+# ***********************************************************************************
+def test_read_old_file_after_rollover(hadoop_container):
+ key_name="test-key1"
+ ez_name = "secure_zone1"
+ filename="testfile1"
+ filecontent="Hello Human"
+
+ #create EZ key-------
+ key_data={
+ "name":key_name
+ }
+ response=requests.post(f"{BASE_URL}/keys",json=key_data,params=PARAMS,headers=HEADERS)
+ assert response.status_code == 201, f"Key creation failed: {response.text}"
+
+ # create EZ ------------
+ create_ez_commands = [cmd.format(ez_name=ez_name, key_name=key_name) for cmd in CREATE_EZ_COMMANDS]
+
+ for cmd in create_ez_commands:
+ output = run_command(hadoop_container, cmd, HDFS_USER)
+ print(output)
+
+ #grant permissions for 'hive' user------------
+ grant_permission_commands= [cmd.format(ez_name=ez_name, user=HIVE_USER) for cmd in GRANT_PERMISSIONS_COMMANDS]
+
+ for cmd in grant_permission_commands:
+ output = run_command(hadoop_container,cmd,HDFS_USER)
+ print(output)
+
+ #create file as 'hive' user-------
+ create_file_cmd = [cmd.format(
+ filename=filename,
+ filecontent=filecontent,
+ user=HIVE_USER
+ ) for cmd in CREATE_FILE_COMMAND]
+
+ run_command(hadoop_container, ["bash", "-c", create_file_cmd[0]], HIVE_USER)
+
+ #read-write using 'hive' user-------
+ read_write_cmd= [cmd.format(filename=filename, ez_name=ez_name, user=HIVE_USER) for cmd in ACTIONS_COMMANDS]
+ for cmd in read_write_cmd:
+ run_command(hadoop_container,cmd,HIVE_USER)
+
+ #roll-over of key---------
+ response=requests.post(f"{BASE_URL}/key/{key_name}", json={}, headers=HEADERS, params=PARAMS)
+ assert response.status_code == 200, f"Key roll over failed: {response.text}"
+
+ #read same file after roll over---------
+ read_ez_file=[cmd.format(filename=filename, ez_name=ez_name) for cmd in READ_EZ_FILE]
+ for cmd in read_ez_file:
+ run_command(hadoop_container,cmd,HIVE_USER)
+
+ #cleanup EZ and EZ file--------
+ cleanup_cmd=[cmd.format(filename=filename, ez_name=ez_name) for cmd in CLEANUP_COMMANDS]
+ for cmd in cleanup_cmd:
+ run_command(hadoop_container,cmd,HDFS_USER)
+
+ #delete EZ key ----------
+ delete_output2=requests.delete(f"{BASE_URL}/key/{key_name}", params=PARAMS)
+ print(delete_output2)
+
+
+# ****** ********************Test Case 02 ********************************************
+# ***** Check if after key roll over new files can be written and read too
+# ***********************************************************************************
+def test_writeAndRead_Newfile_after_rollover(hadoop_container):
+ key_name="test-key2"
+ ez_name = "secure_zone1"
+ filename="testfile2"
+ filename2="testfile3"
+ filecontent="Hello First"
+ filecontent2="Hello Second"
+
+ #create EZ key-------
+ key_data={
+ "name":key_name
+ }
+ response=requests.post(f"{BASE_URL}/keys",json=key_data,params=PARAMS,headers=HEADERS)
+ assert response.status_code == 201, f"Key creation failed: {response.text}"
+
+ # create EZ ------------
+ create_ez_commands = [cmd.format(ez_name=ez_name, key_name=key_name) for cmd in CREATE_EZ_COMMANDS]
+
+ for cmd in create_ez_commands:
+ output = run_command(hadoop_container, cmd, HDFS_USER)
+ print(output)
+
+ #grant permissions for 'hive' user------------
+ grant_permission_commands= [cmd.format(ez_name=ez_name, user=HIVE_USER) for cmd in GRANT_PERMISSIONS_COMMANDS]
+
+ for cmd in grant_permission_commands:
+ output = run_command(hadoop_container,cmd,HDFS_USER)
+ print(output)
+
+ #create file in EZ as 'hive' user-------
+ create_file_cmd = [cmd.format(
+ filename=filename,
+ filecontent=filecontent,
+ user=HIVE_USER
+ ) for cmd in CREATE_FILE_COMMAND]
+
+ run_command(hadoop_container, ["bash", "-c", create_file_cmd[0]], HIVE_USER)
+
+ #read-write using 'hive' user-------
+ read_write_cmd= [cmd.format(filename=filename, ez_name=ez_name, user=HIVE_USER) for cmd in ACTIONS_COMMANDS]
+ for cmd in read_write_cmd:
+ output=run_command(hadoop_container,cmd,HIVE_USER)
+ print(output)
+
+ #roll-over of key---------
+ response=requests.post(f"{BASE_URL}/key/{key_name}", json={}, headers=HEADERS, params=PARAMS)
+ assert response.status_code == 200, f"Key roll over failed: {response.text}"
+
+ #write new file after rollover
+ create_file_cmd = [cmd.format(
+ filename=filename2,
+ filecontent=filecontent2,
+ user=HIVE_USER
+ ) for cmd in CREATE_FILE_COMMAND]
+
+ run_command(hadoop_container, ["bash", "-c", create_file_cmd[0]], HIVE_USER)
+
+ #read-write new file now
+ read_write_cmd= [cmd.format(filename=filename2, ez_name=ez_name, user=HIVE_USER) for cmd in ACTIONS_COMMANDS]
+ for cmd in read_write_cmd:
+ output=run_command(hadoop_container,cmd,HIVE_USER)
+ print(output)
+
+ #cleanup EZ and EZ file--------
+ cleanup_cmd=[cmd.format(filename=filename, ez_name=ez_name) for cmd in CLEANUP_COMMANDS]
+ for cmd in cleanup_cmd:
+ run_command(hadoop_container,cmd,HDFS_USER)
+
+ #delete EZ key ----------
+ delete_output2=requests.delete(f"{BASE_URL}/key/{key_name}", params=PARAMS)
+ print(delete_output2)
+
+
+# ****** ********************Test Case 03 ********************************************
+# ***** Check read operation on file after key deletion
+# ***********************************************************************************
+def test_Readfile_after_keyDeletion(hadoop_container):
+ key_name="test-key3"
+ ez_name = "secure_zone1"
+ filename="testfile4"
+ filecontent="You are reading it before key deletion"
+
+ #create EZ key-------
+ key_data={
+ "name":key_name
+ }
+ response=requests.post(f"{BASE_URL}/keys",json=key_data,params=PARAMS,headers=HEADERS)
+ assert response.status_code == 201, f"Key creation failed: {response.text}"
+
+ # create EZ ------------
+ create_ez_commands = [cmd.format(ez_name=ez_name, key_name=key_name) for cmd in CREATE_EZ_COMMANDS]
+
+ for cmd in create_ez_commands:
+ output = run_command(hadoop_container, cmd, HDFS_USER)
+ print(output)
+
+ #grant permissions for 'hive' user------------
+ grant_permission_commands= [cmd.format(ez_name=ez_name, user=HIVE_USER) for cmd in GRANT_PERMISSIONS_COMMANDS]
+
+ for cmd in grant_permission_commands:
+ output = run_command(hadoop_container,cmd,HDFS_USER)
+ print(output)
+
+ #create file in EZ as 'hive' user-------
+ create_file_cmd = [cmd.format(
+ filename=filename,
+ filecontent=filecontent,
+ user=HIVE_USER
+ ) for cmd in CREATE_FILE_COMMAND]
+
+ run_command(hadoop_container, ["bash", "-c", create_file_cmd[0]], HIVE_USER)
+
+ #read-write using 'hive' user-------
+ read_write_cmd= [cmd.format(filename=filename, ez_name=ez_name, user=HIVE_USER) for cmd in ACTIONS_COMMANDS]
+ for cmd in read_write_cmd:
+ output=run_command(hadoop_container,cmd,HIVE_USER)
+ print(output)
+
+
+ #delete EZ key ----------
+ delete_output2=requests.delete(f"{BASE_URL}/key/{key_name}", params=PARAMS)
+ print(delete_output2)
+
+
+ #read-write file after key deletion --------------
+ read_write_cmd= [cmd.format(filename=filename, ez_name=ez_name, user=HIVE_USER) for cmd in READ_EZ_FILE]
+ failure_detected = False
+
+ for cmd in read_write_cmd:
+ output = run_command(hadoop_container, cmd, HIVE_USER, fail_on_error=False)
+ print(f"Command Output:\n{output}")
+
+ # Check for known failure indicators in output
+ if any(err in output.lower() for err in ["error", "exception", "failed", "not found"]):
+ failure_detected = True
+
+ #assert that failure was detected as expected
+ assert failure_detected, "Expected failure due to deleted EZ key, but command succeeded."
+
+
+ #cleanup EZ and EZ file--------
+ cleanup_cmd=[cmd.format(filename=filename, ez_name=ez_name) for cmd in CLEANUP_COMMANDS]
+ for cmd in cleanup_cmd:
+ run_command(hadoop_container,cmd,HDFS_USER)
+
diff --git a/pytest-Tests/hdfs/test_encryption03.py b/pytest-Tests/hdfs/test_encryption03.py
new file mode 100644
index 0000000000..1d08b7e673
--- /dev/null
+++ b/pytest-Tests/hdfs/test_encryption03.py
@@ -0,0 +1,141 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+# This workflow will build a Java project with Maven, and cache/restore any dependencies to improve the workflow execution time
+# For more information see: https://docs.github.com/en/actions/automating-builds-and-tests/building-and-testing-java-with-maven
+
+# This workflow uses actions that are not certified by GitHub.
+# They are provided by a third-party and are governed by
+# separate terms of service, privacy policy, and support
+# documentation.
+
+import pytest
+import requests
+from utils import run_command,get_error_logs
+from test_config import (HDFS_USER,HIVE_USER,HEADERS,PARAMS,BASE_URL,
+ CREATE_EZ_COMMANDS ,GRANT_PERMISSIONS_COMMANDS,
+ CREATE_FILE_COMMAND, ACTIONS_COMMANDS,READ_EZ_FILE,
+ CLEANUP_COMMANDS,CROSS_EZ_ACTION_COMMANDS,CLEANUP_EZ)
+
+
+# ****** ********************Test Case 01 ********************************************
+# ***** Cross EZ operation where one user has given access to one EZ and does operation on that zone and another second zone where he has no permission
+# ***********************************************************************************
+def test_cross_EZ_operations(hadoop_container):
+ key_name="cross-key"
+ key_name2="cross-key2"
+
+ ez_name = "secure_zone1"
+ ez_name2 = "secure_zone2"
+
+ filename="testfile1"
+ filecontent="Cross operation on Encryption zone"
+
+ dirname="dir1"
+ dirname2="dir2"
+
+ #create 2 EZ key-------
+ key_data1={
+ "name":key_name
+ }
+ key_data2={
+ "name":key_name2
+ }
+ response=requests.post(f"{BASE_URL}/keys",json=key_data1,params=PARAMS,headers=HEADERS)
+ assert response.status_code == 201, f"Key creation failed: {response.text}"
+
+ response2=requests.post(f"{BASE_URL}/keys",json=key_data2,params=PARAMS,headers=HEADERS)
+ assert response2.status_code == 201, f"Key creation failed: {response2.text}"
+
+ # create 2 EZ ------------
+ create_ez_commands = [cmd.format(ez_name=ez_name, key_name=key_name) for cmd in CREATE_EZ_COMMANDS]
+
+ for cmd in create_ez_commands:
+ output = run_command(hadoop_container, cmd, HDFS_USER)
+ print(output)
+
+ create_ez_commands = [cmd.format(ez_name=ez_name2, key_name=key_name2) for cmd in CREATE_EZ_COMMANDS]
+
+ for cmd in create_ez_commands:
+ output = run_command(hadoop_container, cmd, HDFS_USER)
+ print(output)
+
+ # Create the subdirectories inside the encryption zone as HDFS user
+ create_dirs_cmds = [
+ f"hdfs dfs -mkdir -p /{ez_name}/{dirname}",
+ f"hdfs dfs -mkdir -p /{ez_name}/{dirname2}"
+ ]
+ for cmd in create_dirs_cmds:
+ run_command(hadoop_container, cmd, HDFS_USER)
+
+ #grant permissions for 'hive' user on 1st EZ------------
+ grant_permission_commands= [cmd.format(ez_name=ez_name, user=HIVE_USER) for cmd in GRANT_PERMISSIONS_COMMANDS]
+
+ for cmd in grant_permission_commands:
+ output = run_command(hadoop_container,cmd,HDFS_USER)
+ print(output)
+
+ #create file as 'hive' user-------
+ create_file_cmd = [cmd.format(
+ filename=filename,
+ filecontent=filecontent,
+ user=HIVE_USER
+ ) for cmd in CREATE_FILE_COMMAND]
+
+ run_command(hadoop_container, ["bash", "-c", create_file_cmd[0]], HIVE_USER)
+
+ #write it to dir1 in EZ1 using 'hive' user and read it -------
+ read_write_cmd= [cmd.format(filename=filename, ez_name=ez_name,dirname=dirname, user=HIVE_USER) for cmd in CROSS_EZ_ACTION_COMMANDS]
+ for cmd in read_write_cmd:
+ run_command(hadoop_container,cmd,HIVE_USER)
+
+ #write it to dir2 in EZ1 using 'hive' user and read it -------
+ read_write_cmd= [cmd.format(filename=filename, ez_name=ez_name,dirname=dirname2, user=HIVE_USER) for cmd in CROSS_EZ_ACTION_COMMANDS]
+ for cmd in read_write_cmd:
+ run_command(hadoop_container,cmd,HIVE_USER)
+
+ #try to write in EZ2 now as HIVE user- should fail as has no permission on EZ2-----------------------
+ failure_detected = False
+ read_write_cmd= [cmd.format(filename=filename, ez_name=ez_name2, user=HIVE_USER) for cmd in ACTIONS_COMMANDS]
+
+ for cmd in read_write_cmd:
+ output,exit_code=run_command(hadoop_container,cmd,HIVE_USER, fail_on_error=False,return_exit_code=True)
+ print(f"Command Output:\n{output}")
+
+ # Check for known failure indicators in output
+ if exit_code != 0:
+ failure_detected = True
+ break
+
+ #assert that failure was detected as expected
+ assert failure_detected, "Expected failure due to no permission on EZ, but command succeeded."
+
+ #cleanup EZ and EZ file------------------------------------------------------------------------------
+ cleanup_cmd=[cmd.format(ez_name=ez_name) for cmd in CLEANUP_EZ]
+ for cmd in cleanup_cmd:
+ run_command(hadoop_container,cmd,HDFS_USER)
+
+ cleanup_cmd=[cmd.format(ez_name=ez_name2) for cmd in CLEANUP_EZ]
+ for cmd in cleanup_cmd:
+ run_command(hadoop_container,cmd,HDFS_USER)
+
+ #delete EZ key ----------
+ delete_output2=requests.delete(f"{BASE_URL}/key/{key_name}", params=PARAMS)
+ print(delete_output2)
+
+ delete_output2=requests.delete(f"{BASE_URL}/key/{key_name2}", params=PARAMS)
+ print(delete_output2)
+
diff --git a/pytest-Tests/hdfs/utils.py b/pytest-Tests/hdfs/utils.py
new file mode 100644
index 0000000000..f70166d835
--- /dev/null
+++ b/pytest-Tests/hdfs/utils.py
@@ -0,0 +1,77 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+# This workflow will build a Java project with Maven, and cache/restore any dependencies to improve the workflow execution time
+# For more information see: https://docs.github.com/en/actions/automating-builds-and-tests/building-and-testing-java-with-maven
+
+# This workflow uses actions that are not certified by GitHub.
+# They are provided by a third-party and are governed by
+# separate terms of service, privacy policy, and support
+# documentation.
+
+import pytest
+import docker
+from test_config import (KMS_CONTAINER,HADOOP_NAMENODE_LOG_PATH,KMS_LOG_PATH)
+
+# Setup Docker Client
+client = docker.from_env()
+
+#to run all HDFS commands
+def run_command(container, cmd, user, fail_on_error=True,return_exit_code=False):
+ exit_code, output = container.exec_run(cmd, user=user)
+ output_response = output.decode()
+
+ if exit_code != 0 and fail_on_error:
+ kms_container = client.containers.get(KMS_CONTAINER)
+ hadoop_logs, kms_logs = get_error_logs(container, kms_container)
+
+ pytest.fail(f"""
+ Command failed: {cmd}
+ Exit Code: {exit_code}
+
+ Output:
+ {output_response}
+
+ Hadoop Container Logs:
+ {hadoop_logs}
+
+ KMS Container Logs:
+ {kms_logs}
+ """)
+ if return_exit_code:
+ return output_response, exit_code
+
+ return output_response
+
+
+#fetch logs from hadoop and KMS file
+def get_error_logs(hadoop_container, kms_container):
+
+ # Get Hadoop NameNode logs
+ hadoop_log_cmd = f"tail -n 50 {HADOOP_NAMENODE_LOG_PATH}"
+ _, hadoop_logs = hadoop_container.exec_run(hadoop_log_cmd, user='hdfs')
+ hadoop_logs_decoded = hadoop_logs.decode()
+ hadoop_error_lines = [line for line in hadoop_logs_decoded.split("\n") if "ERROR" in line or "Exception" in line or "WARN" in line]
+ hadoop_error_text = "\n".join(hadoop_error_lines) if hadoop_error_lines else "No recent errors in Hadoop Namenode logs."
+
+ # Get KMS logs
+ kms_log_cmd = f"tail -n 50 {KMS_LOG_PATH}"
+ _, kms_logs = kms_container.exec_run(kms_log_cmd, user='root')
+ kms_logs_decoded = kms_logs.decode()
+ kms_error_lines = [line for line in kms_logs_decoded.split("\n") if "ERROR" in line or "Exception" in line or "WARN" in line]
+ kms_error_text = "\n".join(kms_error_lines) if kms_error_lines else "No recent errors in KMS logs."
+
+ return hadoop_error_text, kms_error_text
diff --git a/pytest-Tests/kms/conftest.py b/pytest-Tests/kms/conftest.py
new file mode 100644
index 0000000000..9b28dc8b23
--- /dev/null
+++ b/pytest-Tests/kms/conftest.py
@@ -0,0 +1,57 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+# This workflow will build a Java project with Maven, and cache/restore any dependencies to improve the workflow execution time
+# For more information see: https://docs.github.com/en/actions/automating-builds-and-tests/building-and-testing-java-with-maven
+
+# This workflow uses actions that are not certified by GitHub.
+# They are provided by a third-party and are governed by
+# separate terms of service, privacy policy, and support
+# documentation.
+
+import pytest
+import requests
+
+from utils import fetch_logs
+
+BASE_URL="http://localhost:9292/kms/v1"
+PARAMS={"user.name":"keyadmin"}
+HEADERS={"Content-Type": "application/json"}
+
+@pytest.fixture(scope="session")
+def headers():
+ return HEADERS
+
+@pytest.fixture(scope="class")
+def create_test_key(headers):
+ data={
+ "name":"key1",
+ "cipher": "AES/CTR/NoPadding", # material can be provided (optional)
+ "length": 128,
+ "description": "Test key"
+ }
+
+ key_creation_response=requests.post(f"{BASE_URL}/keys",headers=headers,json=data,params=PARAMS)
+
+ if key_creation_response.status_code != 201:
+ error_logs = fetch_logs() # Fetch logs on failure
+ pytest.fail(f"Key creation failed. API Response: {key_creation_response.text}\nLogs:\n{error_logs}")
+
+ yield data
+ requests.delete(f"{BASE_URL}/key/key1",params=PARAMS)
+
+
+
diff --git a/pytest-Tests/kms/readme.md b/pytest-Tests/kms/readme.md
new file mode 100644
index 0000000000..87629c8245
--- /dev/null
+++ b/pytest-Tests/kms/readme.md
@@ -0,0 +1,137 @@
+
+
+# This is the main directory for running KMS API functionality tests
+
+## Structure
+```
+test_kms/
+├── test_keys.py
+├── test_keys_02.py
+├── test_keyDetails.py
+├── test_keyOps.py
+├── test_keyOps_policy.py
+├── test_blacklisting.py
+├── conftest.py
+├── utils.py
+```
+
+
+## Features and Functionalities Used:
+
+- **Parametrization:** For running multiple test cases handling the same functionality in a single method.
+
+- **fetch_logs:** Fetches errors or exceptions from logs when something goes wrong.
+
+- **cleanup:** Cleans up all resources used while testing, ensuring re-runs of test cases.
+
+---
+
+## `conftest.py`
+
+Special file used to define fixtures and shared configurations that pytest can automatically discover and use across tests.
+Pytest automatically loads this file, aiding code reusability.
+
+---
+
+## `utils.py`
+
+Consists of helper functions or classes used in tests.
+You need to import it wherever required.
+
+---
+
+## `test_keys.py`
+
+Handles **key creation operations**.
+
+1. **test_create_key:**
+ Used to create a key with the necessary payload, checks for errors, and cleans up the created key.
+
+2. **test_key_name_validation:**
+ Validates creation of a key with different valid and invalid name formats.
+
+3. **test_duplicate_key_creation:**
+ Checks for creation of duplicate EZ key and checks if it's failing or not.
+
+> Similarly, other validations can be implemented on keys.
+
+---
+
+## `test_keys_02.py`
+
+Handles **Bulk key opeartions and other extra cases**.
+
+---
+
+## `test_keyDetails.py`
+
+Handles **retrieval of key-related data**.
+
+1. **test_get_key_names:**
+ Fetches all created keys and checks the presence of a specific key.
+
+2. **test_get_key_metadata:**
+ Checks metadata of existing and non-existing keys and validates the response.
+
+3. **test_get_key_versions:**
+ Checks key versions for existing and non-existing keys.
+
+---
+
+## `test_keyOps.py`
+
+Handles **operations on keys**.
+
+1. **test_temp_key:**
+ Creates a temporary key used for further roll-over functionality.
+
+2. **test_roll_over_key:**
+ Handles proper roll-over of the key.
+
+3. **test_roll_over_new_material:**
+ Checks whether the rolled-over key has new material.
+
+4. **test_generate_data_key_and_decrypt:**
+ - Generation of data key from EZ key and checks for presence of EDEK and DEK.
+ - Decryption of EDEK to get back DEK.
+
+---
+
+## `test_keyOps_policy.py`
+
+Handles **operations on keys based on policy enforcement**.
+Checks Key operation by giving incremental access to each opeartion one by one
+i.e `create, rollover, getKeyVersion, getMetadata, generateeek, decrypteek, delete`
+
+## `test_blacklisting.py`
+
+Handles **operations on keys before and after blacklisting a user**.
+Checks Key operation by blacklisting a specific user and checks again after unblacklisting
+i.e `create, rollover,delete` key operation
+
+
+
diff --git a/pytest-Tests/kms/test_blacklisting.py b/pytest-Tests/kms/test_blacklisting.py
new file mode 100644
index 0000000000..ca7f1ca2e6
--- /dev/null
+++ b/pytest-Tests/kms/test_blacklisting.py
@@ -0,0 +1,285 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+# This workflow will build a Java project with Maven, and cache/restore any dependencies to improve the workflow execution time
+# For more information see: https://docs.github.com/en/actions/automating-builds-and-tests/building-and-testing-java-with-maven
+
+# This workflow uses actions that are not certified by GitHub.
+# They are provided by a third-party and are governed by
+# separate terms of service, privacy policy, and support
+# documentation.
+
+import os
+import xml.etree.ElementTree as ET
+import requests
+import pytest
+import time
+import docker
+import tarfile
+import io
+
+KMS_SERVICE_NAME = "dev_kms"
+BASE_URL = "http://localhost:9292/kms/v1"
+RANGER_AUTH = ('keyadmin', 'rangerR0cks!')
+BASE_URL_RANGER = "http://localhost:6080/service/public/v2/api/policy"
+HEADERS={"Content-Type": "application/json"}
+KMS_CONTAINER_NAME = "ranger-kms"
+RANGER_CONTAINER_NAME = "ranger"
+
+TEST_USER = "keyadmin"
+PARAMS = {"user.name": TEST_USER}
+
+client = docker.from_env()
+container = client.containers.get(KMS_CONTAINER_NAME)
+
+@pytest.fixture(scope="module")
+def headers():
+ return HEADERS
+
+@pytest.fixture(scope="module")
+def user1():
+ return TEST_USER
+
+
+# **************** create KMS policy for user1 --------------------------------------
+@pytest.fixture(scope="module", autouse=True)
+def create_initial_kms_policy(user1):
+ policy_data = {
+ "policyName": "blacklist-policy",
+ "service": KMS_SERVICE_NAME,
+ "resources": {
+ "keyname": {
+ "values": ["blacklist-*"], # Match any key starting with 'blacklist-'
+ "isExcludes": False,
+ "isRecursive": False
+ }
+ },
+ "policyItems": [{
+ "accesses": [
+ {"type": "CREATE", "isAllowed": True},
+ {"type": "ROLLOVER", "isAllowed": True},
+ {"type": "DELETE", "isAllowed": True}
+ ],
+ "users": [user1]
+ }]
+ }
+
+ response = requests.post(BASE_URL_RANGER, auth=RANGER_AUTH, json=policy_data)
+ time.sleep(30) # Wait for policy propagation
+ if response.status_code not in [200, 201]:
+ raise Exception(f"Failed to create policy: {response.text}")
+
+ created_policy = response.json()
+ policy_id = created_policy["id"]
+ yield policy_id
+
+ # Optionally delete policy after tests
+ requests.delete(f"{BASE_URL_RANGER}/{policy_id}", auth=RANGER_AUTH)
+
+
+# ************************** Main Function to add or remove blacklist property--------------------------------
+
+def modify_blacklist_property(operation, users, action="add"):
+ dbks_site_path = "/opt/ranger/ranger-3.0.0-SNAPSHOT-kms/ews/webapp/WEB-INF/classes/conf/dbks-site.xml"
+
+ # Step 1: Read the current XML content
+ result = container.exec_run(f"cat {dbks_site_path}", user='root')
+ xml_content = result.output.decode('utf-8')
+
+ # Step 2: Parse and modify
+ tree = ET.ElementTree(ET.fromstring(xml_content))
+ root = tree.getroot()
+ prop_name = f"hadoop.kms.blacklist.{operation}"
+
+ prop = None
+ for elem in root.findall("property"):
+ name = elem.find("name")
+ if name is not None and name.text == prop_name:
+ prop = elem
+ break
+
+ if prop is None:
+ print(f"Property {prop_name} does not exist. Creating it.")
+ prop = ET.SubElement(root, "property")
+ ET.SubElement(prop, "name").text = prop_name
+ ET.SubElement(prop, "value").text = ""
+
+ val_elem = prop.find("value")
+ current = val_elem.text.split(",") if val_elem.text else []
+ updated = set(current)
+
+ if action == "add":
+ updated.update(users)
+ elif action == "remove":
+ updated -= set(users)
+
+ val_elem.text = ",".join(sorted(updated))
+
+ # Step 3: Convert XML back to string
+ modified_xml = ET.tostring(root, encoding='utf-8', method='xml').decode()
+
+ # Step 4: Package XML file into a tarball for `put_archive`
+ tarstream = io.BytesIO()
+ with tarfile.open(fileobj=tarstream, mode='w') as tar:
+ file_data = modified_xml.encode()
+ tarinfo = tarfile.TarInfo(name="dbks-site.xml")
+ tarinfo.size = len(file_data)
+ tar.addfile(tarinfo, io.BytesIO(file_data))
+ tarstream.seek(0)
+
+ # Step 5: Upload and replace the file inside the container
+ container.put_archive(
+ path="/opt/ranger/ranger-3.0.0-SNAPSHOT-kms/ews/webapp/WEB-INF/classes/conf/",
+ data=tarstream
+ )
+
+ print(f"Successfully {'added' if action == 'add' else 'removed'} {users} in {prop_name}")
+
+#-------------------------------------------------------------------------------------------------------
+
+# Blacklist a user operation
+def blacklist_op_users(operation, users=[]):
+ modify_blacklist_property(operation, users, action="add")
+
+# Unblacklist a user operation
+def unblacklist_op_users(operation, users=[]):
+ modify_blacklist_property(operation, users, action="remove")
+
+
+# ****** ******************** Test Case 01 ********************************************
+# ***** check creation, rollover, deletion of key before applying blacklist
+# ***** user1 has permission for above operation so will pass
+# ***********************************************************************************
+
+def test_user_keyOperation_before_blacklist(headers):
+ key_name = "blacklist-key1"
+ key_data = {
+ "name": key_name
+ }
+ #create key
+ create_response = requests.post(f"{BASE_URL}/keys",headers=headers, json=key_data,params=PARAMS)
+ assert create_response.status_code == 201, f"key creation failed"
+
+ #roll over
+ rollover_response = requests.post(f"{BASE_URL}/key/{key_name}", json={}, headers=headers, params=PARAMS)
+ assert rollover_response.status_code == 200 , f"roll over failed"
+
+ #delete
+ delete_response = requests.delete(f"{BASE_URL}/key/{key_name}",params=PARAMS)
+ assert delete_response.status_code == 200 , f"deletion of key got failed"
+
+
+# ****** ******************** Test Case 02 ********************************************
+# ***** Test to blacklist a user for CREATE operation
+# ***** user1 will be blacklisted from CREATE so cant create keys
+# ***** Then Unblacklist that operation and now should succeed
+# ***********************************************************************************
+
+def test_blacklist_create(headers,user1):
+ # blacklist the user for CREATE operation
+ blacklist_op_users('CREATE', [user1])
+ container.restart()
+ time.sleep(30)
+
+ key_name = "blacklist-key2"
+ key_data = {
+ "name": key_name
+ }
+ response = requests.post(f"{BASE_URL}/keys", headers=headers, json=key_data, params=PARAMS)
+
+ # Assert that the user is blocked from creating the key
+ assert response.status_code == 403, f"User {user1} should be blocked from creating the key but got succeeded"
+
+ # Remove blacklist after test
+ unblacklist_op_users('CREATE', [user1])
+ container.restart()
+ time.sleep(30)
+
+ # Retry key creation after unblacklisting
+ response = requests.post(f"{BASE_URL}/keys", headers=headers, json=key_data, params=PARAMS)
+ assert response.status_code == 201, f"User {user1} should be able to create the key after unblacklisting"
+
+ requests.delete(f"{BASE_URL}/key/{key_name}",params=PARAMS)
+
+
+# ****** ******************** Test Case 03 ********************************************
+# ***** Test to blacklist a user for ROLLOVER operation
+# ***** user1 will be blacklisted from ROLLOVER so cant roll over keys
+# ***** Then Unblacklist that operation and now should succeed
+# ***********************************************************************************
+
+def test_blacklist_rollOver(headers,user1):
+ # blacklist the user for rollover operation
+ blacklist_op_users('ROLLOVER', [user1])
+ container.restart()
+ time.sleep(30)
+
+ key_name = "blacklist-key3"
+ key_data = {
+ "name": key_name
+ }
+ #create key
+ requests.post(f"{BASE_URL}/keys", headers=headers, json=key_data, params=PARAMS)
+
+ #roll over
+ response_after_blacklist = requests.post(f"{BASE_URL}/key/{key_name}", json={}, headers=headers, params=PARAMS)
+
+ # Assert that the user is blocked from rolling over the key
+ assert response_after_blacklist.status_code == 403, f"User {user1} should be blocked from rolling over the key but got succeeded"
+
+ # Remove blacklist after test
+ unblacklist_op_users('ROLLOVER', [user1])
+ container.restart()
+ time.sleep(30)
+
+ # Retry key rollover after unblacklisting
+ response_after_unblacklist = requests.post(f"{BASE_URL}/key/{key_name}", headers=headers, json={}, params=PARAMS)
+ assert response_after_unblacklist.status_code == 200, f"User {user1} should be able to roll over the key but failed"
+
+ requests.delete(f"{BASE_URL}/key/{key_name}",params=PARAMS)
+
+
+# ****** ******************** Test Case 04 ********************************************
+# ***** Test to blacklist a user for DELETE operation
+# ***** user1 will be blacklisted from DELETE so cant delete keys
+# ***** Then Unblacklist that operation and now should succeed
+# ***********************************************************************************
+
+def test_blacklist_delete(headers,user1):
+ # blacklist the user for rollover operation
+ blacklist_op_users('DELETE', [user1])
+ container.restart()
+ time.sleep(30)
+
+ key_name = "blacklist-key4"
+ key_data = {
+ "name": key_name
+ }
+ response = requests.post(f"{BASE_URL}/keys", headers=headers, json=key_data, params=PARAMS)
+
+ #try deleting key after blacklisting
+ delete_response_before= requests.delete(f"{BASE_URL}/key/{key_name}",params=PARAMS)
+ assert delete_response_before.status_code == 403, f"User {user1} should be blocked from deleting the key but got succeeded"
+
+ # Remove blacklist after test
+ unblacklist_op_users('DELETE', [user1])
+ container.restart()
+ time.sleep(30)
+
+ # Retry deletion now
+ delete_response_after = requests.delete(f"{BASE_URL}/key/{key_name}",params=PARAMS)
+ assert delete_response_after.status_code == 200, f"Deletion of key got failed"
+
diff --git a/pytest-Tests/kms/test_keyDetails.py b/pytest-Tests/kms/test_keyDetails.py
new file mode 100644
index 0000000000..4f7c682481
--- /dev/null
+++ b/pytest-Tests/kms/test_keyDetails.py
@@ -0,0 +1,161 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+# This workflow will build a Java project with Maven, and cache/restore any dependencies to improve the workflow execution time
+# For more information see: https://docs.github.com/en/actions/automating-builds-and-tests/building-and-testing-java-with-maven
+
+# This workflow uses actions that are not certified by GitHub.
+# They are provided by a third-party and are governed by
+# separate terms of service, privacy policy, and support
+# documentation.
+
+
+import requests
+import pytest
+from utils import fetch_logs
+
+BASE_URL = "http://localhost:9292/kms/v1"
+PARAMS = {"user.name": "keyadmin"}
+
+class TestKeyDetails:
+
+ @pytest.fixture(autouse=True)
+ def setup_class(self, create_test_key):
+ self.test_key = create_test_key
+
+ # ***********************************************************************************
+ # Get key names
+ # ***********************************************************************************
+ def test_get_key_names(self):
+ response = requests.get(f"{BASE_URL}/keys/names",params=PARAMS)
+
+ if response.status_code!=200: #log check
+ logs=fetch_logs()
+ pytest.fail(f"Get key operation failed. API Response: {response.text}\nLogs:\n{logs}")
+
+ print(response.json())
+ assert self.test_key["name"] in response.json()
+
+
+ # ***********************************************************************************
+ # Parametrized Get key metadata check for existent and non existent key
+ # Note: key1 is coming from create_test_key fixture in conftest.py
+ # ***********************************************************************************
+
+ @pytest.mark.parametrize("key_name, expected_status, expected_response", [
+ ("key1", 200, "valid"), # Key exists, should return valid metadata
+ ("non-existent-key", 200, "invalid"), # Key does not exist but returns 200 with [] should give 404
+ ])
+ def test_get_key_metadata(self, headers, key_name, expected_status, expected_response):
+ response = requests.get(f"{BASE_URL}/key/{key_name}/_metadata", headers=headers, params=PARAMS)
+
+ logs=fetch_logs() #log check
+ assert response.status_code==expected_status,f"Get key metadata operation failed. API Response: {response.text}\nLogs:\n{logs}"
+
+ if expected_response == "invalid":
+ assert response.text.strip() in ["", "[ ]", "{ }"], f"Expected blank response for non-existent key, got: {response.text}"
+
+
+ # ***********************************************************************************
+ # Parametrized Get Key version for existent and non existent key
+ # Note: key1 is coming from create_test_key fixture in conftest.py
+ # ***********************************************************************************
+
+ @pytest.mark.parametrize("key_name, expected_status, expected_response", [
+ ("key1", 200, "valid"), # Key exists
+ ("non-existent-key", 200,"invalid"), # Key does not exist but returns 200 with [] should give 404
+ ])
+ def test_get_key_versions(self, headers, key_name, expected_status,expected_response):
+ response = requests.get(f"{BASE_URL}/key/{key_name}/_versions", headers=headers, params=PARAMS)
+
+ logs=fetch_logs() #log check
+ assert response.status_code == expected_status,f"Get key version operation failed. API Response: {response.text}\nLogs:\n{logs}"
+
+ if expected_response == "invalid":
+ assert response.text.strip() in ["", "[ ]", "{ }"], f"Expected blank response for non-existent key, got: {response.text}"
+
+
+ # ***********************************************************************************
+ # Get Key metadata for multiple keys at once
+ # Note: key1 is coming from create_test_key fixture in conftest.py
+ # ***********************************************************************************
+
+ def test_get_keys_metadata(self, headers):
+ #Create second key (key2)
+ key_name="key2"
+ data = {
+ "name":key_name
+ }
+ create_response = requests.post(f"{BASE_URL}/keys", headers=headers, json=data, params=PARAMS)
+ assert create_response.status_code == 201, f"Key2 creation failed: {create_response.text}"
+
+ try:
+ # Check metadata for existing keys (key1 and key2)
+ existing_keys = ["key1", "key2"]
+ params = [("key", k) for k in existing_keys]
+ params.append(("user.name", "keyadmin"))
+
+ response = requests.get(f"{BASE_URL}/keys/metadata", headers=headers, params=params)
+ assert response.status_code == 200, f"Metadata fetch failed: {response.status_code}"
+
+ metadata = response.json()
+ returned_keys = [entry["name"] for entry in metadata]
+ for key in existing_keys:
+ assert key in returned_keys, f"Expected key '{key}' not found in metadata response"
+
+ # Check metadata for non-existent keys
+ fake_keys = ["nonExistent_key_1", "nonExistent_key_2"]
+ params = [("key", k) for k in fake_keys]
+ params.append(("user.name", "keyadmin"))
+
+ response = requests.get(f"{BASE_URL}/keys/metadata", headers=headers, params=params)
+ assert response.status_code == 200, f"Metadata fetch failed for non-existent keys: {response.status_code}"
+
+ assert response.json() == [{}, {}], f"Expected blank response for non-existent key, got: {response.text}"
+
+ finally:
+ # Cleanup key2
+ requests.delete(f"{BASE_URL}/key/{key_name}", params=PARAMS)
+
+
+ # ***********************************************************************************
+ # Test for endpoint 'get key version'
+ # Note: key1 is coming from create_test_key fixture in conftest.py
+ # ***********************************************************************************
+
+ @pytest.mark.parametrize("version_name, expected_status, expected_valid", [
+ ("key1@0", 200, True), # Valid version
+ ("non-existent-key@0", 200, False), # Key does not exist but returns 200 with [] should give 404
+ ])
+
+ def test_get_key_version(self, headers, version_name, expected_status, expected_valid):
+ response = requests.get(f"{BASE_URL}/keyversion/{version_name}", headers=headers, params=PARAMS)
+
+ logs = fetch_logs()
+ assert response.status_code == expected_status,f"Get key version failed. Response: {response.text}\nLogs:\n{logs}"
+
+ if expected_valid:
+ try:
+ version_data = response.json()
+ assert "versionName" in version_data, "versionName missing in response"
+ assert version_data["versionName"] == version_name, f"Mismatch in version name: expected {version_name}"
+ except ValueError:
+ pytest.fail(f"Expected valid JSON response, got: {response.text}")
+ else:
+ assert response.text.strip() in [" ", "{ }", "[ ]"], f"Expected empty for invalid version, got: {response.text}"
+
+
+
diff --git a/pytest-Tests/kms/test_keyOps.py b/pytest-Tests/kms/test_keyOps.py
new file mode 100644
index 0000000000..d9fc349c74
--- /dev/null
+++ b/pytest-Tests/kms/test_keyOps.py
@@ -0,0 +1,231 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+# This workflow will build a Java project with Maven, and cache/restore any dependencies to improve the workflow execution time
+# For more information see: https://docs.github.com/en/actions/automating-builds-and-tests/building-and-testing-java-with-maven
+
+# This workflow uses actions that are not certified by GitHub.
+# They are provided by a third-party and are governed by
+# separate terms of service, privacy policy, and support
+# documentation.
+
+
+
+import requests
+import pytest
+import time
+from utils import fetch_logs
+
+BASE_URL = "http://localhost:9292/kms/v1"
+PARAMS = {"user.name": "keyadmin"}
+
+@pytest.mark.usefixtures("create_test_key")
+class TestKeyOperations:
+
+ # Temporary key for testing roll over
+ def test_temp_key(self, headers):
+ data = {
+ "name": "rollover-key",
+ "cipher": "AES/CTR/NoPadding",
+ "length": 128,
+ "description": "Key to check roll over functionality"
+ }
+ key_creation_response = requests.post(f"{BASE_URL}/keys", headers=headers, json=data, params=PARAMS)
+
+ if key_creation_response.status_code != 201: #log check
+ logs=fetch_logs()
+ pytest.fail(f"Create key operation failed. API Response: {key_creation_response.text}\nLogs:\n{logs}")
+
+
+ # ***********************************************************************************
+ # Parametrized Roll over of key
+ # ***********************************************************************************
+ @pytest.mark.parametrize("key_name, expected_status", [
+ ("rollover-key", 200), # Valid key rollover
+ ("non-existent-key", 500) # Rollover on a non-existent key
+ ])
+
+ def test_roll_over_key(self, headers, key_name, expected_status):
+ response = requests.post(f"{BASE_URL}/key/{key_name}", json={}, headers=headers, params=PARAMS)
+
+ if response.status_code != expected_status: #log check
+ logs=fetch_logs()
+ pytest.fail(f"Rollover key operation failed. API Response: {response.text}\nLogs:\n{logs}")
+
+ # Cleanup after test
+ requests.delete(f"{BASE_URL}/key/rollover-key", params=PARAMS)
+
+
+ # ***********************************************************************************
+ # Test for checking roll overed key has new material
+ # ***********************************************************************************
+ def test_roll_over_new_material(self, headers):
+ old_metadata = requests.get(f"{BASE_URL}/key/key1/_metadata", headers=headers, params=PARAMS)
+ print("Old Metadata:", old_metadata.json())
+
+ requests.post(f"{BASE_URL}/key/key1", json={}, headers=headers, params=PARAMS) #roll-over here
+
+ new_metadata = requests.get(f"{BASE_URL}/key/key1/_metadata", headers=headers, params=PARAMS)
+ print("New Metadata:", new_metadata.json())
+
+ assert old_metadata.json() != new_metadata.json(), "Key rollover should create new key material."
+
+
+ # ***********************************************************************************
+ # Data key generation and decrypting EDEK to get DEK
+ # ***********************************************************************************
+ def test_generate_data_key_and_decrypt(self, headers, create_test_key):
+ # Generate Data Key
+ key_name=create_test_key["name"]
+ response = requests.get(f"{BASE_URL}/key/{key_name}/_dek", headers=headers, params=PARAMS)
+
+ if response.status_code != 200: #log check
+ logs=fetch_logs()
+ pytest.fail(f"generation of data key operation failed. API Response: {response.text}\nLogs:\n{logs}")
+
+
+ data_key_response = response.json()
+ dek = data_key_response.get("dek")
+ edek = data_key_response.get("edek")
+
+ print(dek)
+ print(edek)
+
+ assert dek is not None, "Generated DEK should not be None"
+ assert edek is not None, "Generated EDEK should not be None"
+
+ # Extracting details for decryption from EDEK
+ encrypted_key_version = edek.get("encryptedKeyVersion")
+ encrypted_material = encrypted_key_version.get("material")
+ name = encrypted_key_version.get("name")
+ version_name = edek.get("versionName")
+ iv = edek.get("iv")
+
+ decrypt_payload = {
+
+ "name":name,
+ "iv": iv,
+ "material": encrypted_material,
+ }
+
+ DECRYPT_PARAMS = {"user.name": "keyadmin","eek_op":"decrypt"}
+ decrypt_response = requests.post(f"{BASE_URL}/keyversion/{version_name}/_eek", json=decrypt_payload, headers=headers, params=DECRYPT_PARAMS)
+
+ if decrypt_response.status_code != 200: #log check
+ logs=fetch_logs()
+ pytest.fail(f"Decryption of key operation failed. API Response: {response.text}\nLogs:\n{logs}")
+
+ decrypted_data = decrypt_response.json()
+ print("Decrypted Data:", decrypted_data) # check decrypted data
+
+ # checking the decrypted key matches the original DEK
+ assert decrypted_data == dek, "Decrypted DEK should match the original DEK"
+
+
+ # ***********************************************************************************
+ # re encryption of encrypted keys---------------------------------
+ # verifies: that the EDEK is updated (i.e., versionName changes) after key rotation
+ # ***********************************************************************************
+ def test_reencrypt_encrypted_keys(self, headers):
+ # Step 1: Create the key
+ key_name = "reencrypt-key"
+ data = {"name": key_name}
+ create_response = requests.post(f"{BASE_URL}/keys", headers=headers, json=data, params=PARAMS)
+ logs=fetch_logs()
+ assert create_response.status_code == 201, f"Key creation failed: {create_response.text}\nLogs:\n{logs}"
+
+ try:
+ # Step 2: Generate an Encrypted DEK (EDEK) using the key
+ generate_response = requests.get(f"{BASE_URL}/key/{key_name}/_dek", headers=headers, params=PARAMS)
+ logs=fetch_logs()
+ assert generate_response.status_code == 200, f"EEK generation failed: {generate_response.text}\nLogs:\n{logs}"
+
+ print(generate_response.json())
+
+ edek = generate_response.json()["edek"]
+ encrypted_key_version = edek["encryptedKeyVersion"]
+
+ edek_payload = [
+ {
+ "versionName": edek["versionName"],
+ "iv": edek["iv"],
+ "encryptedKeyVersion": {
+ "versionName": "EEK",
+ "material": encrypted_key_version["material"]
+ }
+ }
+ ]
+
+ # Step 3: Rotate the key (to create a new version)
+ rollover_response = requests.post(f"{BASE_URL}/key/{key_name}", json={}, headers=headers, params=PARAMS)
+ assert rollover_response.status_code == 200, f"Key rollover failed: {rollover_response.text}"
+
+ # Step 4: Call the reencryptEncryptedKeys API
+ reencrypt_url = f"{BASE_URL}/key/{key_name}/_reencryptbatch"
+ reencrypt_response = requests.post(reencrypt_url, headers=headers, json=edek_payload, params=PARAMS)
+ assert reencrypt_response.status_code == 200, f"Re-encrypt call failed: {reencrypt_response.text}"
+
+ # Step 5: Validate the response EDEKs
+ reencrypted_edeks = reencrypt_response.json()
+ print(reencrypted_edeks)
+
+ assert isinstance(reencrypted_edeks, list), "Expected list of re-encrypted EDEKs"
+ assert len(reencrypted_edeks) == 1, "Expected exactly one re-encrypted EDEK"
+ assert reencrypted_edeks[0]["versionName"] != edek["versionName"], \
+ "Expected EDEK version to change after re-encryption"
+
+ finally:
+ # Cleanup key
+ requests.delete(f"{BASE_URL}/key/{key_name}", params=PARAMS)
+
+
+
+ # ***********************************************************************************
+ # invalidate cache use
+ # ***********************************************************************************
+ def test_generate_data_key_after_invalidate_cache(self, headers):
+ key_name = "cache_key"
+
+ data = {"name": key_name}
+
+ # Step 1: Create a key
+ create_response = requests.post(f"{BASE_URL}/keys", headers=headers, json=data, params=PARAMS)
+ assert create_response.status_code == 201, "Key creation failed"
+
+ # Step 2: Roll over (creates @1, cached)
+ roll_response = requests.post(f"{BASE_URL}/key/{key_name}", json={}, headers=headers, params=PARAMS)
+ assert roll_response.status_code == 200, "Rollover failed"
+
+ # Step 3: Delete the key (DB is clean, cache still references @1)
+ delete_response = requests.delete(f"{BASE_URL}/key/{key_name}", headers=headers, params=PARAMS)
+ assert delete_response.status_code == 200, "Key deletion failed"
+
+ time.sleep(5)
+
+ # Step 4: Recreate the key (creates only @0 in DB, cache still stale @1)
+ recreate_response = requests.post(f"{BASE_URL}/keys", headers=headers, json=data, params=PARAMS)
+ assert recreate_response.status_code == 201, "Key recreation failed"
+
+ # Step 5: Invalidate cache – forces KMS to reload latest version from DB
+ invalidate_params = {"user.name": "keyadmin", "action": "invalidateCache"}
+ invalidate_response = requests.post(f"{BASE_URL}/key/{key_name}", json={}, headers=headers, params=invalidate_params)
+ assert invalidate_response.status_code == 200, "Invalidate cache failed"
+
+ # Step 6: try DEK – should succeed (correct version @0 loaded)
+ dek_response = requests.get(f"{BASE_URL}/key/{key_name}/_dek", headers=headers, params=PARAMS)
+ assert dek_response.status_code == 200, "DEK generation should succeed after cache invalidation"
+
+ requests.delete(f"{BASE_URL}/key/{key_name}", headers=headers, params=PARAMS)
diff --git a/pytest-Tests/kms/test_keyOps_policy.py b/pytest-Tests/kms/test_keyOps_policy.py
new file mode 100644
index 0000000000..bddc92b72a
--- /dev/null
+++ b/pytest-Tests/kms/test_keyOps_policy.py
@@ -0,0 +1,466 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+# This workflow will build a Java project with Maven, and cache/restore any dependencies to improve the workflow execution time
+# For more information see: https://docs.github.com/en/actions/automating-builds-and-tests/building-and-testing-java-with-maven
+
+# This workflow uses actions that are not certified by GitHub.
+# They are provided by a third-party and are governed by
+# separate terms of service, privacy policy, and support
+# documentation.
+
+
+import requests
+import pytest
+import time
+
+BASE_URL = "http://localhost:9292/kms/v1"
+
+BASE_URL_RANGER = "http://localhost:6080/service/public/v2/api/policy"
+BASE_URL_RANGER_USERS = "http://localhost:6080/service/xusers/secure/users"
+
+BASE_URL_RANGER_USERS_BY_NAME = "http://localhost:6080/service/xusers/users/userName"
+
+PARAMS={"user.name":"keyadmin"}
+
+RANGER_ADMIN_AUTH = ("admin", "rangerR0cks!")
+RANGER_KMS_AUTH = ('keyadmin', 'rangerR0cks!') # Ranger key admin user
+KMS_SERVICE_NAME = "dev_kms"
+TEST_USER = "testuser"
+
+def ensure_test_user_exists(username: str) -> None:
+ payload = {
+ "name": username,
+ "firstName": "Test",
+ "lastName": "User",
+ "password": "Password123!",
+ "description": "pytest dummy user created via API",
+ "status": 1,
+ "isVisible": 1,
+ "userSource": 0,
+ "userRoleList": ["ROLE_USER"],
+ }
+
+ r = requests.post(BASE_URL_RANGER_USERS, auth=RANGER_ADMIN_AUTH, json=payload)
+ if r.status_code in (200, 201):
+ return
+ raise RuntimeError(f"Failed to create Ranger user {username}: {r.status_code} {r.text}")
+
+def delete_test_user(username: str) -> None:
+ r = requests.delete(
+ f"{BASE_URL_RANGER_USERS_BY_NAME}/{username}",
+ params={"forceDelete": "true"},
+ auth=RANGER_ADMIN_AUTH,
+ )
+ if r.status_code in (200, 204, 404):
+ return
+ raise RuntimeError(f"Failed to delete Ranger user {username}: {r.status_code} {r.text}")
+
+
+@pytest.fixture(scope="session", autouse=True)
+def test_user_lifecycle():
+ ensure_test_user_exists(TEST_USER)
+ try:
+ yield
+ finally:
+ delete_test_user(TEST_USER)
+
+
+# create base policy ------------------------------------------------------------------
+@pytest.fixture(scope="function", autouse=True)
+def create_initial_kms_policy():
+ policy_data = {
+ "policyName": "pytest-policy",
+ "service": KMS_SERVICE_NAME,
+ "resources": {
+ "keyname": {
+ "values": ["pytest-*"], # All keys starting with 'pytest-'
+ "isExcludes": False,
+ "isRecursive": False
+ }
+ },
+ "policyItems": []
+ }
+
+ # Create policy
+ response = requests.post(BASE_URL_RANGER, auth=RANGER_KMS_AUTH, json=policy_data)
+ time.sleep(30)
+ if response.status_code != 200 and response.status_code != 201:
+ raise Exception(f"Failed to create initial policy: {response.text}")
+
+ created_policy = response.json()
+ policy_id = created_policy["id"]
+ yield policy_id
+
+ # Optionally delete policy after tests
+ requests.delete(f"{BASE_URL_RANGER}/{policy_id}", auth=RANGER_KMS_AUTH)
+
+# method to update policy---------------------------------------------------------------
+def update_kms_policy(policy_id, username, accesses):
+ update_url = f"{BASE_URL_RANGER}/{policy_id}"
+
+ # Fetch existing policy
+ response = requests.get(update_url, auth=RANGER_KMS_AUTH)
+ if response.status_code != 200:
+ raise Exception(f"Failed to fetch policy: {response.text}")
+
+ policy_data = response.json()
+
+ # Ensure policyItems key exists
+ if "policyItems" not in policy_data:
+ policy_data["policyItems"] = []
+
+ # Only add policy item if accesses are provided
+ if accesses:
+ policy_data["policyItems"].append({
+ "accesses": [{"type": access, "isAllowed": True} for access in accesses],
+ "users": [username],
+ "delegateAdmin": False
+ })
+
+ # Update the policy
+ response = requests.put(update_url, auth=RANGER_KMS_AUTH, json=policy_data)
+ time.sleep(30) # Reduced wait time; increase only if propagation is slow
+ if response.status_code != 200:
+ raise Exception(f"Failed to update policy: {response.text}")
+
+
+
+
+# ****** ********************Test Case 01 ********************************************
+# ***** user has "create" access only
+# ***********************************************************************************
+def test_policy_01(create_initial_kms_policy, headers):
+ policy_id = create_initial_kms_policy
+ username=TEST_USER
+
+ # Update policy for this test
+ update_kms_policy(policy_id, username, accesses=["create"])
+
+ key_name = "pytest-key-01"
+
+ # create key
+ response = requests.post(f"{BASE_URL}/keys", json={"name": key_name}, params={"user.name": username}, headers=headers)
+ assert response.status_code == 201, f"Key creation failed: {response.text}"
+
+ #get current version
+ response = requests.get(f"{BASE_URL}/key/{key_name}/_currentversion",params={"user.name": username}, headers=headers)
+ assert response.status_code == 403, f"Get current version failed: {response.text}"
+
+ # Try getting key metadata
+ response = requests.get(f"{BASE_URL}/key/{key_name}/_metadata", params={"user.name": username}, headers=headers)
+ assert response.status_code == 403, f"Expected 403 but got {response.status_code}: {response.text}"
+
+ # Try rollover
+ response = requests.post(f"{BASE_URL}/key/{key_name}", json={}, params={"user.name": username}, headers=headers)
+ assert response.status_code == 403, f"Expected 403 but got {response.status_code}: {response.text}"
+
+ #generate DEK
+ response = requests.get(f"{BASE_URL}/key/{key_name}/_dek",params={"user.name": username})
+ assert response.status_code == 403, f"Expected 403 but got {response.status_code}: {response.text}"
+
+ #delete key
+ response= requests.delete(f"{BASE_URL}/key/{key_name}",params={"user.name": username})
+ assert response.status_code == 403, f"Expected 403 but got :{response.text}"
+
+ #cleanup
+ requests.delete(f"{BASE_URL}/key/{key_name}",params=PARAMS)
+
+
+# ****** ********************Test Case 02 ********************************************
+# ***** user has "create, delete" access only
+# ***********************************************************************************
+def test_policy_02(create_initial_kms_policy, headers):
+ policy_id = create_initial_kms_policy
+ username=TEST_USER
+
+ #Update policy for this test
+ update_kms_policy(policy_id, username, accesses=["create","delete"])
+
+ key_name = "pytest-key-02"
+
+ # create key
+ response = requests.post(f"{BASE_URL}/keys", json={"name": key_name}, params={"user.name": username}, headers=headers)
+ assert response.status_code == 201, f"Key creation failed: {response.text}"
+
+ #get current version
+ response = requests.get(f"{BASE_URL}/key/{key_name}/_currentversion",params={"user.name": username}, headers=headers)
+ assert response.status_code == 403, f"Get current version failed: {response.text}"
+
+ # Try getting key metadata
+ response = requests.get(f"{BASE_URL}/key/{key_name}/_metadata", params={"user.name": username}, headers=headers)
+ assert response.status_code == 403, f"Expected 403 but got {response.status_code}: {response.text}"
+
+ # Try rollover
+ response = requests.post(f"{BASE_URL}/key/{key_name}", json={}, params={"user.name": username}, headers=headers)
+ assert response.status_code == 403, f"Expected 403 but got {response.status_code}: {response.text}"
+
+ #generate DEK
+ response = requests.get(f"{BASE_URL}/key/{key_name}/_dek",params={"user.name": username})
+ assert response.status_code == 403, f"Expected 403 but got {response.status_code}: {response.text}"
+
+ #delete key
+ response= requests.delete(f"{BASE_URL}/key/{key_name}",params={"user.name": username})
+ assert response.status_code == 200, f"Key deletion failed :{response.text}"
+
+
+# ****** ********************Test Case 03 ********************************************
+# ***** user has "create, rollover, delete" access only
+# ***********************************************************************************
+def test_policy_03(create_initial_kms_policy, headers):
+ policy_id = create_initial_kms_policy
+ username=TEST_USER
+
+ #Update policy for this test
+ update_kms_policy(policy_id, username, accesses=["create","delete","rollover"])
+
+ key_name = "pytest-key-03"
+
+ # create key
+ response = requests.post(f"{BASE_URL}/keys", json={"name": key_name}, params={"user.name": username}, headers=headers)
+ assert response.status_code == 201, f"Key creation failed: {response.text}"
+
+ # Try rollover
+ response = requests.post(f"{BASE_URL}/key/{key_name}", json={}, params={"user.name": username}, headers=headers)
+ assert response.status_code == 200, f"Expected 200 but got {response.status_code}: {response.text}"
+
+ #get current version
+ response = requests.get(f"{BASE_URL}/key/{key_name}/_currentversion",params={"user.name": username}, headers=headers)
+ assert response.status_code == 403, f"Get current version failed: {response.text}"
+
+ # Try getting key metadata
+ response = requests.get(f"{BASE_URL}/key/{key_name}/_metadata", params={"user.name": username}, headers=headers)
+ assert response.status_code == 403, f"Expected 403 but got {response.status_code}: {response.text}"
+
+ #generate DEK
+ response = requests.get(f"{BASE_URL}/key/{key_name}/_dek",params={"user.name": username})
+ assert response.status_code == 403, f"Expected 403 but got {response.status_code}: {response.text}"
+
+ #delete key
+ response= requests.delete(f"{BASE_URL}/key/{key_name}",params={"user.name": username})
+ assert response.status_code == 200, f"Key deletion failed :{response.text}"
+
+
+# ****** ********************Test Case 04 ********************************************
+# ***** user has "create, rollover, getKeyVersion, delete" access only
+# ***********************************************************************************
+def test_policy_04(create_initial_kms_policy, headers):
+ policy_id = create_initial_kms_policy
+ username=TEST_USER
+
+ #Update policy for this test
+ update_kms_policy(policy_id, username, accesses=["create","delete","rollover","get"])
+
+ key_name = "pytest-key-04"
+
+ # create key
+ response = requests.post(f"{BASE_URL}/keys", json={"name": key_name}, params={"user.name": username}, headers=headers)
+ assert response.status_code == 201, f"Key creation failed: {response.text}"
+
+ # Try rollover
+ response = requests.post(f"{BASE_URL}/key/{key_name}", json={}, params={"user.name": username}, headers=headers)
+ assert response.status_code == 200, f"Expected 200 but got {response.status_code}: {response.text}"
+
+ #get current version
+ response = requests.get(f"{BASE_URL}/key/{key_name}/_currentversion",params={"user.name": username}, headers=headers)
+ assert response.status_code == 200, f"Get current version failed: {response.text}"
+
+ # Try getting key metadata
+ response = requests.get(f"{BASE_URL}/key/{key_name}/_metadata", params={"user.name": username}, headers=headers)
+ assert response.status_code == 403, f"Expected 403 but got {response.status_code}: {response.text}"
+
+ #generate DEK
+ response = requests.get(f"{BASE_URL}/key/{key_name}/_dek",params={"user.name": username})
+ assert response.status_code == 403, f"Expected 403 but got {response.status_code}: {response.text}"
+
+ #delete key
+ response= requests.delete(f"{BASE_URL}/key/{key_name}",params={"user.name": username})
+ assert response.status_code == 200, f"Key deletion failed :{response.text}"
+
+
+
+# ****** ********************Test Case 05 ********************************************
+# ***** user has "create, rollover, getKeyVersion, getMetadata, delete" access only
+# ***********************************************************************************
+def test_policy_05(create_initial_kms_policy, headers):
+ policy_id = create_initial_kms_policy
+ username=TEST_USER
+
+ #Update policy for this test
+ update_kms_policy(policy_id, username, accesses=["create","delete","rollover","get","getmetadata"])
+
+ key_name = "pytest-key-05"
+
+ # create key
+ response = requests.post(f"{BASE_URL}/keys", json={"name": key_name}, params={"user.name": username}, headers=headers)
+ assert response.status_code == 201, f"Key creation failed: {response.text}"
+
+ # Try rollover
+ response = requests.post(f"{BASE_URL}/key/{key_name}", json={}, params={"user.name": username}, headers=headers)
+ assert response.status_code == 200, f"Expected 200 but got {response.status_code}: {response.text}"
+
+ #get current version
+ response = requests.get(f"{BASE_URL}/key/{key_name}/_currentversion",params={"user.name": username}, headers=headers)
+ assert response.status_code == 200, f"Get current version failed: {response.text}"
+
+ # Try getting key metadata
+ response = requests.get(f"{BASE_URL}/key/{key_name}/_metadata", params={"user.name": username}, headers=headers)
+ assert response.status_code == 200, f"Expected 403 but got {response.status_code}: {response.text}"
+
+ #generate DEK
+ response = requests.get(f"{BASE_URL}/key/{key_name}/_dek",params={"user.name": username})
+ assert response.status_code == 403, f"Expected 403 but got {response.status_code}: {response.text}"
+
+ #delete key
+ response= requests.delete(f"{BASE_URL}/key/{key_name}",params={"user.name": username})
+ assert response.status_code == 200, f"Key deletion failed :{response.text}"
+
+
+
+# ****** ********************Test Case 06 ********************************************
+# ***** user has "create, rollover, getKeyVersion, getMetadata, generateeek, delete" access only
+# ***********************************************************************************
+def test_policy_06(create_initial_kms_policy, headers):
+ policy_id = create_initial_kms_policy
+ username=TEST_USER
+
+ #Update policy for this test
+ update_kms_policy(policy_id, username, accesses=["create","delete","rollover","get","getmetadata","generateeek"])
+
+ key_name = "pytest-key-06"
+
+ # create key
+ response = requests.post(f"{BASE_URL}/keys", json={"name": key_name}, params={"user.name": username}, headers=headers)
+ assert response.status_code == 201, f"Key creation failed: {response.text}"
+
+ # Try rollover
+ response = requests.post(f"{BASE_URL}/key/{key_name}", json={}, params={"user.name": username}, headers=headers)
+ assert response.status_code == 200, f"Expected 200 but got {response.status_code}: {response.text}"
+
+ #get current version
+ response = requests.get(f"{BASE_URL}/key/{key_name}/_currentversion",params={"user.name": username}, headers=headers)
+ assert response.status_code == 200, f"Get current version failed: {response.text}"
+
+ # Try getting key metadata
+ response = requests.get(f"{BASE_URL}/key/{key_name}/_metadata", params={"user.name": username}, headers=headers)
+ assert response.status_code == 200, f"Expected 200 but got {response.status_code}: {response.text}"
+
+ #generate DEK
+ DEK_PARAMS= {"eek_op":"generate","num_keys":1,"user.name":username}
+ response = requests.get(f"{BASE_URL}/key/{key_name}/_eek",params=DEK_PARAMS)
+ assert response.status_code == 200, f"Expected 200 but got {response.status_code}: {response.text}"
+
+ #delete key
+ response= requests.delete(f"{BASE_URL}/key/{key_name}",params={"user.name": username})
+ assert response.status_code == 200, f"Key deletion failed :{response.text}"
+
+
+
+# ****** ********************Test Case 07 ********************************************
+# ***** user has all access "create, rollover, getKeyVersion, getMetadata, generateeek, decrypteek, delete" access
+# ***********************************************************************************
+def test_policy_07(create_initial_kms_policy, headers):
+ policy_id = create_initial_kms_policy
+ username=TEST_USER
+
+ #Update policy for this test
+ update_kms_policy(policy_id, username, accesses=["create","delete","rollover","get","getmetadata","generateeek","decrypteek"])
+
+ key_name = "pytest-key-07"
+
+ # create key
+ response = requests.post(f"{BASE_URL}/keys", json={"name": key_name}, params={"user.name": username}, headers=headers)
+ assert response.status_code == 201, f"Key creation failed: {response.text}"
+
+ # Try rollover
+ response = requests.post(f"{BASE_URL}/key/{key_name}", json={}, params={"user.name": username}, headers=headers)
+ assert response.status_code == 200, f"Expected 200 but got {response.status_code}: {response.text}"
+
+ #get current version
+ response = requests.get(f"{BASE_URL}/key/{key_name}/_currentversion",params={"user.name": username}, headers=headers)
+ assert response.status_code == 200, f"Get current version failed: {response.text}"
+
+ # Try getting key metadata
+ response = requests.get(f"{BASE_URL}/key/{key_name}/_metadata", params={"user.name": username}, headers=headers)
+ assert response.status_code == 200, f"Expected 200 but got {response.status_code}: {response.text}"
+
+ #generate DEK
+ DEK_PARAMS= {"eek_op":"generate","num_keys":1,"user.name":username}
+ response = requests.get(f"{BASE_URL}/key/{key_name}/_eek",params=DEK_PARAMS)
+ assert response.status_code == 200, f"Expected 200 but got {response.status_code}: {response.text}"
+
+ #decrypt generated EDEK
+ eek_response= response.json()[0]
+
+ material = eek_response["encryptedKeyVersion"]["material"]
+ name = eek_response["encryptedKeyVersion"]["name"]
+ iv = eek_response["iv"]
+ version_name = eek_response["versionName"]
+
+ decrypt_payload = {
+
+ "name":name,
+ "iv": iv,
+ "material": material,
+ }
+
+ DECRYPT_PARAMS= {"eek_op":"decrypt","user.name":username}
+ decrypt_response= requests.post(f"{BASE_URL}/keyversion/{version_name}/_eek",params=DECRYPT_PARAMS,headers=headers,json=decrypt_payload)
+ assert decrypt_response.status_code == 200, f"Decryption of EDEK got failed {decrypt_response.status_code}: {decrypt_response.text}"
+
+
+ #delete key
+ response= requests.delete(f"{BASE_URL}/key/{key_name}",params={"user.name": username})
+ assert response.status_code == 200, f"Key deletion failed :{response.text}"
+
+
+
+# ****** ********************Test Case 08 ********************************************
+# ***** user has no access
+# ***********************************************************************************
+def test_policy_08(create_initial_kms_policy, headers):
+ policy_id = create_initial_kms_policy
+ username=TEST_USER
+
+ #Update policy for this test
+ update_kms_policy(policy_id, username, accesses=None)
+
+ key_name = "pytest-key-08"
+
+ # create key
+ response = requests.post(f"{BASE_URL}/keys", json={"name": key_name}, params={"user.name": username}, headers=headers)
+ assert response.status_code == 403, f"Creation of key, Expected 403 but got {response.text}"
+
+ # Try rollover
+ response = requests.post(f"{BASE_URL}/key/{key_name}", json={}, params={"user.name": username}, headers=headers)
+ assert response.status_code == 403, f"Rollover of key, Expected 403 but got {response.status_code}: {response.text}"
+
+ #get current version
+ response = requests.get(f"{BASE_URL}/key/{key_name}/_currentversion",params={"user.name": username}, headers=headers)
+ assert response.status_code == 403, f"Get current version, Expected 403 but got: {response.text}"
+
+ # Try getting key metadata
+ response = requests.get(f"{BASE_URL}/key/{key_name}/_metadata", params={"user.name": username}, headers=headers)
+ assert response.status_code == 403, f"Get keyMetaData, Expected 403 but got {response.status_code}: {response.text}"
+
+ #generate DEK
+ DEK_PARAMS= {"eek_op":"generate","num_keys":1,"user.name":username}
+ response = requests.get(f"{BASE_URL}/key/{key_name}/_eek",params=DEK_PARAMS)
+ assert response.status_code == 403, f"Generate DEK, Expected 403 but got {response.status_code}: {response.text}"
+
+ #delete key
+ response= requests.delete(f"{BASE_URL}/key/{key_name}",params={"user.name": username})
+ assert response.status_code == 403, f"Delete key, Expected 403 but got :{response.text}"
diff --git a/pytest-Tests/kms/test_keys.py b/pytest-Tests/kms/test_keys.py
new file mode 100644
index 0000000000..09dcc3db1e
--- /dev/null
+++ b/pytest-Tests/kms/test_keys.py
@@ -0,0 +1,100 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+# This workflow will build a Java project with Maven, and cache/restore any dependencies to improve the workflow execution time
+# For more information see: https://docs.github.com/en/actions/automating-builds-and-tests/building-and-testing-java-with-maven
+
+# This workflow uses actions that are not certified by GitHub.
+# They are provided by a third-party and are governed by
+# separate terms of service, privacy policy, and support
+# documentation.
+
+
+import requests
+import pytest
+from utils import fetch_logs
+
+BASE_URL = "http://localhost:9292/kms/v1"
+PARAMS={"user.name":"keyadmin"}
+
+class TestKeyManagement:
+
+ @pytest.fixture(autouse=True)
+ def setup_class(self, create_test_key):
+ self.test_key = create_test_key
+
+ def test_create_key(self,headers):
+ key_data = {
+ "name": "key2",
+ "cipher": "AES/CTR/NoPadding",
+ "length": 128,
+ "description": "New key for checking key creation functionality"
+ }
+ response = requests.post(f"{BASE_URL}/keys",headers=headers, json=key_data,params=PARAMS)
+
+ if response.status_code != 201:
+ error_logs = fetch_logs() # Fetch logs on failure
+ pytest.fail(f"Key creation failed. API Response: {response.text}\nLogs:\n{error_logs}")
+
+ requests.delete(f"{BASE_URL}/key/key2",params=PARAMS) #cleanup key2
+
+ #---------------------------------creation key validation------------------------------
+ @pytest.mark.parametrize("name, expected_status", [
+ ("valid-key", 201),
+ ("", 400), # Invalid case: Empty name
+ ("@invalid!", 400), # Invalid case: Special characters
+ ("invalid--key",400) #-- or __ or _- -_ not allowed
+ ])
+ def test_key_name_validation(self, headers, name, expected_status):
+ key_data = {
+ "name": name,
+ "cipher": "AES/CTR/NoPadding",
+ "length": 128,
+ "description": "Validation test"
+ }
+ response = requests.post(f"{BASE_URL}/keys", json=key_data, headers=headers,params=PARAMS)
+
+ if response.status_code != expected_status:
+ error_logs = fetch_logs() # Fetch logs on failure
+ pytest.fail(f"Key validation failed. API Response: {response.text}\nLogs:\n{error_logs}")
+
+ if expected_status == 201:
+ requests.delete(f"{BASE_URL}/key/{name}", params=PARAMS)
+
+ # Negative test----duplicate key creation test ----------------------------------------------
+ def test_duplicate_key_creation(self, headers):
+ key_name = "duplicate-key"
+ key_data = {
+ "name": key_name,
+ "cipher": "AES/CTR/NoPadding",
+ "length": 128,
+ "description": "Testing duplicate key creation"
+ }
+
+ response1 = requests.post(f"{BASE_URL}/keys", headers=headers, json=key_data, params=PARAMS)
+ assert response1.status_code == 201, f"Initial key creation failed: {response1.text}"
+
+ # creating the same key again
+ response2 = requests.post(f"{BASE_URL}/keys", headers=headers, json=key_data, params=PARAMS)
+
+ assert response2.status_code == 500, f"Duplicate key got created, expected to fail"
+
+ # Cleanup
+ requests.delete(f"{BASE_URL}/key/{key_name}", params=PARAMS)
+
+
+
+
diff --git a/pytest-Tests/kms/test_keys_02.py b/pytest-Tests/kms/test_keys_02.py
new file mode 100644
index 0000000000..83a02ce963
--- /dev/null
+++ b/pytest-Tests/kms/test_keys_02.py
@@ -0,0 +1,170 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+# This workflow will build a Java project with Maven, and cache/restore any dependencies to improve the workflow execution time
+# For more information see: https://docs.github.com/en/actions/automating-builds-and-tests/building-and-testing-java-with-maven
+
+# This workflow uses actions that are not certified by GitHub.
+# They are provided by a third-party and are governed by
+# separate terms of service, privacy policy, and support
+# documentation.
+
+
+import requests
+import pytest
+from utils import fetch_logs
+
+BASE_URL = "http://localhost:9292/kms/v1"
+PARAMS={"user.name":"keyadmin"}
+
+# ***********************************************************************************
+# Test to check after key roll over -> new version= old version+1
+# ***********************************************************************************
+def test_versionIncrement_after_rollover(headers):
+ key_name="key_roll"
+ key_data={
+ "name":key_name
+ }
+ #create key
+ response=requests.post(f"{BASE_URL}/keys",json=key_data,params=PARAMS,headers=headers)
+ assert response.status_code == 201, f"Key creation failed: {response.text}"
+
+ #check version before roll over
+ response_before= requests.get(f"{BASE_URL}/key/{key_name}/_currentversion", headers=headers, params=PARAMS)
+ assert response_before.status_code == 200, f"Failed to get current version. Response: {response_before.text}"
+
+ #extract version number
+ version_before = response_before.json().get("versionName") # e.g "test-key@0"
+ version_num_before = int(version_before.split("@")[1])
+ print(f"version before: {version_num_before}" )
+
+ #roll over
+ response = requests.post(f"{BASE_URL}/key/{key_name}", json={}, headers=headers, params=PARAMS)
+ assert response.status_code==200, f"failed to perform roll over . Response:{response.text}"
+
+ #check version after roll over
+ response_after= requests.get(f"{BASE_URL}/key/{key_name}/_currentversion", headers=headers, params=PARAMS)
+ assert response_after.status_code == 200, f"Failed to get current version. Response: {response_after.text}"
+
+ #extract new version number
+ version_after = response_after.json().get("versionName")
+ version_num_after = int(version_after.split("@")[1])
+ print(f"version after: {version_num_after}")
+
+ assert version_num_after == version_num_before + 1 , (
+ f"Expected version to increment. Before: {version_before}, After: {version_after}"
+ )
+
+ # Cleanup key after test
+ requests.delete(f"{BASE_URL}/key/{key_name}", params=PARAMS)
+
+
+# ***********************************************************************************
+# Test to check if material which is used to create key matches material from get key material
+# ***********************************************************************************
+def test_key_material(headers):
+ key_name="test-key"
+ key_material="G90ZtTKOWIICXG_wpqx0tA"
+
+ key_data={
+ "name":key_name,
+ "material":key_material
+ }
+
+ #create key
+ response=requests.post(f"{BASE_URL}/keys",json=key_data,params=PARAMS,headers=headers)
+ assert response.status_code == 201, f"Key creation failed: {response.text}"
+
+ #check material from currentversion
+ version_response= requests.get(f"{BASE_URL}/key/{key_name}/_currentversion", headers=headers, params=PARAMS)
+ assert version_response.status_code == 200, f"Failed to get current version. Response: {version_response.text}"
+
+ response_keyMaterial= version_response.json()
+ response_keyMaterial=response_keyMaterial["material"]
+
+ assert key_material== response_keyMaterial, f"Key material not matching. Passed key material: {key_material}, Got Key material: {response_keyMaterial}"
+
+ # Cleanup key after test
+ requests.delete(f"{BASE_URL}/key/{key_name}", params=PARAMS)
+
+
+# ***********************************************************************************
+# Tests key is not present after deletion
+# ***********************************************************************************
+def test_deleted_key_not_in_list(headers):
+ key_name="Delete-key"
+
+ key_data={
+ "name":key_name,
+ }
+
+ #create key
+ response=requests.post(f"{BASE_URL}/keys",json=key_data,params=PARAMS,headers=headers)
+ assert response.status_code == 201, f"Key creation failed: {response.text}"
+
+ # Delete key
+ requests.delete(f"{BASE_URL}/key/{key_name}", params=PARAMS)
+
+ list_response= requests.get(f"{BASE_URL}/keys/names",params=PARAMS)
+
+ key_list= list_response.json()
+
+ assert key_name not in key_list, f"Deleted key still exists, Deletion might have failed"
+
+
+# ***********************************************************************************
+# Test to check key operations in bulk
+# ***********************************************************************************
+def test_bulk_key_operation(headers):
+ key_names = [f"key{i}" for i in range(5)]
+ created_keys = []
+
+ # Create 5 EZ keys
+ for name in key_names:
+ key_data = {
+ "name": name,
+ }
+
+ response = requests.post(f"{BASE_URL}/keys", json=key_data, params=PARAMS, headers=headers)
+ assert response.status_code == 201, f"Failed to create key {name}: {response.text}"
+ created_keys.append(name)
+
+ # Get all keys and verify they exist
+ list_response = requests.get(f"{BASE_URL}/keys/names", headers=headers, params=PARAMS)
+ assert list_response.status_code == 200, f"Fetching key list failed: {list_response.text}"
+
+ all_keys = list_response.json()
+
+ for name in created_keys:
+ assert name in all_keys, f"Key '{name}' not found in key list."
+
+ # Get metadata for each key
+ for name in created_keys:
+ meta_response = requests.get(f"{BASE_URL}/key/{name}", headers=headers, params=PARAMS)
+ assert meta_response.status_code == 200, f"Failed to get metadata for key {name}"
+
+ # Delete all 5 keys
+ for name in created_keys:
+ del_response = requests.delete(f"{BASE_URL}/key/{name}", params=PARAMS)
+ assert del_response.status_code==200, f"Failed to delete key {name}: {del_response.text}"
+
+ # Verify keys are deleted
+ final_list_response = requests.get(f"{BASE_URL}/keys/names", headers=headers, params=PARAMS)
+ assert final_list_response.status_code == 200, f"Fetching key list after deletion failed"
+ final_keys = final_list_response.json()
+
+ for name in created_keys:
+ assert name not in final_keys, f"Deleted key '{name}' still found in key list"
diff --git a/pytest-Tests/kms/utils.py b/pytest-Tests/kms/utils.py
new file mode 100644
index 0000000000..7d5dbe2951
--- /dev/null
+++ b/pytest-Tests/kms/utils.py
@@ -0,0 +1,37 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+# This workflow will build a Java project with Maven, and cache/restore any dependencies to improve the workflow execution time
+# For more information see: https://docs.github.com/en/actions/automating-builds-and-tests/building-and-testing-java-with-maven
+
+# This workflow uses actions that are not certified by GitHub.
+# They are provided by a third-party and are governed by
+# separate terms of service, privacy policy, and support
+# documentation.
+
+import subprocess
+
+KMS_CONTAINER_NAME = "ranger-kms"
+KMS_LOG_FILE = "/var/log/ranger/kms/ranger-kms-ranger-kms.rangernw-root.log"
+
+def fetch_logs():
+ try:
+ cmd = f"docker exec {KMS_CONTAINER_NAME} tail -n 100 {KMS_LOG_FILE}"
+ logs = subprocess.check_output(cmd, shell=True, text=True)
+ error_logs = [line for line in logs.split("\n") if "ERROR" in line or "Exception" in line]
+ return "\n".join(error_logs) if error_logs else "No recent errors in logs."
+ except subprocess.CalledProcessError as e:
+ return f"Failed to fetch logs from container. Command failed with exit code {e.returncode}: {e.output}"
diff --git a/pytest-Tests/pytest.ini b/pytest-Tests/pytest.ini
new file mode 100644
index 0000000000..c0e584a9c3
--- /dev/null
+++ b/pytest-Tests/pytest.ini
@@ -0,0 +1,28 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+# This workflow will build a Java project with Maven, and cache/restore any dependencies to improve the workflow execution time
+# For more information see: https://docs.github.com/en/actions/automating-builds-and-tests/building-and-testing-java-with-maven
+
+# This workflow uses actions that are not certified by GitHub.
+# They are provided by a third-party and are governed by
+# separate terms of service, privacy policy, and support
+# documentation.
+
+[pytest]
+markers =
+ cleanEZ: clean up the encryption zone
+ createEZ: create encryption zone
\ No newline at end of file
diff --git a/pytest-Tests/readme.md b/pytest-Tests/readme.md
new file mode 100644
index 0000000000..a87b8c3490
--- /dev/null
+++ b/pytest-Tests/readme.md
@@ -0,0 +1,95 @@
+
+
+
+## Pytest Functional Test-suite
+
+
+This test suite validates REST API endpoints for KMS (Key Management Service) and tests HDFS encryption functionalities including key management and file operations within encryption zones.
+
+**hdfs :** contains test cases for checking KMS functionality through hdfs encryption lifecycle
+
+**kms :** contains test cases for checking KMS API functionality
+
+
+### Directory Structure
+
+```
+pytest-Tests/
+├── hdfs/ # Tests on HDFS encryption cycle
+├── kms/ # Tests on KMS API
+├── pytest.ini # Registers custom pytest markers
+├── run_tests.sh # Script to automate test execution
+├── requirements.txt
+├── readme.md
+
+```
+
+### Running Tests
+
+#### Container Setup
+
+Before running the tests, configure container behavior using the following environment variable:
+~~~
+ # To force a fresh container setup:
+ export CLEAN_CONTAINERS=1
+~~~
+
+After the initial setup, you can disable fresh container creation by setting below for the next re-runs:
+~~~
+ export CLEAN_CONTAINERS=0
+~~~
+
+#### Executing Tests
+
+Run the test script using:
+~~~
+ ./run-tests.sh [db-type] [additional-services]
+
+ # valid values for db-type: mysql/postgres/oracle , postgres is the default
+ # additional-services: multiple services can be specified separated by space
+
+ # e.g for running tests within kms and hdfs use below command:
+
+ ./run-tests.sh postgres hadoop
+
+ # Note: If additional-services are specified, db-type must also be explicitly specified
+
+~~~
+
+#### Note (Optional)
+
+If you only need to start the infrastructure i.e containers (without running tests):
+~~~
+ export RUN_TESTS=0
+~~~
+This is useful when tests are failing due to incomplete container setup.
+
+After the infrastructure is successfully up, set RUN_TESTS=1 and rerun the script to execute the tests without setup issues.
+
+#### Note
+
+Reports generated after tests execution in html can be viewed in any browser for detailed test results.
diff --git a/pytest-Tests/requirements.txt b/pytest-Tests/requirements.txt
new file mode 100644
index 0000000000..deec233bd6
--- /dev/null
+++ b/pytest-Tests/requirements.txt
@@ -0,0 +1,20 @@
+annotated-types==0.7.0
+certifi==2025.1.31
+charset-normalizer==3.4.1
+docker==7.1.0
+idna==3.10
+iniconfig==2.0.0
+Jinja2==3.1.6
+MarkupSafe==3.0.2
+packaging==24.2
+pluggy==1.5.0
+pydantic==2.11.0
+pydantic_core==2.33.0
+pytest==8.3.5
+pytest-html==4.1.1
+pytest-metadata==3.1.1
+python-on-whales==0.76.1
+requests==2.32.3
+typing-inspection==0.4.0
+typing_extensions==4.13.0
+urllib3==2.3.0
diff --git a/pytest-Tests/run-tests.sh b/pytest-Tests/run-tests.sh
new file mode 100755
index 0000000000..4368ce7d3b
--- /dev/null
+++ b/pytest-Tests/run-tests.sh
@@ -0,0 +1,175 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+# This workflow will build a Java project with Maven, and cache/restore any dependencies to improve the workflow execution time
+# For more information see: https://docs.github.com/en/actions/automating-builds-and-tests/building-and-testing-java-with-maven
+
+# This workflow uses actions that are not certified by GitHub.
+# They are provided by a third-party and are governed by
+# separate terms of service, privacy policy, and support
+# documentation.
+
+#!/bin/bash
+
+#handle input
+DB_TYPE="${1:-postgres}"
+shift || true
+EXTRA_SERVICES=("$@")
+
+# Cleanup is optional if you need Fresh containers install(export CLEAN_CONTAINERS=1 to force fresh start)
+CLEAN_CONTAINERS="${CLEAN_CONTAINERS:-0}"
+
+echo "Using DB type: ${DB_TYPE}"
+if [ "${#EXTRA_SERVICES[@]}" -gt 0 ]; then
+ echo "Extra services: ${EXTRA_SERVICES[*]}"
+fi
+
+echo "CLEAN_CONTAINERS=${CLEAN_CONTAINERS}"
+
+# Remove all containers and clean up docker space
+if [[ "${CLEAN_CONTAINERS}" == "1" ]]; then
+ docker rm -f $(docker ps -aq --filter "name=ranger") 2>/dev/null || true
+ docker system prune --all --force --volumes
+fi
+
+#path setup
+RANGER_DOCKER_PATH="../dev-support/ranger-docker"
+TESTS_PATH="../../pytest-Tests"
+
+cd "$RANGER_DOCKER_PATH"|| exit 1
+
+# Ensure scripts are executable
+chmod +x scripts/**/*.sh || true
+chmod +x download-archives.sh || true
+
+# Download archives
+if [ "${#EXTRA_SERVICES[@]}" -gt 0 ]; then
+ ./download-archives.sh "${EXTRA_SERVICES[@]}"
+fi
+
+export RANGER_DB_TYPE="${DB_TYPE}"
+
+# Build Apache ranger (admin) only if missing (or CLEAN_CONTAINERS=1)
+ADMIN_SERVICE="ranger"
+admin_missing=false
+
+if [[ "${CLEAN_CONTAINERS}" == "1" ]]; then
+ admin_missing=true
+elif [[ -z "$(docker compose -f docker-compose.ranger-build.yml ps -q "${ADMIN_SERVICE}" 2>/dev/null)" ]]; then
+ admin_missing=true
+fi
+
+if [[ "${admin_missing}" == "true" ]]; then
+ echo "Admin service (${ADMIN_SERVICE}) missing. Building and starting"
+ docker compose -f docker-compose.ranger-build.yml build
+ docker compose -f docker-compose.ranger-build.yml up -d
+else
+ echo "Admin service (${ADMIN_SERVICE}) already exists. Skipping build/up."
+ docker compose -f docker-compose.ranger-build.yml up -d
+fi
+
+# Bring up basic services
+DOCKER_FILES=(
+ "-f" "docker-compose.ranger.yml"
+ "-f" "docker-compose.ranger-usersync.yml"
+ "-f" "docker-compose.ranger-tagsync.yml"
+ "-f" "docker-compose.ranger-kms.yml"
+ )
+
+# add extra service from input
+for service in "${EXTRA_SERVICES[@]}" ; do
+ DOCKER_FILES+=("-f" "docker-compose.ranger-${service}.yml")
+done
+
+BASE_SERVICES=(ranger ranger-${RANGER_DB_TYPE} ranger-zk ranger-solr ranger-usersync ranger-tagsync ranger-kms)
+ALL_SERVICES=("${BASE_SERVICES[@]}")
+for service in "${EXTRA_SERVICES[@]}"; do
+ ALL_SERVICES+=("ranger-${service}")
+done
+
+# only create/build if containers do not exist
+missing=false
+for container in "${ALL_SERVICES[@]}"; do
+ if ! docker container inspect "$container" >/dev/null 2>&1; then
+ missing=true
+ break
+ fi
+done
+
+if [[ "${missing}" == "true" ]]; then
+ echo "Some containers are missing. Creating services..."
+ docker compose "${DOCKER_FILES[@]}" up -d --build
+else
+ echo "All containers already exist. Starting without rebuild..."
+ docker compose "${DOCKER_FILES[@]}" up -d
+fi
+
+echo "Waiting for containers to start..."
+if [[ "${missing}" == "true" || "${admin_missing}" == "true" ]]; then
+ sleep 60
+else
+ echo "No rebuild/start needed. Skipping wait."
+fi
+
+echo "Checking container status..."
+flag=true
+for container in "${ALL_SERVICES[@]}"; do
+ if [[ $(docker inspect -f '{{.State.Running}}' "$container" 2>/dev/null) == "true" ]]; then
+ echo "Container $container is running!"
+ else
+ echo "Container $container is NOT running!"
+ flag=false
+ fi
+done
+
+#RUN TESTS--------
+#Use export RUN_TESTS=0 to only bring up infra and allow all services to initialise properly (NOTE: It'll skip tests to avoid early startup failures).
+RUN_TESTS="${RUN_TESTS:-1}"
+
+if [[ $flag == true ]]; then
+ if [[ "${RUN_TESTS}" == "1" ]]; then
+ echo "All required containers are up. Running test cases..."
+ cd "$TESTS_PATH" || exit 1 # Switch to the tests directory
+
+ python3 -m venv myenv || { echo "Failed to create venv"; exit 1; } # Create a new virtual environment
+ source myenv/bin/activate || { echo "Failed to activate venv"; exit 1; } # Activate it
+ pip install --upgrade pip
+ pip install -r requirements.txt || { echo "Failed to install requirements"; exit 1; } # Install dependencies
+
+ pytest -vs hdfs/ --html=report_hdfs.html # Runs all tests in the hdfs directory
+ pytest -vs kms/ --html=report_kms.html # Runs all tests in the kms directory
+ else
+ echo "All required containers are up. Skipping tests (TESTS_RUN=${TESTS_RUN})."
+ fi
+else
+ echo "Some containers failed to start. Exiting..."
+ if [[ "${CLEAN_CONTAINERS}" == "1" ]]; then
+ docker stop $(docker ps -q --filter "name=ranger") 2>/dev/null || true
+ docker rm $(docker ps -aq --filter "name=ranger") 2>/dev/null || true
+ fi
+ exit 1
+ fi
+
+if [[ "${CLEAN_CONTAINERS}" == "1" ]]; then
+ echo "Cleaning up containers..."
+ docker stop $(docker ps -q --filter "name=ranger") 2>/dev/null || true
+ docker rm $(docker ps -aq --filter "name=ranger") 2>/dev/null || true
+else
+ echo "Skipping cleanup (CLEAN_CONTAINERS!=1)."
+fi
+
+echo "Test execution complete!"
+exit 0