Skip to content

Commit 35c838f

Browse files
rameeshmRamesh Mani
andauthored
RANGER-5513:Enhance Ranger lookup API input validation (#880)
* RANGER-5513:Enhance Ranger lookup API input validation * RANGER-5513:Enhance Ranger lookup API input validation - fixed copilot comments * RANGER-5513:Enhance Ranger lookup API input validation - fixed copilot comments #2 set * RANGER-5513:Enhance Ranger lookup API input validation - fixed copilot comments #3 set * RANGER-5513:Enhance Ranger lookup API input validation - fixed copilot comments #4 set * RANGER-5513:Enhance Ranger lookup API input validation - exception propagation issue fix * RANGER-5513:Enhance Ranger lookup API input validation - hiveclient hms api call validation fix * RANGER-5513:Enhance Ranger lookup API input validation - remove hadoopexception dependency on schemaregistry client * RANGER-5513:Enhance Ranger lookup API input validation - HBase lookup issue fix * RANGER-5513:Enhance Ranger lookup API input validation - Fix Baseclient compilation issue --------- Co-authored-by: Ramesh Mani <rmani@apache.org>
1 parent 3e4a3c1 commit 35c838f

22 files changed

Lines changed: 2381 additions & 406 deletions

File tree

agents-common/src/main/java/org/apache/ranger/plugin/client/BaseClient.java

Lines changed: 163 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -31,6 +31,7 @@
3131
import java.util.ArrayList;
3232
import java.util.List;
3333
import java.util.Map;
34+
import java.util.regex.PatternSyntaxException;
3435

3536
public abstract class BaseClient {
3637
private static final Logger LOG = LoggerFactory.getLogger(BaseClient.class);
@@ -184,6 +185,168 @@ private void init() {
184185
}
185186
}
186187

188+
protected void validateSqlIdentifier(String identifier, String identifierType) throws HadoopException {
189+
if (StringUtils.isBlank(identifier)) {
190+
return;
191+
}
192+
if (identifier.contains("..") || identifier.contains("//") || identifier.contains("\\")) {
193+
String msgDesc = "Invalid " + identifierType + ": [" + identifier + "]. Path traversal patterns are not allowed.";
194+
HadoopException hdpException = new HadoopException(msgDesc);
195+
hdpException.generateResponseDataMap(false, msgDesc, msgDesc + DEFAULT_ERROR_MESSAGE, null, null);
196+
LOG.error(msgDesc);
197+
throw hdpException;
198+
}
199+
if (!identifier.matches("^[a-zA-Z0-9*?\\[\\]\\-\\$%\\{\\}\\=\\/\\._]+$")) {
200+
String msgDesc = "Invalid " + identifierType + ": [" + identifier + "]. Only alphanumeric characters along with ( ., _, -, *, ?, [], {}, %, $, = / ) are allowed.";
201+
HadoopException hdpException = new HadoopException(msgDesc);
202+
hdpException.generateResponseDataMap(false, msgDesc, msgDesc + DEFAULT_ERROR_MESSAGE, null, null);
203+
LOG.error(msgDesc);
204+
throw hdpException;
205+
}
206+
}
207+
208+
protected String convertToSqlPattern(String pattern) throws HadoopException {
209+
if (pattern == null || pattern.isEmpty()) {
210+
return "%";
211+
}
212+
// Convert custom wildcards to SQL LIKE pattern:
213+
// '*' -> '%' (multi-character wildcard)
214+
// '?' -> '_' (single-character wildcard)
215+
String sqlPattern = pattern.replace("*", "%").replace("?", "_");
216+
return sqlPattern;
217+
}
218+
219+
protected boolean matchesSqlPattern(String value, String pattern) throws HadoopException {
220+
if (pattern == null || pattern.equals("%")) {
221+
return true;
222+
}
223+
224+
String regex = convertSqlPatternToRegex(pattern);
225+
try {
226+
return value.matches(regex);
227+
} catch (PatternSyntaxException pe) {
228+
String msgDesc = "Invalid value: [" + value + "]. Only alphanumeric characters along with ( ., _, -, *, ?, [], {}, %, $, = / ) are allowed.";
229+
HadoopException hdpException = new HadoopException(msgDesc);
230+
hdpException.generateResponseDataMap(false, msgDesc, msgDesc + DEFAULT_ERROR_MESSAGE, null, null);
231+
LOG.error(msgDesc);
232+
throw hdpException;
233+
}
234+
}
235+
236+
protected void validateUrlResourceName(String resourceName, String resourceType) throws HadoopException {
237+
if (resourceName == null) {
238+
return;
239+
}
240+
if (resourceName.contains("..") || resourceName.contains("//") || resourceName.contains("\\")) {
241+
String msgDesc = "Invalid " + resourceType + ": [" + resourceName + "]. Path traversal patterns are not allowed.";
242+
HadoopException hdpException = new HadoopException(msgDesc);
243+
hdpException.generateResponseDataMap(false, msgDesc, msgDesc + DEFAULT_ERROR_MESSAGE, null, null);
244+
LOG.error(msgDesc);
245+
throw hdpException;
246+
}
247+
if (!resourceName.matches("^[a-zA-Z0-9_.*\\-]+$")) {
248+
String msgDesc = "Invalid " + resourceType + ": [" + resourceName + "]. Only alphanumeric characters with ( ., _, *, -) are allowed.";
249+
HadoopException hdpException = new HadoopException(msgDesc);
250+
hdpException.generateResponseDataMap(false, msgDesc, msgDesc + DEFAULT_ERROR_MESSAGE, null, null);
251+
LOG.error(msgDesc);
252+
throw hdpException;
253+
}
254+
}
255+
256+
public void validateWildcardPattern(String pattern, String patternType) throws HadoopException {
257+
if (pattern == null || pattern.isEmpty()) {
258+
return;
259+
}
260+
if (pattern.contains("..") || pattern.contains("//") || pattern.contains("\\")) {
261+
String msgDesc = "Invalid " + patternType + ": [" + pattern + "]. Path traversal patterns are not allowed.";
262+
HadoopException hdpException = new HadoopException(msgDesc);
263+
hdpException.generateResponseDataMap(false, msgDesc, msgDesc + DEFAULT_ERROR_MESSAGE, null, null);
264+
LOG.error(msgDesc);
265+
throw hdpException;
266+
}
267+
if (!pattern.matches("^[a-zA-Z0-9_.*?\\[\\]\\-\\$%\\{\\}\\=\\/]+$")) {
268+
String msgDesc = "Invalid " + patternType + ": [" + pattern + "]. Only alphanumeric characters along with ( ., _, -, *, ?, [], {}, %, $, = / ) are allowed.";
269+
HadoopException hdpException = new HadoopException(msgDesc);
270+
hdpException.generateResponseDataMap(false, msgDesc, msgDesc + DEFAULT_ERROR_MESSAGE, null, null);
271+
LOG.error(msgDesc);
272+
throw hdpException;
273+
}
274+
}
275+
276+
protected String convertSqlPatternToRegex(String pattern) {
277+
StringBuilder regexBuilder = new StringBuilder("^");
278+
279+
for (int i = 0; i < pattern.length(); i++) {
280+
char c = pattern.charAt(i);
281+
switch (c) {
282+
case '%':
283+
// SQL LIKE wildcard: zero or more characters
284+
regexBuilder.append(".*");
285+
break;
286+
case '_':
287+
// SQL LIKE wildcard: exactly one character
288+
regexBuilder.append('.');
289+
break;
290+
case '.':
291+
case '^':
292+
case '$':
293+
case '+':
294+
case '?':
295+
case '{':
296+
case '}':
297+
case '[':
298+
case ']':
299+
case '(':
300+
case ')':
301+
case '|':
302+
case '\\':
303+
// Escape regex metacharacters so they are treated literally
304+
regexBuilder.append('\\').append(c);
305+
break;
306+
default:
307+
regexBuilder.append(c);
308+
break;
309+
}
310+
}
311+
312+
return regexBuilder.toString();
313+
}
314+
315+
public String convertWildcardToRegex(String wildcard) {
316+
if (wildcard == null || wildcard.isEmpty()) {
317+
return ".*";
318+
}
319+
StringBuilder regex = new StringBuilder("^");
320+
for (int i = 0; i < wildcard.length(); i++) {
321+
char c = wildcard.charAt(i);
322+
switch (c) {
323+
case '*':
324+
regex.append(".*");
325+
break;
326+
case '?':
327+
regex.append(".");
328+
break;
329+
case '.':
330+
case '\\':
331+
case '^':
332+
case '$':
333+
case '|':
334+
regex.append('\\').append(c);
335+
break;
336+
case '{':
337+
case '}':
338+
case '[':
339+
case ']':
340+
regex.append('\\').append(c);
341+
break;
342+
default:
343+
regex.append(c);
344+
}
345+
}
346+
regex.append('$');
347+
return regex.toString();
348+
}
349+
187350
private HadoopException createException(Exception exp) {
188351
return createException("Unable to login to Hadoop environment [" + serviceName + "]", exp);
189352
}

agents-common/src/test/java/org/apache/ranger/plugin/client/TestBaseClient.java

Lines changed: 110 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -326,4 +326,114 @@ class TestClient extends BaseClient {
326326
assertEquals(IllegalArgumentException.class, ex.getClass());
327327
}
328328
}
329+
330+
@Test
331+
public void test15_convertWildcardToRegex() {
332+
class TestClient extends BaseClient {
333+
TestClient() {
334+
super("test", new HashMap<>());
335+
}
336+
337+
@Override
338+
protected void login() {
339+
}
340+
341+
public String convert(String s) {
342+
return convertWildcardToRegex(s);
343+
}
344+
}
345+
346+
TestClient client = new TestClient();
347+
assertEquals(".*", client.convert(null));
348+
assertEquals(".*", client.convert(""));
349+
assertEquals("^atlas.*$", client.convert("atlas*"));
350+
assertEquals("^atlas\\..*$", client.convert("atlas.*"));
351+
assertEquals("^.*atlas.*$", client.convert("*atlas*"));
352+
assertEquals("^at.as$", client.convert("at?as"));
353+
assertEquals("^atlas\\.$", client.convert("atlas."));
354+
assertEquals("^atlas\\$$", client.convert("atlas$"));
355+
assertEquals("^atlas\\^$", client.convert("atlas^"));
356+
assertEquals("^atlas\\[\\]$", client.convert("atlas[]"));
357+
}
358+
359+
@Test
360+
public void test16_convertToSqlPattern() throws Exception {
361+
class TestClient extends BaseClient {
362+
TestClient() {
363+
super("test", new HashMap<>());
364+
}
365+
366+
@Override
367+
protected void login() {
368+
}
369+
370+
public String convert(String s) throws Exception {
371+
return convertToSqlPattern(s);
372+
}
373+
}
374+
375+
TestClient client = new TestClient();
376+
assertEquals("%", client.convert(null));
377+
assertEquals("%", client.convert(""));
378+
assertEquals("atlas%", client.convert("atlas*"));
379+
assertEquals("at_as", client.convert("at?as"));
380+
}
381+
382+
@Test
383+
public void test17_matchesSqlPattern() throws Exception {
384+
class TestClient extends BaseClient {
385+
TestClient() {
386+
super("test", new HashMap<>());
387+
}
388+
389+
@Override
390+
protected void login() {
391+
}
392+
393+
public boolean match(String v, String p) throws Exception {
394+
return matchesSqlPattern(v, p);
395+
}
396+
}
397+
398+
TestClient client = new TestClient();
399+
assertEquals(true, client.match("atlas", null));
400+
assertEquals(true, client.match("atlas", "%"));
401+
assertEquals(true, client.match("atlas", "atlas%"));
402+
assertEquals(true, client.match("atlas_test", "atlas%"));
403+
assertEquals(true, client.match("atlas", "at_as"));
404+
assertEquals(false, client.match("atlas", "at_a"));
405+
}
406+
407+
@Test
408+
public void test18_validateWildcardPattern() {
409+
class TestClient extends BaseClient {
410+
TestClient() {
411+
super("test", new HashMap<>());
412+
}
413+
414+
@Override
415+
protected void login() {
416+
}
417+
418+
public void validate(String s) throws Exception {
419+
validateWildcardPattern(s, "test");
420+
}
421+
}
422+
423+
TestClient client = new TestClient();
424+
try {
425+
client.validate("atlas*");
426+
client.validate("atlas.*");
427+
client.validate("atlas?");
428+
} catch (Exception e) {
429+
org.junit.jupiter.api.Assertions.fail("Should not throw exception for valid patterns");
430+
}
431+
432+
try {
433+
client.validate("atlas../test");
434+
org.junit.jupiter.api.Assertions.fail("Should throw exception for path traversal");
435+
} catch (Exception e) {
436+
// Expected
437+
}
438+
}
329439
}

hbase-agent/src/main/java/org/apache/ranger/services/hbase/client/HBaseClient.java

Lines changed: 18 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -191,6 +191,12 @@ public List<String> getTableList(final String tableNameMatching, final List<Stri
191191
ret = Subject.doAs(subj, new PrivilegedAction<List<String>>() {
192192
@Override
193193
public List<String> run() {
194+
String wildcard = tableNameMatching;
195+
if (wildcard != null) {
196+
wildcard = wildcard.replace(".*", "*");
197+
}
198+
validateWildcardPattern(wildcard, "table pattern");
199+
String safeTablePattern = convertWildcardToRegex(wildcard);
194200
List<String> tableList = new ArrayList<>();
195201
Admin admin = null;
196202

@@ -205,8 +211,7 @@ public List<String> run() {
205211
LOG.info("getTableList: no exception: HbaseAvailability true");
206212

207213
admin = conn.getAdmin();
208-
209-
List<TableDescriptor> htds = admin.listTableDescriptors(Pattern.compile(tableNameMatching));
214+
List<TableDescriptor> htds = admin.listTableDescriptors(Pattern.compile(safeTablePattern));
210215

211216
if (htds != null) {
212217
for (TableDescriptor htd : htds) {
@@ -240,6 +245,8 @@ public List<String> run() {
240245
LOG.error(msgDesc + mnre);
241246

242247
throw hdpException;
248+
} catch (HadoopException he) {
249+
throw he;
243250
} catch (IOException io) {
244251
String msgDesc = "getTableList: Unable to get HBase table List for [repository:" + getConfigHolder().getDatasourceName() + ",table-match:" + tableNameMatching + "].";
245252
HadoopException hdpException = new HadoopException(msgDesc, io);
@@ -291,14 +298,18 @@ public List<String> getColumnFamilyList(final String columnFamilyMatching, final
291298

292299
@Override
293300
public List<String> run() {
301+
String wildcard = columnFamilyMatching;
302+
if (wildcard != null) {
303+
wildcard = wildcard.replace(".*", "*");
304+
}
305+
validateWildcardPattern(wildcard, "column family pattern");
306+
String safeColumnPattern = convertWildcardToRegex(wildcard);
294307
List<String> colfList = new ArrayList<>();
295308
Admin admin = null;
296309

297310
try {
298311
LOG.info("getColumnFamilyList: setting config values from client");
299-
300312
setClientConfigValues(conf);
301-
302313
LOG.info("getColumnFamilyList: checking HbaseAvailability with the new config");
303314

304315
try (Connection conn = ConnectionFactory.createConnection(conf)) {
@@ -314,8 +325,7 @@ public List<String> run() {
314325
if (htd != null) {
315326
for (ColumnFamilyDescriptor hcd : htd.getColumnFamilies()) {
316327
String colf = hcd.getNameAsString();
317-
318-
if (colf.matches(columnFamilyMatching)) {
328+
if (colf.matches(safeColumnPattern)) {
319329
if (existingColumnFamilies != null && existingColumnFamilies.contains(colf)) {
320330
continue;
321331
} else {
@@ -345,6 +355,8 @@ public List<String> run() {
345355
LOG.error(msgDesc + mnre);
346356

347357
throw hdpException;
358+
} catch (HadoopException he) {
359+
throw he;
348360
} catch (IOException io) {
349361
String msgDesc = "getColumnFamilyList: Unable to get HBase ColumnFamilyList for [repository:" + getConfigHolder().getDatasourceName() + ",table:" + tblName + ", table-match:" + columnFamilyMatching + "] ";
350362
HadoopException hdpException = new HadoopException(msgDesc, io);

0 commit comments

Comments
 (0)