Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
19 changes: 12 additions & 7 deletions .github/scripts/commit_prefix_check.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,10 @@
repo = Repo(".")

# Regex patterns
PREFIX_RE = re.compile(r"^((?:[a-z0-9_]+:\s+)+)\S", re.IGNORECASE)
PREFIX_RE = re.compile(
r"^([a-z0-9_]+:(?:\s+[a-z0-9_]+:)*)\s+\S",
re.IGNORECASE,
)
SIGNED_OFF_RE = re.compile(r"Signed-off-by:", re.IGNORECASE)
FENCED_BLOCK_RE = re.compile(
r"""
Expand Down Expand Up @@ -241,6 +244,7 @@ def is_version_bump(commit):
if not subject_prefix:
return False, f"Missing prefix in commit subject: '{first_line}'"

subject_root_prefix = subject_prefix.split()[0]
# Run squash detection (but ignore multi-signoff errors)
bad_squash, reason = detect_bad_squash(body)

Expand Down Expand Up @@ -291,6 +295,7 @@ def is_version_bump(commit):

expected_lower = {p.lower() for p in expected}
subj_lower = subject_prefix.lower()
subj_root_lower = subject_root_prefix.lower()


# ------------------------------------------------
Expand Down Expand Up @@ -329,10 +334,10 @@ def is_version_bump(commit):
# (because the corresponding file exists). Only reject if it's not in the expected list
# or if it's an umbrella prefix that doesn't match.
if len(non_build_prefixes) > 1:
if subj_lower in umbrella_prefixes:
if subj_root_lower in umbrella_prefixes:
norm_paths = [p.replace(os.sep, "/") for p in files]

if subj_lower == "lib:":
if subj_root_lower == "lib:":
if not all(p.startswith("lib/") for p in norm_paths):
expected_list = sorted(expected)
expected_str = ", ".join(expected_list)
Expand All @@ -341,7 +346,7 @@ def is_version_bump(commit):
f"Expected one of: {expected_str}"
)

elif subj_lower == "tests:":
elif subj_root_lower == "tests:":
if not all(p.startswith("tests/") for p in norm_paths):
expected_list = sorted(expected)
expected_str = ", ".join(expected_list)
Expand All @@ -359,7 +364,7 @@ def is_version_bump(commit):
f"Expected one of: {expected_str}"
)

elif subj_lower == "http_server:":
elif subj_root_lower == "http_server:":
if not all(is_http_server_interface_path(p) for p in norm_paths):
expected_list = sorted(expected)
expected_str = ", ".join(expected_list)
Expand All @@ -377,7 +382,7 @@ def is_version_bump(commit):
)

# Subject prefix must be one of the expected ones
if subj_lower not in expected_lower:
if subj_lower not in expected_lower and subj_root_lower not in expected_lower:
expected_list = sorted(expected)
expected_str = ", ".join(expected_list)
return False, (
Expand All @@ -388,7 +393,7 @@ def is_version_bump(commit):

# If build is NOT optional and build: exists among expected,
# then subject MUST be build:
if not build_optional and "build:" in expected_lower and subj_lower != "build:":
if not build_optional and "build:" in expected_lower and subj_root_lower != "build:":
return False, (
f"Subject prefix '{subject_prefix}' does not match files changed.\n"
f"Expected one of: build:"
Expand Down
24 changes: 24 additions & 0 deletions .github/scripts/tests/test_commit_lint.py
Original file line number Diff line number Diff line change
Expand Up @@ -149,6 +149,30 @@ def test_valid_commit_single_prefix():
ok, _ = validate_commit(commit)
assert ok is True

def test_valid_commit_internal_tests_prefix():
"""
Test that tests/internal commits accept the documented tests: internal: prefix.
"""
commit = make_commit(
"tests: internal: add root-key coverage\n\nSigned-off-by: User",
["tests/internal/cfl_record_accessor.c"]
)
ok, _ = validate_commit(commit)
assert ok is True


def test_valid_commit_runtime_tests_prefix():
"""
Test that tests/runtime commits accept the documented tests: runtime: prefix.
"""
commit = make_commit(
"tests: runtime: add router coverage\n\nSigned-off-by: User",
["tests/runtime/filter.c"]
)
ok, _ = validate_commit(commit)
assert ok is True



def test_valid_commit_multiple_signoffs_allowed():
"""
Expand Down
8 changes: 4 additions & 4 deletions src/flb_cfl_ra_key.c
Original file line number Diff line number Diff line change
Expand Up @@ -402,14 +402,14 @@ int flb_cfl_ra_key_regex_match(flb_sds_t ckey, struct cfl_variant vobj,
if (result) {
/* Regex + capture mode */
return flb_regex_do(regex,
(char *) out_val->data.as_string,
cfl_sds_len(out_val->data.as_string),
(char *) val->data.as_string,
cfl_sds_len(val->data.as_string),
result);
}
else {
/* No capture */
return flb_regex_match(regex, (unsigned char *) out_val->data.as_string,
cfl_sds_len(out_val->data.as_string));
return flb_regex_match(regex, (unsigned char *) val->data.as_string,
cfl_sds_len(val->data.as_string));
}

return -1;
Expand Down
28 changes: 19 additions & 9 deletions src/flb_cfl_record_accessor.c
Original file line number Diff line number Diff line change
Expand Up @@ -575,13 +575,16 @@ static int cfl_to_json(struct cfl_variant *var, flb_sds_t buf)
loop = cfl_array_size(array);

flb_sds_cat_safe(&buf, "[", 1);
if (loop != 0) {
for (i = 0; i < loop - 1; i++) {
cfl_to_json(array->entries[i], buf);
for (i = 0; i < loop; i++) {
ret = cfl_to_json(array->entries[i], buf);
if (ret == -1) {
return -1;
}

if (i + 1 < loop) {
flb_sds_cat_safe(&buf, ",", 1);
}
}
cfl_to_json(array->entries[loop-1], buf);
flb_sds_cat_safe(&buf, "]", 1);
break;
}
Expand All @@ -604,9 +607,13 @@ static int cfl_to_json(struct cfl_variant *var, flb_sds_t buf)
if (ret == -1) {
return -1;
}
break;

if (head->next != &kvlist->list) {
flb_sds_cat_safe(&buf, ",", 1);
}
}
flb_sds_cat_safe(&buf, "}", 1);
break;
}

return 0;
Expand All @@ -617,9 +624,9 @@ static flb_sds_t cfl_ra_translate_keymap(struct flb_ra_parser *rp, flb_sds_t buf
{
int ret;
int len;
char *js;
char str[32];
flb_sds_t tmp = NULL;
flb_sds_t js = NULL;
struct flb_cfl_ra_value *crv;

/* Lookup key or subkey value */
Expand All @@ -644,10 +651,13 @@ static flb_sds_t cfl_ra_translate_keymap(struct flb_ra_parser *rp, flb_sds_t buf
js = flb_sds_create_size(1024);
/* Convert cfl_variant to JSON string */
ret = cfl_to_json(&crv->v, js);
if (ret == -1) {
if (ret == 0) {
len = strlen(js);
tmp = flb_sds_cat(buf, js, len);
flb_free(js);
flb_sds_destroy(js);
}
else {
flb_sds_destroy(js);
}
}
else if (crv->v.type == CFL_VARIANT_BOOL) {
Expand Down Expand Up @@ -808,7 +818,7 @@ int flb_cfl_ra_strcmp(struct flb_cfl_record_accessor *ra, struct cfl_variant var

rp = mk_list_entry_first(&ra->list, struct flb_ra_parser, _head);
return flb_cfl_ra_key_strcmp(rp->key->name, var, rp->key->subkeys,
rp->key->name, flb_sds_len(rp->key->name));
str, len);
}

/*
Expand Down
109 changes: 108 additions & 1 deletion tests/internal/cfl_record_accessor.c
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@
#include <fluent-bit/flb_info.h>
#include <fluent-bit/flb_mem.h>
#include <fluent-bit/flb_error.h>
#include <fluent-bit/flb_regex.h>
#include <fluent-bit/flb_sds.h>
#include <fluent-bit/flb_pack.h>
#include <fluent-bit/flb_sds.h>
Expand Down Expand Up @@ -1700,6 +1701,110 @@ void cb_mixed_array_map_access()
cfl_variant_destroy(vobj);
}

void cb_translate_container_map()
{
char *fmt;
char *fmt_out;
flb_sds_t str;
struct flb_cfl_record_accessor *cra;
struct cfl_kvlist *kvlist = NULL;
struct cfl_kvlist *nested = NULL;
struct cfl_variant *vobj = NULL;

kvlist = cfl_kvlist_create();
nested = cfl_kvlist_create();

TEST_CHECK(kvlist != NULL);
TEST_CHECK(nested != NULL);
if (!kvlist || !nested) {
exit(EXIT_FAILURE);
}

cfl_kvlist_insert_string(nested, "first", "alpha");
cfl_kvlist_insert_string(nested, "second", "beta");
cfl_kvlist_insert_kvlist(kvlist, "obj", nested);

vobj = cfl_variant_create_from_kvlist(kvlist);
TEST_CHECK(vobj != NULL);
if (!vobj) {
exit(EXIT_FAILURE);
}

fmt = flb_sds_create("$obj");
fmt_out = "{\"first\":\"alpha\",\"second\":\"beta\"}";

cra = flb_cfl_ra_create(fmt, FLB_FALSE);
TEST_CHECK(cra != NULL);
if (!cra) {
exit(EXIT_FAILURE);
}

str = flb_cfl_ra_translate(cra, NULL, -1, *vobj, NULL);
TEST_CHECK(str != NULL);
if (!str) {
exit(EXIT_FAILURE);
}

TEST_CHECK(flb_sds_len(str) == strlen(fmt_out));
TEST_CHECK(memcmp(str, fmt_out, strlen(fmt_out)) == 0);

flb_sds_destroy(str);
flb_sds_destroy(fmt);
flb_cfl_ra_destroy(cra);
cfl_variant_destroy(vobj);
}

void cb_strcmp_and_regex_root_key()
{
int ret;
char *fmt;
struct flb_regex *regex;
struct flb_cfl_record_accessor *cra;
struct cfl_kvlist *kvlist = NULL;
struct cfl_variant *vobj = NULL;

kvlist = cfl_kvlist_create();
TEST_CHECK(kvlist != NULL);
if (!kvlist) {
exit(EXIT_FAILURE);
}

cfl_kvlist_insert_string(kvlist, "message", "hello world");

vobj = cfl_variant_create_from_kvlist(kvlist);
TEST_CHECK(vobj != NULL);
if (!vobj) {
exit(EXIT_FAILURE);
}

fmt = flb_sds_create("$message");
cra = flb_cfl_ra_create(fmt, FLB_FALSE);
TEST_CHECK(cra != NULL);
if (!cra) {
exit(EXIT_FAILURE);
}

ret = flb_cfl_ra_strcmp(cra, *vobj, "hello world", 11);
TEST_CHECK(ret == 0);

ret = flb_cfl_ra_strcmp(cra, *vobj, "goodbye", 7);
TEST_CHECK(ret != 0);

regex = flb_regex_create("hello");
TEST_CHECK(regex != NULL);
if (!regex) {
exit(EXIT_FAILURE);
}

ret = flb_cfl_ra_regex_match(cra, *vobj, regex, NULL);
TEST_CHECK(ret > 0);

flb_regex_destroy(regex);
flb_sds_destroy(fmt);
flb_cfl_ra_destroy(cra);
cfl_variant_destroy(vobj);
}

TEST_LIST = {
{ "keys" , cb_keys},
{ "dash_key" , cb_dash_key},
Expand All @@ -1721,5 +1826,7 @@ TEST_LIST = {
{ "direct_array_access" , cb_direct_array_access},
{ "nested_array_access" , cb_nested_array_access},
{ "mixed_array_map_access" , cb_mixed_array_map_access},
{ "translate_container_map", cb_translate_container_map},
{ "strcmp_and_regex_root_key", cb_strcmp_and_regex_root_key},
{ NULL }
};
};
Loading