Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
31 changes: 23 additions & 8 deletions cli/src/commands/scan.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1088,15 +1088,30 @@ mod output_handler {
})
.map(|rule| {
let meta = self.output_options.include_meta.then(|| {
rule.metadata()
.map(|(meta_key, meta_val)| {
let meta_key = meta_key.to_owned();
let meta_val = serde_json::to_value(meta_val)
.expect(
"Derived Serialize impl should never fail",
);
// Group metadata by key to handle duplicate keys.
let mut grouped: HashMap<
String,
Vec<serde_json::Value>,
> = HashMap::new();

for (meta_key, meta_val) in rule.metadata() {
let key = meta_key.to_owned();
let val = serde_json::to_value(meta_val).expect(
"Derived Serialize impl should never fail",
);
grouped.entry(key).or_default().push(val);
}

(meta_key, meta_val)
// Single values stay as-is, multiple values become arrays.
grouped
.into_iter()
.map(|(k, mut v)| {
let val = if v.len() == 1 {
v.pop().unwrap()
} else {
serde_json::Value::Array(v)
};
(k, val)
})
.collect::<HashMap<_, _>>()
});
Expand Down
63 changes: 63 additions & 0 deletions cli/src/tests/scan.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ use assert_cmd::{cargo_bin, Command};
use assert_fs::prelude::*;
use assert_fs::TempDir;
use predicates::prelude::*;
use serde_json;

#[test]
fn always_true() {
Expand Down Expand Up @@ -356,3 +357,65 @@ fn issue_280() {
.assert()
.success();
}

#[test]
fn json_output_duplicate_meta_keys() {
// Test that duplicate metadata keys are preserved as arrays in JSON output
let output = Command::new(cargo_bin!("yr"))
.arg("scan")
.arg("--output-format=json")
.arg("--print-meta")
.arg("src/tests/testdata/duplicate_meta.yar")
.arg("src/tests/testdata/dummy.file")
.assert()
.success()
.get_output()
.stdout
.clone();

let json: serde_json::Value =
serde_json::from_slice(&output).expect("valid JSON output");

// Navigate to the meta object
let meta = &json["matches"][0]["meta"];

// Single-value keys should remain as single values
assert_eq!(meta["author"], "Test Author");
assert_eq!(meta["description"], "Rule with duplicate metadata keys");

// Duplicate keys should become arrays
let hash = &meta["hash"];
assert!(hash.is_array(), "hash should be an array");
let hash_array = hash.as_array().unwrap();
assert_eq!(hash_array.len(), 3);
assert!(hash_array.contains(&serde_json::json!("aaa111")));
assert!(hash_array.contains(&serde_json::json!("bbb222")));
assert!(hash_array.contains(&serde_json::json!("ccc333")));
}

#[test]
fn json_output_single_meta_not_array() {
// Test that single metadata values are NOT wrapped in arrays
let output = Command::new(cargo_bin!("yr"))
.arg("scan")
.arg("--output-format=json")
.arg("--print-meta")
.arg("src/tests/testdata/foo.yar")
.arg("src/tests/testdata/dummy.file")
.assert()
.success()
.get_output()
.stdout
.clone();

let json: serde_json::Value =
serde_json::from_slice(&output).expect("valid JSON output");

let meta = &json["matches"][0]["meta"];

// All values should be single values, not arrays
assert!(meta["string"].is_string());
assert!(meta["bool"].is_boolean());
assert!(meta["int"].is_i64());
assert!(meta["float"].is_f64());
}
10 changes: 10 additions & 0 deletions cli/src/tests/testdata/duplicate_meta.yar
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
rule duplicate_meta {
meta:
author = "Test Author"
hash = "aaa111"
hash = "bbb222"
hash = "ccc333"
description = "Rule with duplicate metadata keys"
condition:
true
}
30 changes: 23 additions & 7 deletions ls/src/features/completion.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
use async_lsp::lsp_types::{
CompletionItem, CompletionItemKind, CompletionItemLabelDetails,
InsertTextFormat, InsertTextMode, Position,
CompletionContext, CompletionItem, CompletionItemKind,
CompletionItemLabelDetails, CompletionTriggerKind, InsertTextFormat,
InsertTextMode, Position,
};
use itertools::Itertools;

Expand Down Expand Up @@ -76,6 +77,7 @@ const CONDITION_SUGGESTIONS: [(&str, Option<&str>); 16] = [
pub fn completion(
document: &Document,
pos: Position,
context: Option<CompletionContext>,
) -> Option<Vec<CompletionItem>> {
let cst = &document.cst;
// Get the token before cursor. There might be no token at cursor when the
Expand All @@ -84,13 +86,29 @@ pub fn completion(
.and_then(|token| token.prev_token())
.or_else(|| cst.root().last_token())?;

// Trigger characters are: `.`, `!`, `$`, `@`, `#`.
let is_trigger_character = context.is_some_and(|ctx| {
ctx.trigger_kind == CompletionTriggerKind::TRIGGER_CHARACTER
});

// If the token is a direct child of `SOURCE_FILE`, return top-level suggestions.
if non_error_parent(&token)?.kind() == SyntaxKind::SOURCE_FILE {
if !is_trigger_character
&& non_error_parent(&token)?.kind() == SyntaxKind::SOURCE_FILE
{
return Some(source_file_suggestions());
}

let prev_token = prev_non_trivia_token(&token)?;

if prev_token.ancestors().any(|n| n.kind() == SyntaxKind::CONDITION_BLK) {
return condition_suggestions(cst, token);
}

// Trigger characters are recognized in the condition block only.
if is_trigger_character {
return Some(vec![]);
}

if prev_token.kind() == SyntaxKind::IMPORT_KW {
#[cfg(feature = "full-compiler")]
return Some(import_suggestions());
Expand All @@ -104,10 +122,6 @@ pub fn completion(
return Some(pattern_modifier_suggestions(pattern_def));
}

if prev_token.ancestors().any(|n| n.kind() == SyntaxKind::CONDITION_BLK) {
return condition_suggestions(cst, token);
}

if prev_token.ancestors().any(|n| n.kind() == SyntaxKind::RULE_DECL) {
return Some(rule_suggestions());
}
Expand Down Expand Up @@ -192,6 +206,8 @@ fn condition_suggestions(
}
}
}
// Do not propose keywords for condition block after a dot.
SyntaxKind::DOT => {}
_ => {
CONDITION_SUGGESTIONS.iter().for_each(|(kw, insert)| {
result.push(CompletionItem {
Expand Down
5 changes: 3 additions & 2 deletions ls/src/server.rs
Original file line number Diff line number Diff line change
Expand Up @@ -290,13 +290,14 @@ impl LanguageServer for YARALanguageServer {
{
let uri = params.text_document_position.text_document.uri;
let position = params.text_document_position.position;
let context = params.context;
let document = match self.documents.get(&uri) {
Some(entry) => entry,
None => return Box::pin(async { Ok(None) }),
};

let completions =
completion(document, position).map(CompletionResponse::Array);
let completions = completion(document, position, context)
.map(CompletionResponse::Array);

Box::pin(async move { Ok(completions) })
}
Expand Down
6 changes: 6 additions & 0 deletions ls/src/tests/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -267,6 +267,12 @@ async fn completion() {

#[cfg(all(feature = "full-compiler", not(feature = "magic-module")))]
test_lsp_request::<_, Completion>("completion11.yar").await;

#[cfg(all(feature = "full-compiler", not(feature = "magic-module")))]
test_lsp_request::<_, Completion>("completion12.yar").await;

#[cfg(all(feature = "full-compiler", not(feature = "magic-module")))]
test_lsp_request::<_, Completion>("completion13.yar").await;
}

#[tokio::test]
Expand Down
13 changes: 13 additions & 0 deletions ls/src/tests/testdata/completion12.request.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
{
"textDocument": {
"uri": "file:///completion12.yar"
},
"position": {
"line": 2,
"character": 15
},
"context": {
"triggerKind": 2,
"triggerCharacter": "."
}
}
1 change: 1 addition & 0 deletions ls/src/tests/testdata/completion12.response.json
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
[]
6 changes: 6 additions & 0 deletions ls/src/tests/testdata/completion12.yar
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
rule test {
strings:
$a = "foo".
condition:
$a
}
13 changes: 13 additions & 0 deletions ls/src/tests/testdata/completion13.request.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
{
"textDocument": {
"uri": "file:///completion13.yar"
},
"position": {
"line": 3,
"character": 24
},
"context": {
"triggerKind": 2,
"triggerCharacter": "."
}
}
1 change: 1 addition & 0 deletions ls/src/tests/testdata/completion13.response.json
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
[]
5 changes: 5 additions & 0 deletions ls/src/tests/testdata/completion13.yar
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
import "pe"
rule test {
condition:
pe.data_directories.
}
Loading