diff --git a/rules/integrations/azure/credential_access_entra_signin_brute_force_microsoft_365_repeat_source.toml b/rules/_deprecated/credential_access_entra_signin_brute_force_microsoft_365_repeat_source.toml similarity index 98% rename from rules/integrations/azure/credential_access_entra_signin_brute_force_microsoft_365_repeat_source.toml rename to rules/_deprecated/credential_access_entra_signin_brute_force_microsoft_365_repeat_source.toml index 98bc858fcff..f5050278864 100644 --- a/rules/integrations/azure/credential_access_entra_signin_brute_force_microsoft_365_repeat_source.toml +++ b/rules/_deprecated/credential_access_entra_signin_brute_force_microsoft_365_repeat_source.toml @@ -1,8 +1,9 @@ [metadata] creation_date = "2024/09/06" +deprecation_date = "2025/07/16" integration = ["azure"] -maturity = "production" -updated_date = "2025/06/06" +maturity = "deprecated" +updated_date = "2025/07/16" [rule] author = ["Elastic"] diff --git a/rules/cross-platform/execution_potential_widespread_malware_infection.toml b/rules/cross-platform/execution_potential_widespread_malware_infection.toml index f7ca76e64fa..3f52c996a02 100644 --- a/rules/cross-platform/execution_potential_widespread_malware_infection.toml +++ b/rules/cross-platform/execution_potential_widespread_malware_infection.toml @@ -1,7 +1,7 @@ [metadata] creation_date = "2024/05/08" maturity = "production" -updated_date = "2025/03/20" +updated_date = "2025/07/16" [rule] author = ["Elastic"] @@ -67,8 +67,8 @@ query = ''' from logs-endpoint.alerts-* | where event.code in ("malicious_file", "memory_signature", "shellcode_thread") and rule.name is not null | keep host.id, rule.name, event.code -| stats hosts = count_distinct(host.id) by rule.name, event.code -| where hosts >= 3 +| stats Esql.host_id_count_distinct = count_distinct(host.id) by rule.name, event.code +| where Esql.host_id_count_distinct >= 3 ''' diff --git a/rules/cross-platform/initial_access_azure_o365_with_network_alert.toml b/rules/cross-platform/initial_access_azure_o365_with_network_alert.toml index 329c2fd6ea7..5632c7110c3 100644 --- a/rules/cross-platform/initial_access_azure_o365_with_network_alert.toml +++ b/rules/cross-platform/initial_access_azure_o365_with_network_alert.toml @@ -2,7 +2,7 @@ creation_date = "2025/04/29" integration = ["azure", "o365"] maturity = "production" -updated_date = "2025/07/02" +updated_date = "2025/07/16" [rule] author = ["Elastic"] @@ -77,22 +77,48 @@ timestamp_override = "event.ingested" type = "esql" query = ''' -FROM logs-*, .alerts-security.* -// query runs every 1 hour looking for activities occured during last 8 hours to match on disparate events -| where @timestamp > NOW() - 8 hours -// filter for Azure or M365 sign-in and External Alerts with source.ip not null -| where TO_IP(source.ip) is not null and (event.dataset in ("o365.audit", "azure.signinlogs") or kibana.alert.rule.name == "External Alerts") and -// exclude private IP ranges - not CIDR_MATCH(TO_IP(source.ip), "10.0.0.0/8", "127.0.0.0/8", "169.254.0.0/16", "172.16.0.0/12", "192.0.0.0/24", "192.0.0.0/29", "192.0.0.8/32", "192.0.0.9/32", "192.0.0.10/32", "192.0.0.170/32", "192.0.0.171/32", "192.0.2.0/24", "192.31.196.0/24", "192.52.193.0/24", "192.168.0.0/16", "192.88.99.0/24", "224.0.0.0/4", "100.64.0.0/10", "192.175.48.0/24","198.18.0.0/15", "198.51.100.0/24", "203.0.113.0/24", "240.0.0.0/4", "::1","FE80::/10", "FF00::/8") +from logs-*, .alerts-security.* +// query runs every 1 hour looking for activities occurred during last 8 hours to match on disparate events +| where @timestamp > now() - 8 hours +// filter for azure or m365 sign-in and external alerts with source.ip not null +| where to_ip(source.ip) is not null + and (event.dataset in ("o365.audit", "azure.signinlogs") or kibana.alert.rule.name == "External Alerts") + and not cidr_match( + to_ip(source.ip), + "10.0.0.0/8", "127.0.0.0/8", "169.254.0.0/16", "172.16.0.0/12", "192.0.0.0/24", "192.0.0.0/29", + "192.0.0.8/32", "192.0.0.9/32", "192.0.0.10/32", "192.0.0.170/32", "192.0.0.171/32", "192.0.2.0/24", + "192.31.196.0/24", "192.52.193.0/24", "192.168.0.0/16", "192.88.99.0/24", "224.0.0.0/4", + "100.64.0.0/10", "192.175.48.0/24", "198.18.0.0/15", "198.51.100.0/24", "203.0.113.0/24", + "240.0.0.0/4", "::1", "FE80::/10", "FF00::/8" + ) + +// capture relevant raw fields | keep source.ip, event.action, event.outcome, event.dataset, kibana.alert.rule.name, event.category -// split alerts to 3 buckets - M365 mail access, azure sign-in and network related external alerts like NGFW and IDS -| eval mail_access_src_ip = case(event.dataset == "o365.audit" and event.action == "MailItemsAccessed" and event.outcome == "success", TO_IP(source.ip), null), - azure_src_ip = case(event.dataset == "azure.signinlogs" and event.outcome == "success", TO_IP(source.ip), null), - network_alert_src_ip = case(kibana.alert.rule.name == "External Alerts" and not event.dataset in ("o365.audit", "azure.signinlogs"), TO_IP(source.ip), null) -// aggregated alerts count by bucket and by source.ip -| stats total_alerts = count(*), is_mail_access = COUNT_DISTINCT(mail_access_src_ip), is_azure = COUNT_DISTINCT(azure_src_ip), unique_dataset = COUNT_DISTINCT(event.dataset),is_network_alert = COUNT_DISTINCT(network_alert_src_ip), datasets = VALUES(event.dataset), rules = VALUES(kibana.alert.rule.name), cat = VALUES(event.category) by source_ip = TO_IP(source.ip) -// filter for cases where there is a successful sign-in to azure or m365 mail and the source.ip is reported by a network external alert. -| where is_network_alert > 0 and unique_dataset >= 2 and (is_mail_access > 0 or is_azure > 0) and total_alerts <= 100 + +// classify each source ip based on alert type +| eval + Esql.source_ip_mail_access_case = case(event.dataset == "o365.audit" and event.action == "MailItemsAccessed" and event.outcome == "success", to_ip(source.ip), null), + Esql.source_ip_azure_signin_case = case(event.dataset == "azure.signinlogs" and event.outcome == "success", to_ip(source.ip), null), + Esql.source_ip_network_alert_case = case(kibana.alert.rule.name == "external alerts" and not event.dataset in ("o365.audit", "azure.signinlogs"), to_ip(source.ip), null) + +// aggregate by source ip +| stats + Esql.event_count = count(*), + Esql.source_ip_mail_access_case_count_distinct = count_distinct(Esql.source_ip_mail_access_case), + Esql.source_ip_azure_signin_case_count_distinct = count_distinct(Esql.source_ip_azure_signin_case), + Esql.source_ip_network_alert_case_count_distinct = count_distinct(Esql.source_ip_network_alert_case), + Esql.event_dataset_count_distinct = count_distinct(event.dataset), + Esql.event_dataset_values = values(event.dataset), + Esql.kibana_alert_rule_name_values = values(kibana.alert.rule.name), + Esql.event_category_values = values(event.category) + by Esql.source_ip = to_ip(source.ip) + +// correlation condition +| where + Esql.source_ip_network_alert_case_count_distinct > 0 + and Esql.event_dataset_count_distinct >= 2 + and (Esql.source_ip_mail_access_case_count_distinct > 0 or Esql.source_ip_azure_signin_case_count_distinct > 0) + and Esql.event_count <= 100 ''' diff --git a/rules/integrations/aws/discovery_ec2_multi_region_describe_instances.toml b/rules/integrations/aws/discovery_ec2_multi_region_describe_instances.toml index b44db7e2dc0..cdf6c4d7929 100644 --- a/rules/integrations/aws/discovery_ec2_multi_region_describe_instances.toml +++ b/rules/integrations/aws/discovery_ec2_multi_region_describe_instances.toml @@ -2,7 +2,7 @@ creation_date = "2024/08/26" integration = ["aws"] maturity = "production" -updated_date = "2025/01/10" +updated_date = "2025/07/16" [rule] author = ["Elastic"] @@ -89,30 +89,35 @@ query = ''' from logs-aws.cloudtrail-* // filter for DescribeInstances API calls -| where event.dataset == "aws.cloudtrail" and event.provider == "ec2.amazonaws.com" and event.action == "DescribeInstances" +| where event.dataset == "aws.cloudtrail" + and event.provider == "ec2.amazonaws.com" + and event.action == "DescribeInstances" // truncate the timestamp to a 30-second window -| eval target_time_window = DATE_TRUNC(30 seconds, @timestamp) +| eval Esql.time_window_date_trunc = date_trunc(30 seconds, @timestamp) -// keep only the relevant fields -| keep target_time_window, aws.cloudtrail.user_identity.arn, cloud.region +// keep only the relevant raw fields +| keep Esql.time_window_date_trunc, aws.cloudtrail.user_identity.arn, cloud.region // count the number of unique regions and total API calls within the 30-second window -| stats region_count = count_distinct(cloud.region), window_count = count(*) by target_time_window, aws.cloudtrail.user_identity.arn +| stats + Esql.cloud_region_count_distinct = count_distinct(cloud.region), + Esql.event_count = count(*) + by Esql.time_window_date_trunc, aws.cloudtrail.user_identity.arn // filter for resources making DescribeInstances API calls in more than 10 regions within the 30-second window -| where region_count >= 10 and window_count >= 10 +| where Esql.cloud_region_count_distinct >= 10 and Esql.event_count >= 10 -// sort the results by time windows in descending order -| sort target_time_window desc +// sort the results by time window in descending order +| sort Esql.time_window_date_trunc desc ''' [rule.investigation_fields] field_names = [ - "aws.cloudtrail.user_identity.arn", - "target_time_window", - "region_count", - "window_count" + "aws.cloudtrail.user_identity.arn", + "target_time_window", + "region_count", + "window_count" ] [[rule.threat]] diff --git a/rules/integrations/aws/discovery_ec2_multiple_discovery_api_calls_via_cli.toml b/rules/integrations/aws/discovery_ec2_multiple_discovery_api_calls_via_cli.toml index f4588bc7f12..8aeac9ba3d7 100644 --- a/rules/integrations/aws/discovery_ec2_multiple_discovery_api_calls_via_cli.toml +++ b/rules/integrations/aws/discovery_ec2_multiple_discovery_api_calls_via_cli.toml @@ -2,7 +2,7 @@ creation_date = "2024/11/04" integration = ["aws"] maturity = "production" -updated_date = "2025/03/20" +updated_date = "2025/07/16" [rule] author = ["Elastic"] @@ -83,11 +83,11 @@ query = ''' from logs-aws.cloudtrail* // create time window buckets of 10 seconds -| eval time_window = date_trunc(10 seconds, @timestamp) +| eval Esql.time_window_date_trunc = date_trunc(10 seconds, @timestamp) | where event.dataset == "aws.cloudtrail" - // filter on CloudTrail audit logs for IAM, EC2, and S3 events only + // filter on CloudTrail audit logs for IAM, EC2, S3, etc. and event.provider in ( "iam.amazonaws.com", "ec2.amazonaws.com", @@ -97,8 +97,7 @@ from logs-aws.cloudtrail* "dynamodb.amazonaws.com", "kms.amazonaws.com", "cloudfront.amazonaws.com", - "elasticloadbalancing.amazonaws.com", - "cloudfront.amazonaws.com" + "elasticloadbalancing.amazonaws.com" ) // ignore AWS service actions @@ -117,19 +116,24 @@ from logs-aws.cloudtrail* starts_with(event.action, "List"), starts_with(event.action, "Generate") ) + // extract owner, identity type, and actor from the ARN -| dissect aws.cloudtrail.user_identity.arn "%{}::%{owner}:%{identity_type}/%{actor}" -| where starts_with(actor, "AWSServiceRoleForConfig") != true -| keep @timestamp, time_window, event.action, aws.cloudtrail.user_identity.arn +| dissect aws.cloudtrail.user_identity.arn "%{}::%{Esql_priv.aws_cloudtrail_user_identity_arn_owner}:%{Esql.aws_cloudtrail_user_identity_arn_type}/%{Esql.aws_cloudtrail_user_identity_arn_roles}" +| where starts_with(Esql.aws_cloudtrail_user_identity_arn_roles, "AWSServiceRoleForConfig") != true + +// keep relevant fields (preserving ECS fields and computed time window) +| keep @timestamp, Esql.time_window_date_trunc, event.action, aws.cloudtrail.user_identity.arn + +// count the number of unique API calls per time window and actor | stats - // count the number of unique API calls per time window and actor - unique_api_count = count_distinct(event.action) by time_window, aws.cloudtrail.user_identity.arn + Esql.event_action_count_distinct = count_distinct(event.action) + by Esql.time_window_date_trunc, aws.cloudtrail.user_identity.arn -// filter for more than 5 unique API calls per time window -| where unique_api_count > 5 +// filter for more than 5 unique API calls per 10s window +| where Esql.event_action_count_distinct > 5 // sort the results by the number of unique API calls in descending order -| sort unique_api_count desc +| sort Esql.event_action_count_distinct desc ''' diff --git a/rules/integrations/aws/discovery_servicequotas_multi_region_service_quota_requests.toml b/rules/integrations/aws/discovery_servicequotas_multi_region_service_quota_requests.toml index 3b171b77e62..afb9e3f6d35 100644 --- a/rules/integrations/aws/discovery_servicequotas_multi_region_service_quota_requests.toml +++ b/rules/integrations/aws/discovery_servicequotas_multi_region_service_quota_requests.toml @@ -1,7 +1,7 @@ [metadata] creation_date = "2024/08/26" maturity = "production" -updated_date = "2025/01/15" +updated_date = "2025/07/16" [rule] author = ["Elastic"] @@ -15,52 +15,6 @@ from = "now-9m" language = "esql" license = "Elastic License v2" name = "AWS Service Quotas Multi-Region `GetServiceQuota` Requests" -references = [ - "https://www.sentinelone.com/labs/exploring-fbot-python-based-malware-targeting-cloud-and-payment-services/", - "https://docs.aws.amazon.com/servicequotas/2019-06-24/apireference/API_GetServiceQuota.html", -] -risk_score = 21 -rule_id = "19be0164-63d2-11ef-8e38-f661ea17fbce" -severity = "low" -tags = [ - "Domain: Cloud", - "Data Source: AWS", - "Data Source: Amazon Web Services", - "Data Source: AWS Service Quotas", - "Use Case: Threat Detection", - "Tactic: Discovery", - "Resources: Investigation Guide", -] -timestamp_override = "event.ingested" -type = "esql" - -query = ''' -from logs-aws.cloudtrail-* - -// filter for GetServiceQuota API calls -| where event.dataset == "aws.cloudtrail" and event.provider == "servicequotas.amazonaws.com" and event.action == "GetServiceQuota" - -// truncate the timestamp to a 30-second window -| eval target_time_window = DATE_TRUNC(30 seconds, @timestamp) - -// pre-process the request parameters to extract the service code and quota code -| dissect aws.cloudtrail.request_parameters "{%{?service_code_key}=%{service_code}, %{?quota_code_key}=%{quota_code}}" - -// filter for EC2 service quota L-1216C47A (vCPU on-demand instances) -| where service_code == "ec2" and quota_code == "L-1216C47A" - -// keep only the relevant fields -| keep target_time_window, aws.cloudtrail.user_identity.arn, cloud.region, service_code, quota_code - -// count the number of unique regions and total API calls within the 30-second window -| stats region_count = count_distinct(cloud.region), window_count = count(*) by target_time_window, aws.cloudtrail.user_identity.arn - -// filter for resources making DescribeInstances API calls in more than 10 regions within the 30-second window -| where region_count >= 10 and window_count >= 10 - -// sort the results by time windows in descending order -| sort target_time_window desc -''' note = """## Triage and analysis > **Disclaimer**: @@ -95,6 +49,66 @@ AWS Service Quotas manage resource limits across AWS services, crucial for maint - Notify the security operations team and relevant stakeholders about the potential compromise and the steps being taken to remediate the issue. - If evidence of compromise is confirmed, consider engaging AWS Support or a third-party incident response team for further investigation and assistance. - Review and update IAM policies and permissions to ensure the principle of least privilege is enforced, reducing the risk of future unauthorized access attempts.""" +references = [ + "https://www.sentinelone.com/labs/exploring-fbot-python-based-malware-targeting-cloud-and-payment-services/", + "https://docs.aws.amazon.com/servicequotas/2019-06-24/apireference/API_GetServiceQuota.html", +] +risk_score = 21 +rule_id = "19be0164-63d2-11ef-8e38-f661ea17fbce" +severity = "low" +tags = [ + "Domain: Cloud", + "Data Source: AWS", + "Data Source: Amazon Web Services", + "Data Source: AWS Service Quotas", + "Use Case: Threat Detection", + "Tactic: Discovery", + "Resources: Investigation Guide", +] +timestamp_override = "event.ingested" +type = "esql" + +query = ''' +from logs-aws.cloudtrail-* + +// filter for GetServiceQuota API calls +| where + event.dataset == "aws.cloudtrail" + and event.provider == "servicequotas.amazonaws.com" + and event.action == "GetServiceQuota" + +// truncate the timestamp to a 30-second window +| eval Esql.time_window_date_trunc = date_trunc(30 seconds, @timestamp) + +// dissect request parameters to extract service and quota code +| dissect aws.cloudtrail.request_parameters "{%{?Esql.aws_cloudtrail_request_parameters_service_code_key}=%{Esql.aws_cloudtrail_request_parameters_service_code}, %{?quota_code_key}=%{Esql.aws_cloudtrail_request_parameters_quota_code}}" + +// filter for EC2 service quota L-1216C47A (vCPU on-demand instances) +| where Esql.aws_cloudtrail_request_parameters_service_code == "ec2" and Esql.aws_cloudtrail_request_parameters_quota_code == "L-1216C47A" + +// keep only the relevant fields +| keep + Esql.time_window_date_trunc, + aws.cloudtrail.user_identity.arn, + cloud.region, + Esql.aws_cloudtrail_request_parameters_service_code, + Esql.aws_cloudtrail_request_parameters_quota_code + +// count the number of unique regions and total API calls within the time window +| stats + Esql.cloud_region_count_distinct = count_distinct(cloud.region), + Esql.event_count = count(*) + by Esql.time_window_date_trunc, aws.cloudtrail.user_identity.arn + +// filter for API calls in more than 10 regions within the 30-second window +| where + Esql.cloud_region_count_distinct >= 10 + and Esql.event_count >= 10 + +// sort by time window descending +| sort Esql.time_window_date_trunc desc +''' + [[rule.threat]] framework = "MITRE ATT&CK" diff --git a/rules/integrations/aws/exfiltration_ec2_ebs_snapshot_shared_with_another_account.toml b/rules/integrations/aws/exfiltration_ec2_ebs_snapshot_shared_with_another_account.toml index b90644c5578..5a55508c3b8 100644 --- a/rules/integrations/aws/exfiltration_ec2_ebs_snapshot_shared_with_another_account.toml +++ b/rules/integrations/aws/exfiltration_ec2_ebs_snapshot_shared_with_another_account.toml @@ -2,7 +2,7 @@ creation_date = "2024/04/16" integration = ["aws"] maturity = "production" -updated_date = "2025/06/02" +updated_date = "2025/07/16" [rule] author = ["Elastic"] @@ -21,8 +21,7 @@ interval = "5m" language = "esql" license = "Elastic License v2" name = "AWS EC2 EBS Snapshot Shared or Made Public" -note = """ -## Triage and analysis +note = """## Triage and analysis ### Investigating AWS EC2 EBS Snapshot Shared or Made Public @@ -81,10 +80,31 @@ type = "esql" query = ''' from logs-aws.cloudtrail-* metadata _id, _version, _index -| where event.provider == "ec2.amazonaws.com" and event.action == "ModifySnapshotAttribute" and event.outcome == "success" -| dissect aws.cloudtrail.request_parameters "{%{?snapshotId}=%{snapshotId},%{?attributeType}=%{attributeType},%{?createVolumePermission}={%{operationType}={%{?items}=[{%{?userId}=%{userId}}]}}}" -| where operationType == "add" and cloud.account.id != userId -| keep @timestamp, aws.cloudtrail.user_identity.arn, cloud.account.id, event.action, snapshotId, attributeType, operationType, userId, source.address +| where + event.provider == "ec2.amazonaws.com" + and event.action == "ModifySnapshotAttribute" + and event.outcome == "success" + +// Extract snapshotId, attribute type, operation type, and userId +| dissect aws.cloudtrail.request_parameters + "{%{?snapshotId}=%{Esql.aws_cloudtrail_request_parameters_snapshot_id},%{?attributeType}=%{Esql.aws_cloudtrail_request_parameters_attribute_type},%{?createVolumePermission}={%{Esql.aws_cloudtrail_request_parameters_operation_type}={%{?items}=[{%{?userId}=%{Esql_priv.aws_cloudtrail_request_parameters_user_id}}]}}}" + +// Check for snapshot permission added for another AWS account +| where + Esql.aws_cloudtrail_request_parameters_operation_type == "add" + and cloud.account.id != Esql_priv.aws_cloudtrail_request_parameters_user_id + +// keep ECS and derived fields +| keep + @timestamp, + aws.cloudtrail.user_identity.arn, + cloud.account.id, + event.action, + Esql.aws_cloudtrail_request_parameters_snapshot_id, + Esql.aws_cloudtrail_request_parameters_attribute_type, + Esql.aws_cloudtrail_request_parameters_operation_type, + Esql_priv.aws_cloudtrail_request_parameters_user_id, + source.ip ''' diff --git a/rules/integrations/aws/impact_aws_s3_bucket_enumeration_or_brute_force.toml b/rules/integrations/aws/impact_aws_s3_bucket_enumeration_or_brute_force.toml index 45a051a8d72..f437ea406fd 100644 --- a/rules/integrations/aws/impact_aws_s3_bucket_enumeration_or_brute_force.toml +++ b/rules/integrations/aws/impact_aws_s3_bucket_enumeration_or_brute_force.toml @@ -1,7 +1,7 @@ [metadata] creation_date = "2024/05/01" maturity = "production" -updated_date = "2025/03/20" +updated_date = "2025/07/16" [rule] author = ["Elastic"] @@ -86,13 +86,29 @@ type = "esql" query = ''' from logs-aws.cloudtrail* -| where event.provider == "s3.amazonaws.com" and aws.cloudtrail.error_code == "AccessDenied" -// keep only relevant fields -| keep tls.client.server_name, source.address, cloud.account.id -| stats failed_requests = count(*) by tls.client.server_name, source.address, cloud.account.id - // can modify the failed request count or tweak time window to fit environment - // can add `not cloud.account.id in (KNOWN)` or specify in exceptions -| where failed_requests > 40 + +| where + event.provider == "s3.amazonaws.com" + and aws.cloudtrail.error_code == "AccessDenied" + and tls.client.server_name is not null + and cloud.account.id is not null + +// keep only relevant ECS fields +| keep + tls.client.server_name, + source.address, + cloud.account.id + +// count access denied requests per server_name, source, and account +| stats + Esql.event_count = count(*) + by + tls.client.server_name, + source.address, + cloud.account.id + +// Threshold: more than 40 denied requests +| where Esql.event_count > 40 ''' diff --git a/rules/integrations/aws/impact_ec2_ebs_snapshot_access_removed.toml b/rules/integrations/aws/impact_ec2_ebs_snapshot_access_removed.toml index dbffdf0db3e..48fad7798e8 100644 --- a/rules/integrations/aws/impact_ec2_ebs_snapshot_access_removed.toml +++ b/rules/integrations/aws/impact_ec2_ebs_snapshot_access_removed.toml @@ -2,25 +2,26 @@ creation_date = "2024/06/02" integration = ["aws"] maturity = "production" -updated_date = "2024/06/02" +updated_date = "2025/07/16" [rule] author = ["Elastic"] description = """ -Identifies the removal of access permissions from a shared AWS EC2 EBS snapshot. EBS snapshots are essential for data retention and disaster recovery. Adversaries may revoke or modify snapshot permissions to prevent legitimate users from accessing backups, thereby obstructing recovery efforts after data loss or destructive actions. This tactic can also be used to evade detection or maintain exclusive access to critical backups, ultimately increasing the impact of an attack and complicating incident response. +Identifies the removal of access permissions from a shared AWS EC2 EBS snapshot. EBS snapshots are essential for data +retention and disaster recovery. Adversaries may revoke or modify snapshot permissions to prevent legitimate users from +accessing backups, thereby obstructing recovery efforts after data loss or destructive actions. This tactic can also be +used to evade detection or maintain exclusive access to critical backups, ultimately increasing the impact of an attack +and complicating incident response. """ false_positives = [ - """ - Access removal may be a part of normal operations and should be verified before taking action. - """, + " Access removal may be a part of normal operations and should be verified before taking action.\n ", ] from = "now-6m" interval = "5m" language = "esql" license = "Elastic License v2" name = "AWS EC2 EBS Snapshot Access Removed" -note = """ -## Triage and analysis +note = """## Triage and analysis ### Investigating AWS EC2 EBS Snapshot Access Removed @@ -34,7 +35,7 @@ Restricting snapshot access may help adversaries cover their tracks by making it - **Analyze the Source of the Request**: Investigate the `source.ip` and `source.geo` fields to determine the geographical origin of the request. An external or unexpected location might indicate compromised credentials or unauthorized access. - **Contextualize with Timestamp**: Use the `@timestamp` field to check when the change occurred. Modifications during non-business hours or outside regular maintenance windows might require further scrutiny. - **Correlate with Other Activities**: Search for related CloudTrail events before and after this change to see if the same actor or IP address engaged in other potentially suspicious activities. In particular, use the `snapshotId` to see if this snapshot was shared with an unauthorized account. -- **Review UserID**: Check the `userId` field to identify which user's permissions were removed. Verify if this account should be authorized to access the data or if the access removal is expected. +- **Review UserID**: Check the `userId` field to identify which user's permissions were removed. Verify if this account should be authorized to access the data or if the access removal is expected. ### False Positive Analysis: @@ -76,25 +77,46 @@ type = "esql" query = ''' from logs-aws.cloudtrail-* metadata _id, _version, _index -| where event.provider == "ec2.amazonaws.com" and event.action == "ModifySnapshotAttribute" and event.outcome == "success" -| dissect aws.cloudtrail.request_parameters "{%{?snapshotId}=%{snapshotId},%{?attributeType}=%{attributeType},%{?createVolumePermission}={%{operationType}={%{?items}=[{%{?userId}=%{userId}}]}}}" -| where operationType == "remove" -| keep @timestamp, aws.cloudtrail.user_identity.arn, cloud.account.id, event.action, snapshotId, attributeType, operationType, userId, source.address + +// Filter for successful snapshot modifications +| where + event.provider == "ec2.amazonaws.com" + and event.action == "ModifySnapshotAttribute" + and event.outcome == "success" + +// dissect parameters to extract key fields +| dissect aws.cloudtrail.request_parameters + "{%{?snapshotId}=%{Esql.aws_cloudtrail_request_parameters_snapshot_id},%{?attributeType}=%{Esql.aws_cloudtrail_request_parameters_attribute_type},%{?createVolumePermission}={%{Esql.aws_cloudtrail_request_parameters_operation_type}={%{?items}=[{%{?userId}=%{Esql_priv.aws_cloudtrail_request_parameters_user_id}}]}}}" + +// Match on snapshot permission **removal** +| where Esql.aws_cloudtrail_request_parameters_operation_type == "remove" + +// keep ECS and derived fields +| keep + @timestamp, + aws.cloudtrail.user_identity.arn, + cloud.account.id, + event.action, + Esql.aws_cloudtrail_request_parameters_snapshot_id, + Esql.aws_cloudtrail_request_parameters_attribute_type, + Esql.aws_cloudtrail_request_parameters_operation_type, + Esql_priv.aws_cloudtrail_request_parameters_user_id, + source.address ''' [[rule.threat]] framework = "MITRE ATT&CK" -[[rule.threat.technique]] -id = "T1490" -name = "Inhibit System Recovery" -reference = "https://attack.mitre.org/techniques/T1490/" - [[rule.threat.technique]] id = "T1485" name = "Data Destruction" reference = "https://attack.mitre.org/techniques/T1485/" +[[rule.threat.technique]] +id = "T1490" +name = "Inhibit System Recovery" +reference = "https://attack.mitre.org/techniques/T1490/" + [rule.threat.tactic] id = "TA0040" diff --git a/rules/integrations/aws/impact_s3_bucket_object_uploaded_with_ransom_extension.toml b/rules/integrations/aws/impact_s3_bucket_object_uploaded_with_ransom_extension.toml index c1ceaf67808..c93d4ab67e8 100644 --- a/rules/integrations/aws/impact_s3_bucket_object_uploaded_with_ransom_extension.toml +++ b/rules/integrations/aws/impact_s3_bucket_object_uploaded_with_ransom_extension.toml @@ -2,7 +2,7 @@ creation_date = "2024/04/17" integration = ["aws"] maturity = "production" -updated_date = "2025/03/20" +updated_date = "2025/07/16" [rule] author = ["Elastic"] @@ -21,8 +21,7 @@ from = "now-9m" language = "esql" license = "Elastic License v2" name = "Potential AWS S3 Bucket Ransomware Note Uploaded" -note = """ -## Triage and analysis +note = """## Triage and analysis ### Investigating Potential AWS S3 Bucket Ransomware Note Uploaded @@ -84,26 +83,36 @@ query = ''' from logs-aws.cloudtrail-* // any successful uploads via S3 API requests -| where event.dataset == "aws.cloudtrail" - and event.provider == "s3.amazonaws.com" - and event.action == "PutObject" - and event.outcome == "success" - -// abstract object name from API request parameters -| dissect aws.cloudtrail.request_parameters "%{?ignore_values}key=%{object_name}}" - -// regex on common ransomware note extensions -| where object_name rlike "(.*)(ransom|lock|crypt|enc|readme|how_to_decrypt|decrypt_instructions|recovery|datarescue)(.*)" - and not object_name rlike "(.*)(AWSLogs|CloudTrail|access-logs)(.*)" - -// keep relevant fields -| keep tls.client.server_name, aws.cloudtrail.user_identity.arn, object_name - -// aggregate by S3 bucket, resource and object name -| stats note_upload_count = count(*) by tls.client.server_name, aws.cloudtrail.user_identity.arn, object_name - -// filter for single occurrence to eliminate common upload operations -| where note_upload_count == 1 +| where + event.dataset == "aws.cloudtrail" + and event.provider == "s3.amazonaws.com" + and event.action == "PutObject" + and event.outcome == "success" + +// extract object key from API request parameters +| dissect aws.cloudtrail.request_parameters "%{?ignore_values}key=%{Esql.aws_cloudtrail_request_parameters_object_key}}" + +// regex match against common ransomware naming patterns +| where + Esql.aws_cloudtrail_request_parameters_object_key rlike "(.*)(ransom|lock|crypt|enc|readme|how_to_decrypt|decrypt_instructions|recovery|datarescue)(.*)" + and not Esql.aws_cloudtrail_request_parameters_object_key rlike "(.*)(AWSLogs|CloudTrail|access-logs)(.*)" + +// keep relevant ECS and derived fields +| keep + tls.client.server_name, + aws.cloudtrail.user_identity.arn, + Esql.aws_cloudtrail_request_parameters_object_key + +// aggregate by server name, actor, and object key +| stats + Esql.event_count = count(*) + by + tls.client.server_name, + aws.cloudtrail.user_identity.arn, + Esql.aws_cloudtrail_request_parameters_object_key + +// filter for rare single uploads (likely test/detonation) +| where Esql.event_count == 1 ''' diff --git a/rules/integrations/aws/impact_s3_object_encryption_with_external_key.toml b/rules/integrations/aws/impact_s3_object_encryption_with_external_key.toml index 58054fcd660..b1eaf50275b 100644 --- a/rules/integrations/aws/impact_s3_object_encryption_with_external_key.toml +++ b/rules/integrations/aws/impact_s3_object_encryption_with_external_key.toml @@ -2,7 +2,7 @@ creation_date = "2024/07/02" integration = ["aws"] maturity = "production" -updated_date = "2025/07/10" +updated_date = "2025/07/16" [rule] author = ["Elastic"] @@ -22,8 +22,7 @@ from = "now-9m" language = "esql" license = "Elastic License v2" name = "AWS S3 Object Encryption Using External KMS Key" -note = """ -## Triage and analysis +note = """## Triage and analysis ### Investigating AWS S3 Object Encryption Using External KMS Key @@ -85,21 +84,30 @@ type = "esql" query = ''' from logs-aws.cloudtrail-* metadata _id, _version, _index -// any successful copy event -| where event.dataset == "aws.cloudtrail" - and event.provider == "s3.amazonaws.com" - and event.action == "CopyObject" - and event.outcome == "success" - -// abstract key account id, key id, encrypted object bucket name and object name -| dissect aws.cloudtrail.request_parameters "{%{?bucketName}=%{target.bucketName},%{?x-amz-server-side-encryption-aws-kms-key-id}=%{?arn}:%{?aws}:%{?kms}:%{?region}:%{key.account.id}:%{?key}/%{keyId},%{?Host}=%{?tls.client.server_name},%{?x-amz-server-side-encryption}=%{?server-side-encryption},%{?x-amz-copy-source}=%{?bucket.objectName},%{?key}=%{target.objectName}}" - -// filter for s3 objects whose account id is different from the encryption key's account id -// add exceptions based on key.account.id or keyId for known external accounts or encryption keys -| where cloud.account.id != key.account.id - -// keep relevant fields -| keep @timestamp, aws.cloudtrail.user_identity.arn, cloud.account.id, event.action, target.bucketName, key.account.id, keyId, target.objectName +// any successful S3 copy event +| where + event.dataset == "aws.cloudtrail" + and event.provider == "s3.amazonaws.com" + and event.action == "CopyObject" + and event.outcome == "success" + +// dissect request parameters to extract KMS key info and target object info +| dissect aws.cloudtrail.request_parameters + "{%{?bucketName}=%{Esql.aws_cloudtrail_request_parameters_target_bucket_name},%{?x-amz-server-side-encryption-aws-kms-key-id}=%{?arn}:%{?aws}:%{?kms}:%{?region}:%{Esql.aws_cloudtrail_request_parameters_kms_key_account_id}:%{?key}/%{Esql.aws_cloudtrail_request_parameters_kms_key_id},%{?Host}=%{?tls.client.server.name},%{?x-amz-server-side-encryption}=%{?server_side_encryption},%{?x-amz-copy-source}=%{?bucket.object.name},%{?key}=%{Esql.aws_cloudtrail_request_parameters_target_object_key}}" + +// detect cross-account key usage +| where cloud.account.id != Esql.aws_cloudtrail_request_parameters_kms_key_account_id + +// keep ECS and dissected fields +| keep + @timestamp, + aws.cloudtrail.user_identity.arn, + cloud.account.id, + event.action, + Esql.aws_cloudtrail_request_parameters_target_bucket_name, + Esql.aws_cloudtrail_request_parameters_kms_key_account_id, + Esql.aws_cloudtrail_request_parameters_kms_key_id, + Esql.aws_cloudtrail_request_parameters_target_object_key ''' diff --git a/rules/integrations/aws/impact_s3_static_site_js_file_uploaded.toml b/rules/integrations/aws/impact_s3_static_site_js_file_uploaded.toml index 686b020d583..86e4ddd9c1d 100644 --- a/rules/integrations/aws/impact_s3_static_site_js_file_uploaded.toml +++ b/rules/integrations/aws/impact_s3_static_site_js_file_uploaded.toml @@ -2,14 +2,14 @@ creation_date = "2025/04/15" integration = ["aws"] maturity = "production" -updated_date = "2025/04/15" +updated_date = "2025/07/16" [rule] author = ["Elastic"] description = """ This rule detects when a JavaScript file is uploaded or accessed in an S3 static site directory (`static/js/`) by an IAM -user or assumed role. This can indicate suspicious modification of web content hosted on S3, such as injecting malicious scripts into a -static website frontend. +user or assumed role. This can indicate suspicious modification of web content hosted on S3, such as injecting malicious +scripts into a static website frontend. """ false_positives = [ """ @@ -71,41 +71,44 @@ type = "esql" query = ''' from logs-aws.cloudtrail* metadata _id, _version, _index -| where - // filter on CloudTrail logs for S3 PutObject actions +| where + // S3 object read/write activity event.dataset == "aws.cloudtrail" and event.provider == "s3.amazonaws.com" - and event.action in ("GetObject","PutObject") + and event.action in ("GetObject", "PutObject") - // filter for IAM users, not federated identities + // IAM users or assumed roles only and aws.cloudtrail.user_identity.type in ("IAMUser", "AssumedRole") - // filter for S3 static site bucket paths from webpack or similar - and aws.cloudtrail.request_parameters LIKE "*static/js/*.js*" + // Requests for static site bundles + and aws.cloudtrail.request_parameters like "*static/js/*.js*" - // exclude common IaC tools and automation scripts + // Exclude IaC and automation tools and not ( - user_agent.original LIKE "*Terraform*" - or user_agent.original LIKE "*Ansible*" - or user_agent.original LIKE "*Pulumni*" + user_agent.original like "*Terraform*" + or user_agent.original like "*Ansible*" + or user_agent.original like "*Pulumni*" ) -// extract bucket and object details from request parameters -| dissect aws.cloudtrail.request_parameters "%{{?bucket.name.key}=%{bucket.name}, %{?host.key}=%{bucket.host}, %{?bucket.object.location.key}=%{bucket.object.location}}" +// Extract fields from request parameters +| dissect aws.cloudtrail.request_parameters + "%{{?bucket.name.key}=%{Esql.aws_cloudtrail_request_parameters_bucket_name}, %{?host.key}=%{Esql_priv.aws_cloudtrail_request_parameters_host}, %{?bucket.object.location.key}=%{Esql.aws_cloudtrail_request_parameters_bucket_object_location}}" + +// Extract file name portion from full object path +| dissect Esql.aws_cloudtrail_request_parameters_object_location "%{}static/js/%{Esql.aws_cloudtrail_request_parameters_object_key}" -// filter for specific bucket and object structure -| dissect bucket.object.location "%{}static/js/%{bucket.object}" +// Match on JavaScript files +| where ends_with(Esql.aws_cloudtrail_request_parameters_object_key, ".js") -// filter for JavaScript files -| where ENDS_WITH(bucket.object, ".js") +// Retain relevant ECS and dissected fields | keep aws.cloudtrail.user_identity.arn, aws.cloudtrail.user_identity.access_key_id, aws.cloudtrail.user_identity.type, aws.cloudtrail.request_parameters, - bucket.name, - bucket.object, + Esql.aws_cloudtrail_request_parameters_bucket_name, + Esql.aws_cloudtrail_request_parameters_object_key, user_agent.original, source.ip, event.action, diff --git a/rules/integrations/aws/initial_access_iam_session_token_used_from_multiple_addresses.toml b/rules/integrations/aws/initial_access_iam_session_token_used_from_multiple_addresses.toml index 124a39fc313..995fd06a2e5 100644 --- a/rules/integrations/aws/initial_access_iam_session_token_used_from_multiple_addresses.toml +++ b/rules/integrations/aws/initial_access_iam_session_token_used_from_multiple_addresses.toml @@ -2,7 +2,7 @@ creation_date = "2025/04/11" integration = ["aws"] maturity = "production" -updated_date = "2025/07/02" +updated_date = "2025/07/16" [rule] author = ["Elastic"] @@ -33,12 +33,12 @@ note = """## Triage and Analysis ### Investigating AWS Access Token Used from Multiple Addresses -Access tokens are bound to a single user. Usage from multiple IP addresses may indicate the token was stolen and used elsewhere. By correlating this with additional detection criteria like multiple user agents, different cities, and different networks, we can improve the fidelity of the rule and help to eliminate false positives associated with expected behavior, like dual-stack IPV4/IPV6 usage. +Access tokens are bound to a single user. Usage from multiple IP addresses may indicate the token was stolen and used elsewhere. By correlating this with additional detection criteria like multiple user agents, different cities, and different networks, we can improve the fidelity of the rule and help to eliminate false positives associated with expected behavior, like dual-stack IPV4/IPV6 usage. #### Possible Investigation Steps - **Identify the IAM User**: Examine the `aws.cloudtrail.user_identity.arn` stored in `user_id` and correlate with the `source.ips` stored in `ip_list` and `unique_ips` count to determine how widely the token was used. -- **Correlate Additional Detection Context**: Examine `activity_type` and `fidelity_score` to determine additional cities, networks or user agents associated with the token usage. +- **Correlate Additional Detection Context**: Examine `activity_type` and `fidelity_score` to determine additional cities, networks or user agents associated with the token usage. - **Determine Access Key Type**: Examine the `access_key_id` to determine whether the token is short-term (beginning with ASIA) or long-term (beginning with AKIA). - **Check Recent MFA Events**: Determine whether the user recently enabled MFA, registered devices, or assumed a role using this token. - **Review Workload Context**: Confirm whether the user was expected to be active across multiple cities, networks or user agent environments. @@ -80,81 +80,97 @@ timestamp_override = "event.ingested" type = "esql" query = ''' -FROM logs-aws.cloudtrail* metadata _id, _version, _index -| WHERE @timestamp > NOW() - 30 minutes - // filter on CloudTrail logs for STS temporary session tokens used by IAM users - - AND event.dataset == "aws.cloudtrail" - AND aws.cloudtrail.user_identity.arn IS NOT NULL - AND aws.cloudtrail.user_identity.type == "IAMUser" - AND source.ip IS NOT NULL - - // exclude known benign IaC tools and Amazon Network - AND NOT (user_agent.original LIKE "%Terraform%" OR user_agent.original LIKE "%Ansible%" OR user_agent.original LIKE "%Pulumni%") - AND `source.as.organization.name` != "AMAZON-AES" - - // exclude noisy service APIs less indicative of malicous behavior - AND event.provider NOT IN ("health.amazonaws.com", "monitoring.amazonaws.com", "notifications.amazonaws.com", "ce.amazonaws.com", "cost-optimization-hub.amazonaws.com", "servicecatalog-appregistry.amazonaws.com", "securityhub.amazonaws.com") - -| EVAL - // create a time window for aggregation - time_window = DATE_TRUNC(30 minutes, @timestamp), - // capture necessary fields for detection and investigation - user_id = aws.cloudtrail.user_identity.arn, - access_key_id = aws.cloudtrail.user_identity.access_key_id, - ip = source.ip, - user_agent = user_agent.original, - ip_string = TO_STRING(source.ip), // Convert IP to string - ip_user_agent_pair = CONCAT(ip_string, " - ", user_agent.original), // Combine IP and user agent - ip_city_pair = CONCAT(ip_string, " - ", source.geo.city_name), // Combine IP and city - city = source.geo.city_name, - event_time = @timestamp, - network_arn = `source.as.organization.name` - -| STATS - event_actions = VALUES(event.action), - event_providers = VALUES(event.provider), - access_key_id = VALUES(access_key_id), - user_id = VALUES(user_id), - ip_list = VALUES(ip), // Collect list of IPs - user_agent_list = VALUES(user_agent), // Collect list of user agents - ip_user_agent_pairs = VALUES(ip_user_agent_pair), // Collect list of IP - user agent pairs - cities_list = VALUES(city), // Collect list of cities - ip_city_pairs = VALUES(ip_city_pair), // Collect list of IP - city pairs - networks_list = VALUES(network_arn), // Collect list of networks - unique_ips = COUNT_DISTINCT(ip), - unique_user_agents = COUNT_DISTINCT(user_agent), - unique_cities = COUNT_DISTINCT(city), - unique_networks = COUNT_DISTINCT(network_arn), - first_seen = MIN(event_time), - last_seen = MAX(event_time), - total_events = COUNT() - BY time_window, access_key_id - -| EVAL - // activity type based on combinations of detection criteria - activity_type = CASE( - unique_ips >= 2 AND unique_networks >= 2 AND unique_cities >= 2 AND unique_user_agents >= 2, "multiple_ip_network_city_user_agent", // high severity - unique_ips >= 2 AND unique_networks >= 2 AND unique_cities >= 2, "multiple_ip_network_city", // high severity - unique_ips >= 2 AND unique_cities >= 2, "multiple_ip_and_city", // medium severity - unique_ips >= 2 AND unique_networks >= 2, "multiple_ip_and_network", // medium severity - unique_ips >= 2 AND unique_user_agents >= 2, "multiple_ip_and_user_agent", // low severity - "normal_activity" - ), - // likelihood of malicious activity based on activity type - fidelity_score = CASE( - activity_type == "multiple_ip_network_city_user_agent", "high", - activity_type == "multiple_ip_network_city", "high", - activity_type == "multiple_ip_and_city", "medium", - activity_type == "multiple_ip_and_network", "medium", - activity_type == "multiple_ip_and_user_agent", "low" - ) - -| KEEP - time_window, activity_type, fidelity_score, total_events, first_seen, last_seen, - user_id, access_key_id, event_actions, event_providers, ip_list, user_agent_list, ip_user_agent_pairs, cities_list, ip_city_pairs, networks_list, unique_ips, unique_user_agents, unique_cities, unique_networks - -| WHERE activity_type != "normal_activity" +from logs-aws.cloudtrail* metadata _id, _version, _index +| where @timestamp > now() - 30 minutes + and event.dataset == "aws.cloudtrail" + and aws.cloudtrail.user_identity.arn is not null + and aws.cloudtrail.user_identity.type == "IAMUser" + and source.ip is not null + and not ( + user_agent.original like "%Terraform%" or + user_agent.original like "%Ansible%" or + user_agent.original like "%Pulumni%" + ) + and `source.as.organization.name` != "AMAZON-AES" + and event.provider not in ( + "health.amazonaws.com", "monitoring.amazonaws.com", "notifications.amazonaws.com", + "ce.amazonaws.com", "cost-optimization-hub.amazonaws.com", + "servicecatalog-appregistry.amazonaws.com", "securityhub.amazonaws.com" + ) + +| eval + Esql.time_window_date_trunc = date_trunc(30 minutes, @timestamp), + Esql.aws_cloudtrail_user_identity_arn = aws.cloudtrail.user_identity.arn, + Esql.aws_cloudtrail_user_identity_access_key_id = aws.cloudtrail.user_identity.access_key_id, + Esql.source_ip = source.ip, + Esql.user_agent_original = user_agent.original, + Esql.source_ip_string = to_string(source.ip), + Esql.source_ip_user_agent_pair = concat(Esql.source_ip_string, " - ", user_agent.original), + Esql.source_ip_city_pair = concat(Esql.source_ip_string, " - ", source.geo.city_name), + Esql.source_geo_city_name = source.geo.city_name, + Esql.event_timestamp = @timestamp, + Esql.source_network_org_name = `source.as.organization.name` + +| stats + Esql.event_action_values = values(event.action), + Esql.event_provider_values = values(event.provider), + Esql.aws_cloudtrail_user_identity_access_key_id_values = values(Esql.aws_cloudtrail_user_identity_access_key_id), + Esql.aws_cloudtrail_user_identity_arn_values = values(Esql.aws_cloudtrail_user_identity_arn), + Esql.source_ip_values = values(Esql.source_ip), + Esql.user_agent_original_values = values(Esql.user_agent_original), + Esql.source_ip_user_agent_pair_values = values(Esql.source_ip_user_agent_pair), + Esql.source_geo_city_name_values = values(Esql.source_geo_city_name), + Esql.source_ip_city_pair_values = values(Esql.source_ip_city_pair), + Esql.source_network_org_name_values = values(Esql.source_network_org_name), + Esql.source_ip_count_distinct = count_distinct(Esql.source_ip), + Esql.user_agent_original_count_distinct = count_distinct(Esql.user_agent_original), + Esql.source_geo_city_name_count_distinct = count_distinct(Esql.source_geo_city_name), + Esql.source_network_org_name_count_distinct = count_distinct(Esql.source_network_org_name), + Esql.timestamp_first_seen = min(Esql.event_timestamp), + Esql.timestamp_last_seen = max(Esql.event_timestamp), + Esql.event_count = count() + by Esql.time_window_date_trunc, Esql.aws_cloudtrail_user_identity_access_key_id + +| eval + Esql.activity_type = case( + Esql.source_ip_count_distinct >= 2 and Esql.source_network_org_name_count_distinct >= 2 and Esql.source_geo_city_name_count_distinct >= 2 and Esql.user_agent_original_count_distinct >= 2, "multiple_ip_network_city_user_agent", + Esql.source_ip_count_distinct >= 2 and Esql.source_network_org_name_count_distinct >= 2 and Esql.source_geo_city_name_count_distinct >= 2, "multiple_ip_network_city", + Esql.source_ip_count_distinct >= 2 and Esql.source_geo_city_name_count_distinct >= 2, "multiple_ip_and_city", + Esql.source_ip_count_distinct >= 2 and Esql.source_network_org_name_count_distinct >= 2, "multiple_ip_and_network", + Esql.source_ip_count_distinct >= 2 and Esql.user_agent_original_count_distinct >= 2, "multiple_ip_and_user_agent", + "normal_activity" + ), + Esql.activity_fidelity_score = case( + Esql.activity_type == "multiple_ip_network_city_user_agent", "high", + Esql.activity_type == "multiple_ip_network_city", "high", + Esql.activity_type == "multiple_ip_and_city", "medium", + Esql.activity_type == "multiple_ip_and_network", "medium", + Esql.activity_type == "multiple_ip_and_user_agent", "low" + ) + +| keep + Esql.time_window_date_trunc, + Esql.activity_type, + Esql.activity_fidelity_score, + Esql.event_count, + Esql.timestamp_first_seen, + Esql.timestamp_last_seen, + Esql.aws_cloudtrail_user_identity_arn_values, + Esql.aws_cloudtrail_user_identity_access_key_id_values, + Esql.event_action_values, + Esql.event_provider_values, + Esql.source_ip_values, + Esql.user_agent_original_values, + Esql.source_ip_user_agent_pair_values, + Esql.source_geo_city_name_values, + Esql.source_ip_city_pair_values, + Esql.source_network_org_name_values, + Esql.source_ip_count_distinct, + Esql.user_agent_original_count_distinct, + Esql.source_geo_city_name_count_distinct, + Esql.source_network_org_name_count_distinct + +| where Esql.activity_type != "normal_activity" ''' diff --git a/rules/integrations/aws/initial_access_signin_console_login_no_mfa.toml b/rules/integrations/aws/initial_access_signin_console_login_no_mfa.toml index 946da73f024..ca78ccf8ca6 100644 --- a/rules/integrations/aws/initial_access_signin_console_login_no_mfa.toml +++ b/rules/integrations/aws/initial_access_signin_console_login_no_mfa.toml @@ -2,7 +2,7 @@ creation_date = "2024/08/19" integration = ["aws"] maturity = "production" -updated_date = "2025/03/20" +updated_date = "2025/07/16" [rule] author = ["Elastic"] @@ -69,14 +69,28 @@ type = "esql" query = ''' from logs-aws.cloudtrail-* metadata _id, _version, _index + | where event.provider == "signin.amazonaws.com" and event.action == "GetSigninToken" and aws.cloudtrail.event_type == "AwsConsoleSignIn" and aws.cloudtrail.user_identity.type == "FederatedUser" -| dissect aws.cloudtrail.additional_eventdata "{%{?mobile_version_key}=%{mobile_version}, %{?mfa_used_key}=%{mfa_used}}" -| where mfa_used == "No" -| keep @timestamp, event.action, aws.cloudtrail.event_type, aws.cloudtrail.user_identity.type + +// Extract mobile version and MFA usage +| dissect aws.cloudtrail.additional_eventdata + "{%{?mobile_version_key}=%{Esql.aws_cloudtrail_additional_eventdata_device_version}, %{?mfa_used_key}=%{Esql.aws_cloudtrail_additional_eventdata_auth_mfa_used}}" + +// Only keep events where MFA was not used +| where Esql.aws_cloudtrail_additional_eventdata_auth_mfa_used == "No" + +// keep relevant ECS and dissected fields +| keep + @timestamp, + event.action, + aws.cloudtrail.event_type, + aws.cloudtrail.user_identity.type, + Esql.aws_cloudtrail_additional_eventdata_device_version, + Esql.aws_cloudtrail_additional_eventdata_auth_mfa_used ''' diff --git a/rules/integrations/aws/persistence_iam_create_login_profile_for_root.toml b/rules/integrations/aws/persistence_iam_create_login_profile_for_root.toml index a7d7733a8b6..e1ed51b5c6a 100644 --- a/rules/integrations/aws/persistence_iam_create_login_profile_for_root.toml +++ b/rules/integrations/aws/persistence_iam_create_login_profile_for_root.toml @@ -2,7 +2,7 @@ creation_date = "2024/12/02" integration = ["aws"] maturity = "production" -updated_date = "2025/03/20" +updated_date = "2025/07/26" [rule] author = ["Elastic"] @@ -141,10 +141,10 @@ from logs-aws.cloudtrail* metadata _id, _version, _index and aws.cloudtrail.user_identity.type == "Root" // filter for an access key existing which sources from AssumeRoot - and aws.cloudtrail.user_identity.access_key_id IS NOT NULL + and aws.cloudtrail.user_identity.access_key_id is not null // filter on the request parameters not including UserName which assumes self-assignment - and NOT TO_LOWER(aws.cloudtrail.request_parameters) LIKE "*username*" + and not to_lower(aws.cloudtrail.request_parameters) like "*username*" | keep @timestamp, aws.cloudtrail.request_parameters, diff --git a/rules/integrations/aws/persistence_iam_user_created_access_keys_for_another_user.toml b/rules/integrations/aws/persistence_iam_user_created_access_keys_for_another_user.toml index 36c9ab1caf4..a27c8df0b92 100644 --- a/rules/integrations/aws/persistence_iam_user_created_access_keys_for_another_user.toml +++ b/rules/integrations/aws/persistence_iam_user_created_access_keys_for_another_user.toml @@ -2,7 +2,7 @@ creation_date = "2024/06/13" integration = ["aws"] maturity = "production" -updated_date = "2025/07/10" +updated_date = "2025/07/24" [rule] author = ["Elastic"] diff --git a/rules/integrations/aws/privilege_escalation_iam_administratoraccess_policy_attached_to_group.toml b/rules/integrations/aws/privilege_escalation_iam_administratoraccess_policy_attached_to_group.toml index 54972f45093..527258f0714 100644 --- a/rules/integrations/aws/privilege_escalation_iam_administratoraccess_policy_attached_to_group.toml +++ b/rules/integrations/aws/privilege_escalation_iam_administratoraccess_policy_attached_to_group.toml @@ -2,7 +2,7 @@ creation_date = "2024/05/31" integration = ["aws"] maturity = "production" -updated_date = "2025/07/10" +updated_date = "2025/07/16" [rule] author = ["Elastic"] @@ -99,10 +99,27 @@ type = "esql" query = ''' from logs-aws.cloudtrail-* metadata _id, _version, _index -| where event.provider == "iam.amazonaws.com" and event.action == "AttachGroupPolicy" and event.outcome == "success" -| dissect aws.cloudtrail.request_parameters "{%{?policyArn}=%{?arn}:%{?aws}:%{?iam}::%{?aws}:%{?policy}/%{policyName},%{?groupName}=%{group.name}}" -| where policyName == "AdministratorAccess" -| keep @timestamp, event.provider, event.action, event.outcome, policyName, group.name + +| where + event.provider == "iam.amazonaws.com" + and event.action == "AttachGroupPolicy" + and event.outcome == "success" + +// Extract policy and group details from request parameters +| dissect aws.cloudtrail.request_parameters + "{%{?policyArn}=%{?arn}:%{?aws}:%{?iam}::%{?aws}:%{?policy}/%{Esql.aws_cloudtrail_request_parameters_policy_name},%{?groupName}=%{Esql.aws_cloudtrail_request_parameters_group_name}}" + +// Filter for attachment of AdministratorAccess policy +| where Esql.aws_cloudtrail_request_parameters_policy_name == "AdministratorAccess" + +// keep ECS and derived fields +| keep + @timestamp, + event.provider, + event.action, + event.outcome, + Esql.aws_cloudtrail_request_parameters_policy_name, + Esql.aws_cloudtrail_request_parameters_group_name ''' diff --git a/rules/integrations/aws/privilege_escalation_iam_administratoraccess_policy_attached_to_role.toml b/rules/integrations/aws/privilege_escalation_iam_administratoraccess_policy_attached_to_role.toml index 77254acc4c6..930fce7f23c 100644 --- a/rules/integrations/aws/privilege_escalation_iam_administratoraccess_policy_attached_to_role.toml +++ b/rules/integrations/aws/privilege_escalation_iam_administratoraccess_policy_attached_to_role.toml @@ -2,7 +2,7 @@ creation_date = "2024/05/31" integration = ["aws"] maturity = "production" -updated_date = "2025/07/10" +updated_date = "2025/07/16" [rule] author = ["Elastic"] @@ -98,10 +98,27 @@ type = "esql" query = ''' from logs-aws.cloudtrail-* metadata _id, _version, _index -| where event.provider == "iam.amazonaws.com" and event.action == "AttachRolePolicy" and event.outcome == "success" -| dissect aws.cloudtrail.request_parameters "{%{?policyArn}=%{?arn}:%{?aws}:%{?iam}::%{?aws}:%{?policy}/%{policyName},%{?roleName}=%{role.name}}" -| where policyName == "AdministratorAccess" -| keep @timestamp, event.provider, event.action, event.outcome, policyName, role.name + +| where + event.provider == "iam.amazonaws.com" + and event.action == "AttachRolePolicy" + and event.outcome == "success" + +// Extract policy name and role name from request parameters +| dissect aws.cloudtrail.request_parameters + "{%{?policyArn}=%{?arn}:%{?aws}:%{?iam}::%{?aws}:%{?policy}/%{Esql.aws_cloudtrail_request_parameters_policy_name},%{?roleName}=%{Esql.aws_cloudtrail_request_parameters_role_name}}" + +// Filter for AdministratorAccess policy attachment +| where Esql.aws_cloudtrail_request_parameters_policy_name == "AdministratorAccess" + +// keep relevant ECS and dynamic fields +| keep + @timestamp, + event.provider, + event.action, + event.outcome, + Esql.aws_cloudtrail_request_parameters_policy_name, + Esql.aws_cloudtrail_request_parameters_role_name ''' diff --git a/rules/integrations/aws/privilege_escalation_iam_administratoraccess_policy_attached_to_user.toml b/rules/integrations/aws/privilege_escalation_iam_administratoraccess_policy_attached_to_user.toml index 906e2d40b4f..79a79e8c400 100644 --- a/rules/integrations/aws/privilege_escalation_iam_administratoraccess_policy_attached_to_user.toml +++ b/rules/integrations/aws/privilege_escalation_iam_administratoraccess_policy_attached_to_user.toml @@ -2,7 +2,7 @@ creation_date = "2024/05/30" integration = ["aws"] maturity = "production" -updated_date = "2025/07/10" +updated_date = "2025/07/16" [rule] author = ["Elastic"] @@ -98,17 +98,28 @@ type = "esql" query = ''' from logs-aws.cloudtrail-* metadata _id, _version, _index -| where event.provider == "iam.amazonaws.com" and event.action == "AttachUserPolicy" and event.outcome == "success" -| dissect aws.cloudtrail.request_parameters "{%{?policyArn}=%{?arn}:%{?aws}:%{?iam}::%{?aws}:%{?policy}/%{policyName},%{?userName}=%{target.userName}}" -| where policyName == "AdministratorAccess" + +| where + event.provider == "iam.amazonaws.com" + and event.action == "AttachUserPolicy" + and event.outcome == "success" + +// Extract policy name and user name from request parameters +| dissect aws.cloudtrail.request_parameters + "{%{?policyArn}=%{?arn}:%{?aws}:%{?iam}::%{?aws}:%{?policy}/%{Esql.aws_cloudtrail_request_parameters_policy_name},%{?userName}=%{Esql_priv.aws_cloudtrail_request_parameters_target_user_name}}" + +// Filter for AdministratorAccess policy +| where Esql.aws_cloudtrail_request_parameters_policy_name == "AdministratorAccess" + +// keep ECS and parsed fields | keep @timestamp, cloud.region, event.provider, event.action, event.outcome, - policyName, - target.userName, + Esql.aws_cloudtrail_request_parameters_policy_name, + Esql_priv.aws_cloudtrail_request_parameters_target_user_name, aws.cloudtrail.request_parameters, aws.cloudtrail.user_identity.arn, related.user, diff --git a/rules/integrations/aws/privilege_escalation_sts_role_chaining.toml b/rules/integrations/aws/privilege_escalation_sts_role_chaining.toml index fb1acc26080..4cb39dcbc9c 100644 --- a/rules/integrations/aws/privilege_escalation_sts_role_chaining.toml +++ b/rules/integrations/aws/privilege_escalation_sts_role_chaining.toml @@ -2,19 +2,22 @@ creation_date = "2024/10/23" integration = ["aws"] maturity = "production" -updated_date = "2025/01/15" +updated_date = "2025/07/16" [rule] author = ["Elastic"] description = """ -Identifies role chaining activity. Role chaining is when you use one assumed role to assume a second role through the AWS CLI or API. -While this a recognized functionality in AWS, role chaining can be abused for privilege escalation if the subsequent assumed role provides additional privileges. -Role chaining can also be used as a persistence mechanism as each AssumeRole action results in a refreshed session token with a 1 hour maximum duration. -This rule looks for role chaining activity happening within a single account, to eliminate false positives produced by common cross-account behavior. +Identifies role chaining activity. Role chaining is when you use one assumed role to assume a second role through the +AWS CLI or API. While this a recognized functionality in AWS, role chaining can be abused for privilege escalation if +the subsequent assumed role provides additional privileges. Role chaining can also be used as a persistence mechanism as +each AssumeRole action results in a refreshed session token with a 1 hour maximum duration. This rule looks for role +chaining activity happening within a single account, to eliminate false positives produced by common cross-account +behavior. """ false_positives = [ """ - Role chaining can be used as an access control. Ensure that this behavior is not part of a legitimate operation before taking action. + Role chaining can be used as an access control. Ensure that this behavior is not part of a legitimate operation + before taking action. """, ] from = "now-6m" @@ -117,6 +120,7 @@ name = "Application Access Token" reference = "https://attack.mitre.org/techniques/T1550/001/" + [rule.threat.tactic] id = "TA0008" name = "Lateral Movement" @@ -128,3 +132,4 @@ framework = "MITRE ATT&CK" id = "TA0003" name = "Persistence" reference = "https://attack.mitre.org/tactics/TA0003/" + diff --git a/rules/integrations/aws_bedrock/aws_bedrock_execution_without_guardrails.toml b/rules/integrations/aws_bedrock/aws_bedrock_execution_without_guardrails.toml index ea726f44005..856363b151c 100644 --- a/rules/integrations/aws_bedrock/aws_bedrock_execution_without_guardrails.toml +++ b/rules/integrations/aws_bedrock/aws_bedrock_execution_without_guardrails.toml @@ -1,7 +1,7 @@ [metadata] creation_date = "2024/11/25" maturity = "production" -updated_date = "2025/03/20" +updated_date = "2025/07/16" [rule] author = ["Elastic"] @@ -80,12 +80,29 @@ type = "esql" query = ''' from logs-aws_bedrock.invocation-* -// create time window buckets of 1 minute -| eval time_window = date_trunc(1 minute, @timestamp) -| where gen_ai.guardrail_id is NULL -| KEEP @timestamp, time_window, gen_ai.guardrail_id , user.id -| stats model_invocation_without_guardrails = count() by user.id -| where model_invocation_without_guardrails > 5 -| sort model_invocation_without_guardrails desc + +// Create 1-minute time buckets +| eval Esql.time_window_date_trunc = date_trunc(1 minute, @timestamp) + +// Filter for invocations without guardrails +| where gen_ai.guardrail_id is null and user.id is not null + +// keep only relevant fields +| keep + @timestamp, + Esql.time_window_date_trunc, + gen_ai.guardrail_id, + user.id + +// count number of unsafe invocations per user +| stats + Esql.ml_invocations_no_guardrails_count = count() + by user.id + +// Filter for suspicious volume +| where Esql.ml_invocations_no_guardrails_count > 5 + +// sort descending +| sort Esql.ml_invocations_no_guardrails_count desc ''' diff --git a/rules/integrations/aws_bedrock/aws_bedrock_guardrails_multiple_violations_by_single_user.toml b/rules/integrations/aws_bedrock/aws_bedrock_guardrails_multiple_violations_by_single_user.toml index 11dcb4124e3..dde3bd8c1b1 100644 --- a/rules/integrations/aws_bedrock/aws_bedrock_guardrails_multiple_violations_by_single_user.toml +++ b/rules/integrations/aws_bedrock/aws_bedrock_guardrails_multiple_violations_by_single_user.toml @@ -1,7 +1,7 @@ [metadata] creation_date = "2024/05/02" maturity = "production" -updated_date = "2025/03/20" +updated_date = "2025/07/16" [rule] author = ["Elastic"] @@ -81,10 +81,28 @@ type = "esql" query = ''' from logs-aws_bedrock.invocation-* + +// Filter for compliance violations detected | where gen_ai.compliance.violation_detected -| keep user.id, gen_ai.request.model.id, cloud.account.id -| stats violations = count(*) by user.id, gen_ai.request.model.id, cloud.account.id -| where violations > 1 -| sort violations desc + +// keep relevant ECS + model fields +| keep + user.id, + gen_ai.request.model.id, + cloud.account.id + +// count violations by user, model, and account +| stats + Esql.ml_violations_count = count(*) + by + user.id, + gen_ai.request.model.id, + cloud.account.id + +// Filter for repeated violations +| where Esql.ml_violations_count > 1 + +// sort descending by violation volume +| sort Esql.ml_violations_count desc ''' diff --git a/rules/integrations/aws_bedrock/aws_bedrock_guardrails_multiple_violations_in_single_request.toml b/rules/integrations/aws_bedrock/aws_bedrock_guardrails_multiple_violations_in_single_request.toml index 9d28ad526c5..ba297172e6f 100644 --- a/rules/integrations/aws_bedrock/aws_bedrock_guardrails_multiple_violations_in_single_request.toml +++ b/rules/integrations/aws_bedrock/aws_bedrock_guardrails_multiple_violations_in_single_request.toml @@ -1,7 +1,7 @@ [metadata] creation_date = "2024/05/02" maturity = "production" -updated_date = "2025/03/20" +updated_date = "2025/07/16" [rule] author = ["Elastic"] @@ -81,11 +81,34 @@ type = "esql" query = ''' from logs-aws_bedrock.invocation-* + +// Filter for policy-blocked requests | where gen_ai.policy.action == "BLOCKED" -| eval policy_violations = mv_count(gen_ai.policy.name) -| where policy_violations > 1 -| keep gen_ai.policy.action, policy_violations, user.id, gen_ai.request.model.id, cloud.account.id, user.id -| stats total_unique_request_violations = count(*) by policy_violations, user.id, gen_ai.request.model.id, cloud.account.id -| sort total_unique_request_violations desc + +// count number of policy matches per request (multi-valued) +| eval Esql.ml_policy_violations_mv_count = mv_count(gen_ai.policy.name) + +// Filter for requests with more than one policy match +| where Esql.ml_policy_violations_mv_count > 1 + +// keep relevant fields +| keep + gen_ai.policy.action, + Esql.ml_policy_violations_mv_count, + user.id, + gen_ai.request.model.id, + cloud.account.id + +// Aggregate requests with multiple violations +| stats + Esql.ml_policy_violations_total_unique_requests_count = count(*) + by + Esql.ml_policy_violations_mv_count, + user.id, + gen_ai.request.model.id, + cloud.account.id + +// sort by number of unique requests +| sort Esql.ml_policy_violations_total_unique_requests_count desc ''' diff --git a/rules/integrations/aws_bedrock/aws_bedrock_high_confidence_misconduct_blocks_detected.toml b/rules/integrations/aws_bedrock/aws_bedrock_high_confidence_misconduct_blocks_detected.toml index c3cfe44e0e3..eb2d374a199 100644 --- a/rules/integrations/aws_bedrock/aws_bedrock_high_confidence_misconduct_blocks_detected.toml +++ b/rules/integrations/aws_bedrock/aws_bedrock_high_confidence_misconduct_blocks_detected.toml @@ -1,7 +1,7 @@ [metadata] creation_date = "2024/05/05" maturity = "production" -updated_date = "2025/03/20" +updated_date = "2025/07/16" [rule] author = ["Elastic"] @@ -80,16 +80,41 @@ type = "esql" query = ''' from logs-aws_bedrock.invocation-* -| MV_EXPAND gen_ai.compliance.violation_code -| MV_EXPAND gen_ai.policy.confidence -| MV_EXPAND gen_ai.policy.name -| where gen_ai.policy.action == "BLOCKED" and gen_ai.policy.name == "content_policy" and gen_ai.policy.confidence LIKE "HIGH" and gen_ai.compliance.violation_code IN ("HATE", "MISCONDUCT", "SEXUAL", "INSULTS", "PROMPT_ATTACK", "VIOLENCE") -| keep user.id, gen_ai.compliance.violation_code -| stats block_count_per_violation = count() by user.id, gen_ai.compliance.violation_code -| SORT block_count_per_violation DESC -| keep user.id, gen_ai.compliance.violation_code, block_count_per_violation -| STATS violation_count = SUM(block_count_per_violation) by user.id -| WHERE violation_count > 5 -| SORT violation_count DESC + +// Expand multi-value fields +| mv_expand gen_ai.compliance.violation_code +| mv_expand gen_ai.policy.confidence +| mv_expand gen_ai.policy.name + +// Filter for high-confidence content policy blocks with targeted violations +| where + gen_ai.policy.action == "BLOCKED" + and gen_ai.policy.name == "content_policy" + and gen_ai.policy.confidence like "HIGH" + and gen_ai.compliance.violation_code in ("HATE", "MISCONDUCT", "SEXUAL", "INSULTS", "PROMPT_ATTACK", "VIOLENCE") + +// keep ECS + compliance fields +| keep + user.id, + gen_ai.compliance.violation_code + +// count blocked violations per user per violation type +| stats + Esql.ml_policy_blocked_violation_count = count() + by + user.id, + gen_ai.compliance.violation_code + +// Aggregate all violation types per user +| stats + Esql.ml_policy_blocked_violation_total_count = sum(Esql.ml_policy_blocked_violation_count) + by + user.id + +// Filter for users with more than 5 total violations +| where Esql.ml_policy_blocked_violation_total_count > 5 + +// sort by violation volume +| sort Esql.ml_policy_blocked_violation_total_count desc ''' diff --git a/rules/integrations/aws_bedrock/aws_bedrock_high_resource_consumption_detection.toml b/rules/integrations/aws_bedrock/aws_bedrock_high_resource_consumption_detection.toml index e2d5e0deda7..0ba98b55658 100644 --- a/rules/integrations/aws_bedrock/aws_bedrock_high_resource_consumption_detection.toml +++ b/rules/integrations/aws_bedrock/aws_bedrock_high_resource_consumption_detection.toml @@ -1,7 +1,7 @@ [metadata] creation_date = "2024/05/04" maturity = "production" -updated_date = "2025/03/20" +updated_date = "2025/07/16" [rule] author = ["Elastic"] @@ -80,15 +80,37 @@ type = "esql" query = ''' from logs-aws_bedrock.invocation-* -| keep user.id, gen_ai.usage.prompt_tokens, gen_ai.usage.completion_tokens -| stats max_tokens = max(gen_ai.usage.prompt_tokens), - total_requests = count(*), - avg_response_size = avg(gen_ai.usage.completion_tokens) - by user.id -// tokens count depends on specific LLM, as is related to how embeddings are generated. -| where max_tokens > 5000 and total_requests > 10 and avg_response_size > 500 -| eval risk_factor = (max_tokens / 1000) * total_requests * (avg_response_size / 500) -| where risk_factor > 10 -| sort risk_factor desc + +// keep token usage data +| keep + user.id, + gen_ai.usage.prompt_tokens, + gen_ai.usage.completion_tokens + +// Aggregate usage metrics +| stats + Esql.ml_usage_prompt_tokens_max = max(gen_ai.usage.prompt_tokens), + Esql.ml_invocations_total_count = count(*), + Esql.ml_usage_completion_tokens_avg = avg(gen_ai.usage.completion_tokens) + by + user.id + +// Filter for suspicious usage patterns +| where + Esql.ml_usage_prompt_tokens_max > 5000 + and Esql.ml_invocations_total_count > 10 + and Esql.ml_usage_completion_tokens_avg > 500 + +// Calculate a custom risk factor +| eval Esql.ml_risk_score = + (Esql.ml_usage_prompt_tokens_max / 1000) * + Esql.ml_invocations_total_count * + (Esql.ml_usage_completion_tokens_avg / 500) + +// Filter on risk score +| where Esql.ml_risk_score > 10 + +// sort high risk users to top +| sort Esql.ml_risk_score desc ''' diff --git a/rules/integrations/aws_bedrock/aws_bedrock_multiple_attempts_to_use_denied_models_by_user.toml b/rules/integrations/aws_bedrock/aws_bedrock_multiple_attempts_to_use_denied_models_by_user.toml index 631b76a1e44..402d3bf764f 100644 --- a/rules/integrations/aws_bedrock/aws_bedrock_multiple_attempts_to_use_denied_models_by_user.toml +++ b/rules/integrations/aws_bedrock/aws_bedrock_multiple_attempts_to_use_denied_models_by_user.toml @@ -1,7 +1,7 @@ [metadata] creation_date = "2024/05/02" maturity = "production" -updated_date = "2025/03/20" +updated_date = "2025/07/16" [rule] author = ["Elastic"] @@ -77,11 +77,30 @@ type = "esql" query = ''' from logs-aws_bedrock.invocation-* + +// Filter for access denied errors from GenAI responses | where gen_ai.response.error_code == "AccessDeniedException" -| keep user.id, gen_ai.request.model.id, cloud.account.id, gen_ai.response.error_code -| stats total_denials = count(*) by user.id, gen_ai.request.model.id, cloud.account.id -| where total_denials > 3 -| sort total_denials desc + +// keep ECS and response fields +| keep + user.id, + gen_ai.request.model.id, + cloud.account.id, + gen_ai.response.error_code + +// count total denials per user/model/account +| stats + Esql.ml_response_access_denied_count = count(*) + by + user.id, + gen_ai.request.model.id, + cloud.account.id + +// Filter for users with repeated denials +| where Esql.ml_response_access_denied_count > 3 + +// sort by volume of denials +| sort Esql.ml_response_access_denied_count desc ''' diff --git a/rules/integrations/aws_bedrock/aws_bedrock_multiple_sensitive_information_policy_blocks_detected.toml b/rules/integrations/aws_bedrock/aws_bedrock_multiple_sensitive_information_policy_blocks_detected.toml index 8d521be1e61..ea99fc671fb 100644 --- a/rules/integrations/aws_bedrock/aws_bedrock_multiple_sensitive_information_policy_blocks_detected.toml +++ b/rules/integrations/aws_bedrock/aws_bedrock_multiple_sensitive_information_policy_blocks_detected.toml @@ -1,7 +1,7 @@ [metadata] creation_date = "2024/11/20" maturity = "production" -updated_date = "2025/03/20" +updated_date = "2025/07/16" [rule] author = ["Elastic"] @@ -79,11 +79,28 @@ type = "esql" query = ''' from logs-aws_bedrock.invocation-* -| MV_EXPAND gen_ai.policy.name -| where gen_ai.policy.action == "BLOCKED" and gen_ai.compliance.violation_detected == "true" and gen_ai.policy.name == "sensitive_information_policy" + +// Expand multi-valued policy name field +| mv_expand gen_ai.policy.name + +// Filter for blocked actions related to sensitive info policy +| where + gen_ai.policy.action == "BLOCKED" + and gen_ai.compliance.violation_detected == "true" + and gen_ai.policy.name == "sensitive_information_policy" + +// keep only relevant fields | keep user.id -| stats sensitive_information_block = count() by user.id -| where sensitive_information_block > 5 -| sort sensitive_information_block desc + +// count how many times each user triggered a sensitive info block +| stats + Esql.ml_policy_blocked_sensitive_info_count = count() + by user.id + +// Filter for users with more than 5 violations +| where Esql.ml_policy_blocked_sensitive_info_count > 5 + +// sort highest to lowest +| sort Esql.ml_policy_blocked_sensitive_info_count desc ''' diff --git a/rules/integrations/aws_bedrock/aws_bedrock_multiple_topic_policy_blocks_detected.toml b/rules/integrations/aws_bedrock/aws_bedrock_multiple_topic_policy_blocks_detected.toml index e5674aaf888..5bcc33cdbb9 100644 --- a/rules/integrations/aws_bedrock/aws_bedrock_multiple_topic_policy_blocks_detected.toml +++ b/rules/integrations/aws_bedrock/aws_bedrock_multiple_topic_policy_blocks_detected.toml @@ -1,7 +1,7 @@ [metadata] creation_date = "2024/11/20" maturity = "production" -updated_date = "2025/03/20" +updated_date = "2025/07/16" [rule] author = ["Elastic"] @@ -79,11 +79,28 @@ type = "esql" query = ''' from logs-aws_bedrock.invocation-* -| MV_EXPAND gen_ai.policy.name -| where gen_ai.policy.action == "BLOCKED" and gen_ai.compliance.violation_detected == "true" and gen_ai.policy.name == "topic_policy" + +// Expand multi-value policy name field +| mv_expand gen_ai.policy.name + +// Filter for blocked topic policy violations +| where + gen_ai.policy.action == "BLOCKED" + and gen_ai.compliance.violation_detected == "true" + and gen_ai.policy.name == "topic_policy" + +// keep only user info | keep user.id -| stats denied_topics = count() by user.id -| where denied_topics > 5 -| sort denied_topics desc + +// count how many times each user triggered a blocked topic policy +| stats + Esql.ml_policy_blocked_topic_count = count() + by user.id + +// Filter for excessive violations +| where Esql.ml_policy_blocked_topic_count > 5 + +// sort highest to lowest +| sort Esql.ml_policy_blocked_topic_count desc ''' diff --git a/rules/integrations/aws_bedrock/aws_bedrock_multiple_validation_exception_errors_by_single_user.toml b/rules/integrations/aws_bedrock/aws_bedrock_multiple_validation_exception_errors_by_single_user.toml index 5839ffdcad4..360900d81b5 100644 --- a/rules/integrations/aws_bedrock/aws_bedrock_multiple_validation_exception_errors_by_single_user.toml +++ b/rules/integrations/aws_bedrock/aws_bedrock_multiple_validation_exception_errors_by_single_user.toml @@ -2,7 +2,7 @@ creation_date = "2024/09/11" integration = ["aws_bedrock"] maturity = "production" -updated_date = "2025/03/20" +updated_date = "2025/07/16" [rule] author = ["Elastic"] @@ -83,13 +83,31 @@ type = "esql" query = ''' from logs-aws_bedrock.invocation-* -// truncate the timestamp to a 1-minute window -| eval target_time_window = DATE_TRUNC(1 minutes, @timestamp) + +// Truncate timestamp to 1-minute window +| eval Esql.time_window_date_trunc = date_trunc(1 minutes, @timestamp) + +// Filter for validation exceptions in responses | where gen_ai.response.error_code == "ValidationException" -| keep user.id, gen_ai.request.model.id, cloud.account.id, gen_ai.response.error_code, target_time_window -// count the number of users causing validation errors within a 1 minute window -| stats total_denials = count(*) by target_time_window, user.id, cloud.account.id -| where total_denials > 3 + +// keep relevant ECS and derived fields +| keep + user.id, + gen_ai.request.model.id, + cloud.account.id, + gen_ai.response.error_code, + Esql.time_window_date_trunc + +// count number of denials by user/account/time window +| stats + Esql.ml_response_validation_error_count = count(*) + by + Esql.time_window_date_trunc, + user.id, + cloud.account.id + +// Filter for excessive errors +| where Esql.ml_response_validation_error_count > 3 ''' diff --git a/rules/integrations/aws_bedrock/aws_bedrock_multiple_word_policy_blocks_detected.toml b/rules/integrations/aws_bedrock/aws_bedrock_multiple_word_policy_blocks_detected.toml index ad942edf801..4448076cd26 100644 --- a/rules/integrations/aws_bedrock/aws_bedrock_multiple_word_policy_blocks_detected.toml +++ b/rules/integrations/aws_bedrock/aws_bedrock_multiple_word_policy_blocks_detected.toml @@ -1,7 +1,7 @@ [metadata] creation_date = "2024/11/20" maturity = "production" -updated_date = "2025/03/20" +updated_date = "2025/07/16" [rule] author = ["Elastic"] @@ -79,11 +79,28 @@ type = "esql" query = ''' from logs-aws_bedrock.invocation-* -| MV_EXPAND gen_ai.policy.name -| where gen_ai.policy.action == "BLOCKED" and gen_ai.compliance.violation_detected == "true" and gen_ai.policy.name == "word_policy" + +// Expand multivalued policy names +| mv_expand gen_ai.policy.name + +// Filter for blocked profanity-related policy violations +| where + gen_ai.policy.action == "BLOCKED" + and gen_ai.compliance.violation_detected == "true" + and gen_ai.policy.name == "word_policy" + +// keep relevant user field | keep user.id -| stats profanity_words= count() by user.id -| where profanity_words > 5 -| sort profanity_words desc + +// count blocked profanity attempts per user +| stats + Esql.ml_policy_blocked_profanity_count = count() + by user.id + +// Filter for excessive policy violations +| where Esql.ml_policy_blocked_profanity_count > 5 + +// sort by violation volume +| sort Esql.ml_policy_blocked_profanity_count desc ''' diff --git a/rules/integrations/azure/credential_access_azure_entra_suspicious_signin.toml b/rules/integrations/azure/credential_access_azure_entra_suspicious_signin.toml index ec6a05b51f0..09326ac93d4 100644 --- a/rules/integrations/azure/credential_access_azure_entra_suspicious_signin.toml +++ b/rules/integrations/azure/credential_access_azure_entra_suspicious_signin.toml @@ -2,14 +2,15 @@ creation_date = "2025/04/28" integration = ["azure"] maturity = "production" -updated_date = "2025/04/28" +updated_date = "2025/07/16" [rule] author = ["Elastic"] description = """ -Identifies concurrent azure signin events for the same user and from multiple sources, and where one of the authentication -event has some suspicious properties often associated to DeviceCode and OAuth phishing. Adversaries may steal Refresh -Tokens (RTs) via phishing to bypass multi-factor authentication (MFA) and gain unauthorized access to Azure resources. +Identifies concurrent azure signin events for the same user and from multiple sources, and where one of the +authentication event has some suspicious properties often associated to DeviceCode and OAuth phishing. Adversaries may +steal Refresh Tokens (RTs) via phishing to bypass multi-factor authentication (MFA) and gain unauthorized access to +Azure resources. """ false_positives = [ """ @@ -45,7 +46,7 @@ references = [ "https://learn.microsoft.com/en-us/entra/identity/", "https://learn.microsoft.com/en-us/entra/identity/monitoring-health/concept-sign-ins", "https://docs.microsoft.com/en-us/azure/active-directory/reports-monitoring/reference-azure-monitor-sign-ins-log-schema", - "https://www.volexity.com/blog/2025/04/22/phishing-for-codes-russian-threat-actors-target-microsoft-365-oauth-workflows/" + "https://www.volexity.com/blog/2025/04/22/phishing-for-codes-russian-threat-actors-target-microsoft-365-oauth-workflows/", ] risk_score = 73 rule_id = "e3bd85e9-7aff-46eb-b60e-20dfc9020d98" @@ -68,20 +69,67 @@ timestamp_override = "event.ingested" type = "esql" query = ''' -FROM logs-azure.signinlogs* metadata _id, _version, _index -// the rule is scheduled to run every hour and looks for events occured during last 1 hour. -| where @timestamp > NOW() - 1 hours -| where event.dataset == "azure.signinlogs" and source.ip is not null and azure.signinlogs.identity is not null and to_lower(event.outcome) == "success" -| keep @timestamp, azure.signinlogs.identity, source.ip, azure.signinlogs.properties.authentication_requirement, azure.signinlogs.properties.app_id, azure.signinlogs.properties.resource_display_name, azure.signinlogs.properties.authentication_protocol, azure.signinlogs.properties.app_display_name -// devicecode authentication no MFA -| eval device_code = case(azure.signinlogs.properties.authentication_protocol == "deviceCode" and azure.signinlogs.properties.authentication_requirement != "multiFactorAuthentication", azure.signinlogs.identity, null), -// potential Visual Studio Code OAuth code phish - sign-in events with client set to Visual Studio Code - visual_studio = case(azure.signinlogs.properties.app_id == "aebc6443-996d-45c2-90f0-388ff96faa56" and azure.signinlogs.properties.resource_display_name == "Microsoft Graph", azure.signinlogs.identity, null), -// Other sign-in events - other = case(azure.signinlogs.properties.authentication_protocol != "deviceCode" and azure.signinlogs.properties.app_id != "aebc6443-996d-45c2-90f0-388ff96faa56", azure.signinlogs.identity, null) -| stats total = COUNT(*), device_code_count = COUNT_DISTINCT(device_code), vsc = count_distinct(visual_studio), other_count = COUNT_DISTINCT(other), src_ip = COUNT_DISTINCT(source.ip), ips = values(source.ip), clients = values(azure.signinlogs.properties.app_display_name), resources = VALUES(azure.signinlogs.properties.resource_display_name), auth_requirement = VALUES(azure.signinlogs.properties.authentication_requirement) by azure.signinlogs.identity -// 2 unique source.ip for same account - which may indicate the presence 2 sign-ins one by the adversary and the other by the victim -| where src_ip >= 2 and (device_code_count > 0 or vsc >0) +from logs-azure.signinlogs* metadata _id, _version, _index + +// Scheduled to run every hour, reviewing events from past hour +| where + @timestamp > now() - 1 hours + and event.dataset == "azure.signinlogs" + and source.ip is not null + and azure.signinlogs.identity is not null + and to_lower(event.outcome) == "success" + +// keep relevant raw fields +| keep + @timestamp, + azure.signinlogs.identity, + source.ip, + azure.signinlogs.properties.authentication_requirement, + azure.signinlogs.properties.app_id, + azure.signinlogs.properties.resource_display_name, + azure.signinlogs.properties.authentication_protocol, + azure.signinlogs.properties.app_display_name + +// case classifications for identity usage +| eval + Esql.azure_signinlogs_properties_authentication_device_code_case = case( + azure.signinlogs.properties.authentication_protocol == "deviceCode" + and azure.signinlogs.properties.authentication_requirement != "multiFactorAuthentication", + azure.signinlogs.identity, + null), + + Esql.azure_signinlogs_auth_visual_studio_case = case( + azure.signinlogs.properties.app_id == "aebc6443-996d-45c2-90f0-388ff96faa56" + and azure.signinlogs.properties.resource_display_name == "Microsoft Graph", + azure.signinlogs.identity, + null), + + Esql.azure_signinlogs_auth_other_case = case( + azure.signinlogs.properties.authentication_protocol != "deviceCode" + and azure.signinlogs.properties.app_id != "aebc6443-996d-45c2-90f0-388ff96faa56", + azure.signinlogs.identity, + null) + +// Aggregate metrics by user identity +| stats + Esql.event_count = count(*), + Esql.azure_signinlogs_properties_authentication_device_code_case_count_distinct = count_distinct(Esql.azure_signinlogs_properties_authentication_device_code_case), + Esql.azure_signinlogs_properties_auth_visual_studio_count_distinct = count_distinct(Esql.azure_signinlogs_auth_visual_studio_case), + Esql.azure_signinlogs_properties_auth_other_count_distinct = count_distinct(Esql.azure_signinlogs_auth_other_case), + Esql.azure_signinlogs_properties_source_ip_count_distinct = count_distinct(source.ip), + Esql.azure_signinlogs_properties_source_ip_values = values(source.ip), + Esql.azure_signinlogs_properties_client_app_values = values(azure.signinlogs.properties.app_display_name), + Esql.azure_signinlogs_properties_resource_display_name_values = values(azure.signinlogs.properties.resource_display_name), + Esql.azure_signinlogs_properties_auth_requirement_values = values(azure.signinlogs.properties.authentication_requirement) + by azure.signinlogs.identity + +// Detect multiple unique IPs for one user with signs of deviceCode or VSC OAuth usage +| where + Esql.azure_signinlogs_properties_source_ip_count_distinct >= 2 + and ( + Esql.azure_signinlogs_properties_authentication_device_code_case_count_distinct > 0 + or Esql.azure_signinlogs_properties_auth_visual_studio_count_distinct > 0 + ) ''' diff --git a/rules/integrations/azure/credential_access_azure_entra_totp_brute_force_attempts.toml b/rules/integrations/azure/credential_access_azure_entra_totp_brute_force_attempts.toml index 2d52c2e151d..510fd4c9539 100644 --- a/rules/integrations/azure/credential_access_azure_entra_totp_brute_force_attempts.toml +++ b/rules/integrations/azure/credential_access_azure_entra_totp_brute_force_attempts.toml @@ -2,7 +2,7 @@ creation_date = "2024/12/11" integration = ["azure"] maturity = "production" -updated_date = "2025/07/28" +updated_date = "2025/07/31" [rule] author = ["Elastic"] @@ -88,7 +88,7 @@ query = ''' from logs-azure.signinlogs* metadata _id, _version, _index | where - // filter for Entra Sign-In Logs + // filter for Entra Sign-in Logs event.dataset == "azure.signinlogs" and azure.signinlogs.operation_name == "Sign-in activity" and azure.signinlogs.properties.user_type == "Member" @@ -105,29 +105,29 @@ from logs-azure.signinlogs* metadata _id, _version, _index ) | stats - Esql.event_count = COUNT(*), - Esql.azure_signinlogs_properties.session_id_count_distinct = COUNT_DISTINCT(azure.signinlogs.properties.session_id), - Esql.source_address_values = VALUES(source.address), - Esql.azure_tenant_id_valuues = VALUES(azure.tenant_id), - Esql_priv.azure_identity_values = VALUES(azure.signinlogs.identity), - Esql_priv.azure_signinlogs_properties_user_principal_name_values = VALUES(azure.signinlogs.properties.user_principal_name), - Esql.azure_signinlogs_properties_app_id_values = VALUES(azure.signinlogs.properties.app_id), - Esql.azure_signinlogs_properties_app_display_name_values = VALUES(azure.signinlogs.properties.app_display_name), - Esql.azure_signinlogs_properties_authentication_requirement_values = VALUES(azure.signinlogs.properties.authentication_requirement), - Esql.azure_signinlogs_properties_authentication_protocol_values = VALUES(azure.signinlogs.properties.authentication_protocol), - Esql.azure_signinlogs_properties_client_app_used_values = VALUES(azure.signinlogs.properties.client_app_used), - Esql.azure_signinlogs_properties_client_credential_type_values = VALUES(azure.signinlogs.properties.client_credential_type), - Esql.azure_signinlogs_properties_conditional_access_status_values = VALUES(azure.signinlogs.properties.conditional_access_status), - Esql.azure_signinlogs_properties_correlation_id_values = VALUES(azure.signinlogs.properties.correlation_id), - Esql.azure_signinlogs_properties_is_interactive_values = VALUES(azure.signinlogs.properties.is_interactive), - Esql.azure_signinlogs_properties_mfa_detail_auth_method_values = VALUES(azure.signinlogs.properties.mfa_detail.auth_method), - Esql.azure_signinlogs_properties_resource_display_name_values = VALUES(azure.signinlogs.properties.resource_display_name), - Esql.azure_signinlogs_properties_resource_id_values = VALUES(azure.signinlogs.properties.resource_id), - Esql.azure_signinlogs_properties_risk_state_values = VALUES(azure.signinlogs.properties.risk_state), - Esql.azure_signinlogs_properties_risk_detail_values = VALUES(azure.signinlogs.properties.risk_detail), - Esql.azure_signinlogs_properties_status.error_code_values = VALUES(azure.signinlogs.properties.status.error_code), - Esql.azure_signinlogs_properties_original_request_id_values = VALUES(azure.signinlogs.properties.original_request_id), - Esql.user_id_values = VALUES(user.id) + Esql.event_count = count(*), + Esql.azure_signinlogs_properties.session_id_count_distinct = count_distinct(azure.signinlogs.properties.session_id), + Esql.source_address_values = values(source.address), + Esql.azure_tenant_id_valuues = values(azure.tenant_id), + Esql_priv.azure_identity_values = values(azure.signinlogs.identity), + Esql_priv.azure_signinlogs_properties_user_principal_name_values = values(azure.signinlogs.properties.user_principal_name), + Esql.azure_signinlogs_properties_app_id_values = values(azure.signinlogs.properties.app_id), + Esql.azure_signinlogs_properties_app_display_name_values = values(azure.signinlogs.properties.app_display_name), + Esql.azure_signinlogs_properties_authentication_requirement_values = values(azure.signinlogs.properties.authentication_requirement), + Esql.azure_signinlogs_properties_authentication_protocol_values = values(azure.signinlogs.properties.authentication_protocol), + Esql.azure_signinlogs_properties_client_app_used_values = values(azure.signinlogs.properties.client_app_used), + Esql.azure_signinlogs_properties_client_credential_type_values = values(azure.signinlogs.properties.client_credential_type), + Esql.azure_signinlogs_properties_conditional_access_status_values = values(azure.signinlogs.properties.conditional_access_status), + Esql.azure_signinlogs_properties_correlation_id_values = values(azure.signinlogs.properties.correlation_id), + Esql.azure_signinlogs_properties_is_interactive_values = values(azure.signinlogs.properties.is_interactive), + Esql.azure_signinlogs_properties_mfa_detail_auth_method_values = values(azure.signinlogs.properties.mfa_detail.auth_method), + Esql.azure_signinlogs_properties_resource_display_name_values = values(azure.signinlogs.properties.resource_display_name), + Esql.azure_signinlogs_properties_resource_id_values = values(azure.signinlogs.properties.resource_id), + Esql.azure_signinlogs_properties_risk_state_values = values(azure.signinlogs.properties.risk_state), + Esql.azure_signinlogs_properties_risk_detail_values = values(azure.signinlogs.properties.risk_detail), + Esql.azure_signinlogs_properties_status.error_code_values = values(azure.signinlogs.properties.status.error_code), + Esql.azure_signinlogs_properties_original_request_id_values = values(azure.signinlogs.properties.original_request_id), + Esql.user_id_values = values(user.id) by user.id | where Esql.event_count >= 20 and Esql.azure_signinlogs_properties.session_id_count_distinct >= 10 diff --git a/rules/integrations/azure/credential_access_azure_key_vault_excessive_retrieval.toml b/rules/integrations/azure/credential_access_azure_key_vault_excessive_retrieval.toml index 135f07d4780..f80f96bc003 100644 --- a/rules/integrations/azure/credential_access_azure_key_vault_excessive_retrieval.toml +++ b/rules/integrations/azure/credential_access_azure_key_vault_excessive_retrieval.toml @@ -2,7 +2,7 @@ creation_date = "2025/07/10" integration = ["azure"] maturity = "production" -updated_date = "2025/07/10" +updated_date = "2025/07/24" [rule] author = ["Elastic"] @@ -86,11 +86,11 @@ timestamp_override = "event.ingested" type = "esql" query = ''' -FROM logs-azure.platformlogs-* METADATA _id, _index +from logs-azure.platformlogs-* metadata _id, _index // Filter for Azure Key Vault read operations -| WHERE event.dataset == "azure.platformlogs" - AND event.action IN ( +| where event.dataset == "azure.platformlogs" + and event.action in ( "VaultGet", "KeyGet", "KeyList", @@ -114,64 +114,64 @@ FROM logs-azure.platformlogs-* METADATA _id, _index ) // Truncate timestamps into 1-minute windows -| EVAL Esql.time_window.date_trunc = DATE_TRUNC(1 minute, @timestamp) +| eval Esql.time_window_date_trunc = date_trunc(1 minute, @timestamp) // Aggregate identity, geo, resource, and activity info -| STATS - Esql.azure.platformlogs.identity.claim.upn.values = VALUES(azure.platformlogs.identity.claim.upn), - Esql.azure.platformlogs.identity.claim.upn.count_unique = COUNT_DISTINCT(azure.platformlogs.identity.claim.upn), - Esql.azure.platformlogs.identity.claim.appid.values = VALUES(azure.platformlogs.identity.claim.appid), - Esql.azure.platformlogs.identity.claim.objectid.values = VALUES(azure.platformlogs.identity.claim.objectid), - - Esql.source.ip.values = VALUES(source.ip), - Esql.geo.city.values = VALUES(geo.city_name), - Esql.geo.region.values = VALUES(geo.region_name), - Esql.geo.country.values = VALUES(geo.country_name), - Esql.network.as_org.values = VALUES(source.as.organization.name), - - Esql.event.actions.values = VALUES(event.action), - Esql.event.count = COUNT(*), - Esql.event.action.count_distinct = COUNT_DISTINCT(event.action), - Esql.azure.resource.name.count_distinct = COUNT_DISTINCT(azure.resource.name), - Esql.azure.resource.name.values = VALUES(azure.resource.name), - Esql.azure.platformlogs.result_type.values = VALUES(azure.platformlogs.result_type), - Esql.cloud.region.values = VALUES(cloud.region), - - Esql.agent.name.values = VALUES(agent.name), - Esql.azure.subscription_id.values = VALUES(azure.subscription_id), - Esql.azure.resource_group.values = VALUES(azure.resource.group), - Esql.azure.resource_id.values = VALUES(azure.resource.id) - -BY Esql.time_window.date_trunc, azure.platformlogs.identity.claim.upn - -// Keep relevant fields -| KEEP - Esql.time_window.date_trunc, - Esql.azure.platformlogs.identity.claim.upn.values, - Esql.azure.platformlogs.identity.claim.upn.count_unique, - Esql.azure.platformlogs.identity.claim.appid.values, - Esql.azure.platformlogs.identity.claim.objectid.values, - Esql.source.ip.values, - Esql.geo.city.values, - Esql.geo.region.values, - Esql.geo.country.values, - Esql.network.as_org.values, - Esql.event.actions.values, - Esql.event.count, - Esql.event.action.count_distinct, - Esql.azure.resource.name.count_distinct, - Esql.azure.resource.name.values, - Esql.azure.platformlogs.result_type.values, - Esql.cloud.region.values, - Esql.agent.name.values, - Esql.azure.subscription_id.values, - Esql.azure.resource_group.values, - Esql.azure.resource_id.values +| stats + Esql_priv.azure_platformlogs_identity_claim_upn_values = values(azure.platformlogs.identity.claim.upn), + Esql.azure_platformlogs_identity_claim_upn_count_distinct = count_distinct(azure.platformlogs.identity.claim.upn), + Esql.azure_platformlogs_identity_claim_appid_values = values(azure.platformlogs.identity.claim.appid), + Esql.azure_platformlogs_identity_claim_objectid_values = values(azure.platformlogs.identity.claim.objectid), + + Esql.source_ip_values = values(source.ip), + Esql.geo_city_values = values(geo.city_name), + Esql.geo_region_values = values(geo.region_name), + Esql.geo_country_values = values(geo.country_name), + Esql.source_as_organization_name_values = values(source.as.organization.name), + + Esql.event_action_values = values(event.action), + Esql.event_count = count(*), + Esql.event_action_count_distinct = count_distinct(event.action), + Esql.azure_resource_name_count_distinct = count_distinct(azure.resource.name), + Esql.azure_resource_name_values = values(azure.resource.name), + Esql.azure_platformlogs_result_type_values = values(azure.platformlogs.result_type), + Esql.cloud_region_values = values(cloud.region), + + Esql.agent_name_values = values(agent.name), + Esql.azure_subscription_id_values = values(azure.subscription_id), + Esql.azure_resource_group_values = values(azure.resource.group), + Esql.azure_resource_id_values = values(azure.resource.id) + +by Esql.time_window_date_trunc, azure.platformlogs.identity.claim.upn + +// keep relevant fields +| keep + Esql.time_window_date_trunc, + Esql_priv.azure_platformlogs_identity_claim_upn_values, + Esql.azure_platformlogs_identity_claim_upn_count_distinct, + Esql.azure_platformlogs_identity_claim_appid_values, + Esql.azure_platformlogs_identity_claim_objectid_values, + Esql.source_ip_values, + Esql.geo_city_values, + Esql.geo_region_values, + Esql.geo_country_values, + Esql.source_as_organization_name_values, + Esql.event_action_values, + Esql.event_count, + Esql.event_action_count_distinct, + Esql.azure_resource_name_count_distinct, + Esql.azure_resource_name_values, + Esql.azure_platformlogs_result_type_values, + Esql.cloud_region_values, + Esql.agent_name_values, + Esql.azure_subscription_id_values, + Esql.azure_resource_group_values, + Esql.azure_resource_id_values // Filter for suspiciously high volume of distinct Key Vault reads by a single actor -| WHERE Esql.azure.platformlogs.identity.claim.upn.count_unique == 1 AND Esql.event.count >= 10 AND Esql.event.action.count_distinct >= 2 +| where Esql.azure_platformlogs_identity_claim_upn_count_distinct == 1 and Esql.event_count >= 10 and Esql.event_action_count_distinct >= 2 -| SORT Esql.time_window.date_trunc DESC +| sort Esql.time_window_date_trunc desc ''' diff --git a/rules/integrations/azure/credential_access_entra_id_brute_force_activity.toml b/rules/integrations/azure/credential_access_entra_id_brute_force_activity.toml index bc84da89717..5f664ef65ed 100644 --- a/rules/integrations/azure/credential_access_entra_id_brute_force_activity.toml +++ b/rules/integrations/azure/credential_access_entra_id_brute_force_activity.toml @@ -4,7 +4,7 @@ integration = ["azure"] maturity = "production" min_stack_comments = "Elastic ESQL values aggregation is more performant in 8.16.5 and above." min_stack_version = "8.17.0" -updated_date = "2025/07/10" +updated_date = "2025/07/16" [rule] author = ["Elastic"] @@ -90,20 +90,18 @@ timestamp_override = "event.ingested" type = "esql" query = ''' -FROM logs-azure.signinlogs* +from logs-azure.signinlogs* // Define a time window for grouping and maintain the original event timestamp -| EVAL - time_window = DATE_TRUNC(15 minutes, @timestamp), - event_time = @timestamp +| eval Esql.time_window_date_trunc = date_trunc(15 minutes, @timestamp) // Filter relevant failed authentication events with specific error codes -| WHERE event.dataset == "azure.signinlogs" - AND event.category == "authentication" - AND azure.signinlogs.category IN ("NonInteractiveUserSignInLogs", "SignInLogs") - AND event.outcome == "failure" - AND azure.signinlogs.properties.authentication_requirement == "singleFactorAuthentication" - AND azure.signinlogs.properties.status.error_code IN ( +| where event.dataset == "azure.signinlogs" + and event.category == "authentication" + and azure.signinlogs.category in ("NonInteractiveUserSignInLogs", "SignInLogs") + and event.outcome == "failure" + and azure.signinlogs.properties.authentication_requirement == "singleFactorAuthentication" + and azure.signinlogs.properties.status.error_code in ( 50034, // UserAccountNotFound 50126, // InvalidUsernameOrPassword 50055, // PasswordExpired @@ -125,89 +123,107 @@ FROM logs-azure.signinlogs* 120002, // PasswordChangeInvalidNewPasswordWeak 120020 // PasswordChangeFailure ) - AND azure.signinlogs.properties.user_principal_name IS NOT NULL AND azure.signinlogs.properties.user_principal_name != "" - AND user_agent.original != "Mozilla/5.0 (compatible; MSAL 1.0) PKeyAuth/1.0" - AND source.`as`.organization.name != "MICROSOFT-CORP-MSN-AS-BLOCK" - -// Aggregate statistics for behavioral pattern analysis -| STATS - authentication_requirement = VALUES(azure.signinlogs.properties.authentication_requirement), - client_app_id = VALUES(azure.signinlogs.properties.app_id), - client_app_display_name = VALUES(azure.signinlogs.properties.app_display_name), - target_resource_id = VALUES(azure.signinlogs.properties.resource_id), - target_resource_display_name = VALUES(azure.signinlogs.properties.resource_display_name), - conditional_access_status = VALUES(azure.signinlogs.properties.conditional_access_status), - device_detail_browser = VALUES(azure.signinlogs.properties.device_detail.browser), - device_detail_device_id = VALUES(azure.signinlogs.properties.device_detail.device_id), - device_detail_operating_system = VALUES(azure.signinlogs.properties.device_detail.operating_system), - incoming_token_type = VALUES(azure.signinlogs.properties.incoming_token_type), - risk_state = VALUES(azure.signinlogs.properties.risk_state), - session_id = VALUES(azure.signinlogs.properties.session_id), - user_id = VALUES(azure.signinlogs.properties.user_id), - user_principal_name = VALUES(azure.signinlogs.properties.user_principal_name), - result_description = VALUES(azure.signinlogs.result_description), - result_signature = VALUES(azure.signinlogs.result_signature), - result_type = VALUES(azure.signinlogs.result_type), - - unique_users = COUNT_DISTINCT(azure.signinlogs.properties.user_id), - user_id_list = VALUES(azure.signinlogs.properties.user_id), - login_errors = VALUES(azure.signinlogs.result_description), - unique_login_errors = COUNT_DISTINCT(azure.signinlogs.result_description), - error_codes = VALUES(azure.signinlogs.properties.status.error_code), - unique_error_codes = COUNT_DISTINCT(azure.signinlogs.properties.status.error_code), - request_types = VALUES(azure.signinlogs.properties.incoming_token_type), - app_names = VALUES(azure.signinlogs.properties.app_display_name), - ip_list = VALUES(source.ip), - unique_ips = COUNT_DISTINCT(source.ip), - source_orgs = VALUES(source.`as`.organization.name), - countries = VALUES(source.geo.country_name), - unique_country_count = COUNT_DISTINCT(source.geo.country_name), - unique_asn_orgs = COUNT_DISTINCT(source.`as`.organization.name), - first_seen = MIN(@timestamp), - last_seen = MAX(@timestamp), - total_attempts = COUNT() -BY time_window - -// Determine brute force behavior type based on statistical thresholds -| EVAL - duration_seconds = DATE_DIFF("seconds", first_seen, last_seen), - bf_type = CASE( - // Many users, relatively few distinct login errors, distributed over multiple IPs (but not too many), - // and happens quickly. Often bots using leaked credentials. - unique_users >= 10 AND total_attempts >= 30 AND unique_login_errors <= 3 - AND unique_ips >= 5 - AND duration_seconds <= 600 - AND unique_users > unique_ips, + and azure.signinlogs.properties.user_principal_name is not null and azure.signinlogs.properties.user_principal_name != "" + and user_agent.original != "Mozilla/5.0 (compatible; MSAL 1.0) PKeyAuth/1.0" + and source.`as`.organization.name != "MICROSOFT-CORP-MSN-as-BLOCK" + +| stats + Esql.azure_signinlogs_properties_authentication_requirement_values = values(azure.signinlogs.properties.authentication_requirement), + Esql.azure_signinlogs_properties_app_id_values = values(azure.signinlogs.properties.app_id), + Esql.azure_signinlogs_properties_app_display_name_values = values(azure.signinlogs.properties.app_display_name), + Esql.azure_signinlogs_properties_resource_id_values = values(azure.signinlogs.properties.resource_id), + Esql.azure_signinlogs_properties_resource_display_name_values = values(azure.signinlogs.properties.resource_display_name), + Esql.azure_signinlogs_properties_conditional_access_status_values = values(azure.signinlogs.properties.conditional_access_status), + Esql.azure_signinlogs_properties_device_detail_browser_values = values(azure.signinlogs.properties.device_detail.browser), + Esql.azure_signinlogs_properties_device_detail_device_id_values = values(azure.signinlogs.properties.device_detail.device_id), + Esql.azure_signinlogs_properties_device_detail_operating_system_values = values(azure.signinlogs.properties.device_detail.operating_system), + Esql.azure_signinlogs_properties_incoming_token_type_values = values(azure.signinlogs.properties.incoming_token_type), + Esql.azure_signinlogs_properties_risk_state_values = values(azure.signinlogs.properties.risk_state), + Esql.azure_signinlogs_properties_session_id_values = values(azure.signinlogs.properties.session_id), + Esql.azure_signinlogs_properties_user_id_values = values(azure.signinlogs.properties.user_id), + Esql_priv.azure_signinlogs_properties_user_principal_name_values = values(azure.signinlogs.properties.user_principal_name), + Esql.azure_signinlogs_result_description_values = values(azure.signinlogs.result_description), + Esql.azure_signinlogs_result_signature_values = values(azure.signinlogs.result_signature), + Esql.azure_signinlogs_result_type_values = values(azure.signinlogs.result_type), + + Esql.azure_signinlogs_properties_user_id_count_distinct = count_distinct(azure.signinlogs.properties.user_id), + Esql.azure_signinlogs_properties_user_id_list = values(azure.signinlogs.properties.user_id), + Esql.azure_signinlogs_result_description_values_all = values(azure.signinlogs.result_description), + Esql.azure_signinlogs_result_description_count_distinct = count_distinct(azure.signinlogs.result_description), + Esql.azure_signinlogs_properties_status_error_code_values = values(azure.signinlogs.properties.status.error_code), + Esql.azure_signinlogs_properties_status_error_code_count_distinct = count_distinct(azure.signinlogs.properties.status.error_code), + Esql.azure_signinlogs_properties_incoming_token_type_values_all = values(azure.signinlogs.properties.incoming_token_type), + Esql.azure_signinlogs_properties_app_display_name_values_all = values(azure.signinlogs.properties.app_display_name), + Esql.source_ip_values = values(source.ip), + Esql.source_ip_count_distinct = count_distinct(source.ip), + Esql.source_as_organization_name_values = values(source.`as`.organization.name), + Esql.source_geo_country_name_values = values(source.geo.country_name), + Esql.source_geo_country_name_count_distinct = count_distinct(source.geo.country_name), + Esql.source_as_organization_name_count_distinct = count_distinct(source.`as`.organization.name), + Esql.timestamp_first_seen = min(@timestamp), + Esql.timestamp_last_seen = max(@timestamp), + Esql.event_count = count() +by Esql.time_window_date_trunc + +| eval + Esql.duration_seconds = date_diff("seconds", Esql.timestamp_first_seen, Esql.timestamp_last_seen), + Esql.brute_force_type = case( + Esql.azure_signinlogs_properties_user_id_count_distinct >= 10 and Esql.event_count >= 30 and Esql.azure_signinlogs_result_description_count_distinct <= 3 + and Esql.source_ip_count_distinct >= 5 + and Esql.duration_seconds <= 600 + and Esql.azure_signinlogs_properties_user_id_count_distinct > Esql.source_ip_count_distinct, "credential_stuffing", - // One password against many users. Single error (e.g., "InvalidPassword"), not necessarily fast. - unique_users >= 15 AND unique_login_errors == 1 AND total_attempts >= 15 AND duration_seconds <= 1800, + Esql.azure_signinlogs_properties_user_id_count_distinct >= 15 and Esql.azure_signinlogs_result_description_count_distinct == 1 and Esql.event_count >= 15 and Esql.duration_seconds <= 1800, "password_spraying", - // One user targeted repeatedly (same error), OR extremely noisy pattern from many IPs. - (unique_users == 1 AND unique_login_errors == 1 AND total_attempts >= 30 AND duration_seconds <= 300) - OR (unique_users <= 3 AND unique_ips > 30 AND total_attempts >= 100), + (Esql.azure_signinlogs_properties_user_id_count_distinct == 1 and Esql.azure_signinlogs_result_description_count_distinct == 1 and Esql.event_count >= 30 and Esql.duration_seconds <= 300) + or (Esql.azure_signinlogs_properties_user_id_count_distinct <= 3 and Esql.source_ip_count_distinct > 30 and Esql.event_count >= 100), "password_guessing", - // everything else "other" ) -// Only keep columns necessary for detection output/reporting -| KEEP - time_window, bf_type, duration_seconds, total_attempts, first_seen, last_seen, - unique_users, user_id_list, login_errors, unique_login_errors, - unique_error_codes, error_codes, request_types, app_names, - ip_list, unique_ips, source_orgs, countries, - unique_country_count, unique_asn_orgs, - authentication_requirement, client_app_id, client_app_display_name, - target_resource_id, target_resource_display_name, conditional_access_status, - device_detail_browser, device_detail_device_id, device_detail_operating_system, - incoming_token_type, risk_state, session_id, user_id, - user_principal_name, result_description, result_signature, result_type - -// Remove anything not classified as credential attack activity -| WHERE bf_type != "other" +| keep + Esql.time_window_date_trunc, + Esql.brute_force_type, + Esql.duration_seconds, + Esql.event_count, + Esql.timestamp_first_seen, + Esql.timestamp_last_seen, + Esql.azure_signinlogs_properties_user_id_count_distinct, + Esql.azure_signinlogs_properties_user_id_list, + Esql.azure_signinlogs_result_description_values_all, + Esql.azure_signinlogs_result_description_count_distinct, + Esql.azure_signinlogs_properties_status_error_code_count_distinct, + Esql.azure_signinlogs_properties_status_error_code_values, + Esql.azure_signinlogs_properties_incoming_token_type_values_all, + Esql.azure_signinlogs_properties_app_display_name_values_all, + Esql.source_ip_values, + Esql.source_ip_count_distinct, + Esql.source_as_organization_name_values, + Esql.source_geo_country_name_values, + Esql.source_geo_country_name_count_distinct, + Esql.source_as_organization_name_count_distinct, + Esql.azure_signinlogs_properties_authentication_requirement_values, + Esql.azure_signinlogs_properties_app_id_values, + Esql.azure_signinlogs_properties_app_display_name_values, + Esql.azure_signinlogs_properties_resource_id_values, + Esql.azure_signinlogs_properties_resource_display_name_values, + Esql.azure_signinlogs_properties_conditional_access_status_values, + Esql.azure_signinlogs_properties_device_detail_browser_values, + Esql.azure_signinlogs_properties_device_detail_device_id_values, + Esql.azure_signinlogs_properties_device_detail_operating_system_values, + Esql.azure_signinlogs_properties_incoming_token_type_values, + Esql.azure_signinlogs_properties_risk_state_values, + Esql.azure_signinlogs_properties_session_id_values, + Esql.azure_signinlogs_properties_user_id_values, + Esql_priv.azure_signinlogs_properties_user_principal_name_values, + Esql.azure_signinlogs_result_description_values, + Esql.azure_signinlogs_result_signature_values, + Esql.azure_signinlogs_result_type_values + +| where Esql.brute_force_type != "other" ''' diff --git a/rules/integrations/azure/credential_access_entra_id_excessive_account_lockouts.toml b/rules/integrations/azure/credential_access_entra_id_excessive_account_lockouts.toml index 62c22b810e8..8981a92e250 100644 --- a/rules/integrations/azure/credential_access_entra_id_excessive_account_lockouts.toml +++ b/rules/integrations/azure/credential_access_entra_id_excessive_account_lockouts.toml @@ -2,7 +2,7 @@ creation_date = "2025/07/01" integration = ["azure"] maturity = "production" -updated_date = "2025/07/02" +updated_date = "2025/07/16" [rule] author = ["Elastic"] @@ -84,79 +84,99 @@ timestamp_override = "event.ingested" type = "esql" query = ''' -FROM logs-azure.signinlogs* - -| EVAL - time_window = DATE_TRUNC(30 minutes, @timestamp), - user_id = TO_LOWER(azure.signinlogs.properties.user_principal_name), - ip = source.ip, - login_error = azure.signinlogs.result_description, - error_code = azure.signinlogs.properties.status.error_code, - request_type = TO_LOWER(azure.signinlogs.properties.incoming_token_type), - app_name = TO_LOWER(azure.signinlogs.properties.app_display_name), - asn_org = source.`as`.organization.name, - country = source.geo.country_name, - user_agent = user_agent.original, - event_time = @timestamp - -| WHERE event.dataset == "azure.signinlogs" - AND event.category == "authentication" - AND azure.signinlogs.category IN ("NonInteractiveUserSignInLogs", "SignInLogs") - AND event.outcome == "failure" - AND azure.signinlogs.properties.authentication_requirement == "singleFactorAuthentication" - AND error_code == 50053 - AND user_id IS NOT NULL AND user_id != "" - AND asn_org != "MICROSOFT-CORP-MSN-AS-BLOCK" - -| STATS - authentication_requirement = VALUES(azure.signinlogs.properties.authentication_requirement), - client_app_id = VALUES(azure.signinlogs.properties.app_id), - client_app_display_name = VALUES(azure.signinlogs.properties.app_display_name), - target_resource_id = VALUES(azure.signinlogs.properties.resource_id), - target_resource_display_name = VALUES(azure.signinlogs.properties.resource_display_name), - conditional_access_status = VALUES(azure.signinlogs.properties.conditional_access_status), - device_detail_browser = VALUES(azure.signinlogs.properties.device_detail.browser), - device_detail_device_id = VALUES(azure.signinlogs.properties.device_detail.device_id), - device_detail_operating_system = VALUES(azure.signinlogs.properties.device_detail.operating_system), - incoming_token_type = VALUES(azure.signinlogs.properties.incoming_token_type), - risk_state = VALUES(azure.signinlogs.properties.risk_state), - session_id = VALUES(azure.signinlogs.properties.session_id), - user_id = VALUES(azure.signinlogs.properties.user_id), - user_principal_name = VALUES(azure.signinlogs.properties.user_principal_name), - result_description = VALUES(azure.signinlogs.result_description), - result_signature = VALUES(azure.signinlogs.result_signature), - result_type = VALUES(azure.signinlogs.result_type), - - unique_users = COUNT_DISTINCT(user_id), - user_id_list = VALUES(user_id), - login_errors = VALUES(login_error), - unique_login_errors = COUNT_DISTINCT(login_error), - error_codes = VALUES(error_code), - unique_error_codes = COUNT_DISTINCT(error_code), - request_types = VALUES(request_type), - app_names = VALUES(app_name), - ip_list = VALUES(ip), - unique_ips = COUNT_DISTINCT(ip), - source_orgs = VALUES(asn_org), - countries = VALUES(country), - unique_country_count = COUNT_DISTINCT(country), - unique_asn_orgs = COUNT_DISTINCT(asn_org), - first_seen = MIN(event_time), - last_seen = MAX(event_time), - total_attempts = COUNT() -BY time_window -| WHERE unique_users >= 15 AND total_attempts >= 20 -| KEEP - time_window, total_attempts, first_seen, last_seen, - unique_users, user_id_list, login_errors, unique_login_errors, - unique_error_codes, error_codes, request_types, app_names, - ip_list, unique_ips, source_orgs, countries, - unique_country_count, unique_asn_orgs, - authentication_requirement, client_app_id, client_app_display_name, - target_resource_id, target_resource_display_name, conditional_access_status, - device_detail_browser, device_detail_device_id, device_detail_operating_system, - incoming_token_type, risk_state, session_id, user_id, - user_principal_name, result_description, result_signature, result_type +from logs-azure.signinlogs* + +| eval + Esql.time_window_date_trunc = date_trunc(30 minutes, @timestamp), + Esql_priv.azure_signinlogs_properties_user_principal_name_lower = to_lower(azure.signinlogs.properties.user_principal_name), + Esql.azure_signinlogs_properties_incoming_token_type_lower = to_lower(azure.signinlogs.properties.incoming_token_type), + Esql.azure_signinlogs_properties_app_display_name_lower = to_lower(azure.signinlogs.properties.app_display_name) + +| where event.dataset == "azure.signinlogs" + and event.category == "authentication" + and azure.signinlogs.category in ("NonInteractiveUserSignInLogs", "SignInLogs") + and event.outcome == "failure" + and azure.signinlogs.properties.authentication_requirement == "singleFactorAuthentication" + and azure.signinlogs.properties.status.error_code == 50053 + and azure.signinlogs.properties.user_principal_name is not null + and azure.signinlogs.properties.user_principal_name != "" + and source.`as`.organization.name != "MICROSOFT-CORP-MSN-as-BLOCK" + +| stats + Esql.azure_signinlogs_properties_authentication_requirement_values = values(azure.signinlogs.properties.authentication_requirement), + Esql.azure_signinlogs_properties_app_id_values = values(azure.signinlogs.properties.app_id), + Esql.azure_signinlogs_properties_app_display_name_values = values(azure.signinlogs.properties.app_display_name), + Esql.azure_signinlogs_properties_resource_id_values = values(azure.signinlogs.properties.resource_id), + Esql.azure_signinlogs_properties_resource_display_name_values = values(azure.signinlogs.properties.resource_display_name), + Esql.azure_signinlogs_properties_conditional_access_status_values = values(azure.signinlogs.properties.conditional_access_status), + Esql.azure_signinlogs_properties_device_detail_browser_values = values(azure.signinlogs.properties.device_detail.browser), + Esql.azure_signinlogs_properties_device_detail_device_id_values = values(azure.signinlogs.properties.device_detail.device_id), + Esql.azure_signinlogs_properties_device_detail_operating_system_values = values(azure.signinlogs.properties.device_detail.operating_system), + Esql.azure_signinlogs_properties_incoming_token_type_values = values(azure.signinlogs.properties.incoming_token_type), + Esql.azure_signinlogs_properties_risk_state_values = values(azure.signinlogs.properties.risk_state), + Esql.azure_signinlogs_properties_session_id_values = values(azure.signinlogs.properties.session_id), + Esql.azure_signinlogs_properties_user_id_values = values(azure.signinlogs.properties.user_id), + Esql_priv.azure_signinlogs_properties_user_principal_name_values = values(azure.signinlogs.properties.user_principal_name), + Esql.azure_signinlogs_result_description_values = values(azure.signinlogs.result_description), + Esql.azure_signinlogs_result_signature_values = values(azure.signinlogs.result_signature), + Esql.azure_signinlogs_result_type_values = values(azure.signinlogs.result_type), + + Esql.azure_signinlogs_properties_user_principal_name_lower_count_distinct = count_distinct(Esql_priv.azure_signinlogs_properties_user_principal_name_lower), + Esql_priv.azure_signinlogs_properties_user_principal_name_lower_values = values(Esql_priv.azure_signinlogs_properties_user_principal_name_lower), + Esql.azure_signinlogs_result_description_count_distinct = count_distinct(azure.signinlogs.result_description), + Esql.azure_signinlogs_properties_status_error_code_count_distinct = count_distinct(azure.signinlogs.properties.status.error_code), + Esql.azure_signinlogs_properties_status_error_code_values = values(azure.signinlogs.properties.status.error_code), + Esql.azure_signinlogs_properties_incoming_token_type_lower_values = values(Esql.azure_signinlogs_properties_incoming_token_type_lower), + Esql.azure_signinlogs_properties_app_display_name_lower_values = values(Esql.azure_signinlogs_properties_app_display_name_lower), + Esql.source_ip_values = values(source.ip), + Esql.source_ip_count_distinct = count_distinct(source.ip), + Esql.source_as_organization_name_values = values(source.`as`.organization.name), + Esql.source_as_organization_name_count_distinct = count_distinct(source.`as`.organization.name), + Esql.source_geo_country_name_values = values(source.geo.country_name), + Esql.source_geo_country_name_count_distinct = count_distinct(source.geo.country_name), + Esql.@timestamp.min = min(@timestamp), + Esql.@timestamp.max = max(@timestamp), + Esql.event_count = count() +by Esql.time_window_date_trunc + +| where Esql.azure_signinlogs_properties_user_principal_name_lower_count_distinct >= 15 and Esql.event_count >= 20 + +| keep + Esql.time_window_date_trunc, + Esql.event_count, + Esql.@timestamp.min, + Esql.@timestamp.max, + Esql.azure_signinlogs_properties_user_principal_name_lower_count_distinct, + Esql_priv.azure_signinlogs_properties_user_principal_name_lower_values, + Esql.azure_signinlogs_result_description_count_distinct, + Esql.azure_signinlogs_result_description_values, + Esql.azure_signinlogs_properties_status_error_code_count_distinct, + Esql.azure_signinlogs_properties_status_error_code_values, + Esql.azure_signinlogs_properties_incoming_token_type_lower_values, + Esql.azure_signinlogs_properties_app_display_name_lower_values, + Esql.source_ip_values, + Esql.source_ip_count_distinct, + Esql.source_as_organization_name_values, + Esql.source_as_organization_name_count_distinct, + Esql.source_geo_country_name_values, + Esql.source_geo_country_name_count_distinct, + Esql.azure_signinlogs_properties_authentication_requirement_values, + Esql.azure_signinlogs_properties_app_id_values, + Esql.azure_signinlogs_properties_app_display_name_values, + Esql.azure_signinlogs_properties_resource_id_values, + Esql.azure_signinlogs_properties_resource_display_name_values, + Esql.azure_signinlogs_properties_conditional_access_status_values, + Esql.azure_signinlogs_properties_device_detail_browser_values, + Esql.azure_signinlogs_properties_device_detail_device_id_values, + Esql.azure_signinlogs_properties_device_detail_operating_system_values, + Esql.azure_signinlogs_properties_incoming_token_type_values, + Esql.azure_signinlogs_properties_risk_state_values, + Esql.azure_signinlogs_properties_session_id_values, + Esql.azure_signinlogs_properties_user_id_values, + Esql_priv.azure_signinlogs_properties_user_principal_name_values, + Esql.azure_signinlogs_result_description_values, + Esql.azure_signinlogs_result_signature_values, + Esql.azure_signinlogs_result_type_values ''' diff --git a/rules/integrations/azure/credential_access_entra_signin_brute_force_microsoft_365.toml b/rules/integrations/azure/credential_access_entra_signin_brute_force_microsoft_365.toml index a311d6ba5ec..1185de758c1 100644 --- a/rules/integrations/azure/credential_access_entra_signin_brute_force_microsoft_365.toml +++ b/rules/integrations/azure/credential_access_entra_signin_brute_force_microsoft_365.toml @@ -2,7 +2,7 @@ creation_date = "2024/09/06" integration = ["azure"] maturity = "production" -updated_date = "2025/07/02" +updated_date = "2025/07/16" [rule] author = ["Elastic"] @@ -88,28 +88,22 @@ timestamp_override = "event.ingested" type = "esql" query = ''' -FROM logs-azure.signinlogs* - -| EVAL - time_window = DATE_TRUNC(15 minutes, @timestamp), - user_id = TO_LOWER(azure.signinlogs.properties.user_principal_name), - ip = source.ip, - login_error = azure.signinlogs.result_description, - error_code = azure.signinlogs.properties.status.error_code, - request_type = TO_LOWER(azure.signinlogs.properties.incoming_token_type), - app_name = TO_LOWER(azure.signinlogs.properties.app_display_name), - asn_org = source.`as`.organization.name, - country = source.geo.country_name, - user_agent = user_agent.original, - event_time = @timestamp - -| WHERE event.dataset == "azure.signinlogs" - AND event.category == "authentication" - AND azure.signinlogs.category IN ("NonInteractiveUserSignInLogs", "SignInLogs") - AND azure.signinlogs.properties.resource_display_name RLIKE "(.*)365|SharePoint|Exchange|Teams|Office(.*)" - AND event.outcome == "failure" - AND error_code != 50053 - AND azure.signinlogs.properties.status.error_code IN ( +from logs-azure.signinlogs* + +| eval + Esql.time_window_date_trunc = date_trunc(15 minutes, @timestamp), + Esql_priv.azure_signinlogs_properties_user_principal_name_lower = to_lower(azure.signinlogs.properties.user_principal_name), + Esql.azure_signinlogs_properties_incoming_token_type_lower = to_lower(azure.signinlogs.properties.incoming_token_type), + Esql.azure_signinlogs_properties_app_display_name_lower = to_lower(azure.signinlogs.properties.app_display_name), + Esql.user_agent_original = user_agent.original + +| where event.dataset == "azure.signinlogs" + and event.category == "authentication" + and azure.signinlogs.category in ("NonInteractiveUserSignInLogs", "SignInLogs") + and azure.signinlogs.properties.resource_display_name rlike "(.*)365|SharePoint|Exchange|Teams|Office(.*)" + and event.outcome == "failure" + and azure.signinlogs.properties.status.error_code != 50053 + and azure.signinlogs.properties.status.error_code in ( 50034, // UserAccountNotFound 50126, // InvalidUsernameOrPassword 50055, // PasswordExpired @@ -131,84 +125,113 @@ FROM logs-azure.signinlogs* 120002, // PasswordChangeInvalidNewPasswordWeak 120020 // PasswordChangeFailure ) - AND user_id IS NOT NULL AND user_id != "" - AND user_agent != "Mozilla/5.0 (compatible; MSAL 1.0) PKeyAuth/1.0" - -| STATS - authentication_requirement = VALUES(azure.signinlogs.properties.authentication_requirement), - client_app_id = VALUES(azure.signinlogs.properties.app_id), - client_app_display_name = VALUES(azure.signinlogs.properties.app_display_name), - target_resource_id = VALUES(azure.signinlogs.properties.resource_id), - target_resource_display_name = VALUES(azure.signinlogs.properties.resource_display_name), - conditional_access_status = VALUES(azure.signinlogs.properties.conditional_access_status), - device_detail_browser = VALUES(azure.signinlogs.properties.device_detail.browser), - device_detail_device_id = VALUES(azure.signinlogs.properties.device_detail.device_id), - device_detail_operating_system = VALUES(azure.signinlogs.properties.device_detail.operating_system), - incoming_token_type = VALUES(azure.signinlogs.properties.incoming_token_type), - risk_state = VALUES(azure.signinlogs.properties.risk_state), - session_id = VALUES(azure.signinlogs.properties.session_id), - user_id = VALUES(azure.signinlogs.properties.user_id), - user_principal_name = VALUES(azure.signinlogs.properties.user_principal_name), - result_description = VALUES(azure.signinlogs.result_description), - result_signature = VALUES(azure.signinlogs.result_signature), - result_type = VALUES(azure.signinlogs.result_type), - - unique_users = COUNT_DISTINCT(user_id), - user_id_list = VALUES(user_id), - login_errors = VALUES(login_error), - unique_login_errors = COUNT_DISTINCT(login_error), - error_codes = VALUES(error_code), - unique_error_codes = COUNT_DISTINCT(error_code), - request_types = VALUES(request_type), - app_names = VALUES(app_name), - ip_list = VALUES(ip), - unique_ips = COUNT_DISTINCT(ip), - source_orgs = VALUES(asn_org), - countries = VALUES(country), - unique_country_count = COUNT_DISTINCT(country), - unique_asn_orgs = COUNT_DISTINCT(asn_org), - first_seen = MIN(event_time), - last_seen = MAX(event_time), - total_attempts = COUNT() -BY time_window - -| EVAL - duration_seconds = DATE_DIFF("seconds", first_seen, last_seen), - bf_type = CASE( - // Many users, relatively few distinct login errors, distributed over multiple IPs (but not too many), - // and happens quickly. Often bots using leaked credentials. - unique_users >= 10 AND total_attempts >= 30 AND unique_login_errors <= 3 - AND unique_ips >= 5 - AND duration_seconds <= 600 - AND unique_users > unique_ips, + and azure.signinlogs.properties.user_principal_name is not null + and azure.signinlogs.properties.user_principal_name != "" + and user_agent.original != "Mozilla/5.0 (compatible; MSAL 1.0) PKeyAuth/1.0" + +| stats + Esql.azure_signinlogs_properties_authentication_requirement_values = values(azure.signinlogs.properties.authentication_requirement), + Esql.azure_signinlogs_properties_app_id_values = values(azure.signinlogs.properties.app_id), + Esql.azure_signinlogs_properties_app_display_name_values = values(azure.signinlogs.properties.app_display_name), + Esql.azure_signinlogs_properties_resource_id_values = values(azure.signinlogs.properties.resource_id), + Esql.azure_signinlogs_properties_resource_display_name_values = values(azure.signinlogs.properties.resource_display_name), + Esql.azure_signinlogs_properties_conditional_access_status_values = values(azure.signinlogs.properties.conditional_access_status), + Esql.azure_signinlogs_properties_device_detail_browser_values = values(azure.signinlogs.properties.device_detail.browser), + Esql.azure_signinlogs_properties_device_detail_device_id_values = values(azure.signinlogs.properties.device_detail.device_id), + Esql.azure_signinlogs_properties_device_detail_operating_system_values = values(azure.signinlogs.properties.device_detail.operating_system), + Esql.azure_signinlogs_properties_incoming_token_type_values = values(azure.signinlogs.properties.incoming_token_type), + Esql.azure_signinlogs_properties_risk_state_values = values(azure.signinlogs.properties.risk_state), + Esql.azure_signinlogs_properties_session_id_values = values(azure.signinlogs.properties.session_id), + Esql.azure_signinlogs_properties_user_id_values = values(azure.signinlogs.properties.user_id), + Esql_priv.azure_signinlogs_properties_user_principal_name_values = values(azure.signinlogs.properties.user_principal_name), + Esql.azure_signinlogs_result_description_values = values(azure.signinlogs.result_description), + Esql.azure_signinlogs_result_signature_values = values(azure.signinlogs.result_signature), + Esql.azure_signinlogs_result_type_values = values(azure.signinlogs.result_type), + + Esql.azure_signinlogs_properties_user_principal_name_lower_count_distinct = count_distinct(Esql_priv.azure_signinlogs_properties_user_principal_name_lower), + Esql_priv.azure_signinlogs_properties_user_principal_name_lower_values = values(Esql_priv.azure_signinlogs_properties_user_principal_name_lower), + Esql.azure_signinlogs_result_description_count_distinct = count_distinct(azure.signinlogs.result_description), + Esql.azure_signinlogs_result_description_values = values(azure.signinlogs.result_description), + Esql.azure_signinlogs_properties_status_error_code_count_distinct = count_distinct(azure.signinlogs.properties.status.error_code), + Esql.azure_signinlogs_properties_status_error_code_values = values(azure.signinlogs.properties.status.error_code), + Esql.azure_signinlogs_properties_incoming_token_type_lower_values = values(Esql.azure_signinlogs_properties_incoming_token_type_lower), + Esql.azure_signinlogs_properties_app_display_name_lower_values = values(Esql.azure_signinlogs_properties_app_display_name_lower), + Esql.source_ip_values = values(source.ip), + Esql.source_ip_count_distinct = count_distinct(source.ip), + Esql.source_as_organization_name_values = values(source.`as`.organization.name), + Esql.source_as_organization_name_count_distinct = count_distinct(source.`as`.organization.name), + Esql.source_geo_country_name_values = values(source.geo.country_name), + Esql.source_geo_country_name_count_distinct = count_distinct(source.geo.country_name), + Esql.@timestamp.min = min(@timestamp), + Esql.@timestamp.max = max(@timestamp), + Esql.event_count = count() +by Esql.time_window_date_trunc + +| eval + Esql.event_duration_seconds = date_diff("seconds", Esql.@timestamp.min, Esql.@timestamp.max), + Esql.event_bf_type = case( + Esql.azure_signinlogs_properties_user_principal_name_lower_count_distinct >= 10 + and Esql.event_count >= 30 + and Esql.azure_signinlogs_result_description_count_distinct <= 3 + and Esql.source_ip_count_distinct >= 5 + and Esql.event_duration_seconds <= 600 + and Esql.azure_signinlogs_properties_user_principal_name_lower_count_distinct > Esql.source_ip_count_distinct, "credential_stuffing", - // One password against many users. Single error (e.g., "InvalidPassword"), not necessarily fast. - unique_users >= 15 AND unique_login_errors == 1 AND total_attempts >= 15 AND duration_seconds <= 1800, + Esql.azure_signinlogs_properties_user_principal_name_lower_count_distinct >= 15 + and Esql.azure_signinlogs_result_description_count_distinct == 1 + and Esql.event_count >= 15 + and Esql.event_duration_seconds <= 1800, "password_spraying", - // One user targeted repeatedly (same error), OR extremely noisy pattern from many IPs. - (unique_users == 1 AND unique_login_errors == 1 AND total_attempts >= 30 AND duration_seconds <= 300) - OR (unique_users <= 3 AND unique_ips > 30 AND total_attempts >= 100), + (Esql.azure_signinlogs_properties_user_principal_name_lower_count_distinct == 1 + and Esql.azure_signinlogs_result_description_count_distinct == 1 + and Esql.event_count >= 30 + and Esql.event_duration_seconds <= 300) + or (Esql.azure_signinlogs_properties_user_principal_name_lower_count_distinct <= 3 + and Esql.source_ip_count_distinct > 30 + and Esql.event_count >= 100), "password_guessing", - // everything else "other" ) -| KEEP - time_window, bf_type, duration_seconds, total_attempts, first_seen, last_seen, - unique_users, user_id_list, login_errors, unique_login_errors, - unique_error_codes, error_codes, request_types, app_names, - ip_list, unique_ips, source_orgs, countries, - unique_country_count, unique_asn_orgs, - authentication_requirement, client_app_id, client_app_display_name, - target_resource_id, target_resource_display_name, conditional_access_status, - device_detail_browser, device_detail_device_id, device_detail_operating_system, - incoming_token_type, risk_state, session_id, user_id, - user_principal_name, result_description, result_signature, result_type - -| WHERE bf_type != "other" +| where Esql.event_bf_type != "other" + +| keep + Esql.time_window_date_trunc, + Esql.event_bf_type, + Esql.event_duration_seconds, + Esql.event_count, + Esql.@timestamp.min, + Esql.@timestamp.max, + Esql.azure_signinlogs_properties_user_principal_name_lower_count_distinct, + Esql_priv.azure_signinlogs_properties_user_principal_name_lower_values, + Esql.azure_signinlogs_result_description_count_distinct, + Esql.azure_signinlogs_result_description_values, + Esql.azure_signinlogs_properties_status_error_code_count_distinct, + Esql.azure_signinlogs_properties_status_error_code_values, + Esql.azure_signinlogs_properties_incoming_token_type_lower_values, + Esql.azure_signinlogs_properties_app_display_name_lower_values, + Esql.source_ip_values, + Esql.source_ip_count_distinct, + Esql.source_as_organization_name_values, + Esql.source_as_organization_name_count_distinct, + Esql.source_geo_country_name_values, + Esql.source_geo_country_name_count_distinct, + Esql.azure_signinlogs_properties_authentication_requirement_values, + Esql.azure_signinlogs_properties_app_id_values, + Esql.azure_signinlogs_properties_app_display_name_values, + Esql.azure_signinlogs_properties_resource_id_values, + Esql.azure_signinlogs_properties_resource_display_name_values, + Esql.azure_signinlogs_properties_conditional_access_status_values, + Esql.azure_signinlogs_properties_device_detail_browser_values, + Esql.azure_signinlogs_properties_device_detail_device_id_values, + Esql.azure_signinlogs_properties_device_detail_operating_system_values, + Esql.azure_signinlogs_properties_incoming_token_type_values, + Esql.azure_signinlogs_properties_risk_state_values, + Esql.azure_signinlogs_properties_session_id_values, + Esql.azure_signinlogs_properties_user_id_values ''' diff --git a/rules/integrations/azure/initial_access_entra_graph_single_session_from_multiple_addresses.toml b/rules/integrations/azure/initial_access_entra_graph_single_session_from_multiple_addresses.toml index 6feacb31af9..58bdd681f8c 100644 --- a/rules/integrations/azure/initial_access_entra_graph_single_session_from_multiple_addresses.toml +++ b/rules/integrations/azure/initial_access_entra_graph_single_session_from_multiple_addresses.toml @@ -2,7 +2,7 @@ creation_date = "2025/05/08" integration = ["azure"] maturity = "production" -updated_date = "2025/07/10" +updated_date = "2025/07/16" [rule] author = ["Elastic"] @@ -88,49 +88,65 @@ timestamp_override = "event.ingested" type = "esql" query = ''' -FROM logs-azure.* -| WHERE - (event.dataset == "azure.signinlogs" AND source.`as`.organization.name != "MICROSOFT-CORP-MSN-AS-BLOCK" AND azure.signinlogs.properties.session_id IS NOT NULL) - OR - (event.dataset == "azure.graphactivitylogs" AND source.`as`.organization.name != "MICROSOFT-CORP-MSN-AS-BLOCK" AND azure.graphactivitylogs.properties.c_sid IS NOT NULL) -| EVAL - session_id = COALESCE(azure.signinlogs.properties.session_id, azure.graphactivitylogs.properties.c_sid), - user_id = COALESCE(azure.signinlogs.properties.user_id, azure.graphactivitylogs.properties.user_principal_object_id), - client_id = COALESCE(azure.signinlogs.properties.app_id, azure.graphactivitylogs.properties.app_id), - source_ip = source.ip, - event_time = @timestamp, - event_type = CASE( +from logs-azure.* +| where + (event.dataset == "azure.signinlogs" + and source.`as`.organization.name != "MICROSOFT-CORP-MSN-as-BLOCK" + and azure.signinlogs.properties.session_id is not null) + or + (event.dataset == "azure.graphactivitylogs" + and source.`as`.organization.name != "MICROSOFT-CORP-MSN-as-BLOCK" + and azure.graphactivitylogs.properties.c_sid is not null) + +| eval + Esql.azure_signinlogs_properties_session_id_coalesce = coalesce(azure.signinlogs.properties.session_id, azure.graphactivitylogs.properties.c_sid), + Esql.azure_signinlogs_properties_user_id_coalesce = coalesce(azure.signinlogs.properties.user_id, azure.graphactivitylogs.properties.user_principal_object_id), + Esql.azure_signinlogs_properties_app_id_coalesce = coalesce(azure.signinlogs.properties.app_id, azure.graphactivitylogs.properties.app_id), + Esql.source_ip = source.ip, + Esql.@timestamp = @timestamp, + Esql.event_type_case = case( event.dataset == "azure.signinlogs", "signin", event.dataset == "azure.graphactivitylogs", "graph", "other" ), - time_window = DATE_TRUNC(5 minutes, @timestamp) -| KEEP session_id, source_ip, event_time, event_type, time_window, user_id, client_id -| STATS - user_id = VALUES(user_id), - session_id = VALUES(session_id), - source_ip_list = VALUES(source_ip), - source_ip_count = COUNT_DISTINCT(source_ip), - client_id_list = VALUES(client_id), - application_count = COUNT_DISTINCT(client_id), - event_type_list = VALUES(event_type), - event_type_count = COUNT_DISTINCT(event_type), - event_start = MIN(event_time), - event_end = MAX(event_time), - signin_time = MIN(CASE(event_type == "signin", event_time, NULL)), - graph_time = MIN(CASE(event_type == "graph", event_time, NULL)), - document_count = COUNT() - BY session_id, time_window -| EVAL - duration_minutes = DATE_DIFF("minutes", event_start, event_end), - signin_to_graph_delay_minutes = DATE_DIFF("minutes", signin_time, graph_time) -| WHERE - event_type_count > 1 AND - source_ip_count > 1 AND - duration_minutes <= 5 AND - signin_time IS NOT NULL AND - graph_time IS NOT NULL AND - signin_to_graph_delay_minutes >= 0 + Esql.time_window_date_trunc = date_trunc(5 minutes, @timestamp) + +| keep + Esql.azure_signinlogs_properties_session_id_coalesce, + Esql.source_ip, + Esql.@timestamp, + Esql.event_type_case, + Esql.time_window_date_trunc, + Esql.azure_signinlogs_properties_user_id_coalesce, + Esql.azure_signinlogs_properties_app_id_coalesce + +| stats + Esql.azure_signinlogs_properties_user_id_coalesce_values = values(Esql.azure_signinlogs_properties_user_id_coalesce), + Esql.azure_signinlogs_properties_session_id_coalesce_values = values(Esql.azure_signinlogs_properties_session_id_coalesce), + Esql.source_ip_values = values(Esql.source_ip), + Esql.source_ip_count_distinct = count_distinct(Esql.source_ip), + Esql.azure_signinlogs_properties_app_id_coalesce_values = values(Esql.azure_signinlogs_properties_app_id_coalesce), + Esql.azure_signinlogs_properties_app_id_coalesce_count_distinct = count_distinct(Esql.azure_signinlogs_properties_app_id_coalesce), + Esql.event_type_case_values = values(Esql.event_type_case), + Esql.event_type_case_count_distinct = count_distinct(Esql.event_type_case), + Esql.@timestamp.min = min(Esql.@timestamp), + Esql.@timestamp.max = max(Esql.@timestamp), + Esql.signin_time_min = min(case(Esql.event_type_case == "signin", Esql.@timestamp, null)), + Esql.graph_time_min = min(case(Esql.event_type_case == "graph", Esql.@timestamp, null)), + Esql.event_count = count() + by Esql.azure_signinlogs_properties_session_id_coalesce, Esql.time_window_date_trunc + +| eval + Esql.event_duration_minutes_date_diff = date_diff("minutes", Esql.@timestamp.min, Esql.@timestamp.max), + Esql.event_signin_to_graph_delay_minutes_date_diff = date_diff("minutes", Esql.signin_time_min, Esql.graph_time_min) + +| where + Esql.event_type_case_count_distinct > 1 and + Esql.source_ip_count_distinct > 1 and + Esql.event_duration_minutes_date_diff <= 5 and + Esql.signin_time_min is not null and + Esql.graph_time_min is not null and + Esql.event_signin_to_graph_delay_minutes_date_diff >= 0 ''' diff --git a/rules/integrations/azure/initial_access_entra_id_suspicious_oauth_flow_via_auth_broker_to_drs.toml b/rules/integrations/azure/initial_access_entra_id_suspicious_oauth_flow_via_auth_broker_to_drs.toml index 61581563aa1..0cd35b7bd96 100644 --- a/rules/integrations/azure/initial_access_entra_id_suspicious_oauth_flow_via_auth_broker_to_drs.toml +++ b/rules/integrations/azure/initial_access_entra_id_suspicious_oauth_flow_via_auth_broker_to_drs.toml @@ -2,7 +2,7 @@ creation_date = "2025/04/30" integration = ["azure"] maturity = "production" -updated_date = "2025/07/02" +updated_date = "2025/07/16" [rule] author = ["Elastic"] @@ -89,95 +89,105 @@ timestamp_override = "event.ingested" type = "esql" query = ''' -FROM logs-azure.signinlogs* metadata _id, _version, _index - -// Filter for Microsoft Entra ID sign-in logs -| WHERE event.dataset == "azure.signinlogs" - AND event.outcome == "success" - AND azure.signinlogs.properties.user_type == "Member" - AND azure.signinlogs.identity IS NOT NULL - AND azure.signinlogs.properties.user_principal_name IS NOT NULL - AND source.address IS NOT NULL - - // Filter for MAB as client (app_id) and DRS as resource (resource_id) - AND azure.signinlogs.properties.app_id == "29d9ed98-a469-4536-ade2-f981bc1d605e" // MAB - AND azure.signinlogs.properties.resource_id == "01cb2876-7ebd-4aa4-9cc9-d28bd4d359a9" // DRS - -// Normalize timestamps into 30-minute detection windows -| EVAL target_time_window = DATE_TRUNC(30 minutes, @timestamp) - -// Tag browser-based requests and extract session ID -| EVAL - session_id = azure.signinlogs.properties.session_id, - is_browser = CASE( - TO_LOWER(azure.signinlogs.properties.device_detail.browser) RLIKE "(chrome|firefox|edge|safari).*", 1, 0 +from logs-azure.signinlogs* metadata _id, _version, _index +| where + event.dataset == "azure.signinlogs" and + event.outcome == "success" and + azure.signinlogs.properties.user_type == "Member" and + azure.signinlogs.identity is not null and + azure.signinlogs.properties.user_principal_name is not null and + source.address is not null and + azure.signinlogs.properties.app_id == "29d9ed98-a469-4536-ade2-f981bc1d605e" and // MAB + azure.signinlogs.properties.resource_id == "01cb2876-7ebd-4aa4-9cc9-d28bd4d359a9" // DRS + +| eval + Esql.time_window_date_trunc = date_trunc(30 minutes, @timestamp), + Esql.azure_signinlogs_properties_session_id = azure.signinlogs.properties.session_id, + Esql.is_browser_case = case( + to_lower(azure.signinlogs.properties.device_detail.browser) rlike "(chrome|firefox|edge|safari).*", 1, 0 ) -| STATS - // user & session identity - user_display_name = VALUES(azure.signinlogs.properties.user_display_name), - user_principal_name = VALUES(azure.signinlogs.properties.user_principal_name), - session_id = VALUES(azure.signinlogs.properties.session_id), - unique_token_id = VALUES(azure.signinlogs.properties.unique_token_identifier), - - // geolocation - city_name = VALUES(source.geo.city_name), - country_name = VALUES(source.geo.country_name), - region_name = VALUES(source.geo.region_name), - source_ip = VALUES(source.address), - ip_count = COUNT_DISTINCT(source.address), - autonomous_system = VALUES(source.`as`.organization.name), - - // authentication context - auth_protocol = VALUES(azure.signinlogs.properties.authentication_protocol), - auth_requirement = VALUES(azure.signinlogs.properties.authentication_requirement), - is_interactive = VALUES(azure.signinlogs.properties.is_interactive), - - // token & app context - token_type = VALUES(azure.signinlogs.properties.incoming_token_type), - token_session_status = VALUES(azure.signinlogs.properties.token_protection_status_details.sign_in_session_status), - session_id_count = COUNT_DISTINCT(session_id), - client_app_display_name = VALUES(azure.signinlogs.properties.app_display_name), - client_app_ids = VALUES(azure.signinlogs.properties.app_id), - target_resource_ids = VALUES(azure.signinlogs.properties.resource_id), - target_resource_display_name = VALUES(azure.signinlogs.properties.resource_display_name), - - // tenant details - app_owner_tenant_id = VALUES(azure.signinlogs.properties.app_owner_tenant_id), - resource_owner_tenant_id = VALUES(azure.signinlogs.properties.resource_owner_tenant_id), - - // conditional access & risk signals - conditional_access_status = VALUES(azure.signinlogs.properties.conditional_access_status), - risk_state = VALUES(azure.signinlogs.properties.risk_state), - risk_level_aggregated = VALUES(azure.signinlogs.properties.risk_level_aggregated), - - // user agent & device - browser = VALUES(azure.signinlogs.properties.device_detail.browser), - os = VALUES(azure.signinlogs.properties.device_detail.operating_system), - user_agent = VALUES(user_agent.original), - has_browser = MAX(is_browser), - - auth_count = COUNT(*) -BY - target_time_window, +| stats + Esql_priv.azure_signinlogs_properties_user_display_name_values = values(azure.signinlogs.properties.user_display_name), + Esql_priv.azure_signinlogs_properties_user_principal_name_values = values(azure.signinlogs.properties.user_principal_name), + Esql.azure_signinlogs_properties_session_id_values = values(azure.signinlogs.properties.session_id), + Esql.azure_signinlogs_properties_unique_token_identifier_values = values(azure.signinlogs.properties.unique_token_identifier), + + Esql.source_geo_city_name_values = values(source.geo.city_name), + Esql.source_geo_country_name_values = values(source.geo.country_name), + Esql.source_geo_region_name_values = values(source.geo.region_name), + Esql.source_address_values = values(source.address), + Esql.source_address_count_distinct = count_distinct(source.address), + Esql.source_as_organization_name_values = values(source.`as`.organization.name), + + Esql.azure_signinlogs_properties_authentication_protocol_values = values(azure.signinlogs.properties.authentication_protocol), + Esql.azure_signinlogs_properties_authentication_requirement_values = values(azure.signinlogs.properties.authentication_requirement), + Esql.azure_signinlogs_properties_is_interactive_values = values(azure.signinlogs.properties.is_interactive), + + Esql.azure_signinlogs_properties_incoming_token_type_values = values(azure.signinlogs.properties.incoming_token_type), + Esql.azure_signinlogs_properties_token_protection_status_details_sign_in_session_status_values = values(azure.signinlogs.properties.token_protection_status_details.sign_in_session_status), + Esql.azure_signinlogs_properties_session_id_count_distinct = count_distinct(azure.signinlogs.properties.session_id), + Esql.azure_signinlogs_properties_app_display_name_values = values(azure.signinlogs.properties.app_display_name), + Esql.azure_signinlogs_properties_app_id_values = values(azure.signinlogs.properties.app_id), + Esql.azure_signinlogs_properties_resource_id_values = values(azure.signinlogs.properties.resource_id), + Esql.azure_signinlogs_properties_resource_display_name_values = values(azure.signinlogs.properties.resource_display_name), + + Esql.azure_signinlogs_properties_app_owner_tenant_id_values = values(azure.signinlogs.properties.app_owner_tenant_id), + Esql.azure_signinlogs_properties_resource_owner_tenant_id_values = values(azure.signinlogs.properties.resource_owner_tenant_id), + + Esql.azure_signinlogs_properties_conditional_access_status_values = values(azure.signinlogs.properties.conditional_access_status), + Esql.azure_signinlogs_properties_risk_state_values = values(azure.signinlogs.properties.risk_state), + Esql.azure_signinlogs_properties_risk_level_aggregated_values = values(azure.signinlogs.properties.risk_level_aggregated), + + Esql.azure_signinlogs_properties_device_detail_browser_values = values(azure.signinlogs.properties.device_detail.browser), + Esql.azure_signinlogs_properties_device_detail_operating_system_values = values(azure.signinlogs.properties.device_detail.operating_system), + Esql.user_agent_original_values = values(user_agent.original), + Esql.is_browser_case_max = max(Esql.is_browser_case), + + Esql.event_count = count(*) + by + Esql.time_window_date_trunc, azure.signinlogs.properties.user_principal_name, - session_id - -| KEEP - target_time_window, user_display_name, user_principal_name, session_id, unique_token_id, - city_name, country_name, region_name, source_ip, ip_count, autonomous_system, - auth_protocol, auth_requirement, is_interactive, - token_type, token_session_status, session_id_count, client_app_display_name, - client_app_ids, target_resource_ids, target_resource_display_name, - app_owner_tenant_id, resource_owner_tenant_id, - conditional_access_status, risk_state, risk_level_aggregated, - browser, os, user_agent, has_browser, auth_count - -| WHERE - ip_count >= 2 AND - session_id_count == 1 AND - has_browser >= 1 AND - auth_count >= 2 + azure.signinlogs.properties.session_id + +| keep + Esql.time_window_date_trunc, + Esql_priv.azure_signinlogs_properties_user_display_name_values, + Esql_priv.azure_signinlogs_properties_user_principal_name_values, + Esql.azure_signinlogs_properties_session_id_values, + Esql.azure_signinlogs_properties_unique_token_identifier_values, + Esql.source_geo_city_name_values, + Esql.source_geo_country_name_values, + Esql.source_geo_region_name_values, + Esql.source_address_values, + Esql.source_address_count_distinct, + Esql.source_as_organization_name_values, + Esql.azure_signinlogs_properties_authentication_protocol_values, + Esql.azure_signinlogs_properties_authentication_requirement_values, + Esql.azure_signinlogs_properties_is_interactive_values, + Esql.azure_signinlogs_properties_incoming_token_type_values, + Esql.azure_signinlogs_properties_token_protection_status_details_sign_in_session_status_values, + Esql.azure_signinlogs_properties_session_id_count_distinct, + Esql.azure_signinlogs_properties_app_display_name_values, + Esql.azure_signinlogs_properties_app_id_values, + Esql.azure_signinlogs_properties_resource_id_values, + Esql.azure_signinlogs_properties_resource_display_name_values, + Esql.azure_signinlogs_properties_app_owner_tenant_id_values, + Esql.azure_signinlogs_properties_resource_owner_tenant_id_values, + Esql.azure_signinlogs_properties_conditional_access_status_values, + Esql.azure_signinlogs_properties_risk_state_values, + Esql.azure_signinlogs_properties_risk_level_aggregated_values, + Esql.azure_signinlogs_properties_device_detail_browser_values, + Esql.azure_signinlogs_properties_device_detail_operating_system_values, + Esql.user_agent_original_values, + Esql.is_browser_case_max, + Esql.event_count + +| where + Esql.source_address_count_distinct >= 2 and + Esql.azure_signinlogs_properties_session_id_count_distinct == 1 and + Esql.is_browser_case_max >= 1 and + Esql.event_count >= 2 ''' diff --git a/rules/integrations/azure/persistence_entra_id_oidc_discovery_url_change.toml b/rules/integrations/azure/persistence_entra_id_oidc_discovery_url_change.toml index 9a0b54f4ae9..8612b003fad 100644 --- a/rules/integrations/azure/persistence_entra_id_oidc_discovery_url_change.toml +++ b/rules/integrations/azure/persistence_entra_id_oidc_discovery_url_change.toml @@ -2,7 +2,7 @@ creation_date = "2025/07/14" integration = ["azure"] maturity = "production" -updated_date = "2025/07/22" +updated_date = "2025/07/31" [rule] author = ["Elastic"] @@ -57,15 +57,15 @@ timestamp_override = "event.ingested" type = "esql" query = ''' -FROM logs-azure.auditlogs-* metadata _id, _version, _index -| WHERE event.action == "Authentication Methods Policy Update" -| EVAL Esql.azure.auditlogs.properties.target_resources.modified_properties.new_value.replace = REPLACE(`azure.auditlogs.properties.target_resources.0.modified_properties.0.new_value`, "\\\\", "") -| EVAL Esql.azure.auditlogs.properties.target_resources.modified_properties.old_value.replace = REPLACE(`azure.auditlogs.properties.target_resources.0.modified_properties.0.old_value`, "\\\\", "") -| DISSECT Esql.azure.auditlogs.properties.target_resources.modified_properties.new_value.replace "%{}discoveryUrl\":\"%{Esql.azure.auditlogs.properties.auth.oidc.discovery.url.new}\"}%{}" -| DISSECT Esql.azure.auditlogs.properties.target_resources.modified_properties.old_value.replace "%{}discoveryUrl\":\"%{Esql.azure.auditlogs.properties.auth.oidc.discovery.url.old}\"}%{}" -| WHERE Esql.azure.auditlogs.properties.auth.oidc.discovery.url.new IS NOT NULL and Esql.azure.auditlogs.properties.auth.oidc.discovery.url.old IS NOT NULL -| WHERE Esql.azure.auditlogs.properties.auth.oidc.discovery.url.new != Esql.azure.auditlogs.properties.auth.oidc.discovery.url.old -| KEEP +from logs-azure.auditlogs-* metadata _id, _version, _index +| where event.action == "Authentication Methods Policy Update" +| eval Esql.azure_auditlogs_properties_target_resources_modified_properties_new_value_replace = replace(`azure.auditlogs.properties.target_resources.0.modified_properties.0.new_value`, "\\\\", "") +| eval Esql.azure_auditlogs_properties_target_resources_modified_properties_old_value_replace = replace(`azure.auditlogs.properties.target_resources.0.modified_properties.0.old_value`, "\\\\", "") +| dissect Esql.azure_auditlogs_properties_target_resources_modified_properties_new_value_replace "%{}discoveryUrl\":\"%{Esql.azure_auditlogs_properties_auth_oidc_discovery_url_new}\"}%{}" +| dissect Esql.azure_auditlogs_properties_target_resources_modified_properties_old_value_replace "%{}discoveryUrl\":\"%{Esql.azure_auditlogs_properties_auth_oidc_discovery_url_old}\"}%{}" +| where Esql.azure_auditlogs_properties_auth_oidc_discovery_url_new is not null and Esql.azure_auditlogs_properties_auth_oidc_discovery_url_old is not null +| where Esql.azure_auditlogs_properties_auth_oidc_discovery_url_new != Esql.azure_auditlogs_properties_auth_oidc_discovery_url_old +| keep @timestamp, event.action, event.outcome, @@ -79,8 +79,8 @@ FROM logs-azure.auditlogs-* metadata _id, _version, _index source.geo.city_name, source.geo.region_name, source.geo.country_name, - Esql.azure.auditlogs.properties.auth.oidc.discovery.url.new, - Esql.azure.auditlogs.properties.auth.oidc.discovery.url.old + Esql.azure_auditlogs_properties_auth_oidc_discovery_url_new, + Esql.azure_auditlogs_properties_auth_oidc_discovery_url_old ''' diff --git a/rules/integrations/azure_openai/azure_openai_denial_of_ml_service_detection.toml b/rules/integrations/azure_openai/azure_openai_denial_of_ml_service_detection.toml index e470b7e5e87..26e2b388fa3 100644 --- a/rules/integrations/azure_openai/azure_openai_denial_of_ml_service_detection.toml +++ b/rules/integrations/azure_openai/azure_openai_denial_of_ml_service_detection.toml @@ -1,7 +1,7 @@ [metadata] creation_date = "2025/02/25" maturity = "production" -updated_date = "2025/04/07" +updated_date = "2025/07/16" [rule] author = ["Elastic"] @@ -77,12 +77,23 @@ type = "esql" query = ''' from logs-azure_openai.logs-* -// truncate the timestamp to a 1-minute window -| eval target_time_window = DATE_TRUNC(1 minutes, @timestamp) +| eval + Esql.time_window_date_trunc = date_trunc(1 minutes, @timestamp) | where azure.open_ai.operation_name == "ChatCompletions_Create" -| keep azure.open_ai.properties.request_length, azure.resource.name, cloud.account.id,target_time_window -| stats count = count(), avg_request_size = avg(azure.open_ai.properties.request_length) by target_time_window, azure.resource.name -| where count >= 10 and avg_request_size >= 5000 -| sort count desc +| keep + azure.open_ai.properties.request_length, + azure.resource.name, + cloud.account.id, + Esql.time_window_date_trunc +| stats + Esql.event_count = count(*), + Esql.azure_open_ai_properties_request_length_avg = avg(azure.open_ai.properties.request_length) + by + Esql.time_window_date_trunc, + azure.resource.name +| where + Esql.event_count >= 10 and + Esql.azure_open_ai_properties_request_length_avg >= 5000 +| sort Esql.event_count desc ''' diff --git a/rules/integrations/azure_openai/azure_openai_insecure_output_handling_detection.toml b/rules/integrations/azure_openai/azure_openai_insecure_output_handling_detection.toml index 9a77b283ba6..4979da3f494 100644 --- a/rules/integrations/azure_openai/azure_openai_insecure_output_handling_detection.toml +++ b/rules/integrations/azure_openai/azure_openai_insecure_output_handling_detection.toml @@ -1,7 +1,7 @@ [metadata] creation_date = "2025/02/25" maturity = "production" -updated_date = "2025/04/07" +updated_date = "2025/07/16" [rule] author = ["Elastic"] @@ -72,10 +72,22 @@ type = "esql" query = ''' from logs-azure_openai.logs-* -| where azure.open_ai.properties.response_length == 0 and azure.open_ai.result_signature == "200" and azure.open_ai.operation_name == "ChatCompletions_Create" -| keep azure.open_ai.properties.request_length, azure.open_ai.result_signature, cloud.account.id, azure.resource.name -| stats count = count() by azure.resource.name -| where count >= 10 -| sort count desc +| where + azure.open_ai.properties.response_length == 0 and + azure.open_ai.result_signature == "200" and + azure.open_ai.operation_name == "ChatCompletions_Create" +| keep + azure.open_ai.properties.request_length, + azure.open_ai.result_signature, + cloud.account.id, + azure.resource.name +| stats + Esql.event_count = count(*) + by + azure.resource.name +| where + Esql.event_count >= 10 +| sort + Esql.event_count desc ''' diff --git a/rules/integrations/azure_openai/azure_openai_model_theft_detection.toml b/rules/integrations/azure_openai/azure_openai_model_theft_detection.toml index b1bb72f1ec6..1e584a3119b 100644 --- a/rules/integrations/azure_openai/azure_openai_model_theft_detection.toml +++ b/rules/integrations/azure_openai/azure_openai_model_theft_detection.toml @@ -1,7 +1,7 @@ [metadata] creation_date = "2025/02/25" maturity = "production" -updated_date = "2025/04/07" +updated_date = "2025/07/16" [rule] author = ["Elastic"] @@ -74,10 +74,26 @@ type = "esql" query = ''' from logs-azure_openai.logs-* -| where azure.open_ai.operation_name == "ListKey" and azure.open_ai.category == "Audit" -| KEEP @timestamp, azure.open_ai.operation_name , azure.open_ai.category, azure.resource.group, azure.resource.name, azure.open_ai.properties.response_length -| stats count = count(), max_data_transferred = max(azure.open_ai.properties.response_length) by azure.resource.group , azure.resource.name -| where count >= 100 or max_data_transferred >= 1000000 -| sort count desc +| where + azure.open_ai.operation_name == "ListKey" and + azure.open_ai.category == "Audit" +| keep + @timestamp, + azure.open_ai.operation_name, + azure.open_ai.category, + azure.resource.group, + azure.resource.name, + azure.open_ai.properties.response_length +| stats + Esql.event_count = count(*), + Esql.azure_open_ai_properties_response_length_max = max(azure.open_ai.properties.response_length) + by + azure.resource.group, + azure.resource.name +| where + Esql.event_count >= 100 or + Esql.azure_open_ai_properties_response_length_max >= 1000000 +| sort + Esql.event_count desc ''' diff --git a/rules/integrations/o365/collection_onedrive_excessive_file_downloads.toml b/rules/integrations/o365/collection_onedrive_excessive_file_downloads.toml index 6976ee1c19a..37d9710d3a5 100644 --- a/rules/integrations/o365/collection_onedrive_excessive_file_downloads.toml +++ b/rules/integrations/o365/collection_onedrive_excessive_file_downloads.toml @@ -2,7 +2,7 @@ creation_date = "2025/02/19" integration = ["o365"] maturity = "production" -updated_date = "2025/07/10" +updated_date = "2025/07/16" [rule] author = ["Elastic"] @@ -80,27 +80,30 @@ timestamp_override = "event.ingested" type = "esql" query = ''' -FROM logs-o365.audit-* -| WHERE @timestamp > now() - 14 day -| WHERE +from logs-o365.audit-* +| where + @timestamp > now() - 14d and event.dataset == "o365.audit" and - - // filter on files downloaded from OneDrive event.provider == "OneDrive" and event.action == "FileDownloaded" and - - // filter on OAuth authentication which encompasses device code workflow - o365.audit.AuthenticationType == "OAuth" - and event.outcome == "success" -// bucket authentication attempts by 1 minute -| EVAL target_time_window = DATE_TRUNC(1 minutes, @timestamp) -| KEEP target_time_window, o365.audit.UserId, file.name, source.ip - -// aggregate on unique file names and download attempts -| STATS unique_file_count = count_distinct(file.name), download_attempt_count = count(*) BY target_time_window, o365.audit.UserId, source.ip - -// adjustable range for "excessive" unique files that were downloaded -| WHERE unique_file_count >= 25 + o365.audit.AuthenticationType == "OAuth" and + event.outcome == "success" +| eval + Esql.time_window_date_trunc = date_trunc(1 minutes, @timestamp) +| keep + Esql.time_window_date_trunc, + o365.audit.UserId, + file.name, + source.ip +| stats + Esql.file_name_count_distinct = count_distinct(file.name), + Esql.event_count = count(*) + by + Esql.time_window_date_trunc, + o365.audit.UserId, + source.ip +| where + Esql.file_name_count_distinct >= 25 ''' diff --git a/rules/integrations/o365/credential_access_microsoft_365_excessive_account_lockouts.toml b/rules/integrations/o365/credential_access_microsoft_365_excessive_account_lockouts.toml index b2cb4e27195..448eb30ea34 100644 --- a/rules/integrations/o365/credential_access_microsoft_365_excessive_account_lockouts.toml +++ b/rules/integrations/o365/credential_access_microsoft_365_excessive_account_lockouts.toml @@ -2,7 +2,7 @@ creation_date = "2025/05/10" integration = ["o365"] maturity = "production" -updated_date = "2025/07/10" +updated_date = "2025/07/16" [rule] author = ["Elastic"] @@ -72,57 +72,55 @@ timestamp_override = "event.ingested" type = "esql" query = ''' -FROM logs-o365.audit-* - -| MV_EXPAND event.category -| EVAL - time_window = DATE_TRUNC(5 minutes, @timestamp), - user_id = TO_LOWER(o365.audit.UserId), - ip = source.ip, - login_error = o365.audit.LogonError, - request_type = TO_LOWER(o365.audit.ExtendedProperties.RequestType), - asn_org = source.`as`.organization.name, - country = source.geo.country_name, - event_time = @timestamp - -| WHERE event.dataset == "o365.audit" - AND event.category == "authentication" - AND event.provider IN ("AzureActiveDirectory", "Exchange") - AND event.action IN ("UserLoginFailed", "PasswordLogonInitialAuthUsingPassword") - AND request_type RLIKE "(oauth.*||.*login.*)" - AND login_error == "IdsLocked" - AND user_id != "not available" - AND o365.audit.Target.Type IN ("0", "2", "6", "10") - AND asn_org != "MICROSOFT-CORP-MSN-AS-BLOCK" - -| STATS - unique_users = COUNT_DISTINCT(user_id), - user_id_list = VALUES(user_id), - ip_list = VALUES(ip), - unique_ips = COUNT_DISTINCT(ip), - source_orgs = VALUES(asn_org), - countries = VALUES(country), - unique_country_count = COUNT_DISTINCT(country), - unique_asn_orgs = COUNT_DISTINCT(asn_org), - request_types = VALUES(request_type), - first_seen = MIN(event_time), - last_seen = MAX(event_time), - total_lockout_responses = COUNT() - BY time_window - -| EVAL - duration_seconds = DATE_DIFF("seconds", first_seen, last_seen) - -| KEEP - time_window, unique_users, user_id_list, ip_list, - unique_ips, source_orgs, countries, unique_country_count, - unique_asn_orgs, request_types, first_seen, last_seen, - total_lockout_responses, duration_seconds - -| WHERE - unique_users >= 10 AND - total_lockout_responses >= 10 AND - duration_seconds <= 300 +from logs-o365.audit-* +| mv_expand event.category +| eval + Esql.time_window_date_trunc = date_trunc(5 minutes, @timestamp) +| where + event.dataset == "o365.audit" and + event.category == "authentication" and + event.provider in ("AzureActiveDirectory", "Exchange") and + event.action in ("UserLoginFailed", "PasswordLogonInitialAuthUsingPassword") and + to_lower(o365.audit.ExtendedProperties.RequestType) rlike "(oauth.*||.*login.*)" and + o365.audit.LogonError == "IdsLocked" and + to_lower(o365.audit.UserId) != "not available" and + o365.audit.Target.Type in ("0", "2", "6", "10") and + source.`as`.organization.name != "MICROSOFT-CORP-MSN-as-BLOCK" +| stats + Esql_priv.o365_audit_UserId_count_distinct = count_distinct(to_lower(o365.audit.UserId)), + Esql_priv.o365_audit_UserId_values = values(to_lower(o365.audit.UserId)), + Esql.source_ip_values = values(source.ip), + Esql.source_ip_count_distinct = count_distinct(source.ip), + Esql.source_as_organization_name_values = values(source.`as`.organization.name), + Esql.source_as_organization_name_count_distinct = count_distinct(source.`as`.organization.name), + Esql.source_geo_country_name_values = values(source.geo.country_name), + Esql.source_geo_country_name_count_distinct = count_distinct(source.geo.country_name), + Esql.o365_audit_ExtendedProperties_RequestType_values = values(to_lower(o365.audit.ExtendedProperties.RequestType)), + Esql.timestamp_first_seen = min(@timestamp), + Esql.timestamp_last_seen = max(@timestamp), + Esql.event_count = count(*) + by Esql.time_window_date_trunc +| eval + Esql.event_duration_seconds = date_diff("seconds", Esql.timestamp_first_seen, Esql.timestamp_last_seen) +| keep + Esql.time_window_date_trunc, + Esql_priv.o365_audit_UserId_count_distinct, + Esql_priv.o365_audit_UserId_values, + Esql.source_ip_values, + Esql.source_ip_count_distinct, + Esql.source_as_organization_name_values, + Esql.source_as_organization_name_count_distinct, + Esql.source_geo_country_name_values, + Esql.source_geo_country_name_count_distinct, + Esql.o365_audit_ExtendedProperties_RequestType_values, + Esql.timestamp_first_seen, + Esql.timestamp_last_seen, + Esql.event_count, + Esql.event_duration_seconds +| where + Esql_priv.o365_audit_UserId_count_distinct >= 10 and + Esql.event_count >= 10 and + Esql.event_duration_seconds <= 300 ''' diff --git a/rules/integrations/o365/credential_access_microsoft_365_potential_user_account_brute_force.toml b/rules/integrations/o365/credential_access_microsoft_365_potential_user_account_brute_force.toml index bb2a93206f3..20413cd6ea4 100644 --- a/rules/integrations/o365/credential_access_microsoft_365_potential_user_account_brute_force.toml +++ b/rules/integrations/o365/credential_access_microsoft_365_potential_user_account_brute_force.toml @@ -2,7 +2,7 @@ creation_date = "2020/11/30" integration = ["o365"] maturity = "production" -updated_date = "2025/07/02" +updated_date = "2025/07/16" [rule] author = ["Elastic", "Willem D'Haese", "Austin Songer"] @@ -78,67 +78,76 @@ timestamp_override = "event.ingested" type = "esql" query = ''' -FROM logs-o365.audit-* - -| MV_EXPAND event.category -| EVAL - time_window = DATE_TRUNC(5 minutes, @timestamp), - user_id = TO_LOWER(o365.audit.UserId), - ip = source.ip, - login_error = o365.audit.LogonError, - request_type = TO_LOWER(o365.audit.ExtendedProperties.RequestType), - asn_org = source.`as`.organization.name, - country = source.geo.country_name, - event_time = @timestamp - -| WHERE event.dataset == "o365.audit" - AND event.category == "authentication" - AND event.provider IN ("AzureActiveDirectory", "Exchange") - AND event.action IN ("UserLoginFailed", "PasswordLogonInitialAuthUsingPassword") - AND request_type RLIKE "(oauth.*||.*login.*)" - AND login_error != "IdsLocked" - AND login_error NOT IN ( - "EntitlementGrantsNotFound", "UserStrongAuthEnrollmentRequired", "UserStrongAuthClientAuthNRequired", - "InvalidReplyTo", "SsoArtifactExpiredDueToConditionalAccess", "PasswordResetRegistrationRequiredInterrupt", - "SsoUserAccountNotFoundInResourceTenant", "UserStrongAuthExpired", "CmsiInterrupt" - ) - AND user_id != "not available" - AND o365.audit.Target.Type IN ("0", "2", "6", "10") - -| STATS - unique_users = COUNT_DISTINCT(user_id), - user_id_list = VALUES(user_id), - login_errors = VALUES(login_error), - unique_login_errors = COUNT_DISTINCT(login_error), - request_types = VALUES(request_type), - ip_list = VALUES(ip), - unique_ips = COUNT_DISTINCT(ip), - source_orgs = VALUES(asn_org), - countries = VALUES(country), - unique_country_count = COUNT_DISTINCT(country), - unique_asn_orgs = COUNT_DISTINCT(asn_org), - first_seen = MIN(event_time), - last_seen = MAX(event_time), - total_attempts = COUNT() - BY time_window - -| EVAL - duration_seconds = DATE_DIFF("seconds", first_seen, last_seen), - bf_type = CASE( - unique_users >= 15 AND unique_login_errors == 1 AND total_attempts >= 10 AND duration_seconds <= 1800, "password_spraying", - unique_users >= 8 AND total_attempts >= 15 AND unique_login_errors <= 3 AND unique_ips <= 5 AND duration_seconds <= 600, "credential_stuffing", - unique_users == 1 AND unique_login_errors == 1 AND total_attempts >= 20 AND duration_seconds <= 300, "password_guessing", +from logs-o365.audit-* +| mv_expand event.category +| eval + Esql.time_window_date_trunc = date_trunc(5 minutes, @timestamp), + Esql_priv.o365_audit_UserId_lower = to_lower(o365.audit.UserId), + Esql.o365_audit_LogonError = o365.audit.LogonError, + Esql.o365_audit_ExtendedProperties_RequestType_lower = to_lower(o365.audit.ExtendedProperties.RequestType) +| where + event.dataset == "o365.audit" and + event.category == "authentication" and + event.provider in ("AzureActiveDirectory", "Exchange") and + event.action in ("UserLoginFailed", "PasswordLogonInitialAuthUsingPassword") and + Esql.o365_audit_ExtendedProperties_RequestType_lower rlike "(oauth.*||.*login.*)" and + Esql.o365_audit_LogonError != "IdsLocked" and + Esql.o365_audit_LogonError not in ( + "EntitlementGrantsNotFound", + "UserStrongAuthEnrollmentRequired", + "UserStrongAuthClientAuthNRequired", + "InvalidReplyTo", + "SsoArtifactExpiredDueToConditionalAccess", + "PasswordResetRegistrationRequiredInterrupt", + "SsoUserAccountNotFoundInResourceTenant", + "UserStrongAuthExpired", + "CmsiInterrupt" + ) and + Esql_priv.o365_audit_UserId_lower != "not available" and + o365.audit.Target.Type in ("0", "2", "6", "10") +| stats + Esql.o365_audit_UserId_lower_count_distinct = count_distinct(Esql_priv.o365_audit_UserId_lower), + Esql_priv.o365_audit_UserId_lower_values = values(Esql_priv.o365_audit_UserId_lower), + Esql.o365_audit_LogonError_values = values(Esql.o365_audit_LogonError), + Esql.o365_audit_LogonError_count_distinct = count_distinct(Esql.o365_audit_LogonError), + Esql.o365_audit_ExtendedProperties_RequestType_values = values(Esql.o365_audit_ExtendedProperties_RequestType_lower), + Esql.source_ip_values = values(source.ip), + Esql.source_ip_count_distinct = count_distinct(source.ip), + Esql.source_as_organization_name_values = values(source.`as`.organization.name), + Esql.source_geo_country_name_values = values(source.geo.country_name), + Esql.source_geo_country_name_count_distinct = count_distinct(source.geo.country_name), + Esql.source_as_organization_name_count_distinct = count_distinct(source.`as`.organization.name), + Esql.timestamp_first_seen = min(@timestamp), + Esql.timestamp_last_seen = max(@timestamp), + Esql.event_count = count(*) + by Esql.time_window_date_trunc +| eval + Esql.event_duration_seconds = date_diff("seconds", Esql.timestamp_first_seen, Esql.timestamp_last_seen), + Esql.brute_force_type = case( + Esql.o365_audit_UserId_lower_count_distinct >= 15 and Esql.o365_audit_LogonError_count_distinct == 1 and Esql.event_count >= 10 and Esql.event_duration_seconds <= 1800, "password_spraying", + Esql.o365_audit_UserId_lower_count_distinct >= 8 and Esql.event_count >= 15 and Esql.o365_audit_LogonError_count_distinct <= 3 and Esql.source_ip_count_distinct <= 5 and Esql.event_duration_seconds <= 600, "credential_stuffing", + Esql.o365_audit_UserId_lower_count_distinct == 1 and Esql.o365_audit_LogonError_count_distinct == 1 and Esql.event_count >= 20 and Esql.event_duration_seconds <= 300, "password_guessing", "other" ) - -| KEEP - time_window, unique_users, user_id_list, login_errors, unique_login_errors, - request_types, ip_list, unique_ips, source_orgs, countries, - unique_country_count, unique_asn_orgs, first_seen, last_seen, - duration_seconds, total_attempts, bf_type - -| WHERE - bf_type != "other" +| keep + Esql.time_window_date_trunc, + Esql.o365_audit_UserId_lower_count_distinct, + Esql_priv.o365_audit_UserId_lower_values, + Esql.o365_audit_LogonError_values, + Esql.o365_audit_LogonError_count_distinct, + Esql.o365_audit_ExtendedProperties_RequestType_values, + Esql.source_ip_values, + Esql.source_ip_count_distinct, + Esql.source_as_organization_name_values, + Esql.source_geo_country_name_values, + Esql.source_geo_country_name_count_distinct, + Esql.source_as_organization_name_count_distinct, + Esql.timestamp_first_seen, + Esql.timestamp_last_seen, + Esql.event_duration_seconds, + Esql.event_count, + Esql.brute_force_type +| where Esql.brute_force_type != "other" ''' diff --git a/rules/integrations/o365/defense_evasion_microsoft_365_susp_oauth2_authorization.toml b/rules/integrations/o365/defense_evasion_microsoft_365_susp_oauth2_authorization.toml index a77b4e56c2e..9c247608297 100644 --- a/rules/integrations/o365/defense_evasion_microsoft_365_susp_oauth2_authorization.toml +++ b/rules/integrations/o365/defense_evasion_microsoft_365_susp_oauth2_authorization.toml @@ -2,7 +2,7 @@ creation_date = "2025/05/01" integration = ["o365"] maturity = "production" -updated_date = "2025/07/02" +updated_date = "2025/07/16" [rule] author = ["Elastic"] @@ -69,43 +69,51 @@ type = "esql" query = ''' from logs-o365.audit-* -| WHERE event.dataset == "o365.audit" and event.action == "UserLoggedIn" and - - // ensure source, application and user are not null - source.ip is not null and - o365.audit.UserId is not null and - o365.audit.ApplicationId is not null and - - // filter for user principals that are not service accounts - o365.audit.UserType in ("0", "2", "3", "10") and - - // filter for successful logon to Microsoft Graph and from the Microsoft Authentication Broker or Visual Studio Code - o365.audit.ApplicationId in ("aebc6443-996d-45c2-90f0-388ff96faa56", "29d9ed98-a469-4536-ade2-f981bc1d605e") and - o365.audit.ObjectId in ("00000003-0000-0000-c000-000000000000") - -// keep relevant fields only -| keep @timestamp, o365.audit.UserId, source.ip, o365.audit.ApplicationId, o365.audit.ObjectId, o365.audit.ExtendedProperties.RequestType, source.as.organization.name, o365.audit.ExtendedProperties.ResultStatusDetail - -// case statements to track which are OAuth2 authorization request via redirect and which are related to OAuth2 code to token conversion +| where + event.dataset == "o365.audit" and + event.action == "UserLoggedIn" and + source.ip is not null and + o365.audit.UserId is not null and + o365.audit.ApplicationId is not null and + o365.audit.UserType in ("0", "2", "3", "10") and + o365.audit.ApplicationId in ("aebc6443-996d-45c2-90f0-388ff96faa56", "29d9ed98-a469-4536-ade2-f981bc1d605e") and + o365.audit.ObjectId in ("00000003-0000-0000-c000-000000000000") | eval - oauth_authorize = case(o365.audit.ExtendedProperties.RequestType == "OAuth2:Authorize" and o365.audit.ExtendedProperties.ResultStatusDetail == "Redirect", o365.audit.UserId, null), - oauth_token = case(o365.audit.ExtendedProperties.RequestType == "OAuth2:Token", o365.audit.UserId, null) - -// split time to 30 minutes intervals -| eval target_time_window = DATE_TRUNC(30 minutes, @timestamp) - -// aggregate by principal, applicationId, objectId and time window + Esql.time_window_date_trunc = date_trunc(30 minutes, @timestamp), + Esql.oauth_authorize_user_id_case = case( + o365.audit.ExtendedProperties.RequestType == "OAuth2:Authorize" and o365.audit.ExtendedProperties.ResultStatusDetail == "Redirect", + o365.audit.UserId, + null + ), + Esql.oauth_token_user_id_case = case( + o365.audit.ExtendedProperties.RequestType == "OAuth2:Token", + o365.audit.UserId, + null + ) | stats - unique_ips = COUNT_DISTINCT(source.ip), - source_ips = VALUES(source.ip), - appIds = VALUES(o365.audit.ApplicationId), - asn = values(`source.as.organization.name`), - is_oauth_token = COUNT_DISTINCT(oauth_token), - is_oauth_authorize = COUNT_DISTINCT(oauth_authorize) -by o365.audit.UserId, target_time_window, o365.audit.ApplicationId, o365.audit.ObjectId - -// filter for cases where the same appId is used by the same principal user to access the same object and from multiple addresses via OAuth2 token -| where unique_ips >= 2 and is_oauth_authorize > 0 and is_oauth_token > 0 + Esql.source_ip_count_distinct = count_distinct(source.ip), + Esql.source_ip_values = values(source.ip), + Esql.o365_audit_ApplicationId_values = values(o365.audit.ApplicationId), + Esql.source_as_organization_name_values = values(source.`as`.organization.name), + Esql.oauth_token_count_distinct = count_distinct(Esql.oauth_token_user_id_case), + Esql.oauth_authorize_count_distinct = count_distinct(Esql.oauth_authorize_user_id_case) + by + o365.audit.UserId, + Esql.time_window_date_trunc, + o365.audit.ApplicationId, + o365.audit.ObjectId +| keep + Esql.time_window_date_trunc, + Esql.source_ip_values, + Esql.source_ip_count_distinct, + Esql.o365_audit_ApplicationId_values, + Esql.source_as_organization_name_values, + Esql.oauth_token_count_distinct, + Esql.oauth_authorize_count_distinct +| where + Esql.source_ip_count_distinct >= 2 and + Esql.oauth_token_count_distinct > 0 and + Esql.oauth_authorize_count_distinct > 0 ''' diff --git a/rules/integrations/okta/credential_access_multiple_device_token_hashes_for_single_okta_session.toml b/rules/integrations/okta/credential_access_multiple_device_token_hashes_for_single_okta_session.toml index f1109e5cde7..080f35e67dd 100644 --- a/rules/integrations/okta/credential_access_multiple_device_token_hashes_for_single_okta_session.toml +++ b/rules/integrations/okta/credential_access_multiple_device_token_hashes_for_single_okta_session.toml @@ -2,7 +2,7 @@ creation_date = "2023/11/08" integration = ["okta"] maturity = "production" -updated_date = "2025/07/10" +updated_date = "2025/07/16" [rule] author = ["Elastic"] @@ -77,30 +77,31 @@ timestamp_override = "event.ingested" type = "esql" query = ''' -FROM logs-okta* -| WHERE - event.dataset == "okta.system" - // ignore authentication events where session and device token hash change often - AND NOT event.action IN ( +from logs-okta* +| where + event.dataset == "okta.system" and + not event.action in ( "policy.evaluate_sign_on", "user.session.start", "user.authentication.sso" - ) - // ignore Okta system events and only allow registered users - AND ( - okta.actor.alternate_id != "system@okta.com" - AND okta.actor.alternate_id RLIKE "[^@\\s]+\\@[^@\\s]+" - ) - AND okta.authentication_context.external_session_id != "unknown" -| KEEP event.action, okta.actor.alternate_id, okta.authentication_context.external_session_id, okta.debug_context.debug_data.dt_hash -| STATS - dt_hash_counts = COUNT_DISTINCT(okta.debug_context.debug_data.dt_hash) BY - okta.actor.alternate_id, - okta.authentication_context.external_session_id -| WHERE - dt_hash_counts >= 2 -| SORT - dt_hash_counts DESC + ) and + okta.actor.alternate_id != "system@okta.com" and + okta.actor.alternate_id rlike "[^@\s]+\@[^@\s]+" and + okta.authentication_context.external_session_id != "unknown" +| keep + event.action, + okta.actor.alternate_id, + okta.authentication_context.external_session_id, + okta.debug_context.debug_data.dt_hash +| stats + Esql.okta_debug_context_debug_data_dt_hash_count_distinct = count_distinct(okta.debug_context.debug_data.dt_hash) + by + okta.actor.alternate_id, + okta.authentication_context.external_session_id +| where + Esql.okta_debug_context_debug_data_dt_hash_count_distinct >= 2 +| sort + Esql.okta_debug_context_debug_data_dt_hash_count_distinct desc ''' diff --git a/rules/integrations/okta/credential_access_okta_authentication_for_multiple_users_from_single_source.toml b/rules/integrations/okta/credential_access_okta_authentication_for_multiple_users_from_single_source.toml index dd686b8570f..3dd856caa1b 100644 --- a/rules/integrations/okta/credential_access_okta_authentication_for_multiple_users_from_single_source.toml +++ b/rules/integrations/okta/credential_access_okta_authentication_for_multiple_users_from_single_source.toml @@ -2,7 +2,7 @@ creation_date = "2024/06/17" integration = ["okta"] maturity = "production" -updated_date = "2025/07/10" +updated_date = "2025/07/16" [rule] author = ["Elastic"] @@ -90,19 +90,26 @@ timestamp_override = "event.ingested" type = "esql" query = ''' -FROM logs-okta* -| WHERE - event.dataset == "okta.system" - AND (event.action == "user.session.start" OR event.action RLIKE "user\\.authentication(.*)") - AND okta.outcome.reason == "INVALID_CREDENTIALS" -| KEEP okta.client.ip, okta.actor.alternate_id, okta.actor.id, event.action, okta.outcome.reason -| STATS - source_auth_count = COUNT_DISTINCT(okta.actor.id) - BY okta.client.ip, okta.actor.alternate_id -| WHERE - source_auth_count > 5 -| SORT - source_auth_count DESC +from logs-okta* +| where + event.dataset == "okta.system" and + (event.action == "user.session.start" or event.action rlike "user\.authentication(.*)") and + okta.outcome.reason == "INVALID_CREDENTIALS" +| keep + okta.client.ip, + okta.actor.alternate_id, + okta.actor.id, + event.action, + okta.outcome.reason +| stats + Esql.okta_actor_id_count_distinct = count_distinct(okta.actor.id) + by + okta.client.ip, + okta.actor.alternate_id +| where + Esql.okta_actor_id_count_distinct > 5 +| sort + Esql.okta_actor_id_count_distinct desc ''' diff --git a/rules/integrations/okta/credential_access_okta_authentication_for_multiple_users_with_the_same_device_token_hash.toml b/rules/integrations/okta/credential_access_okta_authentication_for_multiple_users_with_the_same_device_token_hash.toml index 799578c7650..1bc4dfd5c00 100644 --- a/rules/integrations/okta/credential_access_okta_authentication_for_multiple_users_with_the_same_device_token_hash.toml +++ b/rules/integrations/okta/credential_access_okta_authentication_for_multiple_users_with_the_same_device_token_hash.toml @@ -2,7 +2,7 @@ creation_date = "2024/06/17" integration = ["okta"] maturity = "production" -updated_date = "2025/07/10" +updated_date = "2025/07/16" [rule] author = ["Elastic"] @@ -87,20 +87,27 @@ timestamp_override = "event.ingested" type = "esql" query = ''' -FROM logs-okta* -| WHERE - event.dataset == "okta.system" - AND (event.action RLIKE "user\\.authentication(.*)" OR event.action == "user.session.start") - AND okta.debug_context.debug_data.dt_hash != "-" - AND okta.outcome.reason == "INVALID_CREDENTIALS" -| KEEP event.action, okta.debug_context.debug_data.dt_hash, okta.actor.id, okta.actor.alternate_id, okta.outcome.reason -| STATS - target_auth_count = COUNT_DISTINCT(okta.actor.id) - BY okta.debug_context.debug_data.dt_hash, okta.actor.alternate_id -| WHERE - target_auth_count > 20 -| SORT - target_auth_count DESC +from logs-okta* +| where + event.dataset == "okta.system" and + (event.action rlike "user\.authentication(.*)" or event.action == "user.session.start") and + okta.debug_context.debug_data.dt_hash != "-" and + okta.outcome.reason == "INVALID_CREDENTIALS" +| keep + event.action, + okta.debug_context.debug_data.dt_hash, + okta.actor.id, + okta.actor.alternate_id, + okta.outcome.reason +| stats + Esql.okta_actor_id_count_distinct = count_distinct(okta.actor.id) + by + okta.debug_context.debug_data.dt_hash, + okta.actor.alternate_id +| where + Esql.okta_actor_id_count_distinct > 20 +| sort + Esql.okta_actor_id_count_distinct desc ''' diff --git a/rules/integrations/okta/credential_access_okta_multiple_device_token_hashes_for_single_user.toml b/rules/integrations/okta/credential_access_okta_multiple_device_token_hashes_for_single_user.toml index a5f018981af..3d1a722f10f 100644 --- a/rules/integrations/okta/credential_access_okta_multiple_device_token_hashes_for_single_user.toml +++ b/rules/integrations/okta/credential_access_okta_multiple_device_token_hashes_for_single_user.toml @@ -2,7 +2,7 @@ creation_date = "2024/06/17" integration = ["okta"] maturity = "production" -updated_date = "2025/07/10" +updated_date = "2025/07/16" [rule] author = ["Elastic"] @@ -91,20 +91,28 @@ timestamp_override = "event.ingested" type = "esql" query = ''' -FROM logs-okta* -| WHERE - event.dataset == "okta.system" - AND (event.action RLIKE "user\\.authentication(.*)" OR event.action == "user.session.start") - AND okta.debug_context.debug_data.request_uri == "/api/v1/authn" - AND okta.outcome.reason == "INVALID_CREDENTIALS" -| KEEP event.action, okta.debug_context.debug_data.dt_hash, okta.client.ip, okta.actor.alternate_id, okta.debug_context.debug_data.request_uri, okta.outcome.reason -| STATS - source_auth_count = COUNT_DISTINCT(okta.debug_context.debug_data.dt_hash) - BY okta.client.ip, okta.actor.alternate_id -| WHERE - source_auth_count >= 30 -| SORT - source_auth_count DESC +from logs-okta* +| where + event.dataset == "okta.system" and + (event.action rlike "user\.authentication(.*)" or event.action == "user.session.start") and + okta.debug_context.debug_data.request_uri == "/api/v1/authn" and + okta.outcome.reason == "INVALID_CREDENTIALS" +| keep + event.action, + okta.debug_context.debug_data.dt_hash, + okta.client.ip, + okta.actor.alternate_id, + okta.debug_context.debug_data.request_uri, + okta.outcome.reason +| stats + Esql.okta_debug_context_debug_data_dt_hash_count_distinct = count_distinct(okta.debug_context.debug_data.dt_hash) + by + okta.client.ip, + okta.actor.alternate_id +| where + Esql.okta_debug_context_debug_data_dt_hash_count_distinct >= 30 +| sort + Esql.okta_debug_context_debug_data_dt_hash_count_distinct desc ''' diff --git a/rules/integrations/okta/initial_access_okta_user_sessions_started_from_different_geolocations.toml b/rules/integrations/okta/initial_access_okta_user_sessions_started_from_different_geolocations.toml index d4487daff7c..c871fd86c0d 100644 --- a/rules/integrations/okta/initial_access_okta_user_sessions_started_from_different_geolocations.toml +++ b/rules/integrations/okta/initial_access_okta_user_sessions_started_from_different_geolocations.toml @@ -2,7 +2,7 @@ creation_date = "2023/11/18" integration = ["okta"] maturity = "production" -updated_date = "2025/07/10" +updated_date = "2025/07/16" [rule] author = ["Elastic"] @@ -16,8 +16,7 @@ interval = "15m" language = "esql" license = "Elastic License v2" name = "Okta User Sessions Started from Different Geolocations" -note = """ -## Triage and analysis +note = """## Triage and analysis ### Investigating Okta User Sessions Started from Different Geolocations @@ -78,18 +77,27 @@ timestamp_override = "event.ingested" type = "esql" query = ''' -FROM logs-okta* -| WHERE - event.dataset == "okta.system" - AND (event.action RLIKE "user\\.authentication(.*)" OR event.action == "user.session.start") - AND okta.security_context.is_proxy != true and okta.actor.id != "unknown" - AND event.outcome == "success" -| KEEP event.action, okta.security_context.is_proxy, okta.actor.id, event.outcome, client.geo.country_name, okta.actor.alternate_id -| STATS - geo_auth_counts = COUNT_DISTINCT(client.geo.country_name) - BY okta.actor.id, okta.actor.alternate_id -| WHERE - geo_auth_counts >= 2 +from logs-okta* +| where + event.dataset == "okta.system" and + (event.action rlike "user\.authentication(.*)" or event.action == "user.session.start") and + okta.security_context.is_proxy != true and + okta.actor.id != "unknown" and + event.outcome == "success" +| keep + event.action, + okta.security_context.is_proxy, + okta.actor.id, + okta.actor.alternate_id, + event.outcome, + client.geo.country_name +| stats + Esql.client_geo_country_name_count_distinct = count_distinct(client.geo.country_name) + by okta.actor.id, okta.actor.alternate_id +| where + Esql.client_geo_country_name_count_distinct >= 2 +| sort + Esql.client_geo_country_name_count_distinct desc ''' diff --git a/rules/linux/command_and_control_frequent_egress_netcon_from_sus_executable.toml b/rules/linux/command_and_control_frequent_egress_netcon_from_sus_executable.toml index c62b74db0fa..18e98e3d18f 100644 --- a/rules/linux/command_and_control_frequent_egress_netcon_from_sus_executable.toml +++ b/rules/linux/command_and_control_frequent_egress_netcon_from_sus_executable.toml @@ -2,7 +2,7 @@ creation_date = "2025/02/20" integration = ["endpoint"] maturity = "production" -updated_date = "2025/07/10" +updated_date = "2025/07/16" [rule] author = ["Elastic"] @@ -94,30 +94,50 @@ type = "esql" query = ''' from logs-endpoint.events.network-* -| keep @timestamp, host.os.type, event.type, event.action, process.name, process.executable, destination.ip, agent.id, host.name -| where @timestamp > now() - 1 hours -| where host.os.type == "linux" and event.type == "start" and event.action == "connection_attempted" and ( - ( - process.executable like "/tmp/*" or - process.executable like "/var/tmp/*" or - process.executable like "/dev/shm/*" - ) or - (process.name like ".*") -) and not ( - CIDR_MATCH( - destination.ip, "10.0.0.0/8", "127.0.0.0/8", "169.254.0.0/16", "172.16.0.0/12", "192.0.0.0/24", "192.0.0.0/29", "192.0.0.8/32", "192.0.0.9/32", - "192.0.0.10/32", "192.0.0.170/32", "192.0.0.171/32", "192.0.2.0/24", "192.31.196.0/24", "192.52.193.0/24", "192.168.0.0/16", "192.88.99.0/24", - "224.0.0.0/4", "100.64.0.0/10", "192.175.48.0/24","198.18.0.0/15", "198.51.100.0/24", "203.0.113.0/24", "224.0.0.0/4", "240.0.0.0/4", "::1", - "FE80::/10", "FF00::/8" - ) or - process.executable like "/nix/store/*" or - process.executable like "/tmp/newroot/*" or - process.executable like "/tmp/.mount*" or - process.executable like "/tmp/go-build*" - ) -| stats cc = count(), agent_count = count_distinct(agent.id), host.name = VALUES(host.name), agent.id = VALUES(agent.id) by process.executable -| where agent_count == 1 and cc > 15 -| sort cc asc +| where + @timestamp > now() - 1h and + host.os.type == "linux" and + event.type == "start" and + event.action == "connection_attempted" and + ( + process.executable like "/tmp/*" or + process.executable like "/var/tmp/*" or + process.executable like "/dev/shm/*" or + process.name rlike ".*" + ) and not ( + cidr_match(destination.ip, + "10.0.0.0/8", "127.0.0.0/8", "169.254.0.0/16", "172.16.0.0/12", + "192.0.0.0/24", "192.0.0.29/32", "192.0.0.8/32", "192.0.0.9/32", + "192.0.0.10/32", "192.0.0.170/32", "192.0.0.171/32", "192.0.2.0/24", + "192.31.196.0/24", "192.52.193.0/24", "192.168.0.0/16", "192.88.99.0/24", + "224.0.0.0/4", "100.64.0.0/10", "192.175.48.0/24", "198.18.0.0/15", + "198.51.100.0/24", "203.0.113.0/24", "240.0.0.0/4", "::1", "FE80::/10", "FF00::/8" + ) or + process.executable like "/nix/store/*" or + process.executable like "/tmp/newroot/*" or + process.executable like "/tmp/.mount*" or + process.executable like "/tmp/go-build*" + ) +| keep + @timestamp, + host.os.type, + event.type, + event.action, + process.name, + process.executable, + destination.ip, + agent.id, + host.name +| stats + Esql.event_count = count(), + Esql.agent_id_count_distinct = count_distinct(agent.id), + Esql.host_name_values = values(host.name), + Esql.agent_id_values = values(agent.id) + by process.executable +| where + Esql.agent_id_count_distinct == 1 and + Esql.event_count > 15 +| sort Esql.event_count asc | limit 100 ''' diff --git a/rules/linux/defense_evasion_base64_decoding_activity.toml b/rules/linux/defense_evasion_base64_decoding_activity.toml index 074373a3404..ffd98181da9 100644 --- a/rules/linux/defense_evasion_base64_decoding_activity.toml +++ b/rules/linux/defense_evasion_base64_decoding_activity.toml @@ -2,7 +2,7 @@ creation_date = "2025/02/21" integration = ["endpoint"] maturity = "production" -updated_date = "2025/07/10" +updated_date = "2025/07/16" [rule] author = ["Elastic"] @@ -95,21 +95,63 @@ type = "esql" query = ''' from logs-endpoint.events.process-* -| keep @timestamp, host.os.type, event.type, event.action, process.name, process.args, process.command_line, agent.id, host.name -| where @timestamp > now() - 1 hours -| where host.os.type == "linux" and event.type == "start" and event.action == "exec" and ( - (process.name in ("base64", "base64plain", "base64url", "base64mime", "base64pem", "base32", "base16") and process.command_line like "*-*d*") or - (process.name == "openssl" and process.args == "enc" and process.args in ("-d", "-base64", "-a")) or - (process.name like "python*" and - (process.args == "base64" and process.args in ("-d", "-u", "-t")) or - (process.args == "-c" and process.command_line like "*base64*" and process.command_line like "*b64decode*") - ) or - (process.name like "perl*" and process.command_line like "*decode_base64*") or - (process.name like "ruby*" and process.args == "-e" and process.command_line like "*Base64.decode64*") -) -| stats cc = count(), agent_count = count_distinct(agent.id), host.name = VALUES(host.name), agent.id = VALUES(agent.id) by process.name, process.command_line -| where agent_count == 1 and cc < 15 -| sort cc asc +| where + @timestamp > now() - 1h and + host.os.type == "linux" and + event.type == "start" and + event.action == "exec" and ( + ( + process.name in ("base64", "base64plain", "base64url", "base64mime", "base64pem", "base32", "base16") and + process.command_line like "*-*d*" + ) or + ( + process.name == "openssl" and + process.args == "enc" and + process.args in ("-d", "-base64", "-a") + ) or + ( + process.name like "python*" and ( + ( + process.args == "base64" and + process.args in ("-d", "-u", "-t") + ) or + ( + process.args == "-c" and + process.command_line like "*base64*" and + process.command_line like "*b64decode*" + ) + ) + ) or + ( + process.name like "perl*" and + process.command_line like "*decode_base64*" + ) or + ( + process.name like "ruby*" and + process.args == "-e" and + process.command_line like "*Base64.decode64*" + ) + ) +| keep + @timestamp, + host.os.type, + event.type, + event.action, + process.name, + process.args, + process.command_line, + agent.id, + host.name +| stats + Esql.event_count = count(), + Esql.agent_id_count_distinct = count_distinct(agent.id), + Esql.host_name_values = values(host.name), + Esql.agent_id_values = values(agent.id) + by process.name, process.command_line +| where + Esql.agent_id_count_distinct == 1 and + Esql.event_count < 15 +| sort Esql.event_count asc | limit 100 ''' diff --git a/rules/linux/discovery_port_scanning_activity_from_compromised_host.toml b/rules/linux/discovery_port_scanning_activity_from_compromised_host.toml index b31aa56d377..e9867a5acb4 100644 --- a/rules/linux/discovery_port_scanning_activity_from_compromised_host.toml +++ b/rules/linux/discovery_port_scanning_activity_from_compromised_host.toml @@ -2,7 +2,7 @@ creation_date = "2025/03/04" integration = ["endpoint"] maturity = "production" -updated_date = "2025/07/10" +updated_date = "2025/07/16" [rule] author = ["Elastic"] @@ -96,12 +96,32 @@ type = "esql" query = ''' from logs-endpoint.events.network-* -| keep @timestamp, host.os.type, event.type, event.action, destination.port, process.executable, destination.ip, agent.id, host.name -| where @timestamp > now() - 1 hours -| where host.os.type == "linux" and event.type == "start" and event.action == "connection_attempted" -| stats cc = count(), port_count = count_distinct(destination.port), agent_count = count_distinct(agent.id), host.name = VALUES(host.name), agent.id = VALUES(agent.id) by process.executable, destination.ip -| where agent_count == 1 and port_count > 100 -| sort cc asc +| where + @timestamp > now() - 1h and + host.os.type == "linux" and + event.type == "start" and + event.action == "connection_attempted" +| keep + @timestamp, + host.os.type, + event.type, + event.action, + destination.port, + process.executable, + destination.ip, + agent.id, + host.name +| stats + Esql.event_count = count(), + Esql.destination_port_count_distinct = count_distinct(destination.port), + Esql.agent_id_count_distinct = count_distinct(agent.id), + Esql.host_name_values = values(host.name), + Esql.agent_id_values = values(agent.id) + by process.executable, destination.ip +| where + Esql.agent_id_count_distinct == 1 and + Esql.destination_port_count_distinct > 100 +| sort Esql.event_count asc | limit 100 ''' diff --git a/rules/linux/discovery_subnet_scanning_activity_from_compromised_host.toml b/rules/linux/discovery_subnet_scanning_activity_from_compromised_host.toml index 4e5da31b19f..ae377495dcc 100644 --- a/rules/linux/discovery_subnet_scanning_activity_from_compromised_host.toml +++ b/rules/linux/discovery_subnet_scanning_activity_from_compromised_host.toml @@ -2,7 +2,7 @@ creation_date = "2025/03/04" integration = ["endpoint"] maturity = "production" -updated_date = "2025/07/10" +updated_date = "2025/07/16" [rule] author = ["Elastic"] @@ -96,11 +96,22 @@ type = "esql" query = ''' from logs-endpoint.events.network-* | keep @timestamp, host.os.type, event.type, event.action, process.executable, destination.ip, agent.id, host.name -| where @timestamp > now() - 1 hours -| where host.os.type == "linux" and event.type == "start" and event.action == "connection_attempted" -| stats cc = count(), dest_count = count_distinct(destination.ip), agent_count = count_distinct(agent.id), host.name = VALUES(host.name), agent.id = VALUES(agent.id) by process.executable -| where agent_count == 1 and dest_count > 250 -| sort cc asc +| where + @timestamp > now() - 1 hours and + host.os.type == "linux" and + event.type == "start" and + event.action == "connection_attempted" +| stats + Esql.event_count = count(), + Esql.destination_ip_count_distinct = count_distinct(destination.ip), + Esql.agent_id_count_distinct = count_distinct(agent.id), + Esql.host_name_values = values(host.name), + Esql.agent_id_values = values(agent.id) + by process.executable +| where + Esql.agent_id_count_distinct == 1 and + Esql.destination_ip_count_distinct > 250 +| sort Esql.event_count asc | limit 100 ''' diff --git a/rules/linux/exfiltration_unusual_file_transfer_utility_launched.toml b/rules/linux/exfiltration_unusual_file_transfer_utility_launched.toml index 0ec94a90c4c..4fdf884f8a2 100644 --- a/rules/linux/exfiltration_unusual_file_transfer_utility_launched.toml +++ b/rules/linux/exfiltration_unusual_file_transfer_utility_launched.toml @@ -2,7 +2,7 @@ creation_date = "2025/02/21" integration = ["endpoint"] maturity = "production" -updated_date = "2025/07/10" +updated_date = "2025/07/16" [rule] author = ["Elastic"] @@ -95,12 +95,22 @@ type = "esql" query = ''' from logs-endpoint.events.process-* | keep @timestamp, host.os.type, event.type, event.action, process.name, process.executable, process.parent.executable, process.command_line, agent.id, host.name -| where @timestamp > now() - 1 hours -| where host.os.type == "linux" and event.type == "start" and event.action == "exec" and - process.name in ("scp", "ftp", "sftp", "vsftpd", "sftp-server", "rsync") -| stats cc = count(), agent_count = count_distinct(agent.id), host.name = VALUES(host.name), agent.id = VALUES(agent.id) by process.executable, process.parent.executable, process.command_line -| where agent_count == 1 and cc < 5 -| sort cc asc +| where + @timestamp > now() - 1 hours and + host.os.type == "linux" and + event.type == "start" and + event.action == "exec" and + process.name in ("scp", "ftp", "sftp", "vsftpd", "sftp-server", "rsync") +| stats + Esql.event_count = count(), + Esql.agent_id_count_distinct = count_distinct(agent.id), + Esql.host_name_values = values(host.name), + Esql.agent_id_values = values(agent.id) + by process.executable, process.parent.executable, process.command_line +| where + Esql.agent_id_count_distinct == 1 and + Esql.event_count < 5 +| sort Esql.event_count asc | limit 100 ''' diff --git a/rules/linux/impact_potential_bruteforce_malware_infection.toml b/rules/linux/impact_potential_bruteforce_malware_infection.toml index 859f717dfa9..71b42747d35 100644 --- a/rules/linux/impact_potential_bruteforce_malware_infection.toml +++ b/rules/linux/impact_potential_bruteforce_malware_infection.toml @@ -2,7 +2,7 @@ creation_date = "2025/02/20" integration = ["endpoint"] maturity = "production" -updated_date = "2025/07/10" +updated_date = "2025/07/16" [rule] author = ["Elastic"] @@ -99,18 +99,31 @@ type = "esql" query = ''' from logs-endpoint.events.network-* | keep @timestamp, host.os.type, event.type, event.action, destination.port, process.executable, destination.ip, agent.id, host.name -| where @timestamp > now() - 1 hours -| where host.os.type == "linux" and event.type == "start" and event.action == "connection_attempted" and - destination.port in (22, 222, 2222, 10022, 2022, 2200, 62612, 8022) and not - CIDR_MATCH( - destination.ip, "10.0.0.0/8", "127.0.0.0/8", "169.254.0.0/16", "172.16.0.0/12", "192.0.0.0/24", "192.0.0.0/29", "192.0.0.8/32", "192.0.0.9/32", - "192.0.0.10/32", "192.0.0.170/32", "192.0.0.171/32", "192.0.2.0/24", "192.31.196.0/24", "192.52.193.0/24", "192.168.0.0/16", "192.88.99.0/24", - "224.0.0.0/4", "100.64.0.0/10", "192.175.48.0/24","198.18.0.0/15", "198.51.100.0/24", "203.0.113.0/24", "224.0.0.0/4", "240.0.0.0/4", "::1", - "FE80::/10", "FF00::/8" - ) -| stats cc = count(), agent_count = count_distinct(agent.id), host.name = VALUES(host.name), agent.id = VALUES(agent.id) by process.executable, destination.port -| where agent_count == 1 and cc > 15 -| sort cc asc +| where + @timestamp > now() - 1 hours and + host.os.type == "linux" and + event.type == "start" and + event.action == "connection_attempted" and + destination.port in (22, 222, 2222, 10022, 2022, 2200, 62612, 8022) and + not cidr_match( + destination.ip, + "10.0.0.0/8", "127.0.0.0/8", "169.254.0.0/16", "172.16.0.0/12", + "192.0.0.0/24", "192.0.0.0/29", "192.0.0.8/32", "192.0.0.9/32", + "192.0.0.10/32", "192.0.0.170/32", "192.0.0.171/32", "192.0.2.0/24", + "192.31.196.0/24", "192.52.193.0/24", "192.168.0.0/16", "192.88.99.0/24", + "224.0.0.0/4", "100.64.0.0/10", "192.175.48.0/24", "198.18.0.0/15", + "198.51.100.0/24", "203.0.113.0/24", "240.0.0.0/4", "::1", "FE80::/10", "FF00::/8" + ) +| stats + Esql.event_count = count(), + Esql.agent_id_count_distinct = count_distinct(agent.id), + Esql.host_name_values = values(host.name), + Esql.agent_id_values = values(agent.id) + by process.executable, destination.port +| where + Esql.agent_id_count_distinct == 1 and + Esql.event_count > 15 +| sort Esql.event_count asc | limit 100 ''' diff --git a/rules/linux/persistence_web_server_sus_child_spawned.toml b/rules/linux/persistence_web_server_sus_child_spawned.toml index 01e65c1d014..bf3b75fb4ab 100644 --- a/rules/linux/persistence_web_server_sus_child_spawned.toml +++ b/rules/linux/persistence_web_server_sus_child_spawned.toml @@ -2,7 +2,7 @@ creation_date = "2025/03/04" integration = ["endpoint"] maturity = "production" -updated_date = "2025/07/10" +updated_date = "2025/07/16" [rule] author = ["Elastic"] @@ -97,32 +97,55 @@ type = "esql" query = ''' from logs-endpoint.events.process-* -| keep @timestamp, host.os.type, event.type, event.action, process.parent.name, user.name, user.id, process.working_directory, process.name, process.executable, process.command_line, process.parent.executable, agent.id, host.name -| where @timestamp > now() - 1 hours -| where host.os.type == "linux" and event.type == "start" and event.action == "exec" and ( - process.parent.name in ( - "apache", "nginx", "apache2", "httpd", "lighttpd", "caddy", "node", "mongrel_rails", "java", "gunicorn", - "uwsgi", "openresty", "cherokee", "h2o", "resin", "puma", "unicorn", "traefik", "tornado", "hypercorn", - "daphne", "twistd", "yaws", "webfsd", "httpd.worker", "flask", "rails", "mongrel" - ) or - process.parent.name like "php-*" or - process.parent.name like "python*" or - process.parent.name like "ruby*" or - process.parent.name like "perl*" or - user.name in ( - "apache", "www-data", "httpd", "nginx", "lighttpd", "tomcat", "tomcat8", "tomcat9", "ftp", "ftpuser", "ftpd" - ) or - user.id in ("99", "33", "498", "48") or - process.working_directory like "/var/www/*" -) and -not ( - process.working_directory like "/home/*" or - process.working_directory like "/" or - process.parent.executable like "/vscode/vscode-server/*" -) -| stats cc = count(), agent_count = count_distinct(agent.id), host.name = VALUES(host.name), agent.id = VALUES(agent.id) by process.executable, process.working_directory, process.parent.executable -| where agent_count == 1 and cc < 5 -| sort cc asc +| keep + @timestamp, + host.os.type, + event.type, + event.action, + process.parent.name, + user.name, + user.id, + process.working_directory, + process.name, + process.executable, + process.command_line, + process.parent.executable, + agent.id, + host.name +| where + @timestamp > now() - 1 hours and + host.os.type == "linux" and + event.type == "start" and + event.action == "exec" and ( + process.parent.name in ( + "apache", "nginx", "apache2", "httpd", "lighttpd", "caddy", "node", "mongrel_rails", "java", "gunicorn", + "uwsgi", "openresty", "cherokee", "h2o", "resin", "puma", "unicorn", "traefik", "tornado", "hypercorn", + "daphne", "twistd", "yaws", "webfsd", "httpd.worker", "flask", "rails", "mongrel" + ) or + process.parent.name like "php-*" or + process.parent.name like "python*" or + process.parent.name like "ruby*" or + process.parent.name like "perl*" or + user.name in ( + "apache", "www-data", "httpd", "nginx", "lighttpd", "tomcat", "tomcat8", "tomcat9", "ftp", "ftpuser", "ftpd" + ) or + user.id in ("99", "33", "498", "48") or + process.working_directory like "/var/www/*" + ) and not ( + process.working_directory like "/home/*" or + process.working_directory == "/" or + process.parent.executable like "/vscode/vscode-server/*" + ) +| stats + Esql.event_count = count(), + Esql.agent_id_count_distinct = count_distinct(agent.id), + Esql.host_name_values = values(host.name), + Esql.agent_id_values = values(agent.id) + by process.executable, process.working_directory, process.parent.executable +| where + Esql.agent_id_count_distinct == 1 and + Esql.event_count < 5 +| sort Esql.event_count asc | limit 100 ''' diff --git a/rules/linux/persistence_web_server_sus_command_execution.toml b/rules/linux/persistence_web_server_sus_command_execution.toml index 2c796ccc7cf..e75c582d1d6 100644 --- a/rules/linux/persistence_web_server_sus_command_execution.toml +++ b/rules/linux/persistence_web_server_sus_command_execution.toml @@ -2,7 +2,7 @@ creation_date = "2025/03/04" integration = ["endpoint"] maturity = "production" -updated_date = "2025/07/10" +updated_date = "2025/07/16" [rule] author = ["Elastic"] @@ -104,35 +104,58 @@ type = "esql" query = ''' from logs-endpoint.events.process-* -| keep @timestamp, host.os.type, event.type, event.action, process.parent.name, user.name, user.id, process.working_directory, process.name, process.command_line, process.parent.executable, agent.id, host.name -| where @timestamp > now() - 1 hours -| where host.os.type == "linux" and event.type == "start" and event.action == "exec" and ( - process.parent.name in ( - "apache", "nginx", "apache2", "httpd", "lighttpd", "caddy", "node", "mongrel_rails", "java", "gunicorn", - "uwsgi", "openresty", "cherokee", "h2o", "resin", "puma", "unicorn", "traefik", "tornado", "hypercorn", - "daphne", "twistd", "yaws", "webfsd", "httpd.worker", "flask", "rails", "mongrel" - ) or - process.parent.name like "php-*" or - process.parent.name like "python*" or - process.parent.name like "ruby*" or - process.parent.name like "perl*" or - user.name in ( - "apache", "www-data", "httpd", "nginx", "lighttpd", "tomcat", "tomcat8", "tomcat9", "ftp", "ftpuser", "ftpd" - ) or - user.id in ("99", "33", "498", "48") or - process.working_directory like "/var/www/*" -) and - process.name in ("bash", "dash", "sh", "tcsh", "csh", "zsh", "ksh", "fish") and process.command_line like "* -c *" and - not ( - process.working_directory like "/home/*" or - process.working_directory like "/" or - process.working_directory like "/vscode/vscode-server/*" or - process.parent.executable like "/vscode/vscode-server/*" or - process.parent.executable == "/usr/bin/xfce4-terminal" -) -| stats cc = count(), agent_count = count_distinct(agent.id), host.name = VALUES(host.name), agent.id = VALUES(agent.id) by process.command_line, process.working_directory, process.parent.executable -| where agent_count == 1 and cc < 5 -| sort cc asc +| keep + @timestamp, + host.os.type, + event.type, + event.action, + process.parent.name, + user.name, + user.id, + process.working_directory, + process.name, + process.command_line, + process.parent.executable, + agent.id, + host.name +| where + @timestamp > now() - 1 hours and + host.os.type == "linux" and + event.type == "start" and + event.action == "exec" and ( + process.parent.name in ( + "apache", "nginx", "apache2", "httpd", "lighttpd", "caddy", "node", "mongrel_rails", "java", "gunicorn", + "uwsgi", "openresty", "cherokee", "h2o", "resin", "puma", "unicorn", "traefik", "tornado", "hypercorn", + "daphne", "twistd", "yaws", "webfsd", "httpd.worker", "flask", "rails", "mongrel" + ) or + process.parent.name like "php-*" or + process.parent.name like "python*" or + process.parent.name like "ruby*" or + process.parent.name like "perl*" or + user.name in ( + "apache", "www-data", "httpd", "nginx", "lighttpd", "tomcat", "tomcat8", "tomcat9", "ftp", "ftpuser", "ftpd" + ) or + user.id in ("99", "33", "498", "48") or + process.working_directory like "/var/www/*" + ) and + process.name in ("bash", "dash", "sh", "tcsh", "csh", "zsh", "ksh", "fish") and + process.command_line like "* -c *" and not ( + process.working_directory like "/home/*" or + process.working_directory == "/" or + process.working_directory like "/vscode/vscode-server/*" or + process.parent.executable like "/vscode/vscode-server/*" or + process.parent.executable == "/usr/bin/xfce4-terminal" + ) +| stats + Esql.event_count = count(), + Esql.agent_id_count_distinct = count_distinct(agent.id), + Esql.host_name_values = values(host.name), + Esql.agent_id_values = values(agent.id) + by process.command_line, process.working_directory, process.parent.executable +| where + Esql.agent_id_count_distinct == 1 and + Esql.event_count < 5 +| sort Esql.event_count asc | limit 100 ''' diff --git a/rules/windows/credential_access_rare_webdav_destination.toml b/rules/windows/credential_access_rare_webdav_destination.toml index e7bc190e789..072af969828 100644 --- a/rules/windows/credential_access_rare_webdav_destination.toml +++ b/rules/windows/credential_access_rare_webdav_destination.toml @@ -2,7 +2,7 @@ creation_date = "2025/04/28" integration = ["endpoint", "system", "windows", "m365_defender", "crowdstrike"] maturity = "production" -updated_date = "2025/07/02" +updated_date = "2025/07/16" [rule] author = ["Elastic"] @@ -54,15 +54,32 @@ timestamp_override = "event.ingested" type = "esql" query = ''' -FROM logs-* -| where @timestamp > NOW() - 8 hours -| WHERE event.category == "process" and event.type == "start" and process.name == "rundll32.exe" and process.command_line like "*DavSetCookie*" +from logs-* +| where + @timestamp > now() - 8 hours and + event.category == "process" and + event.type == "start" and + process.name == "rundll32.exe" and + process.command_line like "*DavSetCookie*" | keep host.id, process.command_line, user.name -| grok process.command_line """(?DavSetCookie .* http)""" -| eval webdav_target = REPLACE(target, "(DavSetCookie | http)", "") -| where webdav_target is not null and webdav_target rlike """(([a-zA-Z0-9-]+\.)+[a-zA-Z]{2,3}(@SSL.*)*|(\d{1,3}\.){3}\d{1,3})""" and not webdav_target in ("www.google.com@SSL", "www.elastic.co@SSL") and not webdav_target rlike """(10\.(\d{1,3}\.){2}\d{1,3}|172\.(1[6-9]|2\d|3[0-1])\.(\d{1,3}\.)\d{1,3}|192\.168\.(\d{1,3}\.)\d{1,3})""" -| stats total = count(*), unique_count_host = count_distinct(host.id), hosts = VALUES(host.id), users = VALUES(user.name) by webdav_target -| where unique_count_host == 1 and total <= 3 +| grok + process.command_line """(?DavSetCookie .* http)""" +| eval + Esql.server_webdav_cookie_replace = replace(Esql.server_webdav_cookie, "(DavSetCookie | http)", "") +| where + Esql.server_webdav_cookie_replace is not null and + Esql.server_webdav_cookie_replace rlike """(([a-zA-Z0-9-]+\.)+[a-zA-Z]{2,3}(@SSL.*)*|(\d{1,3}\.){3}\d{1,3})""" and + not Esql.server_webdav_cookie_replace in ("www.google.com@SSL", "www.elastic.co@SSL") and + not Esql.server_webdav_cookie_replace rlike """(10\.(\d{1,3}\.){2}\d{1,3}|172\.(1[6-9]|2\d|3[0-1])\.(\d{1,3}\.)\d{1,3}|192\.168\.(\d{1,3}\.)\d{1,3})""" +| stats + Esql.event_count = count(*), + Esql.host_id_count_distinct = count_distinct(host.id), + Esql.host_id_values = values(host.id), + Esql.user_name_values = values(user.name) + by Esql.server_webdav_cookie_replace +| where + Esql.host_id_count_distinct == 1 and + Esql.event_count <= 3 ''' diff --git a/rules/windows/defense_evasion_posh_obfuscation_backtick.toml b/rules/windows/defense_evasion_posh_obfuscation_backtick.toml index 307279c76f5..8a4a8498d40 100644 --- a/rules/windows/defense_evasion_posh_obfuscation_backtick.toml +++ b/rules/windows/defense_evasion_posh_obfuscation_backtick.toml @@ -2,7 +2,7 @@ creation_date = "2025/04/15" integration = ["windows"] maturity = "production" -updated_date = "2025/07/07" +updated_date = "2025/07/16" [rule] author = ["Elastic"] @@ -90,28 +90,43 @@ timestamp_override = "event.ingested" type = "esql" query = ''' -FROM logs-windows.powershell_operational* metadata _id, _version, _index -| WHERE event.code == "4104" and powershell.file.script_block_text LIKE "*`*" +from logs-windows.powershell_operational* metadata _id, _version, _index +| where event.code == "4104" and powershell.file.script_block_text like "*`*" -// Replace string format expressions with 🔥 to enable counting the occurrence of the patterns we are looking for +// replace the patterns we are looking for with the 🔥 emoji to enable counting them // The emoji is used because it's unlikely to appear in scripts and has a consistent character length of 1 -| EVAL replaced_with_fire = REPLACE(powershell.file.script_block_text, """[A-Za-z0-9_-]`(?![rntb]|\r|\n|\d)[A-Za-z0-9_-]""", "🔥") - -// Count how many patterns were detected by calculating the number of 🔥 characters inserted -| EVAL count = LENGTH(replaced_with_fire) - LENGTH(REPLACE(replaced_with_fire, "🔥", "")) - -// Keep the fields relevant to the query, although this is not needed as the alert is populated using _id -| KEEP count, replaced_with_fire, powershell.file.script_block_text, powershell.file.script_block_id, file.name, file.path, powershell.sequence, powershell.total, _id, _index, host.name, agent.id, user.id -| WHERE count >= 10 - -// Filter FPs, and due to the behavior of the LIKE operator, allow null values -| WHERE (file.name NOT LIKE "TSS_*.psm1" or file.name IS NULL) - -| WHERE - // VSCode Shell integration - NOT powershell.file.script_block_text LIKE "*$([char]0x1b)]633*" +| eval Esql.script_block_tmp = replace(powershell.file.script_block_text, """[A-Za-z0-9_-]`(?![rntb]|\r|\n|\d)[A-Za-z0-9_-]""", "🔥") + +// count how many patterns were detected by calculating the number of 🔥 characters inserted +| eval Esql.script_block_pattern_count = length(Esql.script_block_tmp) - length(replace(Esql.script_block_tmp, "🔥", "")) + +// keep the fields relevant to the query, although this is not needed as the alert is populated using _id +| keep + Esql.script_block_pattern_count, + Esql.script_block_tmp, + powershell.file.script_block_text, + powershell.file.script_block_id, + file.name, + file.path, + powershell.sequence, + powershell.total, + _id, + _index, + host.name, + agent.id, + user.id + +// Filter for scripts that match the pattern at least 10 times +| where Esql.script_block_pattern_count >= 10 + +// Filter FPs, and due to the behavior of the like operator, allow null values +| where (file.name not like "TSS_*.psm1" or file.name is null) + +// VSCode Shell integration +| where not powershell.file.script_block_text like "*$([char]0x1b)]633*" ''' + [[rule.threat]] framework = "MITRE ATT&CK" [[rule.threat.technique]] diff --git a/rules/windows/defense_evasion_posh_obfuscation_backtick_var.toml b/rules/windows/defense_evasion_posh_obfuscation_backtick_var.toml index 0b59c441508..63ba68fd0e1 100644 --- a/rules/windows/defense_evasion_posh_obfuscation_backtick_var.toml +++ b/rules/windows/defense_evasion_posh_obfuscation_backtick_var.toml @@ -2,7 +2,7 @@ creation_date = "2025/04/16" integration = ["windows"] maturity = "production" -updated_date = "2025/07/07" +updated_date = "2025/07/16" [rule] author = ["Elastic"] @@ -84,23 +84,39 @@ timestamp_override = "event.ingested" type = "esql" query = ''' -FROM logs-windows.powershell_operational* metadata _id, _version, _index -| WHERE event.code == "4104" +from logs-windows.powershell_operational* metadata _id, _version, _index +| where event.code == "4104" -// Look for scripts with more than 500 chars that contain a related keyword -| EVAL script_len = LENGTH(powershell.file.script_block_text) -| WHERE script_len > 500 +// Filter out smaller scripts that are unlikely to implement obfuscation using the patterns we are looking for +| eval Esql.script_block_length = length(powershell.file.script_block_text) +| where Esql.script_block_length > 500 -// Replace string format expressions with 🔥 to enable counting the occurrence of the patterns we are looking for +// replace the patterns we are looking for with the 🔥 emoji to enable counting them // The emoji is used because it's unlikely to appear in scripts and has a consistent character length of 1 -| EVAL replaced_with_fire = REPLACE(powershell.file.script_block_text, """\$\{(\w++`){2,}\w++\}""", "🔥") - -// Count how many patterns were detected by calculating the number of 🔥 characters inserted -| EVAL count = LENGTH(replaced_with_fire) - LENGTH(REPLACE(replaced_with_fire, "🔥", "")) - -// Keep the fields relevant to the query, although this is not needed as the alert is populated using _id -| KEEP count, replaced_with_fire, powershell.file.script_block_text, powershell.file.script_block_id, file.path, file.name, powershell.sequence, powershell.total, _id, _index, host.name, agent.id, user.id -| WHERE count >= 1 +| eval Esql.script_block_tmp = replace(powershell.file.script_block_text, """\$\{(\w++`){2,}\w++\}""", "🔥") + +// count how many patterns were detected by calculating the number of 🔥 characters inserted +| eval Esql.script_block_pattern_count = length(Esql.script_block_tmp) - length(replace(Esql.script_block_tmp, "🔥", "")) + +// keep the fields relevant to the query, although this is not needed as the alert is populated using _id +| keep + Esql.script_block_pattern_count, + Esql.script_block_length, + Esql.script_block_tmp, + powershell.file.script_block_text, + powershell.file.script_block_id, + file.path, + file.name, + powershell.sequence, + powershell.total, + _id, + _index, + host.name, + agent.id, + user.id + +// Filter for scripts that match the pattern at least once +| where Esql.script_block_pattern_count >= 1 ''' diff --git a/rules/windows/defense_evasion_posh_obfuscation_char_arrays.toml b/rules/windows/defense_evasion_posh_obfuscation_char_arrays.toml index 5cdc9ed0231..81bee0a4af1 100644 --- a/rules/windows/defense_evasion_posh_obfuscation_char_arrays.toml +++ b/rules/windows/defense_evasion_posh_obfuscation_char_arrays.toml @@ -2,7 +2,7 @@ creation_date = "2025/04/14" integration = ["windows"] maturity = "production" -updated_date = "2025/07/07" +updated_date = "2025/07/16" [rule] author = ["Elastic"] @@ -84,22 +84,40 @@ timestamp_override = "event.ingested" type = "esql" query = ''' -FROM logs-windows.powershell_operational* metadata _id, _version, _index -| WHERE event.code == "4104" +from logs-windows.powershell_operational* metadata _id, _version, _index +| where event.code == "4104" // Filter for scripts that contain the "char" keyword using MATCH, boosts the query performance -| WHERE powershell.file.script_block_text : "char" +| where powershell.file.script_block_text : "char" -// Replace string format expressions with 🔥 to enable counting the occurrence of the patterns we are looking for +// replace the patterns we are looking for with the 🔥 emoji to enable counting them // The emoji is used because it's unlikely to appear in scripts and has a consistent character length of 1 -| EVAL replaced_with_fire = REPLACE(powershell.file.script_block_text, """(char\[\]\]\(\d+,\d+[^)]+|(\s?\(\[char\]\d+\s?\)\+){2,})""", "🔥") - -// Count how many patterns were detected by calculating the number of 🔥 characters inserted -| EVAL count = LENGTH(replaced_with_fire) - LENGTH(REPLACE(replaced_with_fire, "🔥", "")) - -// Keep the fields relevant to the query, although this is not needed as the alert is populated using _id -| KEEP count, replaced_with_fire, powershell.file.script_block_text, powershell.file.script_block_id, file.path, powershell.sequence, powershell.total, _id, _index, host.name, agent.id, user.id -| WHERE count >= 1 +| eval Esql.script_block_tmp = replace( + powershell.file.script_block_text, + """(char\[\]\]\(\d+,\d+[^)]+|(\s?\(\[char\]\d+\s?\)\+){2,})""", + "🔥" +) + +// count how many patterns were detected by calculating the number of 🔥 characters inserted +| eval Esql.script_block_pattern_count = length(Esql.script_block_tmp) - length(replace(Esql.script_block_tmp, "🔥", "")) + +// keep the fields relevant to the query, although this is not needed as the alert is populated using _id +| keep + Esql.script_block_pattern_count, + Esql.script_block_tmp, + powershell.file.script_block_text, + powershell.file.script_block_id, + file.path, + powershell.sequence, + powershell.total, + _id, + _index, + host.name, + agent.id, + user.id + +// Filter for scripts that match the pattern at least once +| where Esql.script_block_pattern_count >= 1 ''' diff --git a/rules/windows/defense_evasion_posh_obfuscation_concat_dynamic.toml b/rules/windows/defense_evasion_posh_obfuscation_concat_dynamic.toml index 2f8107596b0..3f7653156b8 100644 --- a/rules/windows/defense_evasion_posh_obfuscation_concat_dynamic.toml +++ b/rules/windows/defense_evasion_posh_obfuscation_concat_dynamic.toml @@ -2,7 +2,7 @@ creation_date = "2025/04/15" integration = ["windows"] maturity = "production" -updated_date = "2025/07/07" +updated_date = "2025/07/16" [rule] author = ["Elastic"] @@ -83,19 +83,37 @@ timestamp_override = "event.ingested" type = "esql" query = ''' -FROM logs-windows.powershell_operational* metadata _id, _version, _index -| WHERE event.code == "4104" and powershell.file.script_block_text LIKE "*+*" +from logs-windows.powershell_operational* metadata _id, _version, _index +| where event.code == "4104" and powershell.file.script_block_text like "*+*" -// Replace string format expressions with 🔥 to enable counting the occurrence of the patterns we are looking for +// replace the patterns we are looking for with the 🔥 emoji to enable counting them // The emoji is used because it's unlikely to appear in scripts and has a consistent character length of 1 -| EVAL replaced_with_fire = REPLACE(powershell.file.script_block_text, """[.&]\(\s*(['"][A-Za-z0-9.-]+['"]\s*\+\s*)+['"][A-Za-z0-9.-]+['"]\s*\)""", "🔥") - -// Count how many patterns were detected by calculating the number of 🔥 characters inserted -| EVAL count = LENGTH(replaced_with_fire) - LENGTH(REPLACE(replaced_with_fire, "🔥", "")) - -// Keep the fields relevant to the query, although this is not needed as the alert is populated using _id -| KEEP count, replaced_with_fire, powershell.file.script_block_text, powershell.file.script_block_id, file.path, powershell.sequence, powershell.total, _id, _index, host.name, agent.id, user.id -| WHERE count >= 1 +| eval Esql.script_block_tmp = replace( + powershell.file.script_block_text, + """[.&]\(\s*(['"][A-Za-z0-9.-]+['"]\s*\+\s*)+['"][A-Za-z0-9.-]+['"]\s*\)""", + "🔥" +) + +// count how many patterns were detected by calculating the number of 🔥 characters inserted +| eval Esql.script_block_pattern_count = length(Esql.script_block_tmp) - length(replace(Esql.script_block_tmp, "🔥", "")) + +// keep the fields relevant to the query, although this is not needed as the alert is populated using _id +| keep + Esql.script_block_pattern_count, + Esql.script_block_tmp, + powershell.file.script_block_text, + powershell.file.script_block_id, + file.path, + powershell.sequence, + powershell.total, + _id, + _index, + host.name, + agent.id, + user.id + +// Filter for scripts that match the pattern at least once +| where Esql.script_block_pattern_count >= 1 ''' diff --git a/rules/windows/defense_evasion_posh_obfuscation_high_number_proportion.toml b/rules/windows/defense_evasion_posh_obfuscation_high_number_proportion.toml index e1aa44cc221..c6f0ec60d18 100644 --- a/rules/windows/defense_evasion_posh_obfuscation_high_number_proportion.toml +++ b/rules/windows/defense_evasion_posh_obfuscation_high_number_proportion.toml @@ -2,7 +2,7 @@ creation_date = "2025/04/16" integration = ["windows"] maturity = "production" -updated_date = "2025/07/07" +updated_date = "2025/07/16" [rule] author = ["Elastic"] @@ -83,30 +83,45 @@ timestamp_override = "event.ingested" type = "esql" query = ''' -FROM logs-windows.powershell_operational* metadata _id, _version, _index -| WHERE event.code == "4104" +from logs-windows.powershell_operational* metadata _id, _version, _index +| where event.code == "4104" -// Look for scripts with more than 1000 chars that contain a related keyword -| EVAL script_len = LENGTH(powershell.file.script_block_text) -| WHERE script_len > 1000 +// Filter out smaller scripts that are unlikely to implement obfuscation using the patterns we are looking for +| eval Esql.script_block_length = length(powershell.file.script_block_text) +| where Esql.script_block_length > 1000 -// Replace string format expressions with 🔥 to enable counting the occurrence of the patterns we are looking for +// replace the patterns we are looking for with the 🔥 emoji to enable counting them // The emoji is used because it's unlikely to appear in scripts and has a consistent character length of 1 -| EVAL replaced_with_fire = REPLACE(powershell.file.script_block_text, """[0-9]""", "🔥") - -// Count the occurrence of numbers and their proportion to the total chars in the script -| EVAL special_count = script_len - LENGTH(REPLACE(replaced_with_fire, "🔥", "")) -| EVAL proportion = special_count::double / script_len::double - -// Keep the fields relevant to the query, although this is not needed as the alert is populated using _id -| KEEP special_count, script_len, proportion, replaced_with_fire, powershell.file.script_block_text, powershell.file.script_block_id, file.path, powershell.sequence, powershell.total, _id, _index, host.name, agent.id, user.id - -// Filter for scripts with a 30%+ proportion of numbers -| WHERE proportion > 0.30 - -// Exclude noisy patterns -| WHERE - NOT powershell.file.script_block_text RLIKE """.*\"[a-fA-F0-9]{64}\"\,.*""" +| eval Esql.script_block_tmp = replace(powershell.file.script_block_text, """[0-9]""", "🔥") + +// count how many patterns were detected by calculating the number of 🔥 characters inserted +| eval Esql.script_block_pattern_count = Esql.script_block_length - length(replace(Esql.script_block_tmp, "🔥", "")) + +// Calculate the ratio of special characters to total length +| eval Esql.script_block_ratio = Esql.script_block_pattern_count::double / Esql.script_block_length::double + +// keep the fields relevant to the query, although this is not needed as the alert is populated using _id +| keep + Esql.script_block_pattern_count, + Esql.script_block_ratio, + Esql.script_block_length, + Esql.script_block_tmp, + powershell.file.script_block_text, + powershell.file.script_block_id, + file.path, + powershell.sequence, + powershell.total, + _id, + _index, + host.name, + agent.id, + user.id + +// Filter for scripts with high numeric character ratio +| where Esql.script_block_ratio > 0.30 + +// Exclude noisy patterns such as 64-character hash lists +| where not powershell.file.script_block_text rlike """.*\"[a-fA-F0-9]{64}\"\,.*""" ''' diff --git a/rules/windows/defense_evasion_posh_obfuscation_iex_env_vars_reconstruction.toml b/rules/windows/defense_evasion_posh_obfuscation_iex_env_vars_reconstruction.toml index 81549f41ef6..dcf423bacba 100644 --- a/rules/windows/defense_evasion_posh_obfuscation_iex_env_vars_reconstruction.toml +++ b/rules/windows/defense_evasion_posh_obfuscation_iex_env_vars_reconstruction.toml @@ -2,7 +2,7 @@ creation_date = "2025/04/16" integration = ["windows"] maturity = "production" -updated_date = "2025/07/07" +updated_date = "2025/07/16" [rule] author = ["Elastic"] @@ -83,23 +83,42 @@ timestamp_override = "event.ingested" type = "esql" query = ''' -FROM logs-windows.powershell_operational* metadata _id, _version, _index -| WHERE event.code == "4104" +from logs-windows.powershell_operational* metadata _id, _version, _index +| where event.code == "4104" -// Look for scripts with more than 500 chars that contain a related keyword -| EVAL script_len = LENGTH(powershell.file.script_block_text) -| WHERE script_len > 500 +// Filter out smaller scripts that are unlikely to implement obfuscation using the patterns we are looking for +| eval Esql.script_block_length = length(powershell.file.script_block_text) +| where Esql.script_block_length > 500 -// Replace string format expressions with 🔥 to enable counting the occurrence of the patterns we are looking for +// replace the patterns we are looking for with the 🔥 emoji to enable counting them // The emoji is used because it's unlikely to appear in scripts and has a consistent character length of 1 -| EVAL replaced_with_fire = REPLACE(powershell.file.script_block_text, """(?i)(\$(?:\w+|\w+\:\w+)\[\d++\]\+\$(?:\w+|\w+\:\w+)\[\d++\]\+['"]x['"]|\$(?:\w+\:\w+)\[\d++,\d++,\d++\]|\.name\[\d++,\d++,\d++\])""", "🔥") - -// Count how many patterns were detected by calculating the number of 🔥 characters inserted -| EVAL count = LENGTH(replaced_with_fire) - LENGTH(REPLACE(replaced_with_fire, "🔥", "")) - -// Keep the fields relevant to the query, although this is not needed as the alert is populated using _id -| KEEP count, replaced_with_fire, powershell.file.script_block_text, powershell.file.script_block_id, file.path, powershell.sequence, powershell.total, _id, _index, host.name, agent.id, user.id -| WHERE count >= 1 +| eval Esql.script_block_tmp = replace( + powershell.file.script_block_text, + """(?i)(\$(?:\w+|\w+\:\w+)\[\d++\]\+\$(?:\w+|\w+\:\w+)\[\d++\]\+['"]x['"]|\$(?:\w+\:\w+)\[\d++,\d++,\d++\]|\.name\[\d++,\d++,\d++\])""", + "🔥" +) + +// count how many patterns were detected by calculating the number of 🔥 characters inserted +| eval Esql.script_block_pattern_count = length(Esql.script_block_tmp) - length(replace(Esql.script_block_tmp, "🔥", "")) + +// keep the fields relevant to the query, although this is not needed as the alert is populated using _id +| keep + Esql.script_block_pattern_count, + Esql.script_block_length, + Esql.script_block_tmp, + powershell.file.script_block_text, + powershell.file.script_block_id, + file.path, + powershell.sequence, + powershell.total, + _id, + _index, + host.name, + agent.id, + user.id + +// Filter for scripts that match the pattern at least once +| where Esql.script_block_pattern_count >= 1 ''' diff --git a/rules/windows/defense_evasion_posh_obfuscation_iex_string_reconstruction.toml b/rules/windows/defense_evasion_posh_obfuscation_iex_string_reconstruction.toml index 6c82e783d2f..5d5aaeae975 100644 --- a/rules/windows/defense_evasion_posh_obfuscation_iex_string_reconstruction.toml +++ b/rules/windows/defense_evasion_posh_obfuscation_iex_string_reconstruction.toml @@ -2,7 +2,7 @@ creation_date = "2025/04/16" integration = ["windows"] maturity = "production" -updated_date = "2025/07/07" +updated_date = "2025/07/16" [rule] author = ["Elastic"] @@ -84,23 +84,42 @@ timestamp_override = "event.ingested" type = "esql" query = ''' -FROM logs-windows.powershell_operational* metadata _id, _version, _index -| WHERE event.code == "4104" +from logs-windows.powershell_operational* metadata _id, _version, _index +| where event.code == "4104" -// Look for scripts with more than 500 chars that contain a related keyword -| EVAL script_len = LENGTH(powershell.file.script_block_text) -| WHERE script_len > 500 +// Filter out smaller scripts that are unlikely to implement obfuscation using the patterns we are looking for +| eval Esql.script_block_length = length(powershell.file.script_block_text) +| where Esql.script_block_length > 500 -// Replace string format expressions with 🔥 to enable counting the occurrence of the patterns we are looking for +// replace the patterns we are looking for with the 🔥 emoji to enable counting them // The emoji is used because it's unlikely to appear in scripts and has a consistent character length of 1 -| EVAL replaced_with_fire = REPLACE(powershell.file.script_block_text, """(?i)['"]['"].(Insert|Normalize|Chars|SubString|Remove|LastIndexOfAny|LastIndexOf|IsNormalized|IndexOfAny|IndexOf)[^\[]+\[\d+,\d+,\d+\]""", "🔥") - -// Count how many patterns were detected by calculating the number of 🔥 characters inserted -| EVAL count = LENGTH(replaced_with_fire) - LENGTH(REPLACE(replaced_with_fire, "🔥", "")) - -// Keep the fields relevant to the query, although this is not needed as the alert is populated using _id -| KEEP count, replaced_with_fire, powershell.file.script_block_text, powershell.file.script_block_id, file.path, powershell.sequence, powershell.total, _id, _index, host.name, agent.id, user.id -| WHERE count >= 1 +| eval Esql.script_block_tmp = replace( + powershell.file.script_block_text, + """(?i)['"]['"].(Insert|Normalize|Chars|substring|Remove|LastIndexOfAny|LastIndexOf|IsNormalized|IndexOfAny|IndexOf)[^\[]+\[\d+,\d+,\d+\]""", + "🔥" +) + +// count how many patterns were detected by calculating the number of 🔥 characters inserted +| eval Esql.script_block_pattern_count = length(Esql.script_block_tmp) - length(replace(Esql.script_block_tmp, "🔥", "")) + +// keep the fields relevant to the query, although this is not needed as the alert is populated using _id +| keep + Esql.script_block_pattern_count, + Esql.script_block_length, + Esql.script_block_tmp, + powershell.file.script_block_text, + powershell.file.script_block_id, + file.path, + powershell.sequence, + powershell.total, + _id, + _index, + host.name, + agent.id, + user.id + +// Filter for scripts that match the pattern at least once +| where Esql.script_block_pattern_count >= 1 ''' diff --git a/rules/windows/defense_evasion_posh_obfuscation_index_reversal.toml b/rules/windows/defense_evasion_posh_obfuscation_index_reversal.toml index 98b4ca3d450..666e6990313 100644 --- a/rules/windows/defense_evasion_posh_obfuscation_index_reversal.toml +++ b/rules/windows/defense_evasion_posh_obfuscation_index_reversal.toml @@ -2,7 +2,7 @@ creation_date = "2025/04/14" integration = ["windows"] maturity = "production" -updated_date = "2025/07/07" +updated_date = "2025/07/16" [rule] author = ["Elastic"] @@ -85,26 +85,45 @@ timestamp_override = "event.ingested" type = "esql" query = ''' -FROM logs-windows.powershell_operational* metadata _id, _version, _index -| WHERE event.code == "4104" +from logs-windows.powershell_operational* metadata _id, _version, _index +| where event.code == "4104" -// Look for scripts with more than 500 chars that contain a related keyword -| EVAL script_len = LENGTH(powershell.file.script_block_text) -| WHERE script_len > 500 +// Filter out smaller scripts that are unlikely to implement obfuscation using the patterns we are looking for +| eval Esql.script_block_length = length(powershell.file.script_block_text) +| where Esql.script_block_length > 500 -// Replace string format expressions with 🔥 to enable counting the occurrence of the patterns we are looking for +// replace the patterns we are looking for with the 🔥 emoji to enable counting them // The emoji is used because it's unlikely to appear in scripts and has a consistent character length of 1 -| EVAL replaced_with_fire = REPLACE(powershell.file.script_block_text, """\$\w+\[\-\s?1\.\.""", "🔥") - -// Count how many patterns were detected by calculating the number of 🔥 characters inserted -| EVAL count = LENGTH(replaced_with_fire) - LENGTH(REPLACE(replaced_with_fire, "🔥", "")) - -// Keep the fields relevant to the query, although this is not needed as the alert is populated using _id -| KEEP count, replaced_with_fire, powershell.file.script_block_text, powershell.file.script_block_id, file.path, powershell.sequence, powershell.total, _id, _index, host.name, agent.id, user.id -| WHERE count >= 1 +| eval Esql.script_block_tmp = replace( + powershell.file.script_block_text, + """\$\w+\[\-\s?1\.\.""", + "🔥" +) + +// count how many patterns were detected by calculating the number of 🔥 characters inserted +| eval Esql.script_block_pattern_count = length(Esql.script_block_tmp) - length(replace(Esql.script_block_tmp, "🔥", "")) + +// keep the fields relevant to the query, although this is not needed as the alert is populated using _id +| keep + Esql.script_block_pattern_count, + Esql.script_block_length, + Esql.script_block_tmp, + powershell.file.script_block_text, + powershell.file.script_block_id, + file.path, + powershell.sequence, + powershell.total, + _id, + _index, + host.name, + agent.id, + user.id + +// Filter for scripts that match the pattern at least once +| where Esql.script_block_pattern_count >= 1 // FP Patterns -| WHERE NOT powershell.file.script_block_text LIKE "*GENESIS-5654*" +| where not powershell.file.script_block_text like "*GENESIS-5654*" ''' diff --git a/rules/windows/defense_evasion_posh_obfuscation_reverse_keyword.toml b/rules/windows/defense_evasion_posh_obfuscation_reverse_keyword.toml index 1b91b3f7c92..eb83ae996f0 100644 --- a/rules/windows/defense_evasion_posh_obfuscation_reverse_keyword.toml +++ b/rules/windows/defense_evasion_posh_obfuscation_reverse_keyword.toml @@ -2,7 +2,7 @@ creation_date = "2025/04/14" integration = ["windows"] maturity = "production" -updated_date = "2025/07/07" +updated_date = "2025/07/16" [rule] author = ["Elastic"] @@ -82,22 +82,39 @@ timestamp_override = "event.ingested" type = "esql" query = ''' -FROM logs-windows.powershell_operational* metadata _id, _version, _index -| WHERE event.code == "4104" +from logs-windows.powershell_operational* metadata _id, _version, _index +| where event.code == "4104" -// Filter for scripts that contains these keywords using MATCH, boosts the query performance, match will ignore the | and look for the individual words -| WHERE powershell.file.script_block_text : "rahc|metsys|stekcos|tcejboimw|ecalper|ecnerferpe|noitcennoc|nioj|eman|vne|gnirts|tcejbo-wen|_23niw|noisserpxe|ekovni|daolnwod" +// Filter for scripts that contains these keywords using MATCH, boosts the query performance, +// match will ignore the | and look for the individual words +| where powershell.file.script_block_text : "rahc|metsys|stekcos|tcejboimw|ecalper|ecnerferpe|noitcennoc|nioj|eman|vne|gnirts|tcejbo-wen|_23niw|noisserpxe|ekovni|daolnwod" -// Replace string format expressions with 🔥 to enable counting the occurrence of the patterns we are looking for +// replace the patterns we are looking for with the 🔥 emoji to enable counting them // The emoji is used because it's unlikely to appear in scripts and has a consistent character length of 1 -| EVAL replaced_with_fire = REPLACE(powershell.file.script_block_text, """(?i)(rahc|metsys|stekcos|tcejboimw|ecalper|ecnerferpe|noitcennoc|nioj|eman\.|:vne|gnirts|tcejbo-wen|_23niw|noisserpxe|ekovni|daolnwod)""", "🔥") - -// Count how many patterns were detected by calculating the number of 🔥 characters inserted -| EVAL count = LENGTH(replaced_with_fire) - LENGTH(REPLACE(replaced_with_fire, "🔥", "")) - -// Keep the fields relevant to the query, although this is not needed as the alert is populated using _id -| KEEP count, replaced_with_fire, powershell.file.script_block_text, powershell.file.script_block_id, file.path, powershell.sequence, powershell.total, _id, _index, agent.id -| WHERE count >= 2 +| eval Esql.script_block_tmp = replace( + powershell.file.script_block_text, + """(?i)(rahc|metsys|stekcos|tcejboimw|ecalper|ecnerferpe|noitcennoc|nioj|eman\.|:vne|gnirts|tcejbo-wen|_23niw|noisserpxe|ekovni|daolnwod)""", + "🔥" +) + +// count how many patterns were detected by calculating the number of 🔥 characters inserted +| eval Esql.script_block_pattern_count = length(Esql.script_block_tmp) - length(replace(Esql.script_block_tmp, "🔥", "")) + +// keep the fields relevant to the query, although this is not needed as the alert is populated using _id +| keep + Esql.script_block_pattern_count, + Esql.script_block_tmp, + powershell.file.script_block_text, + powershell.file.script_block_id, + file.path, + powershell.sequence, + powershell.total, + _id, + _index, + agent.id + +// Filter for scripts that match the pattern at least twice +| where Esql.script_block_pattern_count >= 2 ''' diff --git a/rules/windows/defense_evasion_posh_obfuscation_string_concat.toml b/rules/windows/defense_evasion_posh_obfuscation_string_concat.toml index eb11f0bd646..390b3bff774 100644 --- a/rules/windows/defense_evasion_posh_obfuscation_string_concat.toml +++ b/rules/windows/defense_evasion_posh_obfuscation_string_concat.toml @@ -2,7 +2,7 @@ creation_date = "2025/04/14" integration = ["windows"] maturity = "production" -updated_date = "2025/07/07" +updated_date = "2025/07/16" [rule] author = ["Elastic"] @@ -83,23 +83,42 @@ timestamp_override = "event.ingested" type = "esql" query = ''' -FROM logs-windows.powershell_operational* metadata _id, _version, _index -| WHERE event.code == "4104" +from logs-windows.powershell_operational* metadata _id, _version, _index +| where event.code == "4104" -// Look for scripts with more than 500 chars that contain a related keyword -| EVAL script_len = LENGTH(powershell.file.script_block_text) -| WHERE script_len > 500 +// Filter out smaller scripts that are unlikely to implement obfuscation using the patterns we are looking for +| eval Esql.script_block_length = length(powershell.file.script_block_text) +| where Esql.script_block_length > 500 -// Replace string format expressions with 🔥 to enable counting the occurrence of the patterns we are looking for +// replace the patterns we are looking for with the 🔥 emoji to enable counting them // The emoji is used because it's unlikely to appear in scripts and has a consistent character length of 1 -| EVAL replaced_with_fire = REPLACE(powershell.file.script_block_text, """['"][A-Za-z0-9.]+['"](\s?\+\s?['"][A-Za-z0-9.,\-\s]+['"]){2,}""", "🔥") - -// Count how many patterns were detected by calculating the number of 🔥 characters inserted -| EVAL count = LENGTH(replaced_with_fire) - LENGTH(REPLACE(replaced_with_fire, "🔥", "")) - -// Keep the fields relevant to the query, although this is not needed as the alert is populated using _id -| KEEP count, replaced_with_fire, powershell.file.script_block_text, powershell.file.script_block_id, file.path, powershell.sequence, powershell.total, _id, _index, host.name, agent.id, user.id -| WHERE count >= 2 +| eval Esql.script_block_tmp = replace( + powershell.file.script_block_text, + """['"][A-Za-z0-9.]+['"](\s?\+\s?['"][A-Za-z0-9.,\-\s]+['"]){2,}""", + "🔥" +) + +// count how many patterns were detected by calculating the number of 🔥 characters inserted +| eval Esql.script_block_pattern_count = length(Esql.script_block_tmp) - length(replace(Esql.script_block_tmp, "🔥", "")) + +// keep the fields relevant to the query, although this is not needed as the alert is populated using _id +| keep + Esql.script_block_pattern_count, + Esql.script_block_length, + Esql.script_block_tmp, + powershell.file.script_block_text, + powershell.file.script_block_id, + file.path, + powershell.sequence, + powershell.total, + _id, + _index, + host.name, + agent.id, + user.id + +// Filter for scripts that match the pattern at least twice +| where Esql.script_block_pattern_count >= 2 ''' diff --git a/rules/windows/defense_evasion_posh_obfuscation_string_format.toml b/rules/windows/defense_evasion_posh_obfuscation_string_format.toml index 913377a3d04..61edfb1081f 100644 --- a/rules/windows/defense_evasion_posh_obfuscation_string_format.toml +++ b/rules/windows/defense_evasion_posh_obfuscation_string_format.toml @@ -2,7 +2,7 @@ creation_date = "2025/04/03" integration = ["windows"] maturity = "production" -updated_date = "2025/07/07" +updated_date = "2025/07/16" [rule] author = ["Elastic"] @@ -82,37 +82,57 @@ timestamp_override = "event.ingested" type = "esql" query = ''' -FROM logs-windows.powershell_operational* metadata _id, _version, _index -| WHERE event.code == "4104" +from logs-windows.powershell_operational* metadata _id, _version, _index +| where event.code == "4104" and powershell.file.script_block_text like "*{0}*" -// Look for scripts with more than 500 chars that contain a related keyword -| EVAL script_len = LENGTH(powershell.file.script_block_text) -| WHERE script_len > 500 -| WHERE powershell.file.script_block_text LIKE "*{0}*" +// Filter out smaller scripts that are unlikely to implement obfuscation using the patterns we are looking for +| eval Esql.script_block_length = length(powershell.file.script_block_text) +| where Esql.script_block_length > 500 -// Replace string format expressions with 🔥 to enable counting the occurrence of the patterns we are looking for +// replace the patterns we are looking for with the 🔥 emoji to enable counting them // The emoji is used because it's unlikely to appear in scripts and has a consistent character length of 1 -| EVAL replaced_with_fire = REPLACE(powershell.file.script_block_text, """((\{\d+\}){2,}["']\s?-f|::Format[^\{]+(\{\d+\}){2,})""", "🔥") - -// Count how many patterns were detected by calculating the number of 🔥 characters inserted -| EVAL count = LENGTH(replaced_with_fire) - LENGTH(REPLACE(replaced_with_fire, "🔥", "")) - -// Keep the fields relevant to the query, although this is not needed as the alert is populated using _id -| KEEP count, replaced_with_fire, powershell.file.script_block_text, powershell.file.script_block_id, file.path, powershell.sequence, powershell.total, _id, _index, host.name, agent.id, user.id -| WHERE count > 3 +| eval Esql.script_block_tmp = replace( + powershell.file.script_block_text, + """((\{\d+\}){2,}["']\s?-f|::Format[^\{]+(\{\d+\}){2,})""", + "🔥" +) + +// count how many patterns were detected by calculating the number of 🔥 characters inserted +| eval Esql.script_block_pattern_count = length(Esql.script_block_tmp) - length(replace(Esql.script_block_tmp, "🔥", "")) + +// keep the fields relevant to the query, although this is not needed as the alert is populated using _id +| keep + Esql.script_block_pattern_count, + Esql.script_block_length, + Esql.script_block_tmp, + powershell.file.script_block_text, + powershell.file.script_block_id, + file.path, + file.name, + powershell.sequence, + powershell.total, + _id, + _index, + host.name, + agent.id, + user.id + +// Filter for scripts that match the pattern at least four times +| where Esql.script_block_pattern_count >= 4 // Exclude Noisy Patterns // Icinga Framework -| WHERE (file.name NOT LIKE "framework_cache.psm1" or file.name IS NULL) -| WHERE NOT +| where (file.name not like "framework_cache.psm1" or file.name is null) + +| where not // https://wtfbins.wtf/17 ( - (powershell.file.script_block_text LIKE "*sentinelbreakpoints*" OR - powershell.file.script_block_text LIKE "*:::::\\\\windows\\\\sentinel*") - AND - (powershell.file.script_block_text LIKE "*$local:Bypassed*" OR - powershell.file.script_block_text LIKE "*origPSExecutionPolicyPreference*") + (powershell.file.script_block_text like "*sentinelbreakpoints*" or + powershell.file.script_block_text like "*:::::\\\\windows\\\\sentinel*") + and + (powershell.file.script_block_text like "*$local:Bypassed*" or + powershell.file.script_block_text like "*origPSExecutionPolicyPreference*") ) ''' diff --git a/rules/windows/defense_evasion_posh_obfuscation_whitespace_special_proportion.toml b/rules/windows/defense_evasion_posh_obfuscation_whitespace_special_proportion.toml index ee8e7621bae..1598e42b6ba 100644 --- a/rules/windows/defense_evasion_posh_obfuscation_whitespace_special_proportion.toml +++ b/rules/windows/defense_evasion_posh_obfuscation_whitespace_special_proportion.toml @@ -2,7 +2,7 @@ creation_date = "2025/04/16" integration = ["windows"] maturity = "production" -updated_date = "2025/07/07" +updated_date = "2025/07/16" [rule] author = ["Elastic"] @@ -83,29 +83,49 @@ timestamp_override = "event.ingested" type = "esql" query = ''' -FROM logs-windows.powershell_operational* metadata _id, _version, _index -| WHERE event.code == "4104" +from logs-windows.powershell_operational* metadata _id, _version, _index +| where event.code == "4104" -// Replace repeated spaces used for formatting after a new line with a single space to reduce FPs -| EVAL dedup_space_script_block = REPLACE(powershell.file.script_block_text, """\n\s+""", "\n ") +// replace repeated spaces used for formatting after a new line with a single space to reduce FPs +| eval Esql.script_block_tmp = replace(powershell.file.script_block_text, """\n\s+""", "\n ") -// Look for scripts with more than 1000 chars that contain a related keyword -| EVAL script_len = LENGTH(dedup_space_script_block) -| WHERE script_len > 1000 +// Look for scripts with more than 1000 chars +| eval Esql.script_block_length = length(Esql.script_block_tmp) +| where Esql.script_block_length > 1000 -// Replace string format expressions with 🔥 to enable counting the occurrence of the patterns we are looking for +// replace the patterns we are looking for with the 🔥 emoji to enable counting them // The emoji is used because it's unlikely to appear in scripts and has a consistent character length of 1 -| EVAL replaced_with_fire = REPLACE(dedup_space_script_block, """[\s\$\{\}\+\@\=\(\)\^\\\"~\[\]\?\.]""", "🔥") - -// Count the occurrence of numbers and their proportion to the total chars in the script -| EVAL special_count = script_len - LENGTH(REPLACE(replaced_with_fire, "🔥", "")) -| EVAL proportion = special_count::double / script_len::double - -// Keep the fields relevant to the query, although this is not needed as the alert is populated using _id -| KEEP special_count, script_len, proportion, dedup_space_script_block, replaced_with_fire, powershell.file.script_block_text, powershell.file.script_block_id, file.path, powershell.sequence, powershell.total, _id, _index, host.name, agent.id, user.id - -// Filter for scripts with a 75%+ proportion of numbers -| WHERE proportion > 0.75 +| eval Esql.script_block_tmp = replace( + Esql.script_block_tmp, + """[\s\$\{\}\+\@\=\(\)\^\\\"~\[\]\?\.]""", + "🔥" +) + +// count how many patterns were detected by calculating the number of 🔥 characters inserted +| eval Esql.script_block_count = Esql.script_block_length - length(replace(Esql.script_block_tmp, "🔥", "")) + +// Calculate the ratio of special characters to total length +| eval Esql.script_block_ratio = Esql.script_block_count::double / Esql.script_block_length::double + +// keep the fields relevant to the query, although this is not needed as the alert is populated using _id +| keep + Esql.script_block_count, + Esql.script_block_length, + Esql.script_block_ratio, + Esql.script_block_tmp, + powershell.file.script_block_text, + powershell.file.script_block_id, + file.path, + powershell.sequence, + powershell.total, + _id, + _index, + host.name, + agent.id, + user.id + +// Filter for scripts with high whitespace and special character ratio +| where Esql.script_block_ratio > 0.75 ''' diff --git a/rules/windows/discovery_command_system_account.toml b/rules/windows/discovery_command_system_account.toml index 03d5ecdf531..b2a19004a01 100644 --- a/rules/windows/discovery_command_system_account.toml +++ b/rules/windows/discovery_command_system_account.toml @@ -2,7 +2,7 @@ creation_date = "2020/03/18" integration = ["endpoint", "windows"] maturity = "production" -updated_date = "2025/05/20" +updated_date = "2025/07/24" [rule] author = ["Elastic"] @@ -77,19 +77,19 @@ process where host.os.type == "windows" and event.type == "start" and ( process.name : "net1.exe" and not process.parent.name : "net.exe" and not process.args : ("start", "stop", "/active:*") ) - ) and -process.parent.executable != null and -not (process.name : "net1.exe" and process.working_directory : "C:\\ProgramData\\Microsoft\\Windows Defender Advanced Threat Protection\\Downloads\\") and -not process.parent.executable : - ("C:\\Program Files\\Microsoft Monitoring Agent\\Agent\\MonitoringHost.exe", - "C:\\Program Files\\Dell\\SupportAssistAgent\\SRE\\SRE.exe", - "C:\\Program Files\\Obkio Agent\\main.dist\\ObkioAgentSoftware.exe", - "C:\\Windows\\Temp\\WinGet\\defaultState\\PostgreSQL.PostgreSQL*\\postgresql-*-windows-x64.exe", - "C:\\Program Files\\Obkio Agent\\main.dist\\ObkioAgentSoftware.exe", - "C:\\Program Files (x86)\\SolarWinds\\Agent\\Plugins\\JobEngine\\SWJobEngineWorker2.exe") and -not (process.parent.executable : "C:\\Windows\\Sys?????\\WindowsPowerShell\\v1.0\\powershell.exe" and - process.parent.args : ("C:\\Program Files (x86)\\Microsoft Intune Management Extension\\*.ps1", - "Agent\\Modules\\AdHealthConfiguration\\AdHealthConfiguration.psd1'")) and + ) and +process.parent.executable != null and +not (process.name : "net1.exe" and process.working_directory : "C:\\ProgramData\\Microsoft\\Windows Defender Advanced Threat Protection\\Downloads\\") and +not process.parent.executable : + ("C:\\Program Files\\Microsoft Monitoring Agent\\Agent\\MonitoringHost.exe", + "C:\\Program Files\\Dell\\SupportAssistAgent\\SRE\\SRE.exe", + "C:\\Program Files\\Obkio Agent\\main.dist\\ObkioAgentSoftware.exe", + "C:\\Windows\\Temp\\WinGet\\defaultState\\PostgrEsql.PostgreSQL*\\postgresql-*-windows-x64.exe", + "C:\\Program Files\\Obkio Agent\\main.dist\\ObkioAgentSoftware.exe", + "C:\\Program Files (x86)\\SolarWinds\\Agent\\Plugins\\JobEngine\\SWJobEngineWorker2.exe") and +not (process.parent.executable : "C:\\Windows\\Sys?????\\WindowsPowerShell\\v1.0\\powershell.exe" and + process.parent.args : ("C:\\Program Files (x86)\\Microsoft Intune Management Extension\\*.ps1", + "Agent\\Modules\\AdHealthConfiguration\\AdHealthConfiguration.psd1'")) and not (process.parent.name : "cmd.exe" and process.working_directory : "C:\\Program Files\\Infraon Corp\\SecuraAgent\\") ''' diff --git a/rules/windows/execution_posh_malicious_script_agg.toml b/rules/windows/execution_posh_malicious_script_agg.toml index 56c3b796b68..ee48f94d869 100644 --- a/rules/windows/execution_posh_malicious_script_agg.toml +++ b/rules/windows/execution_posh_malicious_script_agg.toml @@ -1,7 +1,7 @@ [metadata] creation_date = "2025/04/16" maturity = "production" -updated_date = "2025/04/16" +updated_date = "2025/07/16" [transform] [[transform.osquery]] @@ -28,11 +28,10 @@ services.path FROM services JOIN authenticode ON services.path = authenticode.pa authenticode.path JOIN hash ON services.path = hash.path WHERE authenticode.result != 'trusted' """ + [rule] author = ["Elastic"] -description = """ -Identifies PowerShell script blocks associated with multiple distinct detections, indicating likely malicious behavior. -""" +description = "Identifies PowerShell script blocks associated with multiple distinct detections, indicating likely malicious behavior.\n" from = "now-9m" language = "esql" license = "Elastic License v2" @@ -101,26 +100,33 @@ tags = [ "Use Case: Threat Detection", "Tactic: Execution", "Rule Type: Higher-Order Rule", - "Resources: Investigation Guide" + "Resources: Investigation Guide", ] timestamp_override = "event.ingested" type = "esql" query = ''' -FROM .alerts-security.* metadata _id +from .alerts-security.* metadata _id // Filter for PowerShell related alerts -| WHERE kibana.alert.rule.name LIKE "*PowerShell*" +| where kibana.alert.rule.name like "*PowerShell*" -// As alerts don't have non-ECS fields, parse the script block ID using GROK -| GROK message "ScriptBlock ID: (?.+)" -| WHERE powershell.file.script_block_id IS NOT NULL +// as alerts don't have non-ECS fields, parse the script block ID using grok +| grok message "ScriptBlock ID: (?.+)" +| where Esql.script_block_id is not null -| KEEP kibana.alert.rule.name, powershell.file.script_block_id, _id +// keep relevant fields for further processing +| keep kibana.alert.rule.name, Esql.script_block_id, _id -// Count distinct alerts and filter for matches above the threshold -| STATS distinct_alerts = COUNT_DISTINCT(kibana.alert.rule.name), rules_triggered = VALUES(kibana.alert.rule.name), alert_ids = VALUES(_id) BY powershell.file.script_block_id -| WHERE distinct_alerts >= 5 +// count distinct alerts and filter for matches above the threshold +| stats + Esql.kibana_alert_rule_name_count_distinct = count_distinct(kibana.alert.rule.name), + Esql.kibana_alert_rule_name_values = values(kibana.alert.rule.name), + Esql._id_values = values(_id) + by Esql.script_block_id + +// Apply detection threshold +| where Esql.kibana_alert_rule_name_count_distinct >= 5 ''' @@ -142,4 +148,3 @@ id = "TA0002" name = "Execution" reference = "https://attack.mitre.org/tactics/TA0002/" - diff --git a/rules_building_block/defense_evasion_posh_obfuscation_proportion_special_chars.toml b/rules_building_block/defense_evasion_posh_obfuscation_proportion_special_chars.toml index 689ebc37b84..930c6d16cfd 100644 --- a/rules_building_block/defense_evasion_posh_obfuscation_proportion_special_chars.toml +++ b/rules_building_block/defense_evasion_posh_obfuscation_proportion_special_chars.toml @@ -3,7 +3,7 @@ bypass_bbr_timing = true creation_date = "2025/04/16" integration = ["windows"] maturity = "production" -updated_date = "2025/04/16" +updated_date = "2025/07/16" [rule] author = ["Elastic"] @@ -43,33 +43,49 @@ tags = [ "Use Case: Threat Detection", "Tactic: Defense Evasion", "Data Source: PowerShell Logs", - "Rule Type: BBR" + "Rule Type: BBR", ] timestamp_override = "event.ingested" type = "esql" query = ''' -FROM logs-windows.powershell_operational* metadata _id, _version, _index -| WHERE event.code == "4104" +from logs-windows.powershell_operational* metadata _id, _version, _index +| where event.code == "4104" -// Look for scripts with more than 1000 chars that contain a related keyword -| EVAL script_len = LENGTH(powershell.file.script_block_text) -| WHERE script_len > 1000 +// Filter out smaller scripts that are unlikely to implement obfuscation using the patterns we are looking for +| eval Esql.script_block_length = length(powershell.file.script_block_text) +| where Esql.script_block_length > 1000 -// Replace string format expressions with 🔥 to enable counting the occurrence of the patterns we are looking for +// replace the patterns we are looking for with the 🔥 emoji to enable counting them // The emoji is used because it's unlikely to appear in scripts and has a consistent character length of 1 // Excludes spaces, #, = and - as they are heavily used in scripts for formatting -| EVAL replaced_with_fire = REPLACE(powershell.file.script_block_text, """[^0-9A-Za-z\s#=-]""", "🔥") - -// Count the occurrence of special chars and their proportion to the total chars in the script -| EVAL special_count = script_len - LENGTH(REPLACE(replaced_with_fire, "🔥", "")) -| EVAL proportion = special_count::double / script_len::double - -// Keep the fields relevant to the query, although this is not needed as the alert is populated using _id -| KEEP special_count, script_len, proportion, replaced_with_fire, powershell.file.script_block_text, powershell.file.script_block_id, file.path, powershell.sequence, powershell.total, _id, _index, host.name, agent.id, user.id - -// Filter for scripts with a 25%+ proportion of special chars -| WHERE proportion > 0.25 +| eval Esql.script_block_tmp = replace(powershell.file.script_block_text, """[^0-9A-Za-z\s#=-]""", "🔥") + +// count how many patterns were detected by calculating the number of 🔥 characters inserted +| eval Esql.script_block_pattern_count = Esql.script_block_length - length(replace(Esql.script_block_tmp, "🔥", "")) + +// Calculate the ratio of special characters to total length +| eval Esql.script_block_ratio = Esql.script_block_pattern_count::double / Esql.script_block_length::double + +// keep the fields relevant to the query, although this is not needed as the alert is populated using _id +| keep + Esql.script_block_pattern_count, + Esql.script_block_length, + Esql.script_block_ratio, + Esql.script_block_tmp, + powershell.file.script_block_text, + powershell.file.script_block_id, + file.path, + powershell.sequence, + powershell.total, + _id, + _index, + host.name, + agent.id, + user.id + +// Filter for scripts with high special character ratio +| where Esql.script_block_ratio > 0.25 ''' diff --git a/rules_building_block/persistence_web_server_sus_file_creation.toml b/rules_building_block/persistence_web_server_sus_file_creation.toml index 9aaeb1a79ee..231104566c4 100644 --- a/rules_building_block/persistence_web_server_sus_file_creation.toml +++ b/rules_building_block/persistence_web_server_sus_file_creation.toml @@ -3,7 +3,7 @@ bypass_bbr_timing = true creation_date = "2025/03/06" integration = ["endpoint"] maturity = "production" -updated_date = "2025/04/03" +updated_date = "2025/07/16" [rule] author = ["Elastic"] @@ -62,27 +62,48 @@ type = "esql" query = ''' from logs-endpoint.events.file-* -| keep @timestamp, host.os.type, event.type, event.action, user.name, user.id, process.name, process.executable, file.path, agent.id, host.name -| where @timestamp > now() - 1 hours -| where host.os.type == "linux" and event.type == "change" and event.action in ("rename", "creation") and ( - user.name in ( - "apache", "www-data", "httpd", "nginx", "lighttpd", "tomcat", "tomcat8", "tomcat9", "ftp", "ftpuser", "ftpd" - ) or - user.id in ("99", "33", "498", "48") - ) and ( - process.name in ( - "apache", "nginx", "apache2", "httpd", "lighttpd", "caddy", "node", "mongrel_rails", "java", "gunicorn", - "uwsgi", "openresty", "cherokee", "h2o", "resin", "puma", "unicorn", "traefik", "tornado", "hypercorn", - "daphne", "twistd", "yaws", "webfsd", "httpd.worker", "flask", "rails", "mongrel" - ) or - process.name like "php-*" or - process.name like "python*" or - process.name like "ruby*" or - process.name like "perl*" - ) -| stats cc = count(), agent_count = count_distinct(agent.id), host.name = VALUES(host.name), agent.id = VALUES(agent.id) by process.executable, file.path -| where agent_count == 1 and cc < 5 -| sort cc asc +| keep + @timestamp, + host.os.type, + event.type, + event.action, + user.name, + user.id, + process.name, + process.executable, + file.path, + agent.id, + host.name +| where + @timestamp > now() - 1 hours and + host.os.type == "linux" and + event.type == "change" and + event.action in ("rename", "creation") and ( + user.name in ( + "apache", "www-data", "httpd", "nginx", "lighttpd", "tomcat", "tomcat8", "tomcat9", "ftp", "ftpuser", "ftpd" + ) or + user.id in ("99", "33", "498", "48") + ) and ( + process.name in ( + "apache", "nginx", "apache2", "httpd", "lighttpd", "caddy", "node", "mongrel_rails", "java", "gunicorn", + "uwsgi", "openresty", "cherokee", "h2o", "resin", "puma", "unicorn", "traefik", "tornado", "hypercorn", + "daphne", "twistd", "yaws", "webfsd", "httpd.worker", "flask", "rails", "mongrel" + ) or + process.name like "php-*" or + process.name like "python*" or + process.name like "ruby*" or + process.name like "perl*" + ) +| stats + Esql.event_count = count(), + Esql.agent_id_count_distinct = count_distinct(agent.id), + Esql.host_name_values = values(host.name), + Esql.agent_id_values = values(agent.id) + by process.executable, file.path +| where + Esql.agent_id_count_distinct == 1 and + Esql.event_count < 5 +| sort Esql.event_count asc | limit 100 '''