Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 3 additions & 2 deletions packages/github/_dev/build/docs/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -23,13 +23,14 @@ For Organizations:
- You must be using GitHub Enterprise Cloud.
- The organization must be part of an enterprise plan that includes audit log functionality.

Github integration can collect audit logs from 2 sources: [Github API](https://docs.github.com/en/enterprise-cloud@latest/admin/monitoring-activity-in-your-enterprise/reviewing-audit-logs-for-your-enterprise/using-the-audit-log-api-for-your-enterprise) and [Azure Event Hubs](https://docs.github.com/en/enterprise-cloud@latest/admin/monitoring-activity-in-your-enterprise/reviewing-audit-logs-for-your-enterprise/streaming-the-audit-log-for-your-enterprise#setting-up-streaming-to-azure-event-hubs).
Github integration can collect audit logs from three sources: [Github API](https://docs.github.com/en/enterprise-cloud@latest/admin/monitoring-activity-in-your-enterprise/reviewing-audit-logs-for-your-enterprise/using-the-audit-log-api-for-your-enterprise), [Azure Event Hubs](https://docs.github.com/en/enterprise-cloud@latest/admin/monitoring-activity-in-your-enterprise/reviewing-audit-logs-for-your-enterprise/streaming-the-audit-log-for-your-enterprise#setting-up-streaming-to-azure-event-hubs), and [AWS S3 or AWS SQS](https://docs.github.com/en/enterprise-cloud@latest/admin/monitoring-activity-in-your-enterprise/reviewing-audit-logs-for-your-enterprise/streaming-the-audit-log-for-your-enterprise#setting-up-streaming-to-amazon-s3).

When using Github API to collect audit log events, below requirements must be met for Personal Access Token (PAT):
- You must use a Personal Access Token with `read:audit_log` scope. This applies to both organization and enterprise admins.
- If you're an enterprise admin, ensure your token also includes `admin:enterprise` scope to access enterprise-wide logs.

To collect audit log events from Azure Event Hubs, follow the [guide](https://docs.github.com/en/enterprise-cloud@latest/admin/monitoring-activity-in-your-enterprise/reviewing-audit-logs-for-your-enterprise/streaming-the-audit-log-for-your-enterprise#setting-up-streaming-to-azure-event-hubs) to setup audit log streaming. For more details, see [documentation](https://docs.github.com/en/enterprise-cloud@latest/admin/monitoring-activity-in-your-enterprise/reviewing-audit-logs-for-your-enterprise/streaming-the-audit-log-for-your-enterprise).
To collect audit log events from Azure Event Hubs, follow the [guide](https://docs.github.com/en/enterprise-cloud@latest/admin/monitoring-activity-in-your-enterprise/reviewing-audit-logs-for-your-enterprise/streaming-the-audit-log-for-your-enterprise#setting-up-streaming-to-azure-event-hubs) to setup audit log streaming.
To collect audit log events from AWS S3 or AWS SQS, follow the [guide](https://docs.github.com/en/enterprise-cloud@latest/admin/monitoring-activity-in-your-enterprise/reviewing-audit-logs-for-your-enterprise/streaming-the-audit-log-for-your-enterprise#setting-up-streaming-to-amazon-s3) to setup audit log streaming. For more details, see [documentation](https://docs.github.com/en/enterprise-cloud@latest/admin/monitoring-activity-in-your-enterprise/reviewing-audit-logs-for-your-enterprise/streaming-the-audit-log-for-your-enterprise).

*This integration is not compatible with GitHub Enterprise server.*

Expand Down
9 changes: 9 additions & 0 deletions packages/github/_dev/deploy/tf/env.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
version: '2.3'
services:
terraform:
environment:
- AWS_ACCESS_KEY_ID=${AWS_ACCESS_KEY_ID}
- AWS_SECRET_ACCESS_KEY=${AWS_SECRET_ACCESS_KEY}
- AWS_SESSION_TOKEN=${AWS_SESSION_TOKEN}
- AWS_DEFAULT_PROFILE=${AWS_DEFAULT_PROFILE}
- AWS_REGION=${AWS_REGION:-us-east-1}
2 changes: 2 additions & 0 deletions packages/github/_dev/deploy/tf/files/test-audit.log
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
{"@timestamp": 1698579600000, "action": "user.login", "active": true, "actor": "john_doe", "actor_id": 12345, "actor_location": {"country_name": "USA", "ip": "192.168.1.1"}, "org_id": 67890, "org": "tech-corp", "user_id": 12345, "business_id": 56789, "business": "tech-enterprise", "message": "User logged in successfully.", "name": "John Doe", "device": "laptop", "login_method": "password"}
{"@timestamp": 1698579660000, "action": "user.logout", "active": false, "actor": "jane_doe", "actor_id": 23456, "actor_location": {"country_name": "UK", "ip": "192.168.2.1"}, "org_id": 67890, "org": "tech-corp", "user_id": 23456, "business_id": 56789, "business": "tech-enterprise", "message": "User logged out.", "name": "Jane Doe", "device": "mobile", "logout_reason": "user_initiated"}
57 changes: 57 additions & 0 deletions packages/github/_dev/deploy/tf/main.tf
Original file line number Diff line number Diff line change
@@ -0,0 +1,57 @@
provider "aws" {
region = "us-east-1"
default_tags {
tags = {
environment = var.ENVIRONMENT
repo = var.REPO
branch = var.BRANCH
build = var.BUILD_ID
created_date = var.CREATED_DATE
}
}
}

resource "aws_s3_bucket" "bucket" {
bucket = "elastic-package-github-audit-bucket-${var.TEST_RUN_ID}"
}

resource "aws_sqs_queue" "queue" {
name = "elastic-package-github-audit-queue-${var.TEST_RUN_ID}"
policy = <<POLICY
{
"Version": "2012-10-17",
"Statement": [
{
"Effect": "Allow",
"Principal": "*",
"Action": "sqs:SendMessage",
"Resource": "arn:aws:sqs:*:*:elastic-package-github-audit-queue-${var.TEST_RUN_ID}",
"Condition": {
"ArnEquals": { "aws:SourceArn": "${aws_s3_bucket.bucket.arn}" }
}
}
]
}
POLICY
}

resource "aws_s3_bucket_notification" "bucket_notification" {
bucket = aws_s3_bucket.bucket.id

queue {
queue_arn = aws_sqs_queue.queue.arn
events = ["s3:ObjectCreated:*"]
}
}

resource "aws_s3_object" "object" {
bucket = aws_s3_bucket.bucket.id
key = "test-audit.log"
source = "./files/test-audit.log"

depends_on = [aws_sqs_queue.queue]
}

output "queue_url" {
value = aws_sqs_queue.queue.url
}
26 changes: 26 additions & 0 deletions packages/github/_dev/deploy/tf/variables.tf
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
variable "BRANCH" {
description = "Branch name or pull request for tagging purposes"
default = "unknown-branch"
}

variable "BUILD_ID" {
description = "Build ID in the CI for tagging purposes"
default = "unknown-build"
}

variable "CREATED_DATE" {
description = "Creation date in epoch time for tagging purposes"
default = "unknown-date"
}

variable "ENVIRONMENT" {
default = "unknown-environment"
}

variable "REPO" {
default = "unknown-repo-name"
}

variable "TEST_RUN_ID" {
default = "detached"
}
5 changes: 5 additions & 0 deletions packages/github/changelog.yml
Original file line number Diff line number Diff line change
@@ -1,4 +1,9 @@
# newer versions go on top
- version: "2.11.0"
changes:
- description: Add support for AWS S3 and AWS SQS input in audit dataset.
type: enhancement
link: https://github.com/elastic/integrations/pull/14635
- version: "2.10.1"
changes:
- description: Fix "Top users with assigned issues [GitHub Issues]" lens visual
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8065,7 +8065,7 @@
"name": "Windows",
"version": "10"
},
"version": "116.0."
"version": "116.0"
}
},
{
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
input: aws-s3
deployer: tf
wait_for_data_timeout: 20m
data_stream:
vars:
access_key_id: "{{AWS_ACCESS_KEY_ID}}"
secret_access_key: "{{AWS_SECRET_ACCESS_KEY}}"
session_token: "{{AWS_SESSION_TOKEN}}"
queue_url: "{{TF_OUTPUT_queue_url}}"
preserve_original_event: true
assert:
hit_count: 2
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
input: httpjson
deployer: docker
service: github
vars: ~
data_stream:
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
input: httpjson
deployer: docker
service: github
vars: ~
data_stream:
Expand Down
91 changes: 91 additions & 0 deletions packages/github/data_stream/audit/agent/stream/aws-s3.yml.hbs
Original file line number Diff line number Diff line change
@@ -0,0 +1,91 @@
{{#if collect_s3_logs}}

{{#if bucket_arn}}
bucket_arn: {{bucket_arn}}
{{/if}}
{{#if interval}}
bucket_list_interval: {{interval}}
{{/if}}
{{#if bucket_list_prefix}}
bucket_list_prefix: {{bucket_list_prefix}}
{{/if}}

{{else}}

{{#if queue_url}}
queue_url: {{queue_url}}
{{/if}}
{{#if region}}
region: {{region}}
{{/if}}
{{#if visibility_timeout}}
visibility_timeout: {{visibility_timeout}}
{{/if}}
{{#if api_timeout}}
api_timeout: {{api_timeout}}
{{/if}}
{{#if file_selectors}}
file_selectors:
{{file_selectors}}
{{/if}}

{{/if}}

{{#if access_key_id}}
access_key_id: {{access_key_id}}
{{/if}}
{{#if secret_access_key}}
secret_access_key: {{secret_access_key}}
{{/if}}
{{#if session_token}}
session_token: {{session_token}}
{{/if}}
{{#if shared_credential_file}}
shared_credential_file: {{shared_credential_file}}
{{/if}}
{{#if credential_profile_name}}
credential_profile_name: {{credential_profile_name}}
{{/if}}
{{#if number_of_workers}}
number_of_workers: {{number_of_workers}}
{{/if}}
{{#if role_arn}}
role_arn: {{role_arn}}
{{/if}}
{{#if external_id}}
external_id: {{external_id}}
{{/if}}
{{#if default_region}}
default_region: {{default_region}}
{{/if}}
{{#if fips_enabled}}
fips_enabled: {{fips_enabled}}
{{/if}}
{{#if proxy_url}}
proxy_url: {{proxy_url}}
{{/if}}
{{#if ssl}}
ssl: {{ssl}}
{{/if}}
tags:
{{#if collect_s3_logs}}
- collect_s3_logs
{{else}}
- collect_sqs_logs
{{/if}}
{{#if preserve_original_event}}
- preserve_original_event
{{/if}}
{{#if preserve_duplicate_custom_fields}}
- preserve_duplicate_custom_fields
{{/if}}
{{#each tags as |tag|}}
- {{tag}}
{{/each}}
{{#contains "forwarded" tags}}
publisher_pipeline.disable_host: true
{{/contains}}
{{#if processors}}
processors:
{{processors}}
{{/if}}
Original file line number Diff line number Diff line change
Expand Up @@ -414,6 +414,13 @@ processors:
- github.created_at
- github.@timestamp
ignore_missing: true
- remove:
field:
- log.file.path
- log.offset
tag: remove_log_fields
if: ctx.aws?.s3?.bucket != null && ctx.aws.s3.object != null
ignore_missing: true
- script:
lang: painless
description: This script processor iterates over the whole document to remove fields with null values.
Expand Down
21 changes: 21 additions & 0 deletions packages/github/data_stream/audit/fields/beats.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
- name: aws.s3
type: group
fields:
- name: bucket
type: group
fields:
- name: name
type: keyword
description: The AWS S3 bucket name.
- name: arn
type: keyword
description: The AWS S3 bucket ARN.
- name: object
type: group
fields:
- name: key
type: keyword
description: The AWS S3 Object key.
- name: log.offset
type: long
description: Log offset.
Loading