Skip to content

Commit

Permalink
Merge pull request #374 from Azure/logstash-parsers
Browse files Browse the repository at this point in the history
logstash-parsers-for-blog-reference
  • Loading branch information
shainw committed Nov 15, 2019
2 parents 673c851 + 64ce50b commit 083781f
Show file tree
Hide file tree
Showing 7 changed files with 245 additions and 0 deletions.
Original file line number Diff line number Diff line change
@@ -0,0 +1,38 @@
id: 0db42a94-e7c8-4bf1-99a7-1a2fb4158212
name: Privileged role attached to Instance
description: |
'Identity and Access Management (IAM) securely manages access to AWS services and resources.
Identifies when a Privileged role is attached to an existing instance or new instance at deployment. This instance may be used by an adversary to escalate a normal user privileges to an adminsitrative level.
Read more about ingest custom logs using Logstash at https://github.com/Azure/Azure-Sentinel/wiki/Ingest-Custom-Logs-LogStash
and AWS API AddRoleToInstanceProfile at https://docs.aws.amazon.com/IAM/latest/APIReference/API_AddRoleToInstanceProfile.html'
requiredDataConnectors:
- connectorId: AWS
dataTypes:
- AWSCloudTrail
tactics:
- PrivilegeEscalation
relevantTechniques:
- T1078
query: |
let EventNameList = dynamic(["AttachUserPolicy","AttachRolePolicy"]);
let PolicyArnList = dynamic(["arn:aws:iam::aws:policy/AdministratorAccess","arn:aws:iam::aws:policy/DatabaseAdministrator","arn:aws:iam::aws:policy/NetworkAdministrator","arn:aws:iam::aws:policy/SystemAdministrator","arn:aws:iam::aws:policy/AmazonS3FullAccess"]);
let timeframe = 1d;
let lookback = 14d;
//Creating a temp table of events creating privileged role or users which can later be correlated with suspicious operations.
let PrivilegedRoleorUsers = AWSCloudTrail
| where TimeGenerated >= ago(lookback)
| where EventName in (EventNameList)
| extend PolicyArn = tostring(parse_json(RequestParameters).policyArn), RoleName = tostring(parse_json(RequestParameters).roleName)
| where PolicyArn in (PolicyArnList)
| distinct PolicyArn, UserIdentityType, UserIdentityUserName,RoleName;
// Joining the list of identities having Privileged roles with the API call AddRoleToInstanceProfile to indentify the instances which may be used by adversaries as pivot point for privilege escalation.
PrivilegedRoleorUsers
| join (
AWSCloudTrail
| where TimeGenerated >= ago(timeframe)
| where EventName in ("AddRoleToInstanceProfile")
| extend InstanceProfileName = tostring(parse_json(RequestParameters).InstanceProfileName), RoleName = tostring(parse_json(RequestParameters).roleName)
| summarize EventCount=count(), StartTimeUtc = min(TimeGenerated), EndTimeUtc = max(TimeGenerated) by EventSource, EventName, UserIdentityType , UserIdentityArn , UserIdentityUserName, SourceIpAddress, RoleName
) on RoleName
| extend timestamp = StartTimeUtc, IPCustomEntity = SourceIpAddress, AccountCustomEntity = RoleName
Original file line number Diff line number Diff line change
@@ -0,0 +1,50 @@
id: 0ef8dee1-eb94-44c8-b59b-2eb096a4b983
name: S3 Bucket outbound Data transfer anomaly
description: |
'Identifies when an anomalous spike occur in data transfer from an S3 bucket based on GetObject API call and the BytesTransferredOut field.
The query leverages KQL built-in anomaly detection algorithms to find large deviations from baseline patterns.
Sudden increases in execution frequency of sensitive actions should be further investigated for malicious activity.
Manually change scorethreshold from 1.5 to 3 or higher to reduce the noise based on outliers flagged from the query criteria.
Read more about ingest custom logs using Logstash at https://github.com/Azure/Azure-Sentinel/wiki/Ingest-Custom-Logs-LogStash
and AWS S3 API GetObject at https://docs.aws.amazon.com/AmazonS3/latest/API/API_GetObject.html'
severity: Medium
requiredDataConnectors:
- connectorId: Logstash
dataTypes:
- AwsBucketAPILogs
queryFrequency: 1h
queryPeriod: 14d
triggerOperator: gt
triggerThreshold: 0
tactics:
- Exfiltration
relevantTechniques:
- T1020
query: |
let starttime = 14d;
let endtime = 1d;
let timeframe = 1h;
let scorethreshold = 1.5;
// Preparing the time series data aggregated on BytesTransferredOut column in the form of multi-value array so that it can be used with time series anomaly function.
let TimeSeriesData=
AWSS3BucketAPILogParsed
| where EventTime between (startofday(ago(starttime))..startofday(ago(endtime)))
| where EventName == "GetObject"
| make-series Total=sum(BytesTransferredOut) on EventTime from startofday(ago(starttime)) to startofday(ago(endtime)) step timeframe;
// Use the time series data prepared in previous step with time series aomaly function to generate baseline pattern and flag the outlier based on scorethreshold value.
let TimeSeriesAlerts = TimeSeriesData
| extend (anomalies, score, baseline) = series_decompose_anomalies(Total, scorethreshold, -1, 'linefit')
| mv-expand Total to typeof(double), EventTime to typeof(datetime), anomalies to typeof(double), score to typeof(double), baseline to typeof(long)
| where anomalies > 0
| project EventTime, Total, baseline, anomalies, score;
// Joining the flagged outlier from the previous step with the original dataset to present contextual information during the anomalyhour to analysts to conduct investigation or informed decistions.
TimeSeriesAlerts
| join (
AWSS3BucketAPILogParsed
| where EventTime between (startofday(ago(starttime))..startofday(ago(endtime)))
| where EventName == "GetObject"
| summarize Total = sum(BytesTransferredOut), Files= makeset(Key) , max(EventTime) by bin(EventTime, 1h), EventSource,EventName, SourceIPAddress, UserIdentityType, UserIdentityArn, UserIdentityUserName, BucketName, Host, AuthenticationMethod, SessionMfaAuthenticated, SessionUserName
) on EventTime
| project AnomalyTime = max_EventTime, SourceIPAddress, UserIdentityType,UserIdentityUserName,SessionUserName, BucketName, Host, AuthenticationMethod, Files, Total, baseline, anomalies, score
| extend timestamp = AnomalyTime, AccountCustomEntity = SessionUserName , HostCustomEntity = Host, IPCustomEntity = SourceIPAddress
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@
id: 5b6ee21d-da53-46eb-827c-eab2a9ba3d2f
name: Suspicious credential token access of valid IAM Roles
description: |
'Adversaries may generate temporary credentials of existing privileged IAM roles to access AWS resources that were not previously accessible to perform malicious actions. The credentials may be generated by trusted IAM user or via AWS Cloud Instance Metadata API.
This query will look for AWS STS API Assume Role operations for RoleArn (Role Amazon Resource Names) which was not historically seen.
You can also limit the query to only sensitive IAM Roles which needs to be monitored.
Read more about ingest custom logs using Logstash at https://github.com/Azure/Azure-Sentinel/wiki/Ingest-Custom-Logs-LogStash
, AWS API AssumeRole at https://docs.aws.amazon.com/STS/latest/APIReference/API_AssumeRole.html and AWS Instance Metadata API at https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/ec2-instance-metadata.html'
requiredDataConnectors:
- connectorId: Logstash
dataTypes:
- AwsBucketAPILogs
tactics:
- InitialAccess
- DefenseEvasion
relevantTechniques:
- T1078
query: |
let starttime = 14d;
let midtime = 2d;
let endtime = 1d;
// Generating historical table of AssumeRole operations for IAM Roles to be compared with last 24 hour
AWSCloudTrail
| where TimeGenerated >= ago(endtime)
| where EventName == "AssumeRole" | extend RoleArn = tostring(parse_json(RequestParameters).roleArn)
| project TimeGenerated, EventSource, EventName, UserIdentityType, UserIdentityInvokedBy , SourceIpAddress, RoleArn
// Doing Leftanti join to find new AssumeRole operation for IAM role which was not seen historically generated from previous table.
| join kind= leftanti (
AWSCloudTrail
| where TimeGenerated between (ago(starttime)..ago(midtime))
| where EventName == "AssumeRole" | extend RoleArn = tostring(parse_json(RequestParameters).roleArn)
| project TimeGenerated, EventSource, EventName, UserIdentityType, UserIdentityInvokedBy , SourceIpAddress, RoleArn
) on RoleArn, UserIdentityInvokedBy
| summarize EventCount = count(), StartTimeUtc = min(TimeGenerated), EndTimeUtc = max(TimeGenerated) by RoleArn, EventSource, EventName, UserIdentityType, UserIdentityInvokedBy, SourceIpAddress
| extend timestamp = StartTimeUtc, IPCustomEntity = SourceIpAddress, AccountCustomEntity = tostring(split(RoleArn, "/")[1])
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
id: 669e1338-b1a2-4d73-b720-a1e60d5d1474
name: Suspicious Data Access to S3 Bucket from Unknown IP
description: |
'Adversaries may access data objects from improperly secured cloud storage. This query will identify any access originating from a Source IP which was not seen historically accessing the bucket or downloading files from it.
You can also limit the query to only private buckets with sensitive files by setting the value or list of values to BucketName column.
Read more about ingest custom logs using Logstash at https://github.com/Azure/Azure-Sentinel/wiki/Ingest-Custom-Logs-LogStash
and AWS S3 API GetObject at https://docs.aws.amazon.com/AmazonS3/latest/API/API_GetObject.html and ListObject at https://docs.aws.amazon.com/AmazonS3/latest/API/API_ListObjects.html
and ListBucket at https://docs.aws.amazon.com/AmazonS3/latest/API/API_ListBuckets.html'
requiredDataConnectors:
- connectorId: Logstash
dataTypes:
- AwsBucketAPILogs
tactics:
- Collection
relevantTechniques:
- T1530
query: |
let EventNameList = dynamic(["ListBucket","ListObjects","GetObject"]);
let starttime = 14d;
let midtime = 2d;
let endtime = 1d;
AWSS3BucketAPILogParsed
| where EventTime >= ago(endtime)
| where EventName in (EventNameList)
| project EventTime, EventSource,EventName, SourceIPAddress, UserIdentityType, UserIdentityArn, UserIdentityUserName, BucketName, Host, AuthenticationMethod, SessionMfaAuthenticated, SessionUserName, Key
| join kind=leftanti
(
AWSS3BucketAPILogParsed
| where EventTime between (ago(starttime)..ago(midtime))
| where EventName in (EventNameList)
) on SourceIPAddress
| summarize EventCount=count(), StartTimeUtc = min(EventTime), EndTimeUtc = max(EventTime), Files= makeset(Key), EventNames = makeset(EventName) by EventSource, SourceIPAddress, UserIdentityType, UserIdentityArn, UserIdentityUserName, BucketName, Host, AuthenticationMethod, SessionMfaAuthenticated, SessionUserName
| project StartTimeUtc, EndTimeUtc, EventSource, Host, SourceIPAddress, UserIdentityType, BucketName, EventNames, Files, AuthenticationMethod, SessionMfaAuthenticated, SessionUserName, EventCount
| extend timestamp = StartTimeUtc, HostCustomEntity = Host, AccountCustomEntity = SessionUserName, IPCustomEntity = SourceIPAddress
Loading

0 comments on commit 083781f

Please sign in to comment.