Compare commits

...

7 Commits

Author SHA1 Message Date
Ming Di Leom 23e3238c2b
release: 0.2.0 2024-01-26 04:04:51 +00:00
Ming Di Leom 521012f9cd
refactor(savedsearches): move action.lookup to outputlookup
enables on-demand lookup update
override_if_empty=false prevents lookup from being overwritten with empty result
2024-01-26 03:55:22 +00:00
Ming Di Leom 716f9a521f
fix(transforms): leave batch_index_query to default 2024-01-26 03:48:37 +00:00
Ming Di Leom da853d5e9b
docs: example usage 2024-01-26 02:12:01 +00:00
Ming Di Leom 36fd29f277
chore(vscode): code action
https://code.visualstudio.com/updates/v1_85#_code-actions-on-save-and-auto
2024-01-26 02:08:42 +00:00
Ming Di Leom 313ee66590
release: 0.1.1 2023-11-14 07:30:07 +00:00
Ming Di Leom 1787e5e2de
fix: schedule_window should be less than cron frequency 2023-11-14 07:28:06 +00:00
6 changed files with 43 additions and 40 deletions

View File

@ -3,14 +3,13 @@
"files.trimTrailingWhitespace": true,
"files.insertFinalNewline": true,
"files.trimFinalNewlines": true,
"python.formatting.provider": "none",
"[python]": {
"editor.defaultFormatter": "ms-python.black-formatter",
"editor.formatOnSave": true,
"editor.tabSize": 4,
"editor.codeActionsOnSave": {
"source.fixAll": true,
"source.organizeImports": true
"source.fixAll": "explicit",
"source.organizeImports": "explicit"
}
},
"[markdown]": {

View File

@ -156,6 +156,27 @@ Recommend to update the lookup file "opendbl_ip.csv" every 15 minutes (cron `*/1
Source: https://opendbl.net/
## Example usage
```
| tstats summariesonly=true allow_old_summaries=true count FROM datamodel=Web WHERE Web.action="allowed"
BY Web.user, Web.src, Web.dest, Web.site, Web.url, Web.category, Web.action, index, _time span=1s
| rename Web.* AS *
| lookup urlhaus-filter-splunk-online host AS site, host AS dest OUTPUT message AS description, updated
| lookup urlhaus-filter-splunk-online path_wildcard_prefix AS vendor_url, host AS site, host AS dest OUTPUT message AS description2, updated AS updated2
| lookup phishing-filter-splunk host AS site, host AS dest OUTPUT message AS description3, updated AS updated3
| lookup phishing-filter-splunk path_wildcard_prefix AS vendor_url, host AS site, host AS dest OUTPUT message AS description4, updated AS updated4
| lookup pup-filter-splunk host AS site, host AS dest OUTPUT message AS description5, updated AS updated5
| lookup vn-badsite-filter-splunk host AS site, host AS dest OUTPUT message AS description6, updated AS updated6
| lookup botnet_ip dst_ip AS dest OUTPUT malware AS description7, updated AS updated7
| eval Description=coalesce(description, description2, description3, description4, description5, description6, description7)
| search Description=*
| eval updated=coalesce(updated, updated2, updated3, updated4, updated5, updated6, updated7), "Signature Last Updated"=strftime(strptime(updated." +0000","%Y-%m-%dT%H:%M:%SZ %z"),"%Y-%m-%d %H:%M:%S %z"), Time=strftime(_time, "%Y-%m-%d %H:%M:%S %z"), "Source IP"=src, Username=user, Domain=site, "Destination IP"=dest, URL=url, Action=action
| table Time, index, "Signature Last Updated", "Source IP", Username, Domain, "Destination IP", Description, Action, URL
```
It is not recommended to use subsearch (e.g. `[| inputlookup urlhaus-filter-splunk-online.csv | fields host ]`) for these [lookup tables](./lookups/) especially [urlhaus-filter](./lookups/urlhaus-filter-splunk-online.csv) and [phishing-filter](./lookups/phishing-filter-splunk.csv) because they usually have more than 30,000 rows, which exceed the soft-limit of [10,000 rows](https://docs.splunk.com/Documentation/SplunkCloud/latest/Search/Aboutsubsearches#Subsearch_performance_considerations) returned by subsearch.
## Disable individual commands
Settings -> All configurations -> filter by "malware_filter" app

View File

@ -5,7 +5,7 @@
"id": {
"group": null,
"name": "TA-malware-filter",
"version": "0.0.13"
"version": "0.2.0"
},
"author": [
{

View File

@ -1,6 +1,3 @@
#
# App configuration file
#
[install]
is_configured = false
@ -9,7 +6,7 @@ id = TA-malware-filter
[id]
name = TA-malware-filter
version = 0.1.0
version = 0.2.0
[ui]
is_visible = false
@ -18,4 +15,4 @@ label = malware-filter Add-on
[launcher]
author = Ming Di Leom
description = Update malware-filter lookups. https://gitlab.com/malware-filter
version = 0.1.0
version = 0.2.0

View File

@ -1,70 +1,63 @@
[malware-filter Update botnet_ip.csv]
action.lookup = 1
action.lookup.filename = botnet_ip.csv
cron_schedule = */15 * * * *
description = Update lookup every 15 minutes from 00:00
# https://docs.splunk.com/Documentation/Splunk/9.1.1/SearchReference/Collect#Events_without_timestamps
# https://docs.splunk.com/Documentation/Splunk/latest/SearchReference/Collect#Events_without_timestamps
dispatch.earliest_time = 0
enableSched = 0
schedule_window = 60
search = | getbotnetip
schedule_window = 5
search = | getbotnetip\
| outputlookup override_if_empty=false botnet_ip.csv
[malware-filter Update botnet-filter-splunk.csv]
action.lookup = 1
action.lookup.filename = botnet-filter-splunk.csv
cron_schedule = 0 */12 * * *
description = Update lookup every 12 hours from 00:00
dispatch.earliest_time = 0
enableSched = 0
schedule_window = 60
search = | getbotnetfilter
search = | getbotnetfilter\
| outputlookup override_if_empty=false botnet-filter-splunk.csv
[malware-filter Update opendbl_ip.csv]
action.lookup = 1
action.lookup.filename = opendbl_ip.csv
cron_schedule = */15 * * * *
description = Update lookup every 15 minutes from 00:00
dispatch.earliest_time = 0
enableSched = 0
schedule_window = 60
search = | getopendbl
schedule_window = 5
search = | getopendbl\
| outputlookup override_if_empty=false opendbl_ip.csv
[malware-filter Update phishing-filter-splunk.csv]
action.lookup = 1
action.lookup.filename = phishing-filter-splunk.csv
cron_schedule = 0 */12 * * *
description = Update lookup every 12 hours from 00:00
dispatch.earliest_time = 0
enableSched = 0
schedule_window = 60
search = | getphishingfilter
search = | getphishingfilter\
| outputlookup override_if_empty=false phishing-filter-splunk.csv
[malware-filter Update pup-filter-splunk.csv]
action.lookup = 1
action.lookup.filename = pup-filter-splunk.csv
cron_schedule = 0 */12 * * *
description = Update lookup every 12 hours from 00:00
dispatch.earliest_time = 0
enableSched = 0
schedule_window = 60
search = | getpupfilter
search = | getpupfilter\
| outputlookup override_if_empty=false pup-filter-splunk.csv
[malware-filter Update urlhaus-filter-splunk-online.csv]
action.lookup = 1
action.lookup.filename = urlhaus-filter-splunk-online.csv
cron_schedule = 0 */12 * * *
description = Update lookup every 12 hours from 00:00
dispatch.earliest_time = 0
enableSched = 0
schedule_window = 60
search = | geturlhausfilter
search = | geturlhausfilter\
| outputlookup override_if_empty=false urlhaus-filter-splunk-online.csv
[malware-filter Update vn-badsite-filter-splunk.csv]
action.lookup = 1
action.lookup.filename = vn-badsite-filter-splunk.csv
cron_schedule = 0 */12 * * *
description = Update lookup every 12 hours from 00:00
dispatch.earliest_time = 0
enableSched = 0
schedule_window = 60
search = | getvnbadsitefilter
search = | getvnbadsitefilter\
| outputlookup override_if_empty=false vn-badsite-filter-splunk.csv

View File

@ -1,39 +1,32 @@
[urlhaus-filter-splunk-online]
batch_index_query = 0
case_sensitive_match = 1
filename = urlhaus-filter-splunk-online.csv
max_matches = 1
[phishing-filter-splunk]
batch_index_query = 0
case_sensitive_match = 1
filename = phishing-filter-splunk.csv
max_matches = 1
[pup-filter-splunk]
batch_index_query = 0
case_sensitive_match = 1
filename = pup-filter-splunk.csv
max_matches = 1
[vn-badsite-filter-splunk]
batch_index_query = 0
case_sensitive_match = 1
filename = vn-badsite-filter-splunk.csv
max_matches = 1
[botnet-filter-splunk]
batch_index_query = 0
case_sensitive_match = 1
filename = botnet-filter-splunk.csv
[botnet_ip]
batch_index_query = 0
case_sensitive_match = 1
filename = botnet_ip.csv
[opendbl_ip]
batch_index_query = 0
case_sensitive_match = 1
filename = opendbl_ip.csv
min_matches = 1