feat: download mirrors
This commit is contained in:
parent
60dedb650e
commit
368fa19db8
14
README.md
14
README.md
|
@ -12,6 +12,7 @@
|
|||
- [getopendbl](#getopendbl)
|
||||
- [Disable individual commands](#disable-individual-commands)
|
||||
- [Build](#build)
|
||||
- [Download failover](#download-failover)
|
||||
|
||||
Provide custom search commands to update [malware-filter](https://gitlab.com/malware-filter) lookups. Each command downloads from a source CSV and emit rows as events which can then be piped to a lookup file or used as a subsearch. Each command is exported globally and can be used in any app. This add-on currently does not have any UI.
|
||||
|
||||
|
@ -162,6 +163,19 @@ cd splunk-malware-filter
|
|||
python build.py
|
||||
```
|
||||
|
||||
## Download failover
|
||||
|
||||
For `get*filter` search commands, the script will attempt to download from the following domains in sequence (check out the `DOWNLOAD_URLS` constant in each script):
|
||||
|
||||
- malware-filter.gitlab.io
|
||||
- curbengh.github.io
|
||||
- curbengh.github.io
|
||||
- malware-filter.gitlab.io
|
||||
- malware-filter.pages.dev
|
||||
- \*-filter.pages.dev
|
||||
|
||||
If your corporate proxy admin balks at having to allow >1 domains, allowing any of them will do. Since the script wouldn't know the proxy ruleset, it will still attempt those domains in sequence until it found a reachable one.
|
||||
|
||||
## Disclaimer
|
||||
|
||||
`getbotnetip.py` and `getopendbl.py` are included simply for convenience, their upstream sources are not affiliated with malware-filter.
|
||||
|
|
|
@ -12,8 +12,13 @@ from utils import Utility
|
|||
sys.path.insert(0, path.join(path.dirname(__file__), "..", "lib"))
|
||||
from splunklib.searchcommands import Configuration, GeneratingCommand, Option, dispatch
|
||||
|
||||
DOWNLOAD_URL = (
|
||||
"https://malware-filter.gitlab.io/malware-filter/botnet-filter-splunk.csv"
|
||||
DOWNLOAD_URLS = (
|
||||
"https://malware-filter.gitlab.io/malware-filter/botnet-filter-splunk.csv",
|
||||
"https://curbengh.github.io/malware-filter/botnet-filter-splunk.csv",
|
||||
"https://curbengh.github.io/botnet-filter/botnet-filter-splunk.csv",
|
||||
"https://malware-filter.gitlab.io/botnet-filter/botnet-filter-splunk.csv",
|
||||
"https://malware-filter.pages.dev/botnet-filter-splunk.csv",
|
||||
"https://botnet-filter.pages.dev/botnet-filter-splunk.csv",
|
||||
)
|
||||
|
||||
|
||||
|
@ -24,7 +29,7 @@ class GetBotnetFilter(Utility, GeneratingCommand):
|
|||
custom_message = Option(name="message")
|
||||
|
||||
def generate(self):
|
||||
dl_csv = self.download(DOWNLOAD_URL)
|
||||
dl_csv = self.download(DOWNLOAD_URLS)
|
||||
for row in self.csv_reader(dl_csv):
|
||||
if isinstance(self.custom_message, str) and len(self.custom_message) >= 1:
|
||||
row["custom_message"] = self.custom_message
|
||||
|
|
|
@ -12,8 +12,13 @@ from utils import Utility
|
|||
sys.path.insert(0, path.join(path.dirname(__file__), "..", "lib"))
|
||||
from splunklib.searchcommands import Configuration, GeneratingCommand, Option, dispatch
|
||||
|
||||
DOWNLOAD_URL = (
|
||||
"https://malware-filter.gitlab.io/malware-filter/phishing-filter-splunk.csv"
|
||||
DOWNLOAD_URLS = (
|
||||
"https://malware-filter.gitlab.io/malware-filter/phishing-filter-splunk.csv",
|
||||
"https://curbengh.github.io/malware-filter/phishing-filter-splunk.csv",
|
||||
"https://curbengh.github.io/phishing-filter/phishing-filter-splunk.csv",
|
||||
"https://malware-filter.gitlab.io/phishing-filter/phishing-filter-splunk.csv",
|
||||
"https://malware-filter.pages.dev/phishing-filter-splunk.csv",
|
||||
"https://phishing-filter.pages.dev/phishing-filter-splunk.csv",
|
||||
)
|
||||
|
||||
|
||||
|
@ -27,7 +32,7 @@ class GetPhishingFilter(Utility, GeneratingCommand):
|
|||
custom_message = Option(name="message")
|
||||
|
||||
def generate(self):
|
||||
dl_csv = self.download(DOWNLOAD_URL)
|
||||
dl_csv = self.download(DOWNLOAD_URLS)
|
||||
for row in self.csv_reader(dl_csv):
|
||||
if isinstance(self.custom_message, str) and len(self.custom_message) >= 1:
|
||||
row["custom_message"] = self.custom_message
|
||||
|
|
|
@ -12,7 +12,14 @@ from utils import Utility
|
|||
sys.path.insert(0, path.join(path.dirname(__file__), "..", "lib"))
|
||||
from splunklib.searchcommands import Configuration, GeneratingCommand, Option, dispatch
|
||||
|
||||
DOWNLOAD_URL = "https://malware-filter.gitlab.io/malware-filter/pup-filter-splunk.csv"
|
||||
DOWNLOAD_URLS = (
|
||||
"https://malware-filter.gitlab.io/malware-filter/pup-filter-splunk.csv",
|
||||
"https://curbengh.github.io/malware-filter/pup-filter-splunk.csv",
|
||||
"https://curbengh.github.io/pup-filter/pup-filter-splunk.csv",
|
||||
"https://malware-filter.gitlab.io/pup-filter/pup-filter-splunk.csv",
|
||||
"https://malware-filter.pages.dev/pup-filter-splunk.csv",
|
||||
"https://pup-filter.pages.dev/pup-filter-splunk.csv",
|
||||
)
|
||||
|
||||
|
||||
@Configuration()
|
||||
|
@ -25,7 +32,7 @@ class GetPupFilter(Utility, GeneratingCommand):
|
|||
custom_message = Option(name="message")
|
||||
|
||||
def generate(self):
|
||||
dl_csv = self.download(DOWNLOAD_URL)
|
||||
dl_csv = self.download(DOWNLOAD_URLS)
|
||||
for row in self.csv_reader(dl_csv):
|
||||
if isinstance(self.custom_message, str) and len(self.custom_message) >= 1:
|
||||
row["custom_message"] = self.custom_message
|
||||
|
|
|
@ -12,8 +12,13 @@ from utils import Utility
|
|||
sys.path.insert(0, path.join(path.dirname(__file__), "..", "lib"))
|
||||
from splunklib.searchcommands import Configuration, GeneratingCommand, Option, dispatch
|
||||
|
||||
DOWNLOAD_URL = (
|
||||
"https://malware-filter.gitlab.io/malware-filter/urlhaus-filter-splunk-online.csv"
|
||||
DOWNLOAD_URLS = (
|
||||
"https://malware-filter.gitlab.io/malware-filter/urlhaus-filter-splunk-online.csv",
|
||||
"https://curbengh.github.io/malware-filter/urlhaus-filter-splunk-online.csv",
|
||||
"https://curbengh.github.io/urlhaus-filter/urlhaus-filter-splunk-online.csv",
|
||||
"https://malware-filter.gitlab.io/urlhaus-filter/urlhaus-filter-splunk-online.csv",
|
||||
"https://malware-filter.pages.dev/urlhaus-filter-splunk-online.csv",
|
||||
"https://urlhaus-filter.pages.dev/urlhaus-filter-splunk-online.csv",
|
||||
)
|
||||
|
||||
|
||||
|
@ -27,7 +32,7 @@ class GetUrlhausFilter(Utility, GeneratingCommand):
|
|||
custom_message = Option(name="message")
|
||||
|
||||
def generate(self):
|
||||
dl_csv = self.download(DOWNLOAD_URL)
|
||||
dl_csv = self.download(DOWNLOAD_URLS)
|
||||
for row in self.csv_reader(dl_csv):
|
||||
if isinstance(self.custom_message, str) and len(self.custom_message) >= 1:
|
||||
row["custom_message"] = self.custom_message
|
||||
|
|
|
@ -12,8 +12,13 @@ from utils import Utility
|
|||
sys.path.insert(0, path.join(path.dirname(__file__), "..", "lib"))
|
||||
from splunklib.searchcommands import Configuration, GeneratingCommand, Option, dispatch
|
||||
|
||||
DOWNLOAD_URL = (
|
||||
"https://malware-filter.gitlab.io/malware-filter/vn-badsite-filter-splunk.csv"
|
||||
DOWNLOAD_URLS = (
|
||||
"https://malware-filter.gitlab.io/malware-filter/vn-badsite-filter-splunk.csv",
|
||||
"https://curbengh.github.io/malware-filter/vn-badsite-filter-splunk.csv",
|
||||
"https://curbengh.github.io/vn-badsite-filter/vn-badsite-filter-splunk.csv",
|
||||
"https://malware-filter.gitlab.io/vn-badsite-filter/vn-badsite-filter-splunk.csv",
|
||||
"https://malware-filter.pages.dev/vn-badsite-filter-splunk.csv",
|
||||
"https://vn-badsite-filter.pages.dev/vn-badsite-filter-splunk.csv",
|
||||
)
|
||||
|
||||
|
||||
|
@ -27,7 +32,7 @@ class GetVNBadsiteFilter(Utility, GeneratingCommand):
|
|||
custom_message = Option(name="message")
|
||||
|
||||
def generate(self):
|
||||
dl_csv = self.download(DOWNLOAD_URL)
|
||||
dl_csv = self.download(DOWNLOAD_URLS)
|
||||
for row in self.csv_reader(dl_csv):
|
||||
if isinstance(self.custom_message, str) and len(self.custom_message) >= 1:
|
||||
row["custom_message"] = self.custom_message
|
||||
|
|
23
bin/utils.py
23
bin/utils.py
|
@ -58,13 +58,30 @@ class Utility:
|
|||
|
||||
return {}
|
||||
|
||||
def download(self, url):
|
||||
"""Send a GET request to the URL and return content of the response."""
|
||||
# pylint: disable=inconsistent-return-statements
|
||||
def download(self, urls, index=0):
|
||||
"""
|
||||
Send a GET request to the URL and return content of the response.
|
||||
|
||||
Arguments:
|
||||
urls {list/tuple/string} -- A list of URLs to try in sequence
|
||||
index -- List's index to start
|
||||
"""
|
||||
if isinstance(urls, str):
|
||||
urls = (urls,)
|
||||
|
||||
url = urls[index]
|
||||
proxy_config = self.__get_proxy(url)
|
||||
try:
|
||||
res = requests.get(url, timeout=5, **proxy_config)
|
||||
# pylint: disable=no-member
|
||||
if res.status_code == requests.codes.ok:
|
||||
return res.text
|
||||
|
||||
if index < len(urls) - 1:
|
||||
return self.download(urls, index + 1)
|
||||
|
||||
res.raise_for_status()
|
||||
return res.text
|
||||
except requests.exceptions.HTTPError as errh:
|
||||
raise errh
|
||||
except requests.exceptions.ConnectionError as errc:
|
||||
|
|
Loading…
Reference in New Issue