Compare commits
23 Commits
Author | SHA1 | Date |
---|---|---|
|
c94a7e8a39 | |
|
ed9b77e0ed | |
|
b91f3bfde5 | |
|
65f4aa9e4c | |
|
9ed2beacec | |
|
530a813bc2 | |
|
c4463482db | |
|
23e3238c2b | |
|
521012f9cd | |
|
716f9a521f | |
|
da853d5e9b | |
|
36fd29f277 | |
|
313ee66590 | |
|
1787e5e2de | |
|
93b7e2a44c | |
|
9b04a100db | |
|
444b92a837 | |
|
1cd2ec36a8 | |
|
097ba9a3df | |
|
81ee292f1d | |
|
4e084978b5 | |
|
b9d7dae295 | |
|
d677af89d9 |
|
@ -1,4 +1,4 @@
|
|||
image: python:slim
|
||||
image: python:3.11-slim
|
||||
|
||||
variables:
|
||||
PIP_CACHE_DIR: "$CI_PROJECT_DIR/.cache/pip"
|
||||
|
@ -15,19 +15,24 @@ stages:
|
|||
lint:
|
||||
stage: test
|
||||
|
||||
variables:
|
||||
PRE_COMMIT_HOME: ${CI_PROJECT_DIR}/.cache/pre-commit
|
||||
|
||||
cache:
|
||||
paths:
|
||||
- .cache/pip
|
||||
- .venv/
|
||||
- ${PRE_COMMIT_HOME}
|
||||
|
||||
before_script:
|
||||
- apt-get update && apt-get install -y --no-install-recommends git
|
||||
- python --version
|
||||
- python -m venv .venv
|
||||
- source .venv/bin/activate
|
||||
- pip install -r requirements-dev.txt -U
|
||||
|
||||
script:
|
||||
- pylint $(find -type f -name "*.py" ! -path "./.venv/**" ! -path "./lib/**")
|
||||
- pre-commit run --all-files
|
||||
|
||||
build:
|
||||
stage: build
|
||||
|
@ -56,7 +61,8 @@ release_job:
|
|||
stage: release
|
||||
image: registry.gitlab.com/gitlab-org/release-cli:latest
|
||||
rules:
|
||||
- if: $CI_COMMIT_TAG # Run this job when a tag is created manually
|
||||
# Run this job when a tag is created manually
|
||||
- if: $CI_COMMIT_TAG
|
||||
script:
|
||||
- echo "Running the release job."
|
||||
release:
|
||||
|
|
|
@ -13,20 +13,20 @@ repos:
|
|||
"s|^#\\!.*|#\\!/usr/bin/env python|",
|
||||
]
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
rev: "v4.4.0"
|
||||
rev: "v4.6.0"
|
||||
hooks:
|
||||
- id: end-of-file-fixer
|
||||
- id: trailing-whitespace
|
||||
- repo: https://github.com/charliermarsh/ruff-pre-commit
|
||||
rev: "v0.0.254"
|
||||
rev: "v0.5.5"
|
||||
hooks:
|
||||
- id: ruff
|
||||
args: [--fix, --exit-non-zero-on-fix]
|
||||
- repo: https://github.com/psf/black
|
||||
rev: 22.12.0
|
||||
rev: 24.4.2
|
||||
hooks:
|
||||
- id: black
|
||||
- repo: https://github.com/pre-commit/mirrors-prettier
|
||||
rev: "v2.7.1"
|
||||
rev: "v3.1.0"
|
||||
hooks:
|
||||
- id: prettier
|
||||
|
|
|
@ -3,14 +3,13 @@
|
|||
"files.trimTrailingWhitespace": true,
|
||||
"files.insertFinalNewline": true,
|
||||
"files.trimFinalNewlines": true,
|
||||
"python.formatting.provider": "none",
|
||||
"[python]": {
|
||||
"editor.defaultFormatter": "ms-python.black-formatter",
|
||||
"editor.formatOnSave": true,
|
||||
"editor.tabSize": 4,
|
||||
"editor.codeActionsOnSave": {
|
||||
"source.fixAll": true,
|
||||
"source.organizeImports": true
|
||||
"source.fixAll": "explicit",
|
||||
"source.organizeImports": "explicit"
|
||||
}
|
||||
},
|
||||
"[markdown]": {
|
||||
|
|
29
README.md
29
README.md
|
@ -15,9 +15,9 @@
|
|||
|
||||
Provide custom search commands to update [malware-filter](https://gitlab.com/malware-filter) lookups. Each command downloads from a source CSV and emit rows as events which can then be piped to a lookup file or used as a subsearch. Each command is exported globally and can be used in any app. This add-on currently does not have any UI.
|
||||
|
||||
Source CSVs will be downloaded via a proxy if configured in "$SPLUNK_HOME/etc/system/local/[server.conf](https://docs.splunk.com/Documentation/Splunk/latest/Admin/Serverconf#Splunkd_http_proxy_configuration)".
|
||||
[Lookup files](./lookups/) can be updated using the bundled scheduled reports every 12 hours, every 15 minutes for botnet_ip.csv and opendbl_ip.csv. The scheduled reports are disabled by default. Enable the relevant schedule that corresponds to the required lookup file. Modify the search string to add [optional arguments](#usage).
|
||||
|
||||
By default, [lookup files](./lookups/) will be updated using scheduled reports every 12 hours, every 15 minutes for botnet_ip.csv and opendbl_ip.csv. Modify the relevant saved searches to add [optional arguments](#usage).
|
||||
Source CSVs will be downloaded via a proxy if configured in "$SPLUNK_HOME/etc/system/local/[server.conf](https://docs.splunk.com/Documentation/Splunk/latest/Admin/Serverconf#Splunkd_http_proxy_configuration)".
|
||||
|
||||
Refer to [this article](https://mdleom.com/blog/2023/04/16/splunk-lookup-malware-filter/) for a more comprehensive guide on detecting malicious domain, URL, IP and CIDR range.
|
||||
|
||||
|
@ -25,7 +25,7 @@ Tested on Splunk 9.x.
|
|||
|
||||
## Installation
|
||||
|
||||
Releases are available at https://gitlab.com/malware-filter/splunk-malware-filter/-/releases
|
||||
Releases are available at [Splunkbase](https://splunkbase.splunk.com/app/6970) and [GitLab](https://gitlab.com/malware-filter/splunk-malware-filter/-/releases)
|
||||
|
||||
Instruction to build the main branch is available at the [Build](#build) section.
|
||||
|
||||
|
@ -156,6 +156,27 @@ Recommend to update the lookup file "opendbl_ip.csv" every 15 minutes (cron `*/1
|
|||
|
||||
Source: https://opendbl.net/
|
||||
|
||||
## Example usage
|
||||
|
||||
```
|
||||
| tstats summariesonly=true allow_old_summaries=true count FROM datamodel=Web WHERE Web.action="allowed"
|
||||
BY Web.user, Web.src, Web.dest, Web.site, Web.url, Web.category, Web.action, index, _time span=1s
|
||||
| rename Web.* AS *
|
||||
| lookup urlhaus-filter-splunk-online host AS site, host AS dest OUTPUT message AS description, updated
|
||||
| lookup urlhaus-filter-splunk-online path_wildcard_prefix AS vendor_url, host AS site, host AS dest OUTPUTNEW message AS description, updated
|
||||
| lookup phishing-filter-splunk host AS site, host AS dest OUTPUTNEW message AS description, updated
|
||||
| lookup phishing-filter-splunk path_wildcard_prefix AS vendor_url, host AS site, host AS dest OUTPUTNEW message AS description, updated
|
||||
| lookup pup-filter-splunk host AS site, host AS dest OUTPUTNEW message AS description, updated
|
||||
| lookup vn-badsite-filter-splunk host AS site, host AS dest OUTPUTNEW message AS description, updated
|
||||
| lookup botnet_ip dst_ip AS dest OUTPUTNEW malware AS description, updated
|
||||
| eval Description=description
|
||||
| search Description=*
|
||||
| eval updated=coalesce(updated, updated2, updated3, updated4, updated5, updated6, updated7), "Signature Last Updated"=strftime(strptime(updated." +0000","%Y-%m-%dT%H:%M:%SZ %z"),"%Y-%m-%d %H:%M:%S %z"), Time=strftime(_time, "%Y-%m-%d %H:%M:%S %z"), "Source IP"=src, Username=user, Domain=site, "Destination IP"=dest, URL=url, Action=action
|
||||
| table Time, index, "Signature Last Updated", "Source IP", Username, Domain, "Destination IP", Description, Action, URL
|
||||
```
|
||||
|
||||
It is not recommended to use subsearch (e.g. `[| inputlookup urlhaus-filter-splunk-online.csv | fields host ]`) for these [lookup tables](./lookups/) especially [urlhaus-filter](./lookups/urlhaus-filter-splunk-online.csv) and [phishing-filter](./lookups/phishing-filter-splunk.csv) because they usually have more than 30,000 rows, which exceed the soft-limit of [10,000 rows](https://docs.splunk.com/Documentation/SplunkCloud/latest/Search/Aboutsubsearches#Subsearch_performance_considerations) returned by subsearch.
|
||||
|
||||
## Disable individual commands
|
||||
|
||||
Settings -> All configurations -> filter by "malware_filter" app
|
||||
|
@ -192,3 +213,5 @@ https://gitlab.com/curben/blog#repository-mirrors
|
|||
## License
|
||||
|
||||
[Creative Commons Zero v1.0 Universal](LICENSE-CC0.md) and [MIT License](LICENSE)
|
||||
|
||||
[splunk-sdk-python](https://github.com/splunk/splunk-sdk-python) bundled with the package under lib folder: [Apache License 2.0](https://choosealicense.com/licenses/apache-2.0/)
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
"id": {
|
||||
"group": null,
|
||||
"name": "TA-malware-filter",
|
||||
"version": "0.0.12"
|
||||
"version": "0.2.0"
|
||||
},
|
||||
"author": [
|
||||
{
|
||||
|
|
42
bin/utils.py
42
bin/utils.py
|
@ -4,6 +4,8 @@
|
|||
Common functions used in this add-on
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from configparser import ConfigParser
|
||||
from csv import QUOTE_ALL, DictReader
|
||||
from os import environ, path
|
||||
|
@ -15,7 +17,7 @@ import requests
|
|||
class Utility:
|
||||
"""Provide common functions"""
|
||||
|
||||
def __get_proxy(self, url):
|
||||
def __get_proxy(self, url: str) -> dict[str, dict[str, str]] | str:
|
||||
"""
|
||||
Determine http proxy setting of a URL according to Splunk server configuration.
|
||||
Return {dict} of http/https proxy value if a URL should be proxied.
|
||||
|
@ -42,7 +44,6 @@ class Utility:
|
|||
https_proxy = proxy_config.get("https_proxy", "")
|
||||
|
||||
# https://docs.splunk.com/Documentation/Splunk/9.0.3/Admin/Serverconf#Splunkd_http_proxy_configuration
|
||||
# pylint: disable=too-many-boolean-expressions
|
||||
if (
|
||||
# either configs should not be empty
|
||||
(len(http_proxy) >= 1 or len(https_proxy) >= 1)
|
||||
|
@ -58,14 +59,12 @@ class Utility:
|
|||
|
||||
return {}
|
||||
|
||||
# pylint: disable=inconsistent-return-statements
|
||||
def download(self, urls, index=0):
|
||||
def download(self, urls: list | tuple | str, index: int = 0) -> str:
|
||||
"""
|
||||
Send a GET request to the URL and return content of the response.
|
||||
|
||||
Arguments:
|
||||
urls {list/tuple/string} -- A list of URLs to try in sequence
|
||||
index -- List's index to start
|
||||
:param urls: A list of URLs to try in sequence
|
||||
:param index: List's index to start
|
||||
"""
|
||||
if isinstance(urls, str):
|
||||
urls = (urls,)
|
||||
|
@ -74,7 +73,6 @@ class Utility:
|
|||
proxy_config = self.__get_proxy(url)
|
||||
try:
|
||||
res = requests.get(url, timeout=5, **proxy_config)
|
||||
# pylint: disable=no-member
|
||||
if res.status_code == requests.codes.ok:
|
||||
return res.text
|
||||
|
||||
|
@ -91,7 +89,7 @@ class Utility:
|
|||
except requests.exceptions.RequestException as err:
|
||||
raise err
|
||||
|
||||
def __split_column(self, input_str=None):
|
||||
def __split_column(self, input_str: str | list | None = None) -> list[str] | list:
|
||||
"""Split {string} into {list} using comma separator"""
|
||||
if isinstance(input_str, str):
|
||||
return [x.strip() for x in input_str.split(",")]
|
||||
|
@ -99,24 +97,30 @@ class Utility:
|
|||
return input_str
|
||||
return []
|
||||
|
||||
def insert_affix(self, row, prefix_opt=None, suffix_opt=None, affix_opt=None):
|
||||
def insert_affix(
|
||||
self,
|
||||
row: dict,
|
||||
prefix_opt: str | list | None = None,
|
||||
suffix_opt: str | list | None = None,
|
||||
affix_opt: str | list | None = None,
|
||||
) -> dict:
|
||||
"""
|
||||
Affix wildcard "*" character to existing values
|
||||
|
||||
Arguments:
|
||||
row {dict} -- A row of an array-parsed CSV
|
||||
prefix_opt {string/list} -- A column name or a comma-separated list of column names to have
|
||||
wildcard prefixed to their non-empty value.
|
||||
suffix_opt {string/list} -- Same as prefix_opt but have the wildcard suffixed instead.
|
||||
affix_opt {string/list} -- Same as prefix_opt but have the wildcard prefixed and suffixed.
|
||||
:param row: A row of an array-parsed CSV
|
||||
:param prefix_opt: A column name or a comma-separated list of column names to have
|
||||
wildcard prefixed to their non-empty value.
|
||||
:param suffix_opt: Same as prefix_opt but have the wildcard suffixed instead.
|
||||
:param affix_opt: Same as prefix_opt but have the wildcard prefixed and suffixed.
|
||||
|
||||
Return:
|
||||
A new row with prefix/suffix columns appended
|
||||
Return a new row with prefix/suffix columns appended
|
||||
"""
|
||||
|
||||
prefix_opt_list = self.__split_column(prefix_opt)
|
||||
suffix_opt_list = self.__split_column(suffix_opt)
|
||||
affix_opt_list = self.__split_column(affix_opt)
|
||||
new_column = {}
|
||||
|
||||
for column in prefix_opt_list:
|
||||
if column in row and len(row[column]) >= 1:
|
||||
new_column = {
|
||||
|
@ -138,7 +142,7 @@ class Utility:
|
|||
|
||||
return {**row, **new_column}
|
||||
|
||||
def csv_reader(self, csv_str):
|
||||
def csv_reader(self, csv_str: str) -> DictReader:
|
||||
"""Parse an CSV input string into an interable of {dict} rows whose keys correspond to column names"""
|
||||
return DictReader(
|
||||
filter(lambda row: row[0] != "#", csv_str.splitlines()), quoting=QUOTE_ALL
|
||||
|
|
37
build.py
37
build.py
|
@ -5,12 +5,14 @@
|
|||
import tarfile
|
||||
from configparser import ConfigParser
|
||||
from os import environ, path
|
||||
from re import search, sub
|
||||
from pathlib import PurePath
|
||||
from posixpath import join as posixjoin
|
||||
from re import search
|
||||
from subprocess import check_call
|
||||
from sys import executable
|
||||
|
||||
|
||||
def version():
|
||||
def version() -> str:
|
||||
"""
|
||||
Return version number from app.conf or commit hash if in CI
|
||||
"""
|
||||
|
@ -44,24 +46,37 @@ def version():
|
|||
return launcher.get("version", "")
|
||||
|
||||
|
||||
def exclusion(tarinfo):
|
||||
def exclusion(tarinfo: tarfile.TarInfo) -> tarfile.TarInfo | None:
|
||||
"""Exclude dev files and cache, and reset file stats"""
|
||||
|
||||
# exclude certain folders/files
|
||||
pathname = tarinfo.name
|
||||
if search(
|
||||
r"/\.|\\\.|__pycache__|pyproject\.toml|requirements|build\.py|tar\.gz", pathname
|
||||
r"/\.|\\\.|__pycache__|pyproject\.toml|requirements|build\.py|\.tar\.gz|\.tgz",
|
||||
pathname,
|
||||
):
|
||||
return None
|
||||
|
||||
# rename parent folder as "TA-malware-filter"
|
||||
tarinfo.name = sub(r"^.", "TA-malware-filter", pathname)
|
||||
app = PurePath(pathname).parts[0]
|
||||
|
||||
# reset file stats
|
||||
# based on https://splunkbase.splunk.com/app/833
|
||||
tarinfo.uid = 1001
|
||||
tarinfo.gid = 123
|
||||
tarinfo.uid = 0
|
||||
tarinfo.gid = 0
|
||||
tarinfo.uname = tarinfo.gname = ""
|
||||
if tarinfo.isfile():
|
||||
# remove execution permission
|
||||
tarinfo.mode = 0o644
|
||||
|
||||
# except for scripts
|
||||
# tarinfo uses posix (not nt)
|
||||
if (
|
||||
tarinfo.name.startswith(posixjoin(app, "bin"))
|
||||
and path.splitext(tarinfo.name)[-1] == ".py"
|
||||
):
|
||||
tarinfo.mode = 0o744
|
||||
if tarinfo.isdir():
|
||||
# remove write permission from group & world
|
||||
tarinfo.mode = 0o755
|
||||
|
||||
return tarinfo
|
||||
|
||||
|
@ -74,7 +89,7 @@ check_call(
|
|||
"pip",
|
||||
"install",
|
||||
"--quiet",
|
||||
"splunk-sdk == 1.*",
|
||||
"splunk-sdk == 2.*",
|
||||
"-t",
|
||||
"lib",
|
||||
"--upgrade",
|
||||
|
@ -84,4 +99,4 @@ check_call(
|
|||
pkg_file = f"TA-malware-filter-{version()}.tar.gz"
|
||||
print(f"Creating {pkg_file}...")
|
||||
with tarfile.open(pkg_file, "w:gz") as tar:
|
||||
tar.add(".", filter=exclusion)
|
||||
tar.add(".", filter=exclusion, arcname="TA-malware-filter")
|
||||
|
|
|
@ -1,6 +1,3 @@
|
|||
#
|
||||
# App configuration file
|
||||
#
|
||||
[install]
|
||||
is_configured = false
|
||||
|
||||
|
@ -9,7 +6,7 @@ id = TA-malware-filter
|
|||
|
||||
[id]
|
||||
name = TA-malware-filter
|
||||
version = 0.0.12
|
||||
version = 0.2.0
|
||||
|
||||
[ui]
|
||||
is_visible = false
|
||||
|
@ -18,4 +15,4 @@ label = malware-filter Add-on
|
|||
[launcher]
|
||||
author = Ming Di Leom
|
||||
description = Update malware-filter lookups. https://gitlab.com/malware-filter
|
||||
version = 0.0.12
|
||||
version = 0.2.0
|
||||
|
|
|
@ -1,69 +1,63 @@
|
|||
[malware-filter Update botnet_ip.csv]
|
||||
action.lookup = 1
|
||||
action.lookup.filename = botnet_ip.csv
|
||||
cron_schedule = */15 * * * *
|
||||
description = Update lookup every 15 minutes from 00:00
|
||||
dispatch.earliest_time = -1h
|
||||
enableSched = 1
|
||||
schedule_window = 60
|
||||
search = | getbotnetip
|
||||
# https://docs.splunk.com/Documentation/Splunk/latest/SearchReference/Collect#Events_without_timestamps
|
||||
dispatch.earliest_time = 0
|
||||
enableSched = 0
|
||||
schedule_window = 5
|
||||
search = | getbotnetip\
|
||||
| outputlookup override_if_empty=false botnet_ip.csv
|
||||
|
||||
[malware-filter Update botnet-filter-splunk.csv]
|
||||
action.lookup = 1
|
||||
action.lookup.filename = botnet-filter-splunk.csv
|
||||
cron_schedule = 0 */12 * * *
|
||||
description = Update lookup every 12 hours from 00:00
|
||||
dispatch.earliest_time = -12h
|
||||
enableSched = 1
|
||||
dispatch.earliest_time = 0
|
||||
enableSched = 0
|
||||
schedule_window = 60
|
||||
search = | getbotnetfilter
|
||||
search = | getbotnetfilter\
|
||||
| outputlookup override_if_empty=false botnet-filter-splunk.csv
|
||||
|
||||
[malware-filter Update opendbl_ip.csv]
|
||||
action.lookup = 1
|
||||
action.lookup.filename = opendbl_ip.csv
|
||||
cron_schedule = */15 * * * *
|
||||
description = Update lookup every 15 minutes from 00:00
|
||||
dispatch.earliest_time = -1h
|
||||
enableSched = 1
|
||||
schedule_window = 60
|
||||
search = | getopendbl
|
||||
dispatch.earliest_time = 0
|
||||
enableSched = 0
|
||||
schedule_window = 5
|
||||
search = | getopendbl\
|
||||
| outputlookup override_if_empty=false opendbl_ip.csv
|
||||
|
||||
[malware-filter Update phishing-filter-splunk.csv]
|
||||
action.lookup = 1
|
||||
action.lookup.filename = phishing-filter-splunk.csv
|
||||
cron_schedule = 0 */12 * * *
|
||||
description = Update lookup every 12 hours from 00:00
|
||||
dispatch.earliest_time = -12h
|
||||
enableSched = 1
|
||||
dispatch.earliest_time = 0
|
||||
enableSched = 0
|
||||
schedule_window = 60
|
||||
search = | getphishingfilter
|
||||
search = | getphishingfilter\
|
||||
| outputlookup override_if_empty=false phishing-filter-splunk.csv
|
||||
|
||||
[malware-filter Update pup-filter-splunk.csv]
|
||||
action.lookup = 1
|
||||
action.lookup.filename = pup-filter-splunk.csv
|
||||
cron_schedule = 0 */12 * * *
|
||||
description = Update lookup every 12 hours from 00:00
|
||||
dispatch.earliest_time = -12h
|
||||
enableSched = 1
|
||||
dispatch.earliest_time = 0
|
||||
enableSched = 0
|
||||
schedule_window = 60
|
||||
search = | getpupfilter
|
||||
search = | getpupfilter\
|
||||
| outputlookup override_if_empty=false pup-filter-splunk.csv
|
||||
|
||||
[malware-filter Update urlhaus-filter-splunk-online.csv]
|
||||
action.lookup = 1
|
||||
action.lookup.filename = urlhaus-filter-splunk-online.csv
|
||||
cron_schedule = 0 */12 * * *
|
||||
description = Update lookup every 12 hours from 00:00
|
||||
dispatch.earliest_time = -12h
|
||||
enableSched = 1
|
||||
dispatch.earliest_time = 0
|
||||
enableSched = 0
|
||||
schedule_window = 60
|
||||
search = | geturlhausfilter
|
||||
search = | geturlhausfilter\
|
||||
| outputlookup override_if_empty=false urlhaus-filter-splunk-online.csv
|
||||
|
||||
[malware-filter Update vn-badsite-filter-splunk.csv]
|
||||
action.lookup = 1
|
||||
action.lookup.filename = vn-badsite-filter-splunk.csv
|
||||
cron_schedule = 0 */12 * * *
|
||||
description = Update lookup every 12 hours from 00:00
|
||||
dispatch.earliest_time = -12h
|
||||
enableSched = 1
|
||||
dispatch.earliest_time = 0
|
||||
enableSched = 0
|
||||
schedule_window = 60
|
||||
search = | getvnbadsitefilter
|
||||
search = | getvnbadsitefilter\
|
||||
| outputlookup override_if_empty=false vn-badsite-filter-splunk.csv
|
||||
|
|
|
@ -1,39 +1,32 @@
|
|||
[urlhaus-filter-splunk-online]
|
||||
batch_index_query = 0
|
||||
case_sensitive_match = 1
|
||||
filename = urlhaus-filter-splunk-online.csv
|
||||
max_matches = 1
|
||||
|
||||
[phishing-filter-splunk]
|
||||
batch_index_query = 0
|
||||
case_sensitive_match = 1
|
||||
filename = phishing-filter-splunk.csv
|
||||
max_matches = 1
|
||||
|
||||
[pup-filter-splunk]
|
||||
batch_index_query = 0
|
||||
case_sensitive_match = 1
|
||||
filename = pup-filter-splunk.csv
|
||||
max_matches = 1
|
||||
|
||||
[vn-badsite-filter-splunk]
|
||||
batch_index_query = 0
|
||||
case_sensitive_match = 1
|
||||
filename = vn-badsite-filter-splunk.csv
|
||||
max_matches = 1
|
||||
|
||||
[botnet-filter-splunk]
|
||||
batch_index_query = 0
|
||||
case_sensitive_match = 1
|
||||
filename = botnet-filter-splunk.csv
|
||||
|
||||
[botnet_ip]
|
||||
batch_index_query = 0
|
||||
case_sensitive_match = 1
|
||||
filename = botnet_ip.csv
|
||||
|
||||
[opendbl_ip]
|
||||
batch_index_query = 0
|
||||
case_sensitive_match = 1
|
||||
filename = opendbl_ip.csv
|
||||
min_matches = 1
|
||||
|
|
|
@ -1,28 +1,8 @@
|
|||
[tool.pylint.'MASTER']
|
||||
py-version = "3.10"
|
||||
# https://docs.splunk.com/Documentation/Splunk/9.3.0/Python3Migration/PythonCompatibility
|
||||
py-version = "3.7"
|
||||
init-hook='import sys; sys.path.append("./bin")'
|
||||
|
||||
[tool.pylint.'MESSAGES CONTROL']
|
||||
disable = [
|
||||
"raw-checker-failed",
|
||||
"bad-inline-option",
|
||||
"locally-disabled",
|
||||
"file-ignored",
|
||||
"suppressed-message",
|
||||
"useless-suppression",
|
||||
"deprecated-pragma",
|
||||
"use-symbolic-message-instead",
|
||||
"invalid-name",
|
||||
"unspecified-encoding", # assume UTF-8
|
||||
"line-too-long",
|
||||
"too-many-nested-blocks",
|
||||
"too-many-branches",
|
||||
"duplicate-code",
|
||||
"redefined-outer-name",
|
||||
"fixme",
|
||||
"wrong-import-position"
|
||||
]
|
||||
|
||||
[tool.pylint.'FORMAT']
|
||||
indent-after-paren = 4
|
||||
indent-string = " "
|
||||
|
|
|
@ -1,2 +1,2 @@
|
|||
requests == 2.*
|
||||
splunk-sdk == 1.*
|
||||
splunk-sdk == 2.*
|
||||
|
|
Loading…
Reference in New Issue