2019-05-29 05:15:08 +00:00
|
|
|
#!/bin/sh
|
|
|
|
|
|
|
|
set -e -x
|
|
|
|
|
|
|
|
## Create a temporary working folder
|
|
|
|
mkdir -p tmp/ && cd tmp/
|
|
|
|
|
|
|
|
|
|
|
|
## Prepare datasets
|
2019-06-13 06:04:13 +00:00
|
|
|
wget https://urlhaus.abuse.ch/downloads/csv/ -O ../src/URLhaus.csv
|
2019-05-29 05:15:08 +00:00
|
|
|
wget https://s3-us-west-1.amazonaws.com/umbrella-static/top-1m.csv.zip -O top-1m.csv.zip
|
|
|
|
|
|
|
|
cp ../src/exclude.txt .
|
|
|
|
|
2019-06-16 02:21:36 +00:00
|
|
|
## Clean up URLhaus.csv
|
|
|
|
cat ../src/URLhaus.csv | \
|
2019-05-29 05:15:08 +00:00
|
|
|
# Convert DOS to Unix line ending
|
|
|
|
dos2unix | \
|
|
|
|
# Remove comment
|
2019-06-16 02:21:36 +00:00
|
|
|
sed '/^#/d' | \
|
|
|
|
# Parse URLs
|
|
|
|
cut -f 6 -d '"' | \
|
2019-05-29 05:15:08 +00:00
|
|
|
cut -f 3- -d '/' | \
|
|
|
|
# Remove www.
|
|
|
|
sed 's/^www\.//g' | \
|
|
|
|
sort -u > urlhaus.txt
|
|
|
|
|
|
|
|
## Parse domain and IP address only
|
|
|
|
cat urlhaus.txt | \
|
|
|
|
cut -f 1 -d '/' | \
|
|
|
|
cut -f 1 -d ':' | \
|
|
|
|
sort -u > urlhaus-domains.txt
|
|
|
|
|
2019-06-13 06:04:13 +00:00
|
|
|
cat ../src/URLhaus.csv | \
|
|
|
|
dos2unix | \
|
2019-06-16 02:21:36 +00:00
|
|
|
sed '/^#/d' | \
|
2019-06-13 06:04:13 +00:00
|
|
|
# Parse online URLs only
|
|
|
|
grep '"online"' | \
|
|
|
|
cut -f 6 -d '"' | \
|
|
|
|
cut -f 3- -d '/' | \
|
|
|
|
sed 's/^www\.//g' | \
|
|
|
|
sort -u > urlhaus-online.txt
|
|
|
|
|
|
|
|
cat urlhaus-online.txt | \
|
|
|
|
cut -f 1 -d '/' | \
|
|
|
|
cut -f 1 -d ':' | \
|
|
|
|
sort -u > urlhaus-domains-online.txt
|
|
|
|
|
2019-05-29 05:15:08 +00:00
|
|
|
|
|
|
|
## Parse the Cisco Umbrella 1 Million
|
|
|
|
unzip -p top-1m.csv.zip | \
|
|
|
|
dos2unix | \
|
|
|
|
# Parse domains only
|
|
|
|
cut -f 2 -d ',' | \
|
|
|
|
# Domain must have at least a 'dot'
|
|
|
|
grep -F '.' | \
|
|
|
|
# Remove www.
|
|
|
|
sed 's/^www\.//g' | \
|
|
|
|
sort -u > top-1m.txt
|
|
|
|
|
|
|
|
# Merge Umbrella and self-maintained top domains
|
|
|
|
cat top-1m.txt exclude.txt | \
|
|
|
|
sort -u > top-1m-well-known.txt
|
|
|
|
|
|
|
|
|
|
|
|
## Parse popular domains from URLhaus
|
|
|
|
cat urlhaus-domains.txt | \
|
|
|
|
# grep match whole line
|
|
|
|
grep -Fx -f top-1m-well-known.txt > urlhaus-top-domains.txt
|
|
|
|
|
|
|
|
|
|
|
|
## Parse domains from URLhaus excluding popular domains
|
|
|
|
cat urlhaus-domains.txt | \
|
|
|
|
grep -F -vf urlhaus-top-domains.txt > malware-domains.txt
|
|
|
|
|
2019-06-13 06:04:13 +00:00
|
|
|
cat urlhaus-domains-online.txt | \
|
|
|
|
grep -F -vf urlhaus-top-domains.txt > malware-domains-online.txt
|
|
|
|
|
2019-05-29 06:04:31 +00:00
|
|
|
## Parse malware URLs from popular domains
|
2019-05-29 05:15:08 +00:00
|
|
|
cat urlhaus.txt | \
|
|
|
|
grep -F -f urlhaus-top-domains.txt > malware-url-top-domains.txt
|
|
|
|
|
2019-06-13 06:04:13 +00:00
|
|
|
cat urlhaus-online.txt | \
|
|
|
|
grep -F -f urlhaus-top-domains.txt > malware-url-top-domains-online.txt
|
|
|
|
|
2019-05-29 05:15:08 +00:00
|
|
|
|
|
|
|
## Merge malware domains and URLs
|
|
|
|
CURRENT_TIME="$(date -R -u)"
|
|
|
|
FIRST_LINE="! Title: abuse.ch URLhaus Malicious URL Blocklist"
|
|
|
|
SECOND_LINE="! Updated: $CURRENT_TIME"
|
|
|
|
THIRD_LINE="! Expires: 1 day (update frequency)"
|
|
|
|
FOURTH_LINE="! Repo: https://gitlab.com/curben/urlhaus-filter"
|
|
|
|
FIFTH_LINE="! License: https://creativecommons.org/publicdomain/zero/1.0/"
|
|
|
|
SIXTH_LINE="! Source: https://urlhaus.abuse.ch/api/"
|
|
|
|
COMMENT="$FIRST_LINE\n$SECOND_LINE\n$THIRD_LINE\n$FOURTH_LINE\n$FIFTH_LINE\n$SIXTH_LINE"
|
|
|
|
|
|
|
|
cat malware-domains.txt malware-url-top-domains.txt | \
|
|
|
|
sort | \
|
|
|
|
sed '1 i\'"$COMMENT"'' > ../urlhaus-filter.txt
|
|
|
|
|
2019-06-13 06:04:13 +00:00
|
|
|
cat malware-domains-online.txt malware-url-top-domains-online.txt | \
|
|
|
|
sort | \
|
|
|
|
sed '1 i\'"$COMMENT"'' | \
|
|
|
|
sed '1s/Malicious/Online Malicious/' > ../urlhaus-filter-online.txt
|
|
|
|
|
2019-05-29 05:15:08 +00:00
|
|
|
|
|
|
|
cd ../ && rm -r tmp/
|