02-24-2020 01:17 AM
We got a lot of external ressources, and we subissing a huge amount of flooding attacks.
These attacks blocked by security policies, but as firewalls compute each packets using CPU our CPU's cluster reach 100% all day long.
So we created a DoS protection policy.
First we create Custom Report - which select top 25 source addresses from external which are allowed and session end reason due to aged-out.
We collect these informations and export it using API to our InfluxDB. We put it on list and update ou external dynamic list using minemeld. (Scripts are avalaible)
And we create a Dos Protection policy.
We execute these scripts every 5minutes and we synchronize our EDL every 5minutes too.
This script is currently used for a PanOS 8.1 (XML response content)
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# *************************************************************************
# **** Script Dos Protection PALOALTO ****
# **** Get IP Source which try to flood our network ****
# **** and put it on DoS protection rule through Minemeld ****
# *************************************************************************
# **** AMU - DOSI-Pôle réseau ****
# **** Contact: cyril.rimbault@univ-amu.fr ****
# *************************************************************************
import xml.etree.ElementTree as ET
import requests
import time
import json
from influxdb import InfluxDBClient
import dns.reversename
import dns.resolver
from dns.exception import DNSException
import subprocess
#############################
# Get API key from each FW
#############################
def GetApiKey(url,user,password, *args):
querystring = {"type":"keygen" , "user": user , "password":password}
header = {'Accept':"application/json"}
response = requests.request("GET", url , headers=header, params=querystring)
tree = ET.fromstring(response.content)
for child in tree.iter('result'):
for key in child:
key = key.text
# print key
return key
#############################
# Get report ID
#############################
def GetReportID(url,key,reportname, *args):
querystring = {"type":"report" , "async":"yes" , "reporttype":"custom" , "vsys":"vsys2" , "reportname": reportname , "key":key}
header = {'Accept':"application/json"}
response = requests.request("GET", url , headers=header, params=querystring)
tree = ET.fromstring(response.content)
for child in tree.iter('result'):
for job in child:
job = job.text
# print job
return job
#############################
# Get XML result
#############################
def GetResponse(url,key,job,report,aged, *args):
n=5
if aged==True:
n=20
file = "./tmp/data"
if url == "https://pa-5260-stj/api":
file = file + "-STJ"
else:
file = file + "-STC"
job = str(job)
cmd = "<show><report><id>"+job+"</id></report></show>"
querystring = {"type":"op","cmd":cmd , "key":key}
header = {'Accept':"application/json"}
time.sleep(n)
response = requests.request("GET", url , headers=header, params=querystring)
time.sleep(n)
data = open(file,"w")
data.write(response.content)
data.close()
return file
#############################
# Parsing and sending informations to InfluxDB
#############################
def Sendfile(url,report,file,aged, *args):
if url == "https://pa-5260-stj/api":
report = report + "-STJ"
else:
report = report + "-STC"
user = #InfluxDB User
pwd = #DB Password
client = InfluxDBClient(host = "YourDB", port=8086, username = user, password = pwd)
client.switch_database('PALOALTO')
arbre=ET.parse(file)
root=arbre.getroot()
tag = root.tag
dict = {}
for child in root.iter():
if child.tag == 'src':
src=child.text.strip()
#print src
if child.tag == 'resolved-src':
ressrc=child.text.strip()
if child.tag == 'sessions':
session = child.text.strip()
dict[src]=int(session)
if child.tag == 'repeatcnt':
count = child.text.strip()
dict[src]=int(count)
for src, value in sorted(dict.items(), key=lambda item:item[1], reverse= True ):
json_body = [ {
"measurement": report,
"tags": {
"src":src,
},
"fields": {
"count":dict[src]
#"resolve-name":ressrc
}
}
]
try:
client.write_points(json_body)
except Exception as e:
print e
# print ("SEND OK"+":"+report)
return report,aged
#############################
# Requesting source IP which is over thresold
#############################
def Querydb (dbname,aged, *args):
user = #InfluxDB User
pwd = #DB Password
client = InfluxDBClient(host = 'YourDB', port=8086, username = user, password = pwd)
client.switch_database('PALOALTO')
if aged == True:
query = "SELECT \"src\",\"count\" FROM \"" + dbname + "\" WHERE (\"count\" > 500 AND TIME >= now() - 15m)"
else:
query = "SELECT \"src\",\"count\" FROM \"" + dbname + "\" WHERE (\"count\" > 200 AND TIME >= now() - 15m)"
rs = client.query(query)
result = list(rs.get_points(measurement=dbname))
data = open("./tmp/IP2ban","a")
# print json.dumps(result, indent = 4)
for item in result:
data.write(item["src"])
data.write("\n")
data.close()
#############################
# Aggregate host IP to subnet
#############################
def AggregIP():
command = ["perl","./AgregRzo","./tmp/IP2ban","./tmp/listeAGGR"]
try:
process = subprocess.Popen(command,stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
process.communicate()
except Exception as e:
print ("Erreur script aggreg",e)
pass
def main():
url = #list your firewalls
user = #Api Username
password = #PassWordSECURE
reportname = #Create your custom report Monitor>Manage Custom Reports> Add
data = open("./tmp/IP2ban","w") # Temporary file
data.close()
aged = False
for site in url:
key = GetApiKey(site,user,password)
for report in reportname:
if report=="TOP_AGED-OUT":
aged = True
job = GetReportID(site,key,report)
file = GetResponse(site,key,job,report,aged)
name = Sendfile(site,report,file,aged)
Querydb(name[0],name[1])
AggregIP()
if __name__ == '__main__':
main()
Click Accept as Solution to acknowledge that the answer to your question has been provided.
The button appears next to the replies on topics you’ve started. The member who gave the solution and all future visitors to this topic will appreciate it!
These simple actions take just seconds of your time, but go a long way in showing appreciation for community members and the LIVEcommunity as a whole!
The LIVEcommunity thanks you for your participation!